torch
API名称 |
是否支持 |
限制与说明 |
---|---|---|
torch.SymInt |
否 |
|
torch.SymFloat |
否 |
|
torch.SymBool |
否 |
|
torch.Tag |
否 |
|
torch.is_tensor |
是 |
|
torch.is_storage |
是 |
|
torch.is_complex |
是 |
|
torch.is_conj |
是 |
|
torch.is_floating_point |
是 |
|
torch.is_nonzero |
是 |
|
torch.set_default_dtype |
是 |
|
torch.get_default_dtype |
是 |
|
torch.set_default_device |
否 |
|
torch.set_default_tensor_type |
是 |
|
torch.numel |
是 |
|
torch.set_printoptions |
是 |
|
torch.set_flush_denormal |
是 |
|
torch.tensor |
是 |
|
torch.sparse_coo_tensor |
否 |
|
torch.sparse_csr_tensor |
否 |
|
torch.sparse_csc_tensor |
否 |
|
torch.sparse_bsr_tensor |
否 |
|
torch.sparse_bsc_tensor |
否 |
|
torch.asarray |
是 |
|
torch.as_tensor |
是 |
|
torch.as_strided |
||
torch.from_numpy |
是 |
|
torch.from_dlpack |
否 |
|
torch.frombuffer |
是 |
|
torch.zeros |
||
torch.zeros_like |
||
torch.ones |
||
torch.ones_like |
||
torch.arange |
||
torch.range |
||
torch.linspace |
||
torch.logspace |
||
torch.eye |
||
torch.empty |
是 |
|
torch.empty_like |
||
torch.empty_strided |
||
torch.full |
||
torch.full_like |
||
torch.quantize_per_tensor |
||
torch.quantize_per_channel |
||
torch.dequantize |
||
torch.dequantize |
||
torch.complex |
否 |
|
torch.polar |
否 |
|
torch.heaviside |
否 |
|
torch.adjoint |
否 |
|
torch.argwhere |
否 |
|
torch.cat |
||
torch.concat |
||
torch.concatenate |
否 |
|
torch.conj |
否 |
|
torch.chunk |
||
torch.dsplit |
||
torch.column_stack |
||
torch.dstack |
||
torch.gather |
||
torch.hsplit |
||
torch.hstack |
||
torch.index_add |
||
torch.index_copy |
否 |
|
torch.index_reduce |
否 |
|
torch.index_select |
||
torch.masked_select |
||
torch.movedim |
||
torch.moveaxis |
是 |
|
torch.narrow |
||
torch.narrow_copy |
否 |
|
torch.nonzero |
||
torch.permute |
||
torch.reshape |
||
torch.row_stack |
||
torch.select |
||
torch.scatter |
||
torch.diagonal_scatter |
||
torch.select_scatter |
||
torch.slice_scatter |
||
torch.scatter_add |
||
torch.scatter_reduce |
否 |
|
torch.split |
||
torch.squeeze |
否 |
|
torch.stack |
||
torch.swapaxes |
||
torch.swapdims |
||
torch.t |
是 |
|
torch.take |
||
torch.take_along_dim |
||
torch.tensor_split |
||
torch.tile |
||
torch.transpose |
||
torch.unbind |
||
torch.unsqueeze |
||
torch.vsplit |
||
torch.vstack |
||
torch.where |
否 |
|
torch.where |
||
torch.Generator |
是 |
|
torch.Generator.get_state |
是 |
|
torch.Generator.initial_seed |
是 |
|
torch.Generator.manual_seed |
是 |
|
torch.Generator.seed |
是 |
|
torch.Generator.set_state |
是 |
|
torch.seed |
是 |
|
torch.manual_seed |
是 |
|
torch.initial_seed |
是 |
|
torch.get_rng_state |
是 |
|
torch.set_rng_state |
是 |
|
torch.bernoulli |
||
torch.multinomial |
||
torch.normal |
||
torch.normal |
||
torch.normal |
||
torch.normal |
||
torch.poisson |
||
torch.rand |
否 |
|
torch.rand_like |
||
torch.randint |
||
torch.randint_like |
||
torch.randn |
否 |
|
torch.randn_like |
||
torch.randperm |
||
torch.quasirandom.SobolEngine |
||
torch.quasirandom.SobolEngine.draw |
||
torch.quasirandom.SobolEngine.draw_base2 |
||
torch.quasirandom.SobolEngine.fast_forward |
||
torch.quasirandom.SobolEngine.reset |
||
torch.save |
是 |
|
torch.load |
||
torch.get_num_threads |
是 |
只支持CPU,GPU/NPU不支持 |
torch.set_num_threads |
是 |
只支持CPU,GPU/NPU不支持 |
torch.get_num_interop_threads |
是 |
|
torch.set_num_interop_threads |
是 |
只支持CPU,GPU/NPU不支持 |
torch.no_grad |
是 |
|
torch.enable_grad |
是 |
|
torch.set_grad_enabled |
是 |
|
torch.is_grad_enabled |
是 |
|
torch.inference_mode |
否 |
|
torch.is_inference_mode_enabled |
是 |
|
torch.abs |
||
torch.absolute |
||
torch.acos |
||
torch.arccos |
||
torch.acosh |
||
torch.arccosh |
||
torch.add |
||
torch.addcdiv |
||
torch.addcmul |
||
torch.angle |
||
torch.asin |
||
torch.arcsin |
||
torch.asinh |
||
torch.arcsinh |
||
torch.atan |
||
torch.arctan |
||
torch.atanh |
||
torch.arctanh |
||
torch.atan2 |
||
torch.arctan2 |
||
torch.bitwise_not |
||
torch.bitwise_and |
||
torch.bitwise_or |
||
torch.bitwise_xor |
||
torch.bitwise_left_shift |
否 |
|
torch.bitwise_right_shift |
否 |
|
torch.ceil |
||
torch.clamp |
||
torch.clip |
||
torch.conj_physical |
||
torch.copysign |
||
torch.cos |
||
torch.cosh |
||
torch.deg2rad |
||
torch.div |
||
torch.divide |
||
torch.digamma |
||
torch.erf |
||
torch.erfc |
||
torch.erfinv |
||
torch.exp |
||
torch.exp2 |
||
torch.expm1 |
||
torch.fake_quantize_per_channel_affine |
否 |
|
torch.fake_quantize_per_tensor_affine |
否 |
|
torch.fix |
||
torch.float_power |
||
torch.floor |
||
torch.floor_divide |
||
torch.fmod |
||
torch.frac |
||
torch.frexp |
否 |
|
torch.gradient |
||
torch.imag |
否 |
|
torch.ldexp |
||
torch.lerp |
||
torch.lgamma |
||
torch.log |
||
torch.log10 |
||
torch.log1p |
||
torch.log2 |
||
torch.logaddexp |
否 |
|
torch.logaddexp2 |
否 |
|
torch.logical_and |
||
torch.logical_not |
||
torch.logical_or |
||
torch.logical_xor |
||
torch.logit |
||
torch.hypot |
否 |
|
torch.i0 |
否 |
|
torch.igamma |
否 |
|
torch.igammac |
否 |
|
torch.mul |
||
torch.multiply |
||
torch.mvlgamma |
||
torch.nan_to_num |
否 |
|
torch.neg |
||
torch.negative |
||
torch.nextafter |
否 |
|
torch.polygamma |
否 |
|
torch.positive |
||
torch.pow |
||
torch.pow |
||
torch.quantized_batch_norm |
否 |
|
torch.quantized_max_pool1d |
否 |
|
torch.quantized_max_pool2d |
否 |
|
torch.rad2deg |
||
torch.real |
||
torch.reciprocal |
||
torch.remainder |
||
torch.round |
||
torch.rsqrt |
||
torch.sigmoid |
||
torch.sign |
||
torch.sgn |
否 |
|
torch.signbit |
否 |
|
torch.sin |
||
torch.sinc |
否 |
|
torch.sinh |
||
torch.softmax |
否 |
|
torch.sqrt |
||
torch.square |
||
torch.sub |
||
torch.subtract |
||
torch.tan |
||
torch.tanh |
||
torch.true_divide |
||
torch.trunc |
||
torch.xlogy |
否 |
|
torch.argmax |
||
torch.argmax |
||
torch.argmin |
||
torch.amax |
||
torch.amin |
||
torch.aminmax |
否 |
|
torch.all |
||
torch.all |
||
torch.any |
||
torch.any |
||
torch.max |
||
torch.max |
||
torch.max |
||
torch.min |
||
torch.min |
||
torch.min |
||
torch.dist |
||
torch.logsumexp |
||
torch.mean |
||
torch.mean |
||
torch.nanmean |
否 |
|
torch.median |
||
torch.median |
||
torch.nanmedian |
||
torch.nanmedian |
||
torch.mode |
||
torch.norm |
||
torch.nansum |
||
torch.nansum |
||
torch.prod |
||
torch.prod |
||
torch.quantile |
||
torch.nanquantile |
否 |
|
torch.std |
否 |
|
torch.std_mean |
否 |
|
torch.sum |
||
torch.sum |
||
torch.unique |
||
torch.unique_consecutive |
||
torch.var |
否 |
|
torch.var_mean |
否 |
|
torch.count_nonzero |
||
torch.allclose |
||
torch.argsort |
否 |
|
torch.eq |
||
torch.equal |
||
torch.ge |
||
torch.greater_equal |
||
torch.gt |
||
torch.greater |
||
torch.isclose |
||
torch.isfinite |
||
torch.isin |
否 |
|
torch.isinf |
||
torch.isposinf |
||
torch.isneginf |
||
torch.isnan |
||
torch.isreal |
||
torch.kthvalue |
否 |
|
torch.le |
否 |
|
torch.less_equal |
||
torch.lt |
||
torch.less |
||
torch.maximum |
||
torch.minimum |
||
torch.fmax |
否 |
|
torch.fmin |
||
torch.ne |
||
torch.not_equal |
||
torch.sort |
否 |
|
torch.topk |
否 |
|
torch.msort |
||
torch.stft |
否 |
|
torch.istft |
否 |
|
torch.bartlett_window |
否 |
|
torch.blackman_window |
否 |
|
torch.hamming_window |
否 |
|
torch.hann_window |
否 |
|
torch.kaiser_window |
否 |
|
torch.atleast_1d |
||
torch.atleast_2d |
||
torch.atleast_3d |
||
torch.bincount |
||
torch.block_diag |
||
torch.broadcast_tensors |
否 |
|
torch.broadcast_to |
||
torch.broadcast_shapes |
||
torch.bucketize |
||
torch.cartesian_prod |
||
torch.cdist |
||
torch.clone |
||
torch.combinations |
||
torch.corrcoef |
||
torch.cov |
||
torch.cross |
||
torch.cummax |
||
torch.cummin |
||
torch.cumprod |
||
torch.cumsum |
||
torch.diag |
||
torch.diag_embed |
||
torch.diagflat |
||
torch.diagonal |
||
torch.diff |
||
torch.einsum |
||
torch.flatten |
||
torch.flip |
||
torch.fliplr |
||
torch.flipud |
||
torch.kron |
||
torch.rot90 |
否 |
|
torch.gcd |
||
torch.histc |
否 |
|
torch.histogram |
否 |
|
torch.histogramdd |
否 |
|
torch.meshgrid |
||
torch.lcm |
否 |
|
torch.logcumsumexp |
否 |
|
torch.ravel |
||
torch.renorm |
||
torch.repeat_interleave |
||
torch.repeat_interleave |
||
torch.roll |
||
torch.searchsorted |
||
torch.tensordot |
||
torch.trace |
否 |
|
torch.tril |
||
torch.tril_indices |
||
torch.triu |
||
torch.triu_indices |
||
torch.unflatten |
是 |
|
torch.vander |
否 |
|
torch.view_as_real |
否 |
|
torch.view_as_complex |
否 |
|
torch.resolve_conj |
否 |
|
torch.resolve_neg |
否 |
|
torch.addbmm |
||
torch.addmm |
||
torch.addmv |
||
torch.addr |
||
torch.baddbmm |
||
torch.bmm |
||
torch.chain_matmul |
||
torch.cholesky |
否 |
|
torch.cholesky_inverse |
否 |
|
torch.cholesky_solve |
否 |
|
torch.dot |
||
torch.geqrf |
否 |
|
torch.ger |
||
torch.inner |
||
torch.inverse |
||
torch.det |
||
torch.logdet |
否 |
|
torch.slogdet |
||
torch.lu |
否 |
|
torch.lu_solve |
否 |
|
torch.lu_unpack |
否 |
|
torch.matmul |
||
torch.matrix_power |
||
torch.matrix_exp |
否 |
|
torch.mm |
||
torch.mv |
||
torch.orgqr |
否 |
|
torch.ormqr |
否 |
|
torch.outer |
||
torch.pinverse |
||
torch.qr |
||
torch.svd |
||
torch.svd_lowrank |
||
torch.pca_lowrank |
||
torch.lobpcg |
否 |
|
torch.trapz |
||
torch.trapezoid |
||
torch.cumulative_trapezoid |
||
torch.triangular_solve |
||
torch.vdot |
||
torch._foreach_abs |
否 |
|
torch._foreach_abs_ |
否 |
|
torch._foreach_acos |
否 |
|
torch._foreach_acos_ |
否 |
|
torch._foreach_asin |
否 |
|
torch._foreach_asin_ |
否 |
|
torch._foreach_atan |
否 |
|
torch._foreach_atan_ |
否 |
|
torch._foreach_ceil |
否 |
|
torch._foreach_ceil_ |
否 |
|
torch._foreach_cos |
否 |
|
torch._foreach_cos_ |
否 |
|
torch._foreach_cosh |
否 |
|
torch._foreach_cosh_ |
否 |
|
torch._foreach_erf |
否 |
|
torch._foreach_erf_ |
否 |
|
torch._foreach_erfc |
否 |
|
torch._foreach_erfc_ |
否 |
|
torch._foreach_exp |
否 |
|
torch._foreach_exp_ |
否 |
|
torch._foreach_expm1 |
否 |
|
torch._foreach_expm1_ |
否 |
|
torch._foreach_floor |
否 |
|
torch._foreach_floor_ |
否 |
|
torch._foreach_log |
否 |
|
torch._foreach_log_ |
否 |
|
torch._foreach_log10 |
否 |
|
torch._foreach_log10_ |
否 |
|
torch._foreach_log1p |
否 |
|
torch._foreach_log1p_ |
否 |
|
torch._foreach_log2 |
否 |
|
torch._foreach_log2_ |
否 |
|
torch._foreach_neg |
否 |
|
torch._foreach_neg_ |
否 |
|
torch._foreach_tan |
否 |
|
torch._foreach_tan_ |
否 |
|
torch._foreach_sin |
否 |
|
torch._foreach_sin_ |
否 |
|
torch._foreach_sinh |
否 |
|
torch._foreach_sinh_ |
否 |
|
torch._foreach_round |
否 |
|
torch._foreach_round_ |
否 |
|
torch._foreach_sqrt |
否 |
|
torch._foreach_sqrt_ |
否 |
|
torch._foreach_lgamma |
否 |
|
torch._foreach_lgamma_ |
否 |
|
torch._foreach_frac |
否 |
|
torch._foreach_frac_ |
否 |
|
torch._foreach_reciprocal |
否 |
|
torch._foreach_reciprocal_ |
否 |
|
torch._foreach_sigmoid |
否 |
|
torch._foreach_sigmoid_ |
否 |
|
torch._foreach_trunc |
否 |
|
torch._foreach_trunc_ |
否 |
|
torch._foreach_zero_ |
否 |
|
torch.compiled_with_cxx11_abi |
是 |
|
torch.result_type |
是 |
|
torch.can_cast |
是 |
|
torch.promote_types |
是 |
|
torch.use_deterministic_algorithms |
是 |
|
torch.are_deterministic_algorithms_enabled |
是 |
|
torch.is_deterministic_algorithms_warn_only_enabled |
否 |
|
torch.set_deterministic_debug_mode |
否 |
|
torch.get_deterministic_debug_mode |
否 |
|
torch.set_float32_matmul_precision |
否 |
|
torch.get_float32_matmul_precision |
否 |
|
torch.set_warn_always |
否 |
|
torch.is_warn_always_enabled |
否 |
|
torch.vmap |
否 |
|
torch._assert |
是 |
|
torch.sym_float |
否 |
|
torch.sym_int |
否 |
|
torch.sym_max |
否 |
|
torch.sym_min |
否 |
|
torch.sym_not |
否 |
|
torch.compile |
否 |
|
torch.autograd.set_multithreading_enabled |
否 |