Supported Aten Functions#
The following functions have a registered converter in
yobx.torch.interpreter. The list is generated programmatically
from the live converter registry.
Aten Functions#
Functions registered via yobx.torch.interpreter._aten_functions.
<<<
import yobx.torch.interpreter._aten_functions as _af
rows = []
for k, v in sorted(_af.__dict__.items()):
if not k.startswith("aten_") or not callable(v):
continue
rows.append(f"* ``{k}`` → " f":func:`{v.__name__} <{v.__module__}.{v.__name__}>`")
print("\n".join(rows))
>>>
aten_FunctionCtx→aten_FunctionCtxaten___and___Tensor→aten___and___Tensoraten___or___Tensor→aten___or___Tensoraten__adaptive_avg_pool2d→aten__adaptive_avg_pool2daten__assert_scalar→aten__assert_scalaraten__assert_tensor_metadata→aten__assert_tensor_metadataaten__embedding_bag→aten__embedding_bagaten__enter_autocast→aten__enter_autocastaten__exit_autocast→aten__exit_autocastaten__fft_r2c→aten__fft_r2caten__fftn_onnx→aten__fftn_onnxaten__fused_rms_norm→aten__fused_rms_normaten__grouped_mm→aten__grouped_mmaten__log_api_usage_once→aten__log_api_usage_onceaten__log_softmax→aten__log_softmaxaten__log_softmax_backward_data→aten__log_softmax_backward_dataaten__native_batch_norm→aten__native_batch_normaten__native_batch_norm_legit_no_stats→aten__native_batch_norm_legit_no_statsaten__native_batch_norm_legit_no_training→aten__native_batch_norm_legit_no_trainingaten__prelu_kernel→aten__prelu_kernelaten__prelu_kernel_backward→aten__prelu_kernel_backwardaten__set_grad_enabled→aten__set_grad_enabledaten__softmax→aten__softmaxaten__softmax_backward_data→aten__softmax_backward_dataaten__sym_sqrt→aten__sym_sqrtaten__to_copy→aten__to_copyaten__unsafe_index_Tensor→aten__unsafe_index_Tensoraten__unsafe_index_put→aten__unsafe_index_putaten__unsafe_view→aten__unsafe_viewaten_abs→aten_absaten_acos→aten_acosaten_acosh→aten_acoshaten_adaptive_avg_pool1d→aten_adaptive_avg_pool1daten_adaptive_avg_pool2d→aten_adaptive_avg_pool2daten_adaptive_avg_pool3d→aten_adaptive_avg_pool3daten_add→aten_addaten_add_Scalar→aten_add_Scalaraten_add_Tensor→aten_add_Tensoraten_add__Tensor→aten_add__Tensoraten_addcmul→aten_addcmulaten_addmm→aten_addmmaten_alias→aten_aliasaten_all→aten_allaten_all_dim→aten_all_dimaten_amax→aten_amaxaten_and→aten_andaten_and_→aten_and_aten_any→aten_anyaten_any_dim→aten_any_dimaten_arange→aten_arangeaten_arange_start→aten_arange_startaten_arange_start_step→aten_arange_start_stepaten_argmax→aten_argmaxaten_argmin→aten_argminaten_argsort→aten_argsortaten_as_strided→aten_as_stridedaten_asin→aten_asinaten_asinh→aten_asinhaten_atan→aten_atanaten_atanh→aten_atanhaten_auto_functionalized→aten_auto_functionalizedaten_avg_pool2d→aten_avg_pool2daten_avg_pool2d_backward→aten_avg_pool2d_backwardaten_avg_pool3d→aten_avg_pool3daten_baddbmm→aten_baddbmmaten_batch_norm→aten_batch_normaten_bitwise_and→aten_bitwise_andaten_bitwise_and_Tensor→aten_bitwise_and_Tensoraten_bitwise_not→aten_bitwise_notaten_bitwise_or→aten_bitwise_oraten_bitwise_or_Tensor→aten_bitwise_or_Tensoraten_bitwise_or__Tensor→aten_bitwise_or__Tensoraten_bmm→aten_bmmaten_broadcast_tensors→aten_broadcast_tensorsaten_bucketize_Tensor→aten_bucketize_Tensoraten_cat→aten_cataten_ceil→aten_ceilaten_chunk→aten_chunkaten_clamp→aten_clampaten_clamp_Tensor→aten_clamp_Tensoraten_clamp_max→aten_clamp_maxaten_clamp_min→aten_clamp_minaten_clip→aten_clipaten_clone→aten_cloneaten_col2im→aten_col2imaten_cond→aten_condaten_constant_pad_nd→aten_constant_pad_ndaten_contiguous→aten_contiguousaten_conv1d→aten_conv1daten_conv2d→aten_conv2daten_conv2d_padding→aten_conv2d_paddingaten_conv3d→aten_conv3daten_conv_transpose2d_input→aten_conv_transpose2d_inputaten_conv_transpose3d_input→aten_conv_transpose3d_inputaten_convolution→aten_convolutionaten_copy→aten_copyaten_copy_→aten_copy_aten_cos→aten_cosaten_cosh→aten_coshaten_cross_entropy_loss→aten_cross_entropy_lossaten_cudnn_batch_norm→aten_cudnn_batch_normaten_cumsum→aten_cumsumaten_detach→aten_detachaten_detach_→aten_detach_aten_diff→aten_diffaten_div→aten_divaten_div_Scalar→aten_div_Scalaraten_div_Tensor→aten_div_Tensoraten_div_Tensor_mode→aten_div_Tensor_modeaten_div__Tensor→aten_div__Tensoraten_dropout→aten_dropoutaten_dropout_→aten_dropout_aten_einsum→aten_einsumaten_elu→aten_eluaten_elu_→aten_elu_aten_embedding→aten_embeddingaten_embedding_bag_padding_idx→aten_embedding_bag_padding_idxaten_empty_like→aten_empty_likeaten_empty_memory_format→aten_empty_memory_formataten_empty_permuted→aten_empty_permutedaten_empty_strided→aten_empty_stridedaten_eq→aten_eqaten_eq_Scalar→aten_eq_Scalaraten_eq_Tensor→aten_eq_Tensoraten_erf→aten_erfaten_exp→aten_expaten_expand→aten_expandaten_expand_as→aten_expand_asaten_expm1→aten_expm1aten_feature_dropout→aten_feature_dropoutaten_fft_fft→aten_fft_fftaten_fft_fft2→aten_fft_fft2aten_fft_ifft→aten_fft_ifftaten_fft_ifft2→aten_fft_ifft2aten_fill_Scalar→aten_fill_Scalaraten_fill_Tensor→aten_fill_Tensoraten_flatten→aten_flattenaten_flatten_using_ints→aten_flatten_using_intsaten_flip→aten_flipaten_floor→aten_flooraten_floor_divide→aten_floor_divideaten_floordiv→aten_floordivaten_fmod_Scalar→aten_fmod_Scalaraten_full→aten_fullaten_full_like→aten_full_likeaten_gather→aten_gatheraten_ge→aten_geaten_ge_Scalar→aten_ge_Scalaraten_ge_Tensor→aten_ge_Tensoraten_gelu→aten_geluaten_getattr→aten_getattraten_glu→aten_gluaten_grid_sampler→aten_grid_sampleraten_grid_sampler_2d→aten_grid_sampler_2daten_group_norm→aten_group_normaten_gt→aten_gtaten_gt_Scalar→aten_gt_Scalaraten_gt_Tensor→aten_gt_Tensoraten_hardsigmoid→aten_hardsigmoidaten_hardswish→aten_hardswishaten_hardswish_→aten_hardswish_aten_hardtanh→aten_hardtanhaten_hardtanh_→aten_hardtanh_aten_hardtanh_backward→aten_hardtanh_backwardaten_histc→aten_histcaten_iadd→aten_iaddaten_im2col→aten_im2colaten_imul→aten_imulaten_index_Tensor→aten_index_Tensoraten_index_add→aten_index_addaten_index_copy→aten_index_copyaten_index_put→aten_index_putaten_index_put_→aten_index_put_aten_index_select→aten_index_selectaten_instance_norm→aten_instance_normaten_interpolate→aten_interpolateaten_isin→aten_isinaten_isin_Tensor_Tensor→aten_isin_Tensor_Tensoraten_isinf→aten_isinfaten_isnan→aten_isnanaten_item→aten_itematen_l1_loss→aten_l1_lossaten_layer_norm→aten_layer_normaten_le→aten_leaten_le_Scalar→aten_le_Scalaraten_le_Tensor→aten_le_Tensoraten_leaky_relu→aten_leaky_reluaten_leaky_relu_→aten_leaky_relu_aten_leaky_relu_backward→aten_leaky_relu_backwardaten_lift_fresh_copy→aten_lift_fresh_copyaten_linalg_vector_norm→aten_linalg_vector_normaten_linear→aten_linearaten_linspace→aten_linspaceaten_log→aten_logaten_log_softmax_int→aten_log_softmax_intaten_logical_and→aten_logical_andaten_logical_not→aten_logical_notaten_logical_or→aten_logical_oraten_logsumexp→aten_logsumexpaten_lt→aten_ltaten_lt_Scalar→aten_lt_Scalaraten_lt_Tensor→aten_lt_Tensoraten_masked_fill_Scalar→aten_masked_fill_Scalaraten_masked_fill_Tensor→aten_masked_fill_Tensoraten_masked_fill__Scalar→aten_masked_fill__Scalaraten_masked_scatter→aten_masked_scatteraten_matmul→aten_matmulaten_max→aten_maxaten_max_dim→aten_max_dimaten_max_other→aten_max_otheraten_max_pool1d→aten_max_pool1daten_max_pool2d→aten_max_pool2daten_max_pool2d_with_indices→aten_max_pool2d_with_indicesaten_max_pool3d→aten_max_pool3daten_max_pool3d_with_indices→aten_max_pool3d_with_indicesaten_maximum→aten_maximumaten_mean→aten_meanaten_mean_dim→aten_mean_dimaten_meshgrid→aten_meshgridaten_meshgrid_indexing→aten_meshgrid_indexingaten_min→aten_minaten_min_other→aten_min_otheraten_minimum→aten_minimumaten_mm→aten_mmaten_mod→aten_modaten_movedim_int→aten_movedim_intaten_movedim_intlist→aten_movedim_intlistaten_mse_loss→aten_mse_lossaten_mul→aten_mulaten_mul_Scalar→aten_mul_Scalaraten_mul_Tensor→aten_mul_Tensoraten_mul__Tensor→aten_mul__Tensoraten_multinomial→aten_multinomialaten_multiply_Tensor→aten_multiply_Tensoraten_nan_to_num→aten_nan_to_numaten_narrow→aten_narrowaten_native_dropout→aten_native_dropoutaten_native_group_norm→aten_native_group_normaten_native_layer_norm→aten_native_layer_normaten_ne→aten_neaten_ne_Scalar→aten_ne_Scalaraten_ne_Tensor→aten_ne_Tensoraten_neg→aten_negaten_new_ones→aten_new_onesaten_new_zeros→aten_new_zerosaten_nll_loss_forward→aten_nll_loss_forwardaten_nonzero→aten_nonzeroaten_nonzero_numpy→aten_nonzero_numpyaten_not→aten_notaten_not_→aten_not_aten_numpy_T→aten_numpy_Taten_one_hot→aten_one_hotaten_ones→aten_onesaten_ones_like→aten_ones_likeaten_or→aten_oraten_outer→aten_outeraten_pad→aten_padaten_permute→aten_permuteaten_polar→aten_polaraten_pow→aten_powaten_pow_Scalar→aten_pow_Scalaraten_pow_Tensor_Scalar→aten_pow_Tensor_Scalaraten_pow_Tensor_Tensor→aten_pow_Tensor_Tensoraten_pow__Scalar→aten_pow__Scalaraten_prelu→aten_preluaten_prod→aten_prodaten_prod_dim_int→aten_prod_dim_intaten_randn→aten_randnaten_reciprocal→aten_reciprocalaten_reflection_pad2d→aten_reflection_pad2daten_relu→aten_reluaten_relu_→aten_relu_aten_remainder→aten_remainderaten_remainder_Scalar→aten_remainder_Scalaraten_remainder_Tensor→aten_remainder_Tensoraten_repeat→aten_repeataten_repeat_interleave→aten_repeat_interleaveaten_repeat_interleave_Tensor→aten_repeat_interleave_Tensoraten_repeat_interleave_self_Tensor→aten_repeat_interleave_self_Tensoraten_repeat_interleave_self_int→aten_repeat_interleave_self_intaten_reshape→aten_reshapeaten_reshape_as→aten_reshape_asaten_roll→aten_rollaten_round→aten_roundaten_rrelu_with_noise_backward→aten_rrelu_with_noise_backwardaten_rsqrt→aten_rsqrtaten_rsub→aten_rsubaten_rsub_Scalar→aten_rsub_Scalaraten_scalar_tensor→aten_scalar_tensoraten_scan→aten_scanaten_scatter__src→aten_scatter__srcaten_scatter_add→aten_scatter_addaten_scatter_add_→aten_scatter_add_aten_scatter_reduce__two→aten_scatter_reduce__twoaten_scatter_reduce_two→aten_scatter_reduce_twoaten_scatter_src→aten_scatter_srcaten_select_copy_int→aten_select_copy_intaten_select_int→aten_select_intaten_select_scatter→aten_select_scatteraten_selu→aten_seluaten_setitem→aten_setitematen_sigmoid→aten_sigmoidaten_sigmoid_→aten_sigmoid_aten_sigmoid_backward→aten_sigmoid_backwardaten_sign→aten_signaten_silu→aten_siluaten_silu_→aten_silu_aten_simple_loop_for→aten_simple_loop_foraten_sin→aten_sinaten_sinh→aten_sinhaten_slice_Tensor→aten_slice_Tensoraten_slice_backward→aten_slice_backwardaten_slice_scatter→aten_slice_scatteraten_softmax→aten_softmaxaten_softmax_int→aten_softmax_intaten_softplus→aten_softplusaten_split_Tensor→aten_split_Tensoraten_split_with_sizes→aten_split_with_sizesaten_sqrt→aten_sqrtaten_square→aten_squareaten_squeeze→aten_squeezeaten_squeeze_dim→aten_squeeze_dimaten_squeeze_dims→aten_squeeze_dimsaten_stack→aten_stackaten_std_dim→aten_std_dimaten_sub→aten_subaten_sub_Tensor→aten_sub_Tensoraten_sub__Tensor→aten_sub__Tensoraten_sum→aten_sumaten_sum_dim_IntList→aten_sum_dim_IntListaten_sym_constrain_range_for_size→aten_sym_constrain_range_for_sizeaten_sym_float→aten_sym_floataten_sym_max→aten_sym_maxaten_sym_min→aten_sym_minaten_sym_not→aten_sym_notaten_sym_size_int→aten_sym_size_intaten_sym_sum→aten_sym_sumaten_t→aten_taten_take→aten_takeaten_tan→aten_tanaten_tanh→aten_tanhaten_tanh_backward→aten_tanh_backwardaten_tensor→aten_tensoraten_threshold_backward→aten_threshold_backwardaten_to→aten_toaten_to_device→aten_to_deviceaten_to_dtype→aten_to_dtypeaten_to_dtype_layout→aten_to_dtype_layoutaten_topk→aten_topkaten_transpose→aten_transposeaten_transpose_int→aten_transpose_intaten_tril→aten_trilaten_triu→aten_triuaten_truediv→aten_truedivaten_trunc→aten_truncaten_type_as→aten_type_asaten_unbind_int→aten_unbind_intaten_unflatten_int→aten_unflatten_intaten_unfold→aten_unfoldaten_unique_consecutive→aten_unique_consecutiveaten_unsqueeze→aten_unsqueezeaten_unsqueeze_→aten_unsqueeze_aten_upsample_bicubic2d→aten_upsample_bicubic2daten_upsample_bicubic2d_vec→aten_upsample_bicubic2d_vecaten_upsample_bilinear2d→aten_upsample_bilinear2daten_upsample_bilinear2d_vec→aten_upsample_bilinear2d_vecaten_upsample_linear1d_vec→aten_upsample_linear1d_vecaten_upsample_nearest2d→aten_upsample_nearest2daten_upsample_nearest2d_vec→aten_upsample_nearest2d_vecaten_upsample_nearest3d→aten_upsample_nearest3daten_upsample_nearest3d_vec→aten_upsample_nearest3d_vecaten_upsample_trilinear3d→aten_upsample_trilinear3daten_upsample_trilinear3d_vec→aten_upsample_trilinear3d_vecaten_view→aten_viewaten_where→aten_whereaten_where_Scalar→aten_where_Scalaraten_where_ScalarOther→aten_where_ScalarOtheraten_where_ScalarSelf→aten_where_ScalarSelfaten_where_self→aten_where_selfaten_wrap_with_autocast→aten_wrap_with_autocastaten_wrap_with_set_grad_enabled→aten_wrap_with_set_grad_enabledaten_zero→aten_zeroaten_zeros→aten_zerosaten_zeros_like→aten_zeros_like
Attention Functions#
Functions registered via
yobx.torch.interpreter._aten_functions_attention.
<<<
import yobx.torch.interpreter._aten_functions_attention as _afa
rows = []
for k, v in sorted(_afa.__dict__.items()):
if not k.startswith("aten_") or not callable(v):
continue
rows.append(f"* ``{k}`` → " f":func:`{v.__name__} <{v.__module__}.{v.__name__}>`")
print("\n".join(rows))
>>>
aten__scaled_dot_product_efficient_attention→aten__scaled_dot_product_efficient_attentionaten__scaled_dot_product_flash_attention→aten__scaled_dot_product_flash_attentionaten__scaled_dot_product_flash_attention_for_cpu→aten__scaled_dot_product_flash_attention_for_cpuaten_scaled_dot_product_attention→aten_scaled_dot_product_attention
Non-Aten Functions#
Functions registered via yobx.torch.interpreter._non_aten_functions.
These converters handle ONNX-specific symbolic operations that are not
part of the standard namespace for the aten functions.
<<<
import inspect
import yobx.torch.interpreter._non_aten_functions as _naf
rows = []
for k, v in sorted(_naf.__dict__.items()):
if not callable(v) or not inspect.isfunction(v):
continue
if v.__module__ != _naf.__name__:
continue
rows.append(f"* ``{k}`` → " f":func:`{v.__name__} <{v.__module__}.{v.__name__}>`")
print("\n".join(rows))
>>>
onnx_symbolic__symbolic_default→onnx_symbolic__symbolic_default
Prims Functions#
Functions registered via yobx.torch.interpreter._prims_functions.
<<<
import yobx.torch.interpreter._prims_functions as _pf
rows = []
for k, v in sorted(_pf.__dict__.items()):
if not k.startswith("prims_") or not callable(v):
continue
rows.append(f"* ``{k}`` → " f":func:`{v.__name__} <{v.__module__}.{v.__name__}>`")
print("\n".join(rows))
>>>
prims_add→prims_addprims_amax→prims_amaxprims_broadcast_in_dim→prims_broadcast_in_dimprims_cat→prims_catprims_clone→prims_cloneprims_collapse_view→prims_collapse_viewprims_convert_element_type→prims_convert_element_typeprims_cos→prims_cosprims_div→prims_divprims_empty_strided→prims_empty_stridedprims_eq→prims_eqprims_exp→prims_expprims_ge→prims_geprims_gt→prims_gtprims_iota→prims_iotaprims_lt→prims_ltprims_mul→prims_mulprims_neg→prims_negprims_pow→prims_powprims_rsqrt→prims_rsqrtprims_sin→prims_sinprims_split_dim→prims_split_dimprims_sub→prims_subprims_sum→prims_sumprims_transpose→prims_transposeprims_view_of→prims_view_ofprims_where→prims_where
Math Functions#
Functions registered via yobx.torch.interpreter._math_functions.
<<<
import yobx.torch.interpreter._math_functions as _mf
rows = []
for k, v in sorted(_mf.__dict__.items()):
if not k.startswith("math_") or not callable(v):
continue
rows.append(f"* ``{k}`` → " f":func:`{v.__name__} <{v.__module__}.{v.__name__}>`")
print("\n".join(rows))
>>>
math_ceil→math_ceilmath_trunc→math_trunc
Aten Methods#
Tensor-method converters registered via
yobx.torch.interpreter._aten_methods.
<<<
import yobx.torch.interpreter._aten_methods as _am
rows = []
for k, v in sorted(_am.__dict__.items()):
if not k.startswith("aten_meth_") or not callable(v):
continue
rows.append(f"* ``{k}`` → " f":func:`{v.__name__} <{v.__module__}.{v.__name__}>`")
print("\n".join(rows))
>>>
aten_meth___eq__→aten_meth___eq__aten_meth_bool→aten_meth_boolaten_meth_clamp_max→aten_meth_clamp_maxaten_meth_clamp_min→aten_meth_clamp_minaten_meth_clone→aten_meth_cloneaten_meth_contiguous→aten_meth_contiguousaten_meth_cos→aten_meth_cosaten_meth_cpu→aten_meth_cpuaten_meth_detach→aten_meth_detachaten_meth_eq→aten_meth_eqaten_meth_expand→aten_meth_expandaten_meth_expand_as→aten_meth_expand_asaten_meth_float→aten_meth_floataten_meth_item→aten_meth_itematen_meth_masked_fill→aten_meth_masked_fillaten_meth_masked_fill_→aten_meth_masked_fill_aten_meth_mean→aten_meth_meanaten_meth_numel→aten_meth_numelaten_meth_pow→aten_meth_powaten_meth_repeat→aten_meth_repeataten_meth_reshape→aten_meth_reshapeaten_meth_sin→aten_meth_sinaten_meth_size→aten_meth_sizeaten_meth_sum→aten_meth_sumaten_meth_t→aten_meth_taten_meth_to→aten_meth_toaten_meth_transpose→aten_meth_transposeaten_meth_unsqueeze→aten_meth_unsqueezeaten_meth_view→aten_meth_view