operator name stringclasses 180
values | used in model stringclasses 155
values | args stringlengths 19 5.24k |
|---|---|---|
aten.mm.default | TIMM/twins_pcpvt_base | ((T([320, 6272], f16, stride=(1, 320)), T([6272, 1280], f16)), {}) |
aten.mm.default | TIMM/twins_pcpvt_base | ((T([320, 6272], f16, stride=(1, 320)), T([6272, 320], f16)), {}) |
aten.mm.default | TorchBench/tts_angular | ((T([3200, 256], f16), T([256, 768], f16)), {}) |
aten.mm.default | TorchBench/tts_angular | ((T([3200, 768], f16), T([768, 256], f16, stride=(1, 768))), {}) |
aten.mm.default | HuggingFace/XLNetLMHeadModel | ((T([32000, 2048], f16, stride=(1, 32000)), T([2048, 1024], f16)), {}) |
aten.mm.default | HuggingFace/GoogleFnet | ((T([32000, 512], f16, stride=(1, 32000)), T([512, 768], f16)), {}) |
aten.mm.default | HuggingFace/CamemBert | ((T([32005, 512], f16, stride=(1, 32005)), T([512, 768], f16)), {}) |
aten.mm.default | HuggingFace/ElectraForQuestionAnswering | ((T([32768, 1024], f16), T([1024, 256], f16)), {}) |
aten.mm.default | TIMM/botnet26t_256 | ((T([32768, 128], f16), T([128, 15], f16, stride=(1, 128))), {}) |
aten.mm.default | TIMM/botnet26t_256 | ((T([32768, 15], f16), T([15, 128], f16)), {}) |
aten.mm.default | TIMM/eca_botnext26ts_256 | ((T([32768, 15], f16), T([15, 16], f16)), {}) |
aten.mm.default | TIMM/eca_botnext26ts_256 | ((T([32768, 16], f16), T([16, 15], f16, stride=(1, 16))), {}) |
aten.mm.default | HuggingFace/ElectraForQuestionAnswering | ((T([32768, 256], f16), T([256, 1024], f16)), {}) |
aten.mm.default | HuggingFace/ElectraForQuestionAnswering | ((T([32768, 256], f16), T([256, 128], f16)), {}) |
aten.mm.default | HuggingFace/ElectraForQuestionAnswering | ((T([32768, 256], f16), T([256, 256], f16)), {}) |
aten.mm.default | HuggingFace/ElectraForQuestionAnswering | ((T([32768, 2], f16), T([2, 256], f16)), {}) |
aten.mm.default | TorchBench/vision_maskrcnn | ((T([364, 0], f16), T([0, 1024], f16)), {}) |
aten.mm.default | TIMM/coat_lite_mini | ((T([384, 100480], f16, stride=(1, 384)), T([100480, 128], f16)), {}) |
aten.mm.default | TIMM/volo_d1_224 | ((T([384, 12544], f16, stride=(1, 384)), T([12544, 1152], f16)), {}) |
aten.mm.default | TIMM/tnt_s_patch16_224 | ((T([384, 12544], f16, stride=(1, 384)), T([12544, 384], f16)), {}) |
aten.mm.default | TIMM/volo_d1_224 | ((T([384, 12544], f16, stride=(1, 384)), T([12544, 384], f16)), {}) |
aten.mm.default | TIMM/gmixer_24_224 | ((T([384, 12544], f16, stride=(1, 384)), T([12544, 768], f16)), {}) |
aten.mm.default | TIMM/tnt_s_patch16_224 | ((T([384, 12608], f16, stride=(1, 384)), T([12608, 1536], f16)), {}) |
aten.mm.default | TIMM/tnt_s_patch16_224 | ((T([384, 12608], f16, stride=(1, 384)), T([12608, 384], f16)), {}) |
aten.mm.default | TorchBench/timm_vision_transformer | ((T([384, 1576], f16, stride=(1, 384)), T([1576, 1536], f16)), {}) |
aten.mm.default | TorchBench/timm_vision_transformer | ((T([384, 1576], f16, stride=(1, 384)), T([1576, 384], f16)), {}) |
aten.mm.default | TIMM/mobilevit_s | ((T([384, 16384], f16, stride=(1, 384)), T([16384, 192], f16)), {}) |
aten.mm.default | TIMM/jx_nest_base | ((T([384, 200704], f16, stride=(1, 384)), T([200704, 128], f16)), {}) |
aten.mm.default | TIMM/swin_base_patch4_window7_224 | ((T([384, 200704], f16, stride=(1, 384)), T([200704, 128], f16)), {}) |
aten.mm.default | TIMM/levit_128 | ((T([384, 2048], f16, stride=(1, 384)), T([2048, 1024], f16)), {}) |
aten.mm.default | TIMM/levit_128 | ((T([384, 2048], f16, stride=(1, 384)), T([2048, 384], f16)), {}) |
aten.mm.default | TIMM/levit_128 | ((T([384, 2048], f16, stride=(1, 384)), T([2048, 768], f16)), {}) |
aten.mm.default | TIMM/resmlp_12_224 | ((T([384, 25088], f16, stride=(1, 384)), T([25088, 1536], f16)), {}) |
aten.mm.default | TIMM/crossvit_9_240 | ((T([384, 25664], f16, stride=(1, 384)), T([25664, 128], f16)), {}) |
aten.mm.default | HuggingFace/YituTechConvBert | ((T([384, 512], f16), T([512, 768], f16)), {}) |
aten.mm.default | HuggingFace/YituTechConvBert | ((T([384, 512], f16, stride=(1, 384)), T([512, 768], f16)), {}) |
aten.mm.default | TIMM/volo_d1_224 | ((T([384, 64], f16, stride=(1, 384)), T([64, 384], f16)), {}) |
aten.mm.default | TIMM/volo_d1_224 | ((T([384, 64], f16, stride=(1, 384)), T([64, 384], f16, stride=(75648, 1))), {}) |
aten.mm.default | TIMM/volo_d1_224 | ((T([384, 64], f16, stride=(1, 75648)), T([64, 1152], f16)), {}) |
aten.mm.default | TorchBench/densenet121 | ((T([4, 1000], f16, stride=(0, 0)), T([1000, 1024], f16)), {}) |
aten.mm.default | TorchBench/fambench_dlrm | ((T([4000, 1024], f16, stride=(1, 4000)), T([1024, 31068], f16)), {}) |
aten.mm.default | TorchBench/fambench_dlrm | ((T([4000, 1024], f16, stride=(1, 4000)), T([1024, 4000], f16)), {}) |
aten.mm.default | TIMM/coat_lite_mini | ((T([401536, 192], f16), T([192, 64], f16)), {}) |
aten.mm.default | TIMM/coat_lite_mini | ((T([401536, 512], f16), T([512, 64], f16)), {}) |
aten.mm.default | TIMM/coat_lite_mini | ((T([401536, 64], f16), T([64, 512], f16)), {}) |
aten.mm.default | TIMM/coat_lite_mini | ((T([401536, 64], f16), T([64, 64], f16)), {}) |
aten.mm.default | HuggingFace/BartForCausalLM | ((T([4096, 1024], f16), T([1024, 1024], f16)), {}) |
aten.mm.default | HuggingFace/BartForCausalLM | ((T([4096, 1024], f16), T([1024, 4096], f16)), {}) |
aten.mm.default | HuggingFace/BartForCausalLM | ((T([4096, 1024], f16), T([1024, 50265], f16, stride=(1, 1024))), {}) |
aten.mm.default | HuggingFace/MBartForConditionalGeneration | ((T([4096, 1024], f16, stride=(1, 4096)), T([1024, 1024], f16)), {}) |
aten.mm.default | HuggingFace/MegatronBertForQuestionAnswering | ((T([4096, 1024], f16, stride=(1, 4096)), T([1024, 1024], f16)), {}) |
aten.mm.default | HuggingFace/PegasusForCausalLM | ((T([4096, 1024], f16, stride=(1, 4096)), T([1024, 1024], f16)), {}) |
aten.mm.default | HuggingFace/TrOCRForCausalLM | ((T([4096, 1024], f16, stride=(1, 4096)), T([1024, 1024], f16)), {}) |
aten.mm.default | HuggingFace/AlbertForMaskedLM | ((T([4096, 1024], f16, stride=(1, 4096)), T([1024, 128], f16)), {}) |
aten.mm.default | HuggingFace/AlbertForQuestionAnswering | ((T([4096, 1024], f16, stride=(1, 4096)), T([1024, 128], f16)), {}) |
aten.mm.default | HuggingFace/AlbertForMaskedLM | ((T([4096, 1024], f16, stride=(1, 4096)), T([1024, 16384], f16)), {}) |
aten.mm.default | HuggingFace/AlbertForQuestionAnswering | ((T([4096, 1024], f16, stride=(1, 4096)), T([1024, 16384], f16)), {}) |
aten.mm.default | HuggingFace/AlbertForMaskedLM | ((T([4096, 1024], f16, stride=(1, 4096)), T([1024, 4096], f16)), {}) |
aten.mm.default | HuggingFace/AlbertForQuestionAnswering | ((T([4096, 1024], f16, stride=(1, 4096)), T([1024, 4096], f16)), {}) |
aten.mm.default | HuggingFace/MobileBertForQuestionAnswering | ((T([4096, 128], f16), T([128, 128], f16)), {}) |
aten.mm.default | HuggingFace/MobileBertForQuestionAnswering | ((T([4096, 128], f16), T([128, 512], f16)), {}) |
aten.mm.default | TorchBench/hf_Albert | ((T([4096, 128], f16), T([128, 768], f16)), {}) |
aten.mm.default | TorchBench/alexnet | ((T([4096, 128], f16, stride=(1, 4096)), T([128, 4096], f16)), {}) |
aten.mm.default | TorchBench/alexnet | ((T([4096, 128], f16, stride=(1, 4096)), T([128, 9216], f16)), {}) |
aten.mm.default | TIMM/convnext_base | ((T([4096, 1568], f16, stride=(1, 4096)), T([1568, 1024], f16)), {}) |
aten.mm.default | HuggingFace/BartForConditionalGeneration | ((T([4096, 2048], f16, stride=(1, 4096)), T([2048, 1024], f16)), {}) |
aten.mm.default | HuggingFace/MBartForCausalLM | ((T([4096, 2048], f16, stride=(1, 4096)), T([2048, 1024], f16)), {}) |
aten.mm.default | HuggingFace/XLNetLMHeadModel | ((T([4096, 2048], f16, stride=(1, 4096)), T([2048, 1024], f16)), {}) |
aten.mm.default | HuggingFace/GPT2ForSequenceClassification | ((T([4096, 2304], f16), T([2304, 768], f16, stride=(1, 2304))), {}) |
aten.mm.default | TIMM/mobilevit_s | ((T([4096, 240], f16), T([240, 240], f16)), {}) |
aten.mm.default | TIMM/mobilevit_s | ((T([4096, 240], f16), T([240, 480], f16)), {}) |
aten.mm.default | HuggingFace/M2M100ForConditionalGeneration | ((T([4096, 256], f16, stride=(1, 4096)), T([256, 1024], f16)), {}) |
aten.mm.default | HuggingFace/MegatronBertForCausalLM | ((T([4096, 256], f16, stride=(1, 4096)), T([256, 1024], f16)), {}) |
aten.mm.default | HuggingFace/XGLMForCausalLM | ((T([4096, 256], f16, stride=(1, 4096)), T([256, 1024], f16)), {}) |
aten.mm.default | HuggingFace/MobileBertForQuestionAnswering | ((T([4096, 2], f16), T([2, 512], f16)), {}) |
aten.mm.default | HuggingFace/DistilBertForQuestionAnswering | ((T([4096, 2], f16), T([2, 768], f16)), {}) |
aten.mm.default | HuggingFace/GPT2ForSequenceClassification | ((T([4096, 2], f16), T([2, 768], f16)), {}) |
aten.mm.default | TorchBench/hf_Albert | ((T([4096, 30000], f16, stride=(0, 0)), T([30000, 128], f16)), {}) |
aten.mm.default | TorchBench/hf_DistilBert | ((T([4096, 30522], f16, stride=(0, 0)), T([30522, 768], f16)), {}) |
aten.mm.default | HuggingFace/DistilBertForQuestionAnswering | ((T([4096, 3072], f16), T([3072, 768], f16)), {}) |
aten.mm.default | TorchBench/hf_Albert | ((T([4096, 3072], f16), T([3072, 768], f16)), {}) |
aten.mm.default | TorchBench/hf_DistilBert | ((T([4096, 3072], f16), T([3072, 768], f16)), {}) |
aten.mm.default | HuggingFace/GPT2ForSequenceClassification | ((T([4096, 3072], f16), T([3072, 768], f16, stride=(1, 3072))), {}) |
aten.mm.default | TIMM/swin_base_patch4_window7_224 | ((T([4096, 3136], f16, stride=(1, 4096)), T([3136, 1024], f16)), {}) |
aten.mm.default | HuggingFace/BartForCausalLM | ((T([4096, 4096], f16), T([4096, 1024], f16)), {}) |
aten.mm.default | HuggingFace/BartForCausalLM | ((T([4096, 4096], f16, stride=(1, 4096)), T([4096, 1024], f16)), {}) |
aten.mm.default | TIMM/pit_b_224 | ((T([4096, 4160], f16, stride=(1, 4096)), T([4160, 1024], f16)), {}) |
aten.mm.default | TIMM/mobilevit_s | ((T([4096, 480], f16), T([480, 240], f16)), {}) |
aten.mm.default | HuggingFace/BartForCausalLM | ((T([4096, 50265], f16), T([50265, 1024], f16)), {}) |
aten.mm.default | HuggingFace/MobileBertForQuestionAnswering | ((T([4096, 512], f16), T([512, 128], f16)), {}) |
aten.mm.default | HuggingFace/MobileBertForQuestionAnswering | ((T([4096, 512], f16), T([512, 384], f16)), {}) |
aten.mm.default | HuggingFace/PegasusForConditionalGeneration | ((T([4096, 512], f16, stride=(1, 4096)), T([512, 1024], f16)), {}) |
aten.mm.default | TorchBench/vgg16 | ((T([4096, 64], f16, stride=(1, 4096)), T([64, 25088], f16)), {}) |
aten.mm.default | TorchBench/vgg16 | ((T([4096, 64], f16, stride=(1, 4096)), T([64, 4096], f16)), {}) |
aten.mm.default | TIMM/mobilevit_s | ((T([4096, 720], f16), T([720, 240], f16)), {}) |
aten.mm.default | TorchBench/hf_Albert | ((T([4096, 768], f16), T([768, 128], f16)), {}) |
aten.mm.default | HuggingFace/GPT2ForSequenceClassification | ((T([4096, 768], f16), T([768, 2], f16, stride=(1, 768))), {}) |
aten.mm.default | HuggingFace/DistilBertForQuestionAnswering | ((T([4096, 768], f16), T([768, 3072], f16)), {}) |
aten.mm.default | TorchBench/hf_Albert | ((T([4096, 768], f16), T([768, 3072], f16)), {}) |
aten.mm.default | TorchBench/hf_DistilBert | ((T([4096, 768], f16), T([768, 3072], f16)), {}) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.