download history blame contribute delete
Detected Pickle imports (130)
- "torch._utils._rebuild_tensor_v2",
- "collections.OrderedDict",
- "torch.DoubleStorage",
- "torch.LongStorage",
- "__torch__.torch.nn.modules.linear.___torch_mangle_14.NonDynamicallyQuantizableLinear",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_42.Dropout",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_92.Dropout",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_2.Dropout",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_100.LayerNorm",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_57.Dropout",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_101.Dropout",
- "__torch__.torch.nn.modules.linear.___torch_mangle_96.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_44.NonDynamicallyQuantizableLinear",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_52.Dropout",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_41.Dropout",
- "__torch__.transformerJITnative.transformer",
- "__torch__.torch.nn.modules.transformer.___torch_mangle_13.TransformerEncoderLayer",
- "__torch__.torch.nn.modules.activation.___torch_mangle_75.MultiheadAttention",
- "__torch__.torch.nn.modules.linear.___torch_mangle_24.NonDynamicallyQuantizableLinear",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_97.Dropout",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_112.Dropout",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_77.Dropout",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_81.Dropout",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_90.LayerNorm",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_67.Dropout",
- "__torch__.torch.nn.modules.linear.___torch_mangle_0.Linear",
- "__torch__.torch.nn.modules.activation.___torch_mangle_15.MultiheadAttention",
- "__torch__.torch.nn.modules.linear.___torch_mangle_66.Linear",
- "__torch__.torch.nn.modules.transformer.___torch_mangle_103.TransformerEncoderLayer",
- "torch._utils._rebuild_tensor_v2",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_3.Dropout",
- "__torch__.torch.nn.modules.linear.___torch_mangle_4.NonDynamicallyQuantizableLinear",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_39.LayerNorm",
- "__torch__.torch.nn.modules.linear.___torch_mangle_58.Linear",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_99.LayerNorm",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_69.LayerNorm",
- "__torch__.torch.nn.modules.transformer.TransformerEncoderLayer",
- "__torch__.torch.nn.modules.linear.___torch_mangle_18.Linear",
- "__torch__.torch.nn.modules.activation.___torch_mangle_35.MultiheadAttention",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_109.LayerNorm",
- "__torch__.torch.nn.modules.transformer.___torch_mangle_33.TransformerEncoderLayer",
- "__torch__.torch.nn.modules.transformer.___torch_mangle_53.TransformerEncoderLayer",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_82.Dropout",
- "__torch__.torch.nn.modules.linear.___torch_mangle_74.NonDynamicallyQuantizableLinear",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_89.LayerNorm",
- "__torch__.torch.nn.modules.activation.___torch_mangle_25.MultiheadAttention",
- "__torch__.torch.nn.modules.transformer.___torch_mangle_43.TransformerEncoderLayer",
- "__torch__.torch.nn.modules.sparse.Embedding",
- "__torch__.torch.nn.modules.linear.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_8.Linear",
- "__torch__.torch.nn.modules.activation.___torch_mangle_45.MultiheadAttention",
- "__torch__.torch.nn.modules.linear.___torch_mangle_78.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_108.Linear",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_61.Dropout",
- "__torch__.torch.nn.modules.linear.___torch_mangle_34.NonDynamicallyQuantizableLinear",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_107.Dropout",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_10.LayerNorm",
- "__torch__.torch.nn.modules.linear.___torch_mangle_54.NonDynamicallyQuantizableLinear",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_11.Dropout",
- "__torch__.torch.nn.modules.linear.___torch_mangle_98.Linear",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_70.LayerNorm",
- "__torch__.torch.nn.modules.transformer.___torch_mangle_83.TransformerEncoderLayer",
- "__torch__.torch.nn.modules.activation.___torch_mangle_105.MultiheadAttention",
- "__torch__.torch.nn.modules.activation.MultiheadAttention",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_1.LayerNorm",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_20.LayerNorm",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_12.Dropout",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_87.Dropout",
- "__torch__.torch.nn.modules.linear.___torch_mangle_86.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_94.NonDynamicallyQuantizableLinear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_106.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_104.NonDynamicallyQuantizableLinear",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_21.Dropout",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_31.Dropout",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_102.Dropout",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_49.LayerNorm",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_110.LayerNorm",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_27.Dropout",
- "__torch__.torch.nn.modules.linear.___torch_mangle_16.Linear",
- "torch.FloatStorage",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_17.Dropout",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_51.Dropout",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_59.LayerNorm",
- "__torch__.torch.nn.modules.activation.___torch_mangle_65.MultiheadAttention",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_111.Dropout",
- "__torch__.torch.nn.modules.transformer.___torch_mangle_113.TransformerEncoderLayer",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_29.LayerNorm",
- "__torch__.torch.nn.modules.activation.___torch_mangle_55.MultiheadAttention",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_79.LayerNorm",
- "__torch__.torch.nn.modules.linear.___torch_mangle_88.Linear",
- "collections.OrderedDict",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_40.LayerNorm",
- "__torch__.torch.nn.modules.transformer.___torch_mangle_63.TransformerEncoderLayer",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_19.LayerNorm",
- "__torch__.torch.nn.modules.transformer.___torch_mangle_73.TransformerEncoderLayer",
- "__torch__.torch.nn.modules.activation.___torch_mangle_5.MultiheadAttention",
- "__torch__.torch.nn.modules.activation.___torch_mangle_85.MultiheadAttention",
- "__torch__.torch.nn.modules.linear.___torch_mangle_6.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_38.Linear",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_114.LayerNorm",
- "__torch__.torch.nn.modules.linear.___torch_mangle_46.Linear",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_9.LayerNorm",
- "__torch__.torch.nn.modules.linear.NonDynamicallyQuantizableLinear",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_72.Dropout",
- "__torch__.torch.nn.modules.linear.___torch_mangle_76.Linear",
- "__torch__.torch.nn.modules.normalization.LayerNorm",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_30.LayerNorm",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_47.Dropout",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_60.LayerNorm",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_7.Dropout",
- "__torch__.torch.nn.modules.linear.___torch_mangle_48.Linear",
- "__torch__.torch.nn.modules.transformer.___torch_mangle_93.TransformerEncoderLayer",
- "__torch__.torch.nn.modules.linear.___torch_mangle_26.Linear",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_71.Dropout",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_91.Dropout",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_37.Dropout",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_50.LayerNorm",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_62.Dropout",
- "__torch__.torch.nn.modules.activation.___torch_mangle_95.MultiheadAttention",
- "__torch__.torch.nn.modules.linear.___torch_mangle_68.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_28.Linear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_64.NonDynamicallyQuantizableLinear",
- "__torch__.torch.nn.modules.normalization.___torch_mangle_80.LayerNorm",
- "__torch__.torch.nn.modules.linear.___torch_mangle_84.NonDynamicallyQuantizableLinear",
- "__torch__.torch.nn.modules.linear.___torch_mangle_56.Linear",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_32.Dropout",
- "__torch__.torch.nn.modules.dropout.___torch_mangle_22.Dropout",
- "__torch__.torch.nn.modules.transformer.___torch_mangle_23.TransformerEncoderLayer",
- "__torch__.torch.nn.modules.linear.___torch_mangle_36.Linear",
- "__torch__.torch.nn.modules.dropout.Dropout"
331 MB
- Xet hash:
- 891d414e3104577ac0216efba40497f52db600c4787004b811b244566a847e98
- Size of remote file:
- 331 MB
- SHA256:
- ae807578824ea5c2093739e256e3f5d0680de4cce83bf6d076aff3f22b7a207f
·
Xet efficiently stores Large Files inside Git, intelligently splitting files into unique chunks and accelerating uploads and downloads. More info.