Sana9's picture
Upload folder using huggingface_hub
7be0937 verified
{
"architectures": [
"BertForSequenceClassification"
],
"attention_probs_dropout_prob": 0.1,
"classifier_dropout": 0,
"gradient_checkpointing": false,
"hidden_act": "gelu",
"hidden_dropout_prob": 0,
"hidden_size": 768,
"id2label": {
"0": "T1574.006",
"1": "T1040",
"2": "T1120",
"3": "T1498.001",
"4": "T1550.001",
"5": "T1037",
"6": "T1574.002",
"7": "T1554",
"8": "T1543.001",
"9": "T1016",
"10": "T1552.002",
"11": "T1550.003",
"12": "T1018",
"13": "T1111",
"14": "T1217",
"15": "T1080",
"16": "T1491",
"17": "T1021",
"18": "T1134",
"19": "T1562.008",
"20": "T1542.002",
"21": "T1550.002",
"22": "T1565.002",
"23": "T1505.005",
"24": "T1082",
"25": "T1505.003",
"26": "T1546.001",
"27": "T1134.003",
"28": "T1548",
"29": "T1021.002",
"30": "T1185",
"31": "T1558.003",
"32": "T1133",
"33": "T1556.006",
"34": "T1027.006",
"35": "T1135",
"36": "T1574.007",
"37": "T1499.001",
"38": "T1611",
"39": "T1595",
"40": "T1562.002",
"41": "T1110.004",
"42": "T1134.002",
"43": "T1014",
"44": "T1499.003",
"45": "T1574.010",
"46": "T1574.005",
"47": "T1114.002",
"48": "T1543.004",
"49": "T1539",
"50": "T1005",
"51": "T1562.001",
"52": "T1553.004",
"53": "T1036.001",
"54": "T1558",
"55": "T1027",
"56": "T1557",
"57": "T1552.004",
"58": "T1110.003",
"59": "T1078",
"60": "T1012",
"61": "T1562.004",
"62": "T1528",
"63": "T1134.001",
"64": "T1547.009",
"65": "T1547.006",
"66": "T1584.002",
"67": "T1562.003",
"68": "T1574.008",
"69": "T1546.016",
"70": "T1556",
"71": "T1553.002",
"72": "T1542.003",
"73": "T1592",
"74": "T1562.007",
"75": "T1083",
"76": "T1007",
"77": "T1057",
"78": "T1547",
"79": "T1499",
"80": "T1124",
"81": "T1087",
"82": "T1110.001",
"83": "T1046",
"84": "T1574.011",
"85": "T1033",
"86": "T1550.004",
"87": "T1049",
"88": "T1606",
"89": "T1563",
"90": "T1069",
"91": "T1564.009",
"92": "T1543.003",
"93": "T1557.002",
"94": "T1546.004",
"95": "T1562.009",
"96": "T1027.009",
"97": "T1036.005",
"98": "T1211",
"99": "T1499.002",
"100": "T1543",
"101": "T1546.008",
"102": "T1615",
"103": "T1036",
"104": "T1590"
},
"initializer_range": 0.02,
"intermediate_size": 3072,
"label2id": {
"T1005": 50,
"T1007": 76,
"T1012": 60,
"T1014": 43,
"T1016": 9,
"T1018": 12,
"T1021": 17,
"T1021.002": 29,
"T1027": 55,
"T1027.006": 34,
"T1027.009": 96,
"T1033": 85,
"T1036": 103,
"T1036.001": 53,
"T1036.005": 97,
"T1037": 5,
"T1040": 1,
"T1046": 83,
"T1049": 87,
"T1057": 77,
"T1069": 90,
"T1078": 59,
"T1080": 15,
"T1082": 24,
"T1083": 75,
"T1087": 81,
"T1110.001": 82,
"T1110.003": 58,
"T1110.004": 41,
"T1111": 13,
"T1114.002": 47,
"T1120": 2,
"T1124": 80,
"T1133": 32,
"T1134": 18,
"T1134.001": 63,
"T1134.002": 42,
"T1134.003": 27,
"T1135": 35,
"T1185": 30,
"T1211": 98,
"T1217": 14,
"T1491": 16,
"T1498.001": 3,
"T1499": 79,
"T1499.001": 37,
"T1499.002": 99,
"T1499.003": 44,
"T1505.003": 25,
"T1505.005": 23,
"T1528": 62,
"T1539": 49,
"T1542.002": 20,
"T1542.003": 72,
"T1543": 100,
"T1543.001": 8,
"T1543.003": 92,
"T1543.004": 48,
"T1546.001": 26,
"T1546.004": 94,
"T1546.008": 101,
"T1546.016": 69,
"T1547": 78,
"T1547.006": 65,
"T1547.009": 64,
"T1548": 28,
"T1550.001": 4,
"T1550.002": 21,
"T1550.003": 11,
"T1550.004": 86,
"T1552.002": 10,
"T1552.004": 57,
"T1553.002": 71,
"T1553.004": 52,
"T1554": 7,
"T1556": 70,
"T1556.006": 33,
"T1557": 56,
"T1557.002": 93,
"T1558": 54,
"T1558.003": 31,
"T1562.001": 51,
"T1562.002": 40,
"T1562.003": 67,
"T1562.004": 61,
"T1562.007": 74,
"T1562.008": 19,
"T1562.009": 95,
"T1563": 89,
"T1564.009": 91,
"T1565.002": 22,
"T1574.002": 6,
"T1574.005": 46,
"T1574.006": 0,
"T1574.007": 36,
"T1574.008": 68,
"T1574.010": 45,
"T1574.011": 84,
"T1584.002": 66,
"T1590": 104,
"T1592": 73,
"T1595": 39,
"T1606": 88,
"T1611": 38,
"T1615": 102
},
"layer_norm_eps": 1e-12,
"max_position_embeddings": 512,
"model_type": "bert",
"num_attention_heads": 12,
"num_hidden_layers": 12,
"pad_token_id": 0,
"position_embedding_type": "absolute",
"problem_type": "multi_label_classification",
"torch_dtype": "float32",
"transformers_version": "4.51.3",
"type_vocab_size": 2,
"use_cache": true,
"vocab_size": 30522
}