MBuechel's picture
Upload folder using huggingface_hub
0eb8bc6 verified
{
"_name_or_path": "FacebookAI/roberta-base",
"architectures": [
"RobertaForSequenceClassification"
],
"attention_probs_dropout_prob": 0.1,
"bos_token_id": 0,
"classifier_dropout": null,
"eos_token_id": 2,
"hidden_act": "gelu",
"hidden_dropout_prob": 0.1,
"hidden_size": 768,
"id2label": {
"0": "T1001",
"1": "T1003",
"2": "T1005",
"3": "T1010",
"4": "T1012",
"5": "T1014",
"6": "T1016",
"7": "T1018",
"8": "T1020",
"9": "T1021",
"10": "T1027",
"11": "T1033",
"12": "T1036",
"13": "T1037",
"14": "T1041",
"15": "T1046",
"16": "T1047",
"17": "T1048",
"18": "T1049",
"19": "T1053",
"20": "T1055",
"21": "T1056",
"22": "T1057",
"23": "T1059",
"24": "T1068",
"25": "T1070",
"26": "T1071",
"27": "T1074",
"28": "T1078",
"29": "T1082",
"30": "T1083",
"31": "T1087",
"32": "T1090",
"33": "T1091",
"34": "T1098",
"35": "T1102",
"36": "T1104",
"37": "T1105",
"38": "T1106",
"39": "T1110",
"40": "T1112",
"41": "T1113",
"42": "T1114",
"43": "T1115",
"44": "T1119",
"45": "T1120",
"46": "T1123",
"47": "T1124",
"48": "T1125",
"49": "T1132",
"50": "T1134",
"51": "T1135",
"52": "T1136",
"53": "T1137",
"54": "T1140",
"55": "T1185",
"56": "T1189",
"57": "T1190",
"58": "T1195",
"59": "T1199",
"60": "T1203",
"61": "T1204",
"62": "T1210",
"63": "T1211",
"64": "T1217",
"65": "T1218",
"66": "T1219",
"67": "T1222",
"68": "T1437",
"69": "T1482",
"70": "T1484",
"71": "T1486",
"72": "T1489",
"73": "T1490",
"74": "T1491",
"75": "T1496",
"76": "T1497",
"77": "T1499",
"78": "T1505",
"79": "T1518",
"80": "T1528",
"81": "T1539",
"82": "T1542",
"83": "T1543",
"84": "T1547",
"85": "T1548",
"86": "T1550",
"87": "T1552",
"88": "T1553",
"89": "T1555",
"90": "T1556",
"91": "T1557",
"92": "T1559",
"93": "T1560",
"94": "T1562",
"95": "T1563",
"96": "T1564",
"97": "T1565",
"98": "T1566",
"99": "T1567",
"100": "T1568",
"101": "T1571",
"102": "T1572",
"103": "T1573",
"104": "T1574",
"105": "T1583",
"106": "T1586",
"107": "T1587",
"108": "T1588",
"109": "T1589",
"110": "T1592",
"111": "T1594",
"112": "T1595",
"113": "T1606",
"114": "T1608",
"115": "T1614",
"116": "T1620",
"117": "T1629"
},
"initializer_range": 0.02,
"intermediate_size": 3072,
"label2id": {
"T1001": 0,
"T1003": 1,
"T1005": 2,
"T1010": 3,
"T1012": 4,
"T1014": 5,
"T1016": 6,
"T1018": 7,
"T1020": 8,
"T1021": 9,
"T1027": 10,
"T1033": 11,
"T1036": 12,
"T1037": 13,
"T1041": 14,
"T1046": 15,
"T1047": 16,
"T1048": 17,
"T1049": 18,
"T1053": 19,
"T1055": 20,
"T1056": 21,
"T1057": 22,
"T1059": 23,
"T1068": 24,
"T1070": 25,
"T1071": 26,
"T1074": 27,
"T1078": 28,
"T1082": 29,
"T1083": 30,
"T1087": 31,
"T1090": 32,
"T1091": 33,
"T1098": 34,
"T1102": 35,
"T1104": 36,
"T1105": 37,
"T1106": 38,
"T1110": 39,
"T1112": 40,
"T1113": 41,
"T1114": 42,
"T1115": 43,
"T1119": 44,
"T1120": 45,
"T1123": 46,
"T1124": 47,
"T1125": 48,
"T1132": 49,
"T1134": 50,
"T1135": 51,
"T1136": 52,
"T1137": 53,
"T1140": 54,
"T1185": 55,
"T1189": 56,
"T1190": 57,
"T1195": 58,
"T1199": 59,
"T1203": 60,
"T1204": 61,
"T1210": 62,
"T1211": 63,
"T1217": 64,
"T1218": 65,
"T1219": 66,
"T1222": 67,
"T1437": 68,
"T1482": 69,
"T1484": 70,
"T1486": 71,
"T1489": 72,
"T1490": 73,
"T1491": 74,
"T1496": 75,
"T1497": 76,
"T1499": 77,
"T1505": 78,
"T1518": 79,
"T1528": 80,
"T1539": 81,
"T1542": 82,
"T1543": 83,
"T1547": 84,
"T1548": 85,
"T1550": 86,
"T1552": 87,
"T1553": 88,
"T1555": 89,
"T1556": 90,
"T1557": 91,
"T1559": 92,
"T1560": 93,
"T1562": 94,
"T1563": 95,
"T1564": 96,
"T1565": 97,
"T1566": 98,
"T1567": 99,
"T1568": 100,
"T1571": 101,
"T1572": 102,
"T1573": 103,
"T1574": 104,
"T1583": 105,
"T1586": 106,
"T1587": 107,
"T1588": 108,
"T1589": 109,
"T1592": 110,
"T1594": 111,
"T1595": 112,
"T1606": 113,
"T1608": 114,
"T1614": 115,
"T1620": 116,
"T1629": 117
},
"layer_norm_eps": 1e-05,
"max_position_embeddings": 514,
"model_type": "roberta",
"num_attention_heads": 12,
"num_hidden_layers": 12,
"pad_token_id": 1,
"position_embedding_type": "absolute",
"problem_type": "multi_label_classification",
"torch_dtype": "float32",
"transformers_version": "4.45.2",
"type_vocab_size": 1,
"use_cache": true,
"vocab_size": 50265
}