MBuechel's picture
Upload folder using huggingface_hub
0eb8bc6 verified
{
"_name_or_path": "FacebookAI/roberta-base",
"architectures": [
"RobertaForSequenceClassification"
],
"attention_probs_dropout_prob": 0.1,
"bos_token_id": 0,
"classifier_dropout": null,
"eos_token_id": 2,
"hidden_act": "gelu",
"hidden_dropout_prob": 0.1,
"hidden_size": 768,
"id2label": {
"0": "T1001",
"1": "T1012",
"2": "T1014",
"3": "T1018",
"4": "T1020",
"5": "T1021",
"6": "T1027",
"7": "T1033",
"8": "T1037",
"9": "T1041",
"10": "T1047",
"11": "T1048",
"12": "T1055",
"13": "T1056",
"14": "T1057",
"15": "T1059",
"16": "T1070",
"17": "T1071",
"18": "T1078",
"19": "T1083",
"20": "T1090",
"21": "T1110",
"22": "T1115",
"23": "T1120",
"24": "T1132"
},
"initializer_range": 0.02,
"intermediate_size": 3072,
"label2id": {
"T1001": 0,
"T1012": 1,
"T1014": 2,
"T1018": 3,
"T1020": 4,
"T1021": 5,
"T1027": 6,
"T1033": 7,
"T1037": 8,
"T1041": 9,
"T1047": 10,
"T1048": 11,
"T1055": 12,
"T1056": 13,
"T1057": 14,
"T1059": 15,
"T1070": 16,
"T1071": 17,
"T1078": 18,
"T1083": 19,
"T1090": 20,
"T1110": 21,
"T1115": 22,
"T1120": 23,
"T1132": 24
},
"layer_norm_eps": 1e-05,
"max_position_embeddings": 514,
"model_type": "roberta",
"num_attention_heads": 12,
"num_hidden_layers": 12,
"pad_token_id": 1,
"position_embedding_type": "absolute",
"problem_type": "multi_label_classification",
"torch_dtype": "float32",
"transformers_version": "4.45.2",
"type_vocab_size": 1,
"use_cache": true,
"vocab_size": 50265
}