nice-distilbert / config.json
chisadi's picture
add model
7e24ea6
{
"_name_or_path": "distilbert-base-uncased",
"activation": "gelu",
"architectures": [
"DistilBertForSequenceClassification"
],
"attention_dropout": 0.1,
"dim": 768,
"dropout": 0.1,
"finetuning_task": "NICE Category Classification",
"hidden_dim": 3072,
"id2label": {
"0": "NICE_1",
"1": "NICE_2",
"10": "NICE_11",
"11": "NICE_12",
"12": "NICE_13",
"13": "NICE_14",
"14": "NICE_15",
"15": "NICE_16",
"16": "NICE_17",
"17": "NICE_18",
"18": "NICE_19",
"19": "NICE_20",
"2": "NICE_3",
"20": "NICE_21",
"21": "NICE_22",
"22": "NICE_23",
"23": "NICE_24",
"24": "NICE_25",
"25": "NICE_26",
"26": "NICE_27",
"27": "NICE_28",
"28": "NICE_29",
"29": "NICE_30",
"3": "NICE_4",
"30": "NICE_31",
"31": "NICE_32",
"32": "NICE_33",
"33": "NICE_34",
"34": "NICE_35",
"35": "NICE_36",
"36": "NICE_37",
"37": "NICE_38",
"38": "NICE_39",
"39": "NICE_40",
"4": "NICE_5",
"40": "NICE_41",
"41": "NICE_42",
"42": "NICE_43",
"43": "NICE_44",
"44": "NICE_45",
"5": "NICE_6",
"6": "NICE_7",
"7": "NICE_8",
"8": "NICE_9",
"9": "NICE_10"
},
"initializer_range": 0.02,
"label2id": {
"NICE_1": 0,
"NICE_10": 9,
"NICE_11": 10,
"NICE_12": 11,
"NICE_13": 12,
"NICE_14": 13,
"NICE_15": 14,
"NICE_16": 15,
"NICE_17": 16,
"NICE_18": 17,
"NICE_19": 18,
"NICE_2": 1,
"NICE_20": 19,
"NICE_21": 20,
"NICE_22": 21,
"NICE_23": 22,
"NICE_24": 23,
"NICE_25": 24,
"NICE_26": 25,
"NICE_27": 26,
"NICE_28": 27,
"NICE_29": 28,
"NICE_3": 2,
"NICE_30": 29,
"NICE_31": 30,
"NICE_32": 31,
"NICE_33": 32,
"NICE_34": 33,
"NICE_35": 34,
"NICE_36": 35,
"NICE_37": 36,
"NICE_38": 37,
"NICE_39": 38,
"NICE_4": 3,
"NICE_40": 39,
"NICE_41": 40,
"NICE_42": 41,
"NICE_43": 42,
"NICE_44": 43,
"NICE_45": 44,
"NICE_5": 4,
"NICE_6": 5,
"NICE_7": 6,
"NICE_8": 7,
"NICE_9": 8
},
"max_position_embeddings": 512,
"model_type": "distilbert",
"n_heads": 12,
"n_layers": 6,
"pad_token_id": 0,
"problem_type": "multi_label_classification",
"qa_dropout": 0.1,
"seq_classif_dropout": 0.2,
"sinusoidal_pos_embds": false,
"tie_weights_": true,
"torch_dtype": "float32",
"transformers_version": "4.12.0",
"vocab_size": 30522
}