{ "_name_or_path": "bert-base-uncased", "architectures": [ "BertForSequenceClassification" ], "attention_probs_dropout_prob": 0.1, "classifier_dropout": null, "gradient_checkpointing": false, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "hidden_size": 768, "id2label": { "0": "acknowledging", "1": "afraid", "2": "agreeing", "3": "angry", "4": "annoyed", "5": "anticipating", "6": "anxious", "7": "apprehensive", "8": "ashamed", "9": "caring", "10": "confident", "11": "consoling", "12": "content", "13": "devastated", "14": "disappointed", "15": "disgusted", "16": "embarrassed", "17": "encouraging", "18": "exciting", "19": "faithful", "20": "furious", "21": "grateful", "22": "guilty", "23": "hopeful", "24": "impressed", "25": "jealous", "26": "joyful", "27": "lonely", "28": "neutral", "29": "nostalgic", "30": "prepared", "31": "proud", "32": "questioning", "33": "sad", "34": "sentimental", "35": "suggesting", "36": "surprised", "37": "sympathizing", "38": "terrified", "39": "trusting", "40": "wishing" }, "initializer_range": 0.02, "intermediate_size": 3072, "label2id": { "acknowledging": 0, "afraid": 1, "agreeing": 2, "angry": 3, "annoyed": 4, "anticipating": 5, "anxious": 6, "apprehensive": 7, "ashamed": 8, "caring": 9, "confident": 10, "consoling": 11, "content": 12, "devastated": 13, "disappointed": 14, "disgusted": 15, "embarrassed": 16, "encouraging": 17, "exciting": 18, "faithful": 19, "furious": 20, "grateful": 21, "guilty": 22, "hopeful": 23, "impressed": 24, "jealous": 25, "joyful": 26, "lonely": 27, "neutral": 28, "nostalgic": 29, "prepared": 30, "proud": 31, "questioning": 32, "sad": 33, "sentimental": 34, "suggesting": 35, "surprised": 36, "sympathizing": 37, "terrified": 38, "trusting": 39, "wishing": 40 }, "layer_norm_eps": 1e-12, "max_position_embeddings": 512, "model_type": "bert", "num_attention_heads": 12, "num_hidden_layers": 12, "pad_token_id": 0, "position_embedding_type": "absolute", "problem_type": "multi_label_classification", "torch_dtype": "float32", "transformers_version": "4.27.3", "type_vocab_size": 2, "use_cache": true, "vocab_size": 30522 }