boltuix commited on
Commit
109a1dd
·
verified ·
1 Parent(s): 24104b4

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +28 -28
config.json CHANGED
@@ -8,47 +8,47 @@
8
  "hidden_dropout_prob": 0.1,
9
  "hidden_size": 256,
10
  "id2label": {
11
- "0": "Sadness",
12
- "1": "Anger",
13
- "2": "Love",
14
- "3": "Surprise",
15
- "4": "Fear",
16
- "5": "Happiness",
17
- "6": "Neutral",
18
- "7": "Disgust",
19
- "8": "Shame",
20
- "9": "Guilt",
21
- "10": "Confusion",
22
- "11": "Desire",
23
- "12": "Sarcasm"
24
  },
25
  "initializer_range": 0.02,
26
  "intermediate_size": 1024,
27
  "label2id": {
28
- "Sadness": 0,
29
- "Anger": 1,
30
- "Love": 2,
31
- "Surprise": 3,
32
- "Fear": 4,
33
- "Happiness": 5,
34
- "Neutral": 6,
35
- "Disgust": 7,
36
- "Shame": 8,
37
- "Guilt": 9,
38
- "Confusion": 10,
39
- "Desire": 11,
40
- "Sarcasm": 12
41
  },
42
  "layer_norm_eps": 1e-12,
43
  "max_position_embeddings": 512,
44
  "model_type": "bert",
45
  "num_attention_heads": 4,
46
- "num_hidden_layers": 8,
47
  "pad_token_id": 0,
48
  "position_embedding_type": "absolute",
49
  "problem_type": "single_label_classification",
50
  "torch_dtype": "float32",
51
- "transformers_version": "4.51.3",
52
  "type_vocab_size": 2,
53
  "use_cache": true,
54
  "vocab_size": 30522
 
8
  "hidden_dropout_prob": 0.1,
9
  "hidden_size": 256,
10
  "id2label": {
11
+ "0": "sadness",
12
+ "1": "anger",
13
+ "2": "love",
14
+ "3": "surprise",
15
+ "4": "fear",
16
+ "5": "happiness",
17
+ "6": "neutral",
18
+ "7": "disgust",
19
+ "8": "shame",
20
+ "9": "guilt",
21
+ "10": "confusion",
22
+ "11": "desire",
23
+ "12": "sarcasm"
24
  },
25
  "initializer_range": 0.02,
26
  "intermediate_size": 1024,
27
  "label2id": {
28
+ "sadness": 0,
29
+ "anger": 1,
30
+ "love": 2,
31
+ "surprise": 3,
32
+ "fear": 4,
33
+ "happiness": 5,
34
+ "neutral": 6,
35
+ "disgust": 7,
36
+ "shame": 8,
37
+ "guilt": 9,
38
+ "confusion": 10,
39
+ "desire": 11,
40
+ "sarcasm": 12
41
  },
42
  "layer_norm_eps": 1e-12,
43
  "max_position_embeddings": 512,
44
  "model_type": "bert",
45
  "num_attention_heads": 4,
46
+ "num_hidden_layers": 4,
47
  "pad_token_id": 0,
48
  "position_embedding_type": "absolute",
49
  "problem_type": "single_label_classification",
50
  "torch_dtype": "float32",
51
+ "transformers_version": "4.50.1",
52
  "type_vocab_size": 2,
53
  "use_cache": true,
54
  "vocab_size": 30522