CLTL commited on
Commit
49153db
·
verified ·
1 Parent(s): 6a09de1

Upload 2 files

Browse files

Fixed problem with classification head

Files changed (2) hide show
  1. config.json +18 -18
  2. model.safetensors +2 -2
config.json CHANGED
@@ -2,29 +2,15 @@
2
  "architectures": [
3
  "RobertaForSequenceClassification"
4
  ],
5
- "model_type": "xlm-roberta",
6
- "problem_type": "multi_label_classification",
7
  "attention_probs_dropout_prob": 0.1,
8
  "bos_token_id": 0,
 
 
9
  "eos_token_id": 2,
10
  "hidden_act": "gelu",
11
  "hidden_dropout_prob": 0.1,
12
  "hidden_size": 768,
13
- "initializer_range": 0.02,
14
- "intermediate_size": 3072,
15
- "layer_norm_eps": 1e-05,
16
- "max_position_embeddings": 514,
17
- "model_type": "xlm-roberta",
18
- "num_attention_heads": 12,
19
- "num_hidden_layers": 12,
20
- "output_past": true,
21
- "pad_token_id": 1,
22
- "position_embedding_type": "absolute",
23
- "transformers_version": "4.17.0.dev0",
24
- "type_vocab_size": 1,
25
- "use_cache": true,
26
- "vocab_size": 250002,
27
- "id2label": {
28
  "0": "open_question_factual",
29
  "1": "pos_answer",
30
  "2": "command",
@@ -73,5 +59,19 @@
73
  "other": 20,
74
  "opening": 21,
75
  "respond_to_apology": 22
76
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
77
  }
 
2
  "architectures": [
3
  "RobertaForSequenceClassification"
4
  ],
 
 
5
  "attention_probs_dropout_prob": 0.1,
6
  "bos_token_id": 0,
7
+ "classifier_dropout": null,
8
+ "dtype": "float32",
9
  "eos_token_id": 2,
10
  "hidden_act": "gelu",
11
  "hidden_dropout_prob": 0.1,
12
  "hidden_size": 768,
13
+ "id2label": {
 
 
 
 
 
 
 
 
 
 
 
 
 
 
14
  "0": "open_question_factual",
15
  "1": "pos_answer",
16
  "2": "command",
 
59
  "other": 20,
60
  "opening": 21,
61
  "respond_to_apology": 22
62
+ },
63
+ "initializer_range": 0.02,
64
+ "intermediate_size": 3072,
65
+ "layer_norm_eps": 1e-05,
66
+ "max_position_embeddings": 514,
67
+ "model_type": "roberta",
68
+ "num_attention_heads": 12,
69
+ "num_hidden_layers": 12,
70
+ "output_past": true,
71
+ "pad_token_id": 1,
72
+ "position_embedding_type": "absolute",
73
+ "transformers_version": "4.57.0",
74
+ "type_vocab_size": 1,
75
+ "use_cache": true,
76
+ "vocab_size": 250002
77
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:bca28abc7637a08c9b038e79705ae5daec414f0f1e2bb37972039c38adfff8ec
3
- size 1112197064
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ee2a23e171286b3384363ec0c66aeb427e6ad7c850110576f0980c59c18c9ab8
3
+ size 1112269604