matthewleechen commited on
Commit
81bdba6
·
verified ·
1 Parent(s): 321ba8a

Upload RobertaForSequenceClassification

Browse files
Files changed (2) hide show
  1. config.json +15 -5
  2. model.safetensors +2 -2
config.json CHANGED
@@ -9,18 +9,28 @@
9
  "eos_token_id": 2,
10
  "hidden_act": "gelu",
11
  "hidden_dropout_prob": 0.1,
12
- "hidden_size": 768,
 
 
 
 
 
13
  "initializer_range": 0.02,
14
- "intermediate_size": 3072,
 
 
 
 
 
15
  "layer_norm_eps": 1e-05,
16
  "max_position_embeddings": 514,
17
  "model_type": "roberta",
18
- "num_attention_heads": 12,
19
- "num_hidden_layers": 12,
20
  "pad_token_id": 1,
21
  "position_embedding_type": "absolute",
22
  "problem_type": "single_label_classification",
23
- "transformers_version": "4.56.1",
24
  "type_vocab_size": 1,
25
  "use_cache": true,
26
  "vocab_size": 50265
 
9
  "eos_token_id": 2,
10
  "hidden_act": "gelu",
11
  "hidden_dropout_prob": 0.1,
12
+ "hidden_size": 1024,
13
+ "id2label": {
14
+ "0": "PRODUCT",
15
+ "1": "PROCESS",
16
+ "2": "UNKNOWN"
17
+ },
18
  "initializer_range": 0.02,
19
+ "intermediate_size": 4096,
20
+ "label2id": {
21
+ "PROCESS": 1,
22
+ "PRODUCT": 0,
23
+ "UNKNOWN": 2
24
+ },
25
  "layer_norm_eps": 1e-05,
26
  "max_position_embeddings": 514,
27
  "model_type": "roberta",
28
+ "num_attention_heads": 16,
29
+ "num_hidden_layers": 24,
30
  "pad_token_id": 1,
31
  "position_embedding_type": "absolute",
32
  "problem_type": "single_label_classification",
33
+ "transformers_version": "4.57.3",
34
  "type_vocab_size": 1,
35
  "use_cache": true,
36
  "vocab_size": 50265
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:c2d9b8c253627c6e0cb46f3e582dd71c4aba27d28213760c8e0793f1dfd195ec
3
- size 498612824
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2ad894246923e6ccfcc4249331048da1840ce412b4a566fd833b0d9183461b91
3
+ size 1421499516