qgallouedec HF Staff commited on
Commit
fab7b99
·
verified ·
1 Parent(s): cbfbdf6

Upload GPTNeoXForCausalLM

Browse files
Files changed (3) hide show
  1. config.json +4 -10
  2. generation_config.json +1 -1
  3. model.safetensors +2 -2
config.json CHANGED
@@ -7,18 +7,12 @@
7
  "bos_token_id": 0,
8
  "classifier_dropout": 0.1,
9
  "dtype": "float32",
10
- "eos_token_id": 0,
11
  "hidden_act": "gelu",
12
  "hidden_dropout": 0.0,
13
- "hidden_size": 16,
14
- "id2label": {
15
- "0": "LABEL_0"
16
- },
17
  "initializer_range": 0.02,
18
- "intermediate_size": 512,
19
- "label2id": {
20
- "LABEL_0": 0
21
- },
22
  "layer_norm_eps": 1e-05,
23
  "max_position_embeddings": 2048,
24
  "model_type": "gpt_neox",
@@ -34,5 +28,5 @@
34
  "transformers_version": "4.57.0.dev0",
35
  "use_cache": true,
36
  "use_parallel_residual": true,
37
- "vocab_size": 50304
38
  }
 
7
  "bos_token_id": 0,
8
  "classifier_dropout": 0.1,
9
  "dtype": "float32",
10
+ "eos_token_id": 2,
11
  "hidden_act": "gelu",
12
  "hidden_dropout": 0.0,
13
+ "hidden_size": 8,
 
 
 
14
  "initializer_range": 0.02,
15
+ "intermediate_size": 32,
 
 
 
16
  "layer_norm_eps": 1e-05,
17
  "max_position_embeddings": 2048,
18
  "model_type": "gpt_neox",
 
28
  "transformers_version": "4.57.0.dev0",
29
  "use_cache": true,
30
  "use_parallel_residual": true,
31
+ "vocab_size": 50277
32
  }
generation_config.json CHANGED
@@ -1,6 +1,6 @@
1
  {
2
  "_from_model_config": true,
3
  "bos_token_id": 0,
4
- "eos_token_id": 0,
5
  "transformers_version": "4.57.0.dev0"
6
  }
 
1
  {
2
  "_from_model_config": true,
3
  "bos_token_id": 0,
4
+ "eos_token_id": 2,
5
  "transformers_version": "4.57.0.dev0"
6
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:a9e52892fb7e5149fc2b2b3b23a415af074ae86fa4365f3a99a4df68095e3c34
3
- size 6586584
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:986e0bc0ff0a98269e0ee271923a184f6b3c0304a8e49af23e130d214ad15043
3
+ size 3227768