Lhtie commited on
Commit
35ff140
·
1 Parent(s): 1417379

Upload folder using huggingface_hub

Browse files
commongen_pythia-160m/checkpoint-4200/config.json ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "/root/autodl-tmp/models/pythia-160m",
3
+ "architectures": [
4
+ "GPTNeoXForCausalLM"
5
+ ],
6
+ "attention_dropout": 0.0,
7
+ "bos_token_id": 0,
8
+ "classifier_dropout": 0.1,
9
+ "eos_token_id": 0,
10
+ "hidden_act": "gelu",
11
+ "hidden_dropout": 0.0,
12
+ "hidden_size": 768,
13
+ "initializer_range": 0.02,
14
+ "intermediate_size": 3072,
15
+ "layer_norm_eps": 1e-05,
16
+ "max_position_embeddings": 2048,
17
+ "model_type": "gpt_neox",
18
+ "num_attention_heads": 12,
19
+ "num_hidden_layers": 12,
20
+ "pad_token_id": 0,
21
+ "rope_scaling": null,
22
+ "rotary_emb_base": 10000,
23
+ "rotary_pct": 0.25,
24
+ "tie_word_embeddings": false,
25
+ "torch_dtype": "float32",
26
+ "transformers_version": "4.35.0",
27
+ "use_cache": true,
28
+ "use_parallel_residual": true,
29
+ "vocab_size": 50304
30
+ }
commongen_pythia-160m/checkpoint-4200/generation_config.json ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 0,
4
+ "eos_token_id": 0,
5
+ "transformers_version": "4.35.0"
6
+ }
commongen_pythia-160m/checkpoint-4200/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:29e6e142d9bf8728dfa3e5bafb2d718c92665a669cdc941aa39d46b5a2770e2d
3
+ size 649308728
commongen_pythia-160m/checkpoint-4200/optimizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5afe8304d98ead933b197c30826bf4674492ed13bcf3506964d27ed74c054a1c
3
+ size 1298707386
commongen_pythia-160m/checkpoint-4200/rng_state.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fa162e4b259528d57df6ef1186bb5134c28c62a8f151c30af24d16e51cc8792d
3
+ size 14244
commongen_pythia-160m/checkpoint-4200/scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:20a08daf663064db9d1d66accb2f5a92ff1bd9d1e7d1cae02985a5e856e4d228
3
+ size 1064
commongen_pythia-160m/checkpoint-4200/special_tokens_map.json ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": "<|endoftext|>",
3
+ "eos_token": "<|endoftext|>",
4
+ "pad_token": "<|endoftext|>",
5
+ "unk_token": "<|endoftext|>"
6
+ }
commongen_pythia-160m/checkpoint-4200/tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
commongen_pythia-160m/checkpoint-4200/tokenizer_config.json ADDED
@@ -0,0 +1,212 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_prefix_space": false,
3
+ "added_tokens_decoder": {
4
+ "0": {
5
+ "content": "<|endoftext|>",
6
+ "lstrip": false,
7
+ "normalized": false,
8
+ "rstrip": false,
9
+ "single_word": false,
10
+ "special": true
11
+ },
12
+ "1": {
13
+ "content": "<|padding|>",
14
+ "lstrip": false,
15
+ "normalized": false,
16
+ "rstrip": false,
17
+ "single_word": false,
18
+ "special": true
19
+ },
20
+ "50254": {
21
+ "content": " ",
22
+ "lstrip": false,
23
+ "normalized": true,
24
+ "rstrip": false,
25
+ "single_word": false,
26
+ "special": false
27
+ },
28
+ "50255": {
29
+ "content": " ",
30
+ "lstrip": false,
31
+ "normalized": true,
32
+ "rstrip": false,
33
+ "single_word": false,
34
+ "special": false
35
+ },
36
+ "50256": {
37
+ "content": " ",
38
+ "lstrip": false,
39
+ "normalized": true,
40
+ "rstrip": false,
41
+ "single_word": false,
42
+ "special": false
43
+ },
44
+ "50257": {
45
+ "content": " ",
46
+ "lstrip": false,
47
+ "normalized": true,
48
+ "rstrip": false,
49
+ "single_word": false,
50
+ "special": false
51
+ },
52
+ "50258": {
53
+ "content": " ",
54
+ "lstrip": false,
55
+ "normalized": true,
56
+ "rstrip": false,
57
+ "single_word": false,
58
+ "special": false
59
+ },
60
+ "50259": {
61
+ "content": " ",
62
+ "lstrip": false,
63
+ "normalized": true,
64
+ "rstrip": false,
65
+ "single_word": false,
66
+ "special": false
67
+ },
68
+ "50260": {
69
+ "content": " ",
70
+ "lstrip": false,
71
+ "normalized": true,
72
+ "rstrip": false,
73
+ "single_word": false,
74
+ "special": false
75
+ },
76
+ "50261": {
77
+ "content": " ",
78
+ "lstrip": false,
79
+ "normalized": true,
80
+ "rstrip": false,
81
+ "single_word": false,
82
+ "special": false
83
+ },
84
+ "50262": {
85
+ "content": " ",
86
+ "lstrip": false,
87
+ "normalized": true,
88
+ "rstrip": false,
89
+ "single_word": false,
90
+ "special": false
91
+ },
92
+ "50263": {
93
+ "content": " ",
94
+ "lstrip": false,
95
+ "normalized": true,
96
+ "rstrip": false,
97
+ "single_word": false,
98
+ "special": false
99
+ },
100
+ "50264": {
101
+ "content": " ",
102
+ "lstrip": false,
103
+ "normalized": true,
104
+ "rstrip": false,
105
+ "single_word": false,
106
+ "special": false
107
+ },
108
+ "50265": {
109
+ "content": " ",
110
+ "lstrip": false,
111
+ "normalized": true,
112
+ "rstrip": false,
113
+ "single_word": false,
114
+ "special": false
115
+ },
116
+ "50266": {
117
+ "content": " ",
118
+ "lstrip": false,
119
+ "normalized": true,
120
+ "rstrip": false,
121
+ "single_word": false,
122
+ "special": false
123
+ },
124
+ "50267": {
125
+ "content": " ",
126
+ "lstrip": false,
127
+ "normalized": true,
128
+ "rstrip": false,
129
+ "single_word": false,
130
+ "special": false
131
+ },
132
+ "50268": {
133
+ "content": " ",
134
+ "lstrip": false,
135
+ "normalized": true,
136
+ "rstrip": false,
137
+ "single_word": false,
138
+ "special": false
139
+ },
140
+ "50269": {
141
+ "content": " ",
142
+ "lstrip": false,
143
+ "normalized": true,
144
+ "rstrip": false,
145
+ "single_word": false,
146
+ "special": false
147
+ },
148
+ "50270": {
149
+ "content": " ",
150
+ "lstrip": false,
151
+ "normalized": true,
152
+ "rstrip": false,
153
+ "single_word": false,
154
+ "special": false
155
+ },
156
+ "50271": {
157
+ "content": " ",
158
+ "lstrip": false,
159
+ "normalized": true,
160
+ "rstrip": false,
161
+ "single_word": false,
162
+ "special": false
163
+ },
164
+ "50272": {
165
+ "content": " ",
166
+ "lstrip": false,
167
+ "normalized": true,
168
+ "rstrip": false,
169
+ "single_word": false,
170
+ "special": false
171
+ },
172
+ "50273": {
173
+ "content": " ",
174
+ "lstrip": false,
175
+ "normalized": true,
176
+ "rstrip": false,
177
+ "single_word": false,
178
+ "special": false
179
+ },
180
+ "50274": {
181
+ "content": " ",
182
+ "lstrip": false,
183
+ "normalized": true,
184
+ "rstrip": false,
185
+ "single_word": false,
186
+ "special": false
187
+ },
188
+ "50275": {
189
+ "content": " ",
190
+ "lstrip": false,
191
+ "normalized": true,
192
+ "rstrip": false,
193
+ "single_word": false,
194
+ "special": false
195
+ },
196
+ "50276": {
197
+ "content": " ",
198
+ "lstrip": false,
199
+ "normalized": true,
200
+ "rstrip": false,
201
+ "single_word": false,
202
+ "special": false
203
+ }
204
+ },
205
+ "bos_token": "<|endoftext|>",
206
+ "clean_up_tokenization_spaces": true,
207
+ "eos_token": "<|endoftext|>",
208
+ "model_max_length": 1000000000000000019884624838656,
209
+ "pad_token": "<|endoftext|>",
210
+ "tokenizer_class": "GPTNeoXTokenizer",
211
+ "unk_token": "<|endoftext|>"
212
+ }
commongen_pythia-160m/checkpoint-4200/trainer_state.json ADDED
@@ -0,0 +1,235 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_metric": 38.17561721801758,
3
+ "best_model_checkpoint": "/root/autodl-tmp/commongen_pythia-160m/checkpoint-4200",
4
+ "epoch": 1.9943019943019942,
5
+ "eval_steps": 200,
6
+ "global_step": 4200,
7
+ "is_hyper_param_search": false,
8
+ "is_local_process_zero": true,
9
+ "is_world_process_zero": true,
10
+ "log_history": [
11
+ {
12
+ "epoch": 0.09,
13
+ "eval_loss": 40.878292083740234,
14
+ "eval_runtime": 31.6071,
15
+ "eval_samples_per_second": 127.124,
16
+ "eval_steps_per_second": 7.973,
17
+ "step": 200
18
+ },
19
+ {
20
+ "epoch": 0.19,
21
+ "eval_loss": 40.0030632019043,
22
+ "eval_runtime": 32.0957,
23
+ "eval_samples_per_second": 125.188,
24
+ "eval_steps_per_second": 7.852,
25
+ "step": 400
26
+ },
27
+ {
28
+ "epoch": 0.24,
29
+ "learning_rate": 2.857549857549858e-06,
30
+ "loss": 40.7376,
31
+ "step": 500
32
+ },
33
+ {
34
+ "epoch": 0.28,
35
+ "eval_loss": 39.902565002441406,
36
+ "eval_runtime": 31.7745,
37
+ "eval_samples_per_second": 126.454,
38
+ "eval_steps_per_second": 7.931,
39
+ "step": 600
40
+ },
41
+ {
42
+ "epoch": 0.38,
43
+ "eval_loss": 39.43976974487305,
44
+ "eval_runtime": 31.6834,
45
+ "eval_samples_per_second": 126.817,
46
+ "eval_steps_per_second": 7.954,
47
+ "step": 800
48
+ },
49
+ {
50
+ "epoch": 0.47,
51
+ "learning_rate": 2.7150997150997152e-06,
52
+ "loss": 37.1707,
53
+ "step": 1000
54
+ },
55
+ {
56
+ "epoch": 0.47,
57
+ "eval_loss": 39.4637336730957,
58
+ "eval_runtime": 31.4241,
59
+ "eval_samples_per_second": 127.864,
60
+ "eval_steps_per_second": 8.019,
61
+ "step": 1000
62
+ },
63
+ {
64
+ "epoch": 0.57,
65
+ "eval_loss": 38.90019226074219,
66
+ "eval_runtime": 31.576,
67
+ "eval_samples_per_second": 127.249,
68
+ "eval_steps_per_second": 7.981,
69
+ "step": 1200
70
+ },
71
+ {
72
+ "epoch": 0.66,
73
+ "eval_loss": 38.968284606933594,
74
+ "eval_runtime": 31.487,
75
+ "eval_samples_per_second": 127.608,
76
+ "eval_steps_per_second": 8.003,
77
+ "step": 1400
78
+ },
79
+ {
80
+ "epoch": 0.71,
81
+ "learning_rate": 2.5726495726495726e-06,
82
+ "loss": 35.44,
83
+ "step": 1500
84
+ },
85
+ {
86
+ "epoch": 0.76,
87
+ "eval_loss": 38.754234313964844,
88
+ "eval_runtime": 31.6366,
89
+ "eval_samples_per_second": 127.005,
90
+ "eval_steps_per_second": 7.965,
91
+ "step": 1600
92
+ },
93
+ {
94
+ "epoch": 0.85,
95
+ "eval_loss": 39.10488510131836,
96
+ "eval_runtime": 31.4847,
97
+ "eval_samples_per_second": 127.618,
98
+ "eval_steps_per_second": 8.004,
99
+ "step": 1800
100
+ },
101
+ {
102
+ "epoch": 0.95,
103
+ "learning_rate": 2.43019943019943e-06,
104
+ "loss": 34.4738,
105
+ "step": 2000
106
+ },
107
+ {
108
+ "epoch": 0.95,
109
+ "eval_loss": 38.68024444580078,
110
+ "eval_runtime": 31.5379,
111
+ "eval_samples_per_second": 127.402,
112
+ "eval_steps_per_second": 7.99,
113
+ "step": 2000
114
+ },
115
+ {
116
+ "epoch": 1.04,
117
+ "eval_loss": 38.74475860595703,
118
+ "eval_runtime": 31.4804,
119
+ "eval_samples_per_second": 127.635,
120
+ "eval_steps_per_second": 8.005,
121
+ "step": 2200
122
+ },
123
+ {
124
+ "epoch": 1.14,
125
+ "eval_loss": 38.3890266418457,
126
+ "eval_runtime": 31.473,
127
+ "eval_samples_per_second": 127.665,
128
+ "eval_steps_per_second": 8.007,
129
+ "step": 2400
130
+ },
131
+ {
132
+ "epoch": 1.19,
133
+ "learning_rate": 2.2877492877492878e-06,
134
+ "loss": 32.9663,
135
+ "step": 2500
136
+ },
137
+ {
138
+ "epoch": 1.23,
139
+ "eval_loss": 38.70350646972656,
140
+ "eval_runtime": 31.6528,
141
+ "eval_samples_per_second": 126.94,
142
+ "eval_steps_per_second": 7.961,
143
+ "step": 2600
144
+ },
145
+ {
146
+ "epoch": 1.33,
147
+ "eval_loss": 38.5815315246582,
148
+ "eval_runtime": 31.6616,
149
+ "eval_samples_per_second": 126.905,
150
+ "eval_steps_per_second": 7.959,
151
+ "step": 2800
152
+ },
153
+ {
154
+ "epoch": 1.42,
155
+ "learning_rate": 2.1452991452991456e-06,
156
+ "loss": 32.207,
157
+ "step": 3000
158
+ },
159
+ {
160
+ "epoch": 1.42,
161
+ "eval_loss": 39.05810546875,
162
+ "eval_runtime": 31.3896,
163
+ "eval_samples_per_second": 128.004,
164
+ "eval_steps_per_second": 8.028,
165
+ "step": 3000
166
+ },
167
+ {
168
+ "epoch": 1.52,
169
+ "eval_loss": 38.37156295776367,
170
+ "eval_runtime": 31.5129,
171
+ "eval_samples_per_second": 127.503,
172
+ "eval_steps_per_second": 7.997,
173
+ "step": 3200
174
+ },
175
+ {
176
+ "epoch": 1.61,
177
+ "eval_loss": 38.7572135925293,
178
+ "eval_runtime": 31.6149,
179
+ "eval_samples_per_second": 127.092,
180
+ "eval_steps_per_second": 7.971,
181
+ "step": 3400
182
+ },
183
+ {
184
+ "epoch": 1.66,
185
+ "learning_rate": 2.002849002849003e-06,
186
+ "loss": 31.688,
187
+ "step": 3500
188
+ },
189
+ {
190
+ "epoch": 1.71,
191
+ "eval_loss": 38.529903411865234,
192
+ "eval_runtime": 31.463,
193
+ "eval_samples_per_second": 127.706,
194
+ "eval_steps_per_second": 8.009,
195
+ "step": 3600
196
+ },
197
+ {
198
+ "epoch": 1.8,
199
+ "eval_loss": 38.76222610473633,
200
+ "eval_runtime": 31.7691,
201
+ "eval_samples_per_second": 126.475,
202
+ "eval_steps_per_second": 7.932,
203
+ "step": 3800
204
+ },
205
+ {
206
+ "epoch": 1.9,
207
+ "learning_rate": 1.8603988603988605e-06,
208
+ "loss": 31.6849,
209
+ "step": 4000
210
+ },
211
+ {
212
+ "epoch": 1.9,
213
+ "eval_loss": 38.42369079589844,
214
+ "eval_runtime": 31.5455,
215
+ "eval_samples_per_second": 127.372,
216
+ "eval_steps_per_second": 7.988,
217
+ "step": 4000
218
+ },
219
+ {
220
+ "epoch": 1.99,
221
+ "eval_loss": 38.17561721801758,
222
+ "eval_runtime": 31.6756,
223
+ "eval_samples_per_second": 126.848,
224
+ "eval_steps_per_second": 7.956,
225
+ "step": 4200
226
+ }
227
+ ],
228
+ "logging_steps": 500,
229
+ "max_steps": 10530,
230
+ "num_train_epochs": 5,
231
+ "save_steps": 200,
232
+ "total_flos": 5.106737347087565e+16,
233
+ "trial_name": null,
234
+ "trial_params": null
235
+ }
commongen_pythia-160m/checkpoint-4200/training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:78a358522ba11ba088543bfe9410908b8a16515991e75e2a7dc5d3e32858aa69
3
+ size 4600
commongen_pythia-160m/config.json CHANGED
@@ -1,11 +1,14 @@
1
  {
2
- "_name_or_path": "/root/autodl-tmp/models/pythia-160m-deduped",
3
  "architectures": [
4
  "GPTNeoXForCausalLM"
5
  ],
 
6
  "bos_token_id": 0,
 
7
  "eos_token_id": 0,
8
  "hidden_act": "gelu",
 
9
  "hidden_size": 768,
10
  "initializer_range": 0.02,
11
  "intermediate_size": 3072,
@@ -15,11 +18,12 @@
15
  "num_attention_heads": 12,
16
  "num_hidden_layers": 12,
17
  "pad_token_id": 0,
 
18
  "rotary_emb_base": 10000,
19
  "rotary_pct": 0.25,
20
  "tie_word_embeddings": false,
21
  "torch_dtype": "float32",
22
- "transformers_version": "4.24.0",
23
  "use_cache": true,
24
  "use_parallel_residual": true,
25
  "vocab_size": 50304
 
1
  {
2
+ "_name_or_path": "/root/autodl-tmp/models/pythia-160m",
3
  "architectures": [
4
  "GPTNeoXForCausalLM"
5
  ],
6
+ "attention_dropout": 0.0,
7
  "bos_token_id": 0,
8
+ "classifier_dropout": 0.1,
9
  "eos_token_id": 0,
10
  "hidden_act": "gelu",
11
+ "hidden_dropout": 0.0,
12
  "hidden_size": 768,
13
  "initializer_range": 0.02,
14
  "intermediate_size": 3072,
 
18
  "num_attention_heads": 12,
19
  "num_hidden_layers": 12,
20
  "pad_token_id": 0,
21
+ "rope_scaling": null,
22
  "rotary_emb_base": 10000,
23
  "rotary_pct": 0.25,
24
  "tie_word_embeddings": false,
25
  "torch_dtype": "float32",
26
+ "transformers_version": "4.35.0",
27
  "use_cache": true,
28
  "use_parallel_residual": true,
29
  "vocab_size": 50304
commongen_pythia-160m/eval_results_lm.txt CHANGED
@@ -1 +1 @@
1
- perplexity = 1.6278124257903542e+17
 
1
+ perplexity = 3.7971687760844264e+16
commongen_pythia-160m/generation_config.json ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 0,
4
+ "eos_token_id": 0,
5
+ "transformers_version": "4.35.0"
6
+ }
commongen_pythia-160m/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:29e6e142d9bf8728dfa3e5bafb2d718c92665a669cdc941aa39d46b5a2770e2d
3
+ size 649308728
commongen_pythia-160m/tokenizer_config.json CHANGED
@@ -1,9 +1,212 @@
1
  {
2
  "add_prefix_space": false,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3
  "bos_token": "<|endoftext|>",
 
4
  "eos_token": "<|endoftext|>",
5
- "name_or_path": "/root/autodl-tmp/models/pythia-160m-deduped",
6
- "special_tokens_map_file": "/admin/home-hailey/.cache/huggingface/hub/models--EleutherAI--gpt-neox-20b/snapshots/4e49eadb5d14bd22f314ec3f45b69a87b88c7691/special_tokens_map.json",
7
  "tokenizer_class": "GPTNeoXTokenizer",
8
  "unk_token": "<|endoftext|>"
9
  }
 
1
  {
2
  "add_prefix_space": false,
3
+ "added_tokens_decoder": {
4
+ "0": {
5
+ "content": "<|endoftext|>",
6
+ "lstrip": false,
7
+ "normalized": false,
8
+ "rstrip": false,
9
+ "single_word": false,
10
+ "special": true
11
+ },
12
+ "1": {
13
+ "content": "<|padding|>",
14
+ "lstrip": false,
15
+ "normalized": false,
16
+ "rstrip": false,
17
+ "single_word": false,
18
+ "special": true
19
+ },
20
+ "50254": {
21
+ "content": " ",
22
+ "lstrip": false,
23
+ "normalized": true,
24
+ "rstrip": false,
25
+ "single_word": false,
26
+ "special": false
27
+ },
28
+ "50255": {
29
+ "content": " ",
30
+ "lstrip": false,
31
+ "normalized": true,
32
+ "rstrip": false,
33
+ "single_word": false,
34
+ "special": false
35
+ },
36
+ "50256": {
37
+ "content": " ",
38
+ "lstrip": false,
39
+ "normalized": true,
40
+ "rstrip": false,
41
+ "single_word": false,
42
+ "special": false
43
+ },
44
+ "50257": {
45
+ "content": " ",
46
+ "lstrip": false,
47
+ "normalized": true,
48
+ "rstrip": false,
49
+ "single_word": false,
50
+ "special": false
51
+ },
52
+ "50258": {
53
+ "content": " ",
54
+ "lstrip": false,
55
+ "normalized": true,
56
+ "rstrip": false,
57
+ "single_word": false,
58
+ "special": false
59
+ },
60
+ "50259": {
61
+ "content": " ",
62
+ "lstrip": false,
63
+ "normalized": true,
64
+ "rstrip": false,
65
+ "single_word": false,
66
+ "special": false
67
+ },
68
+ "50260": {
69
+ "content": " ",
70
+ "lstrip": false,
71
+ "normalized": true,
72
+ "rstrip": false,
73
+ "single_word": false,
74
+ "special": false
75
+ },
76
+ "50261": {
77
+ "content": " ",
78
+ "lstrip": false,
79
+ "normalized": true,
80
+ "rstrip": false,
81
+ "single_word": false,
82
+ "special": false
83
+ },
84
+ "50262": {
85
+ "content": " ",
86
+ "lstrip": false,
87
+ "normalized": true,
88
+ "rstrip": false,
89
+ "single_word": false,
90
+ "special": false
91
+ },
92
+ "50263": {
93
+ "content": " ",
94
+ "lstrip": false,
95
+ "normalized": true,
96
+ "rstrip": false,
97
+ "single_word": false,
98
+ "special": false
99
+ },
100
+ "50264": {
101
+ "content": " ",
102
+ "lstrip": false,
103
+ "normalized": true,
104
+ "rstrip": false,
105
+ "single_word": false,
106
+ "special": false
107
+ },
108
+ "50265": {
109
+ "content": " ",
110
+ "lstrip": false,
111
+ "normalized": true,
112
+ "rstrip": false,
113
+ "single_word": false,
114
+ "special": false
115
+ },
116
+ "50266": {
117
+ "content": " ",
118
+ "lstrip": false,
119
+ "normalized": true,
120
+ "rstrip": false,
121
+ "single_word": false,
122
+ "special": false
123
+ },
124
+ "50267": {
125
+ "content": " ",
126
+ "lstrip": false,
127
+ "normalized": true,
128
+ "rstrip": false,
129
+ "single_word": false,
130
+ "special": false
131
+ },
132
+ "50268": {
133
+ "content": " ",
134
+ "lstrip": false,
135
+ "normalized": true,
136
+ "rstrip": false,
137
+ "single_word": false,
138
+ "special": false
139
+ },
140
+ "50269": {
141
+ "content": " ",
142
+ "lstrip": false,
143
+ "normalized": true,
144
+ "rstrip": false,
145
+ "single_word": false,
146
+ "special": false
147
+ },
148
+ "50270": {
149
+ "content": " ",
150
+ "lstrip": false,
151
+ "normalized": true,
152
+ "rstrip": false,
153
+ "single_word": false,
154
+ "special": false
155
+ },
156
+ "50271": {
157
+ "content": " ",
158
+ "lstrip": false,
159
+ "normalized": true,
160
+ "rstrip": false,
161
+ "single_word": false,
162
+ "special": false
163
+ },
164
+ "50272": {
165
+ "content": " ",
166
+ "lstrip": false,
167
+ "normalized": true,
168
+ "rstrip": false,
169
+ "single_word": false,
170
+ "special": false
171
+ },
172
+ "50273": {
173
+ "content": " ",
174
+ "lstrip": false,
175
+ "normalized": true,
176
+ "rstrip": false,
177
+ "single_word": false,
178
+ "special": false
179
+ },
180
+ "50274": {
181
+ "content": " ",
182
+ "lstrip": false,
183
+ "normalized": true,
184
+ "rstrip": false,
185
+ "single_word": false,
186
+ "special": false
187
+ },
188
+ "50275": {
189
+ "content": " ",
190
+ "lstrip": false,
191
+ "normalized": true,
192
+ "rstrip": false,
193
+ "single_word": false,
194
+ "special": false
195
+ },
196
+ "50276": {
197
+ "content": " ",
198
+ "lstrip": false,
199
+ "normalized": true,
200
+ "rstrip": false,
201
+ "single_word": false,
202
+ "special": false
203
+ }
204
+ },
205
  "bos_token": "<|endoftext|>",
206
+ "clean_up_tokenization_spaces": true,
207
  "eos_token": "<|endoftext|>",
208
+ "model_max_length": 1000000000000000019884624838656,
209
+ "pad_token": "<|endoftext|>",
210
  "tokenizer_class": "GPTNeoXTokenizer",
211
  "unk_token": "<|endoftext|>"
212
  }
commongen_pythia-160m/training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:cdddcf5691253d7b3f2c469559234906d8157a6147e4dddd5d34b35d084afede
3
- size 3451
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:78a358522ba11ba088543bfe9410908b8a16515991e75e2a7dc5d3e32858aa69
3
+ size 4600