smithblack-0 commited on
Commit
b4e649c
·
verified ·
1 Parent(s): ac4ead6

Upload folder using huggingface_hub

Browse files
none_123/epoch0/metadata.json ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ {
2
+ "epoch_num": 0,
3
+ "global_batch_num": 134,
4
+ "device": "cuda",
5
+ "dtype": "bfloat16"
6
+ }
none_123/epoch0/metrics.json ADDED
@@ -0,0 +1,430 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "training": {
3
+ "effective_batch_nums": [
4
+ 1,
5
+ 2,
6
+ 3,
7
+ 4,
8
+ 5,
9
+ 6,
10
+ 7,
11
+ 8,
12
+ 9,
13
+ 10,
14
+ 11,
15
+ 12,
16
+ 13,
17
+ 14,
18
+ 15,
19
+ 16,
20
+ 17,
21
+ 18,
22
+ 19,
23
+ 20,
24
+ 21,
25
+ 22,
26
+ 23,
27
+ 24,
28
+ 25,
29
+ 26,
30
+ 27,
31
+ 28,
32
+ 29,
33
+ 30,
34
+ 31,
35
+ 32,
36
+ 33,
37
+ 34,
38
+ 35,
39
+ 36,
40
+ 37,
41
+ 38,
42
+ 39,
43
+ 40,
44
+ 41,
45
+ 42,
46
+ 43,
47
+ 44,
48
+ 45,
49
+ 46,
50
+ 47,
51
+ 48,
52
+ 49,
53
+ 50,
54
+ 51,
55
+ 52,
56
+ 53,
57
+ 54,
58
+ 55,
59
+ 56,
60
+ 57,
61
+ 58,
62
+ 59,
63
+ 60,
64
+ 61,
65
+ 62,
66
+ 63,
67
+ 64,
68
+ 65,
69
+ 66,
70
+ 67,
71
+ 68,
72
+ 69,
73
+ 70,
74
+ 71,
75
+ 72,
76
+ 73,
77
+ 74,
78
+ 75,
79
+ 76,
80
+ 77,
81
+ 78,
82
+ 79,
83
+ 80,
84
+ 81,
85
+ 82,
86
+ 83,
87
+ 84,
88
+ 85,
89
+ 86,
90
+ 87,
91
+ 88,
92
+ 89,
93
+ 90,
94
+ 91,
95
+ 92,
96
+ 93,
97
+ 94,
98
+ 95,
99
+ 96,
100
+ 97,
101
+ 98,
102
+ 99,
103
+ 100,
104
+ 101,
105
+ 102,
106
+ 103,
107
+ 104,
108
+ 105,
109
+ 106,
110
+ 107,
111
+ 108,
112
+ 109,
113
+ 110,
114
+ 111,
115
+ 112,
116
+ 113,
117
+ 114,
118
+ 115,
119
+ 116,
120
+ 117,
121
+ 118,
122
+ 119,
123
+ 120,
124
+ 121,
125
+ 122,
126
+ 123,
127
+ 124,
128
+ 125,
129
+ 126,
130
+ 127,
131
+ 128,
132
+ 129,
133
+ 130,
134
+ 131,
135
+ 132,
136
+ 133,
137
+ 134
138
+ ],
139
+ "losses": [
140
+ 127.5,
141
+ 39.0,
142
+ 35.5,
143
+ 30.375,
144
+ 22.375,
145
+ 17.125,
146
+ 14.5625,
147
+ 13.0625,
148
+ 11.625,
149
+ 10.625,
150
+ 10.0625,
151
+ 9.6875,
152
+ 9.5,
153
+ 9.5625,
154
+ 9.5,
155
+ 9.4375,
156
+ 9.25,
157
+ 9.0625,
158
+ 9.0,
159
+ 8.9375,
160
+ 8.875,
161
+ 8.875,
162
+ 8.75,
163
+ 8.75,
164
+ 8.75,
165
+ 8.625,
166
+ 8.625,
167
+ 8.625,
168
+ 8.5625,
169
+ 8.625,
170
+ 8.5625,
171
+ 8.5625,
172
+ 8.5,
173
+ 8.5625,
174
+ 8.5625,
175
+ 8.5625,
176
+ 8.5625,
177
+ 8.5625,
178
+ 8.5,
179
+ 8.5,
180
+ 8.5,
181
+ 8.5,
182
+ 8.5,
183
+ 8.4375,
184
+ 8.5,
185
+ 8.5,
186
+ 8.4375,
187
+ 8.5,
188
+ 8.5,
189
+ 8.4375,
190
+ 8.5,
191
+ 8.5,
192
+ 8.5,
193
+ 8.4375,
194
+ 8.5,
195
+ 8.5,
196
+ 8.4375,
197
+ 8.5,
198
+ 8.5,
199
+ 8.5,
200
+ 8.4375,
201
+ 8.4375,
202
+ 8.5,
203
+ 8.5,
204
+ 8.4375,
205
+ 8.4375,
206
+ 8.4375,
207
+ 8.4375,
208
+ 8.5,
209
+ 8.5,
210
+ 8.4375,
211
+ 8.5,
212
+ 8.4375,
213
+ 8.4375,
214
+ 8.5,
215
+ 8.4375,
216
+ 8.5,
217
+ 8.4375,
218
+ 8.4375,
219
+ 8.5,
220
+ 8.4375,
221
+ 8.5,
222
+ 8.5,
223
+ 8.375,
224
+ 8.4375,
225
+ 8.4375,
226
+ 8.5,
227
+ 8.4375,
228
+ 8.4375,
229
+ 8.375,
230
+ 8.5,
231
+ 8.5625,
232
+ 8.5,
233
+ 8.5,
234
+ 8.4375,
235
+ 8.4375,
236
+ 8.5,
237
+ 8.5,
238
+ 8.5,
239
+ 8.5,
240
+ 8.4375,
241
+ 8.4375,
242
+ 8.5,
243
+ 8.4375,
244
+ 8.4375,
245
+ 8.4375,
246
+ 8.4375,
247
+ 8.5,
248
+ 8.5,
249
+ 8.4375,
250
+ 8.4375,
251
+ 8.4375,
252
+ 8.5,
253
+ 8.4375,
254
+ 8.5,
255
+ 8.5,
256
+ 8.5,
257
+ 8.4375,
258
+ 8.4375,
259
+ 8.4375,
260
+ 8.4375,
261
+ 8.4375,
262
+ 8.375,
263
+ 8.4375,
264
+ 8.4375,
265
+ 8.4375,
266
+ 8.5,
267
+ 8.5,
268
+ 8.4375,
269
+ 8.5,
270
+ 8.5,
271
+ 8.4375,
272
+ 8.5,
273
+ 8.5
274
+ ],
275
+ "grad_norms": [
276
+ 11.4375,
277
+ 19.75,
278
+ 13.4375,
279
+ 8.75,
280
+ 6.28125,
281
+ 5.96875,
282
+ 4.09375,
283
+ 4.46875,
284
+ 1.6171875,
285
+ 1.125,
286
+ 1.0078125,
287
+ 1.0859375,
288
+ 1.203125,
289
+ 2.09375,
290
+ 1.2734375,
291
+ 1.5625,
292
+ 1.4921875,
293
+ 1.140625,
294
+ 0.546875,
295
+ 0.64453125,
296
+ 0.79296875,
297
+ 0.671875,
298
+ 0.640625,
299
+ 0.50390625,
300
+ 0.4765625,
301
+ 0.58984375,
302
+ 0.6171875,
303
+ 0.34375,
304
+ 0.67578125,
305
+ 0.376953125,
306
+ 0.419921875,
307
+ 0.345703125,
308
+ 0.4140625,
309
+ 0.3515625,
310
+ 0.62890625,
311
+ 0.50390625,
312
+ 0.392578125,
313
+ 0.57421875,
314
+ 0.30859375,
315
+ 0.625,
316
+ 0.33203125,
317
+ 0.81640625,
318
+ 0.81640625,
319
+ 0.408203125,
320
+ 1.1015625,
321
+ 0.92578125,
322
+ 0.34375,
323
+ 0.8203125,
324
+ 0.76171875,
325
+ 0.28515625,
326
+ 0.67578125,
327
+ 0.7421875,
328
+ 0.232421875,
329
+ 0.69921875,
330
+ 0.99609375,
331
+ 0.70703125,
332
+ 0.271484375,
333
+ 0.77734375,
334
+ 1.015625,
335
+ 0.87890625,
336
+ 0.2255859375,
337
+ 0.625,
338
+ 0.8984375,
339
+ 0.91015625,
340
+ 0.494140625,
341
+ 0.458984375,
342
+ 0.51953125,
343
+ 0.62890625,
344
+ 0.361328125,
345
+ 0.451171875,
346
+ 0.72265625,
347
+ 0.8359375,
348
+ 0.51953125,
349
+ 0.423828125,
350
+ 0.6328125,
351
+ 0.93359375,
352
+ 0.84765625,
353
+ 0.4296875,
354
+ 0.46484375,
355
+ 0.9765625,
356
+ 1.2265625,
357
+ 1.21875,
358
+ 0.96484375,
359
+ 0.625,
360
+ 0.32421875,
361
+ 0.44140625,
362
+ 0.53125,
363
+ 0.30078125,
364
+ 0.625,
365
+ 1.1328125,
366
+ 1.6796875,
367
+ 2.140625,
368
+ 2.203125,
369
+ 1.3359375,
370
+ 0.416015625,
371
+ 1.171875,
372
+ 2.078125,
373
+ 2.421875,
374
+ 1.8828125,
375
+ 0.89453125,
376
+ 0.40234375,
377
+ 1.0859375,
378
+ 1.5859375,
379
+ 1.3984375,
380
+ 1.078125,
381
+ 0.6640625,
382
+ 0.45703125,
383
+ 0.44921875,
384
+ 0.796875,
385
+ 0.6015625,
386
+ 0.189453125,
387
+ 0.609375,
388
+ 0.8984375,
389
+ 1.3046875,
390
+ 1.8828125,
391
+ 1.9765625,
392
+ 1.6875,
393
+ 1.390625,
394
+ 0.88671875,
395
+ 0.5625,
396
+ 0.443359375,
397
+ 0.5703125,
398
+ 0.6796875,
399
+ 0.84375,
400
+ 1.1171875,
401
+ 1.5,
402
+ 1.71875,
403
+ 1.546875,
404
+ 1.2265625,
405
+ 1.0625,
406
+ 0.859375,
407
+ 0.84375,
408
+ 0.70703125,
409
+ 0.734375
410
+ ]
411
+ },
412
+ "eval": {
413
+ "effective_batch_nums": [
414
+ 50,
415
+ 100
416
+ ],
417
+ "losses": [
418
+ 8.507352941176471,
419
+ 8.477941176470589
420
+ ],
421
+ "perplexities": [
422
+ 4971.294117647059,
423
+ 4841.411764705882
424
+ ],
425
+ "accuracies": [
426
+ 0.05091408201002726,
427
+ 0.05091408201002726
428
+ ]
429
+ }
430
+ }
none_123/epoch0/model.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3c5292aa4c149a79be435b964a91f0f665463086e889d8d8abf0ba73378d5245
3
+ size 23544999
none_123/epoch0/optimizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4433a7c17359a6d99c92b6c2c3d13a00cf89ee10ac2597a3c04e639646f00a7c
3
+ size 47166496
none_123/epoch0/scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2b407c2522c877500eb11a9ad39a136a757ba230db9d6c2aa131ec52c8c3cdf8
3
+ size 1465
none_123/epoch0/tokenizer/merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
none_123/epoch0/tokenizer/special_tokens_map.json ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": "<|endoftext|>",
3
+ "eos_token": "<|endoftext|>",
4
+ "pad_token": "<|endoftext|>",
5
+ "unk_token": "<|endoftext|>"
6
+ }
none_123/epoch0/tokenizer/tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
none_123/epoch0/tokenizer/tokenizer_config.json ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_prefix_space": false,
3
+ "added_tokens_decoder": {
4
+ "50256": {
5
+ "content": "<|endoftext|>",
6
+ "lstrip": false,
7
+ "normalized": true,
8
+ "rstrip": false,
9
+ "single_word": false,
10
+ "special": true
11
+ }
12
+ },
13
+ "bos_token": "<|endoftext|>",
14
+ "clean_up_tokenization_spaces": false,
15
+ "eos_token": "<|endoftext|>",
16
+ "extra_special_tokens": {},
17
+ "model_max_length": 1024,
18
+ "pad_token": "<|endoftext|>",
19
+ "tokenizer_class": "GPT2Tokenizer",
20
+ "unk_token": "<|endoftext|>"
21
+ }
none_123/epoch0/tokenizer/vocab.json ADDED
The diff for this file is too large to render. See raw diff