RaphaelMourad commited on
Commit
c52817c
·
verified ·
1 Parent(s): efa1846

Upload 9 files

Browse files
config.json ADDED
@@ -0,0 +1,47 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "../MistralModels/models/ModernBert-small",
3
+ "architectures": [
4
+ "ModernBertForMaskedLM"
5
+ ],
6
+ "attention_bias": false,
7
+ "attention_dropout": 0.0,
8
+ "bos_token_id": 1,
9
+ "classifier_activation": "gelu",
10
+ "classifier_bias": false,
11
+ "classifier_dropout": 0.0,
12
+ "classifier_pooling": "mean",
13
+ "cls_token_id": 1,
14
+ "decoder_bias": true,
15
+ "deterministic_flash_attn": false,
16
+ "embedding_dropout": 0.0,
17
+ "eos_token_id": 2,
18
+ "global_attn_every_n_layers": 3,
19
+ "global_rope_theta": 160000.0,
20
+ "gradient_checkpointing": false,
21
+ "hidden_activation": "gelu",
22
+ "hidden_size": 768,
23
+ "initializer_cutoff_factor": 2.0,
24
+ "initializer_range": 0.02,
25
+ "intermediate_size": 768,
26
+ "layer_norm_eps": 1e-05,
27
+ "local_attention": 128,
28
+ "local_rope_theta": 10000.0,
29
+ "max_position_embeddings": 8192,
30
+ "mlp_bias": false,
31
+ "mlp_dropout": 0.0,
32
+ "model_type": "modernbert",
33
+ "norm_bias": false,
34
+ "norm_eps": 1e-05,
35
+ "num_attention_heads": 8,
36
+ "num_hidden_layers": 8,
37
+ "pad_token_id": 3,
38
+ "position_embedding_type": "absolute",
39
+ "reference_compile": false,
40
+ "repad_logits_with_grad": false,
41
+ "sep_token_id": 2,
42
+ "sparse_pred_ignore_index": -100,
43
+ "sparse_prediction": false,
44
+ "torch_dtype": "bfloat16",
45
+ "transformers_version": "4.49.0",
46
+ "vocab_size": 30
47
+ }
model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1034a39c189beb847fc0c4d7efc880d6eead7174624fcfda1fdb0f153690ef5c
3
+ size 67319052
rng_state.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6cfe3540f24c66f9bc38a546448a9e5d9989705fafc8d4b37aa5dafd0c7460f2
3
+ size 14244
scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c9d790e47c97798eeebe899758ae713f0848f4aec29a526fe81a6104b378b3ee
3
+ size 1064
special_tokens_map.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "cls_token": "[CLS]",
3
+ "mask_token": "[MASK]",
4
+ "pad_token": "[PAD]",
5
+ "sep_token": "[SEP]",
6
+ "unk_token": "[UNK]"
7
+ }
tokenizer.json ADDED
@@ -0,0 +1,2091 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "version": "1.0",
3
+ "truncation": null,
4
+ "padding": null,
5
+ "added_tokens": [
6
+ {
7
+ "id": 0,
8
+ "content": "[UNK]",
9
+ "single_word": false,
10
+ "lstrip": false,
11
+ "rstrip": false,
12
+ "normalized": false,
13
+ "special": true
14
+ },
15
+ {
16
+ "id": 1,
17
+ "content": "[CLS]",
18
+ "single_word": false,
19
+ "lstrip": false,
20
+ "rstrip": false,
21
+ "normalized": false,
22
+ "special": true
23
+ },
24
+ {
25
+ "id": 2,
26
+ "content": "[SEP]",
27
+ "single_word": false,
28
+ "lstrip": false,
29
+ "rstrip": false,
30
+ "normalized": false,
31
+ "special": true
32
+ },
33
+ {
34
+ "id": 3,
35
+ "content": "[PAD]",
36
+ "single_word": false,
37
+ "lstrip": false,
38
+ "rstrip": false,
39
+ "normalized": false,
40
+ "special": true
41
+ },
42
+ {
43
+ "id": 4,
44
+ "content": "[MASK]",
45
+ "single_word": false,
46
+ "lstrip": false,
47
+ "rstrip": false,
48
+ "normalized": false,
49
+ "special": true
50
+ }
51
+ ],
52
+ "normalizer": null,
53
+ "pre_tokenizer": {
54
+ "type": "Whitespace"
55
+ },
56
+ "post_processor": null,
57
+ "decoder": null,
58
+ "model": {
59
+ "type": "BPE",
60
+ "dropout": null,
61
+ "unk_token": "[UNK]",
62
+ "continuing_subword_prefix": null,
63
+ "end_of_word_suffix": null,
64
+ "fuse_unk": false,
65
+ "byte_fallback": false,
66
+ "vocab": {
67
+ "[UNK]": 0,
68
+ "[CLS]": 1,
69
+ "[SEP]": 2,
70
+ "[PAD]": 3,
71
+ "[MASK]": 4,
72
+ "A": 5,
73
+ "B": 6,
74
+ "C": 7,
75
+ "D": 8,
76
+ "E": 9,
77
+ "F": 10,
78
+ "G": 11,
79
+ "H": 12,
80
+ "I": 13,
81
+ "K": 14,
82
+ "L": 15,
83
+ "M": 16,
84
+ "N": 17,
85
+ "P": 18,
86
+ "Q": 19,
87
+ "R": 20,
88
+ "S": 21,
89
+ "T": 22,
90
+ "V": 23,
91
+ "W": 24,
92
+ "X": 25,
93
+ "Y": 26,
94
+ "Z": 27,
95
+ "AA": 28,
96
+ "SS": 29,
97
+ "TT": 30,
98
+ "GG": 31,
99
+ "LL": 32,
100
+ "AG": 33,
101
+ "LS": 34,
102
+ "TV": 35,
103
+ "AV": 36,
104
+ "AL": 37,
105
+ "AS": 38,
106
+ "DG": 39,
107
+ "TG": 40,
108
+ "EE": 41,
109
+ "TL": 42,
110
+ "TS": 43,
111
+ "DV": 44,
112
+ "AE": 45,
113
+ "DL": 46,
114
+ "SG": 47,
115
+ "AP": 48,
116
+ "RL": 49,
117
+ "VV": 50,
118
+ "SL": 51,
119
+ "TP": 52,
120
+ "TI": 53,
121
+ "EL": 54,
122
+ "SV": 55,
123
+ "AD": 56,
124
+ "NG": 57,
125
+ "AR": 58,
126
+ "EV": 59,
127
+ "QL": 60,
128
+ "TD": 61,
129
+ "AI": 62,
130
+ "NL": 63,
131
+ "EG": 64,
132
+ "PV": 65,
133
+ "KL": 66,
134
+ "PG": 67,
135
+ "EI": 68,
136
+ "NV": 69,
137
+ "DI": 70,
138
+ "PL": 71,
139
+ "DS": 72,
140
+ "KK": 73,
141
+ "RV": 74,
142
+ "NI": 75,
143
+ "AQ": 76,
144
+ "PS": 77,
145
+ "AT": 78,
146
+ "EK": 79,
147
+ "ES": 80,
148
+ "RG": 81,
149
+ "QQ": 82,
150
+ "NS": 83,
151
+ "DD": 84,
152
+ "AK": 85,
153
+ "RR": 86,
154
+ "VL": 87,
155
+ "AF": 88,
156
+ "TF": 89,
157
+ "RI": 90,
158
+ "PP": 91,
159
+ "QV": 92,
160
+ "TE": 93,
161
+ "AN": 94,
162
+ "SI": 95,
163
+ "KV": 96,
164
+ "QG": 97,
165
+ "KI": 98,
166
+ "RS": 99,
167
+ "TY": 100,
168
+ "DP": 101,
169
+ "NN": 102,
170
+ "KG": 103,
171
+ "DE": 104,
172
+ "QI": 105,
173
+ "FG": 106,
174
+ "VG": 107,
175
+ "YL": 108,
176
+ "FL": 109,
177
+ "TN": 110,
178
+ "PI": 111,
179
+ "KS": 112,
180
+ "QS": 113,
181
+ "PE": 114,
182
+ "AY": 115,
183
+ "HL": 116,
184
+ "RE": 117,
185
+ "FS": 118,
186
+ "FV": 119,
187
+ "TK": 120,
188
+ "GL": 121,
189
+ "VS": 122,
190
+ "TQ": 123,
191
+ "DN": 124,
192
+ "IL": 125,
193
+ "RP": 126,
194
+ "GS": 127,
195
+ "KE": 128,
196
+ "IS": 129,
197
+ "DF": 130,
198
+ "TR": 131,
199
+ "DR": 132,
200
+ "DY": 133,
201
+ "AH": 134,
202
+ "DK": 135,
203
+ "IG": 136,
204
+ "QE": 137,
205
+ "AM": 138,
206
+ "YG": 139,
207
+ "NE": 140,
208
+ "IV": 141,
209
+ "YS": 142,
210
+ "NP": 143,
211
+ "ER": 144,
212
+ "QP": 145,
213
+ "YV": 146,
214
+ "ML": 147,
215
+ "TA": 148,
216
+ "QR": 149,
217
+ "GV": 150,
218
+ "ND": 151,
219
+ "KP": 152,
220
+ "FE": 153,
221
+ "FI": 154,
222
+ "NK": 155,
223
+ "HS": 156,
224
+ "HG": 157,
225
+ "QK": 158,
226
+ "CL": 159,
227
+ "HV": 160,
228
+ "NY": 161,
229
+ "IE": 162,
230
+ "DQ": 163,
231
+ "WL": 164,
232
+ "RK": 165,
233
+ "CS": 166,
234
+ "NF": 167,
235
+ "RD": 168,
236
+ "EP": 169,
237
+ "RF": 170,
238
+ "AAL": 171,
239
+ "ED": 172,
240
+ "II": 173,
241
+ "TM": 174,
242
+ "TC": 175,
243
+ "NQ": 176,
244
+ "TH": 177,
245
+ "AGG": 178,
246
+ "FD": 179,
247
+ "AAG": 180,
248
+ "RQ": 181,
249
+ "AC": 182,
250
+ "PD": 183,
251
+ "VI": 184,
252
+ "EQ": 185,
253
+ "LG": 186,
254
+ "YI": 187,
255
+ "AW": 188,
256
+ "MS": 189,
257
+ "MV": 190,
258
+ "KD": 191,
259
+ "LV": 192,
260
+ "SSS": 193,
261
+ "NR": 194,
262
+ "CG": 195,
263
+ "HI": 196,
264
+ "PK": 197,
265
+ "TW": 198,
266
+ "RY": 199,
267
+ "EF": 200,
268
+ "EN": 201,
269
+ "ADG": 202,
270
+ "ALL": 203,
271
+ "PQ": 204,
272
+ "EY": 205,
273
+ "CV": 206,
274
+ "TAA": 207,
275
+ "PF": 208,
276
+ "XX": 209,
277
+ "TSS": 210,
278
+ "MG": 211,
279
+ "KQ": 212,
280
+ "ID": 213,
281
+ "PR": 214,
282
+ "TLS": 215,
283
+ "ASS": 216,
284
+ "QD": 217,
285
+ "RN": 218,
286
+ "WS": 219,
287
+ "RH": 220,
288
+ "FK": 221,
289
+ "VD": 222,
290
+ "VE": 223,
291
+ "KN": 224,
292
+ "TGG": 225,
293
+ "FF": 226,
294
+ "ASG": 227,
295
+ "QN": 228,
296
+ "ATT": 229,
297
+ "QY": 230,
298
+ "HP": 231,
299
+ "ATG": 232,
300
+ "ATV": 233,
301
+ "KY": 234,
302
+ "VP": 235,
303
+ "ALS": 236,
304
+ "QF": 237,
305
+ "IN": 238,
306
+ "TAG": 239,
307
+ "IK": 240,
308
+ "TAS": 241,
309
+ "SP": 242,
310
+ "YY": 243,
311
+ "FN": 244,
312
+ "LP": 245,
313
+ "IP": 246,
314
+ "YD": 247,
315
+ "EH": 248,
316
+ "TAV": 249,
317
+ "KR": 250,
318
+ "SD": 251,
319
+ "VR": 252,
320
+ "ATL": 253,
321
+ "GD": 254,
322
+ "EM": 255,
323
+ "TLL": 256,
324
+ "QH": 257,
325
+ "LD": 258,
326
+ "YR": 259,
327
+ "AAV": 260,
328
+ "TAL": 261,
329
+ "ATS": 262,
330
+ "KF": 263,
331
+ "GGG": 264,
332
+ "CP": 265,
333
+ "ADV": 266,
334
+ "SE": 267,
335
+ "LSG": 268,
336
+ "AEL": 269,
337
+ "AGL": 270,
338
+ "SF": 271,
339
+ "YN": 272,
340
+ "DH": 273,
341
+ "PN": 274,
342
+ "TAP": 275,
343
+ "VN": 276,
344
+ "ADL": 277,
345
+ "LLL": 278,
346
+ "SSG": 279,
347
+ "ASL": 280,
348
+ "SSL": 281,
349
+ "FR": 282,
350
+ "YE": 283,
351
+ "IR": 284,
352
+ "YK": 285,
353
+ "ARL": 286,
354
+ "DM": 287,
355
+ "HH": 288,
356
+ "WG": 289,
357
+ "FP": 290,
358
+ "VK": 291,
359
+ "EEL": 292,
360
+ "IQ": 293,
361
+ "AAS": 294,
362
+ "LSL": 295,
363
+ "CR": 296,
364
+ "TDG": 297,
365
+ "TSG": 298,
366
+ "MP": 299,
367
+ "ALG": 300,
368
+ "ANG": 301,
369
+ "AVL": 302,
370
+ "HR": 303,
371
+ "CI": 304,
372
+ "AAAA": 305,
373
+ "VF": 306,
374
+ "TTG": 307,
375
+ "ME": 308,
376
+ "YF": 309,
377
+ "SN": 310,
378
+ "MK": 311,
379
+ "TAE": 312,
380
+ "AEE": 313,
381
+ "YP": 314,
382
+ "HE": 315,
383
+ "MI": 316,
384
+ "CE": 317,
385
+ "VQ": 318,
386
+ "TAT": 319,
387
+ "YQ": 320,
388
+ "ATP": 321,
389
+ "TTS": 322,
390
+ "GE": 323,
391
+ "LLS": 324,
392
+ "AGS": 325,
393
+ "TEE": 326,
394
+ "FQ": 327,
395
+ "AGV": 328,
396
+ "GR": 329,
397
+ "WV": 330,
398
+ "XXXX": 331,
399
+ "TTL": 332,
400
+ "TVS": 333,
401
+ "GI": 334,
402
+ "CD": 335,
403
+ "TSL": 336,
404
+ "HD": 337,
405
+ "TDV": 338,
406
+ "MD": 339,
407
+ "ATI": 340,
408
+ "CK": 341,
409
+ "ATD": 342,
410
+ "TTV": 343,
411
+ "TGL": 344,
412
+ "MR": 345,
413
+ "TAD": 346,
414
+ "HF": 347,
415
+ "DGS": 348,
416
+ "SGS": 349,
417
+ "HQ": 350,
418
+ "CQ": 351,
419
+ "GGS": 352,
420
+ "WR": 353,
421
+ "IF": 354,
422
+ "LLG": 355,
423
+ "TDL": 356,
424
+ "DSDS": 357,
425
+ "AQL": 358,
426
+ "DVL": 359,
427
+ "MN": 360,
428
+ "MQ": 361,
429
+ "ASV": 362,
430
+ "TGS": 363,
431
+ "TVL": 364,
432
+ "HK": 365,
433
+ "GN": 366,
434
+ "DGL": 367,
435
+ "IY": 368,
436
+ "TEL": 369,
437
+ "DW": 370,
438
+ "TAI": 371,
439
+ "GP": 372,
440
+ "AVV": 373,
441
+ "GGL": 374,
442
+ "EVL": 375,
443
+ "SGL": 376,
444
+ "CN": 377,
445
+ "FY": 378,
446
+ "DAA": 379,
447
+ "SSV": 380,
448
+ "HY": 381,
449
+ "AVG": 382,
450
+ "HN": 383,
451
+ "PY": 384,
452
+ "SR": 385,
453
+ "SK": 386,
454
+ "APG": 387,
455
+ "ALV": 388,
456
+ "DGG": 389,
457
+ "SVL": 390,
458
+ "APL": 391,
459
+ "RVL": 392,
460
+ "LSV": 393,
461
+ "TEV": 394,
462
+ "RM": 395,
463
+ "ALP": 396,
464
+ "RW": 397,
465
+ "AVS": 398,
466
+ "DGV": 399,
467
+ "TPL": 400,
468
+ "AKL": 401,
469
+ "TSV": 402,
470
+ "AAP": 403,
471
+ "VVL": 404,
472
+ "ALR": 405,
473
+ "EC": 406,
474
+ "TEI": 407,
475
+ "TTTT": 408,
476
+ "SVS": 409,
477
+ "TAR": 410,
478
+ "TNG": 411,
479
+ "TEG": 412,
480
+ "EW": 413,
481
+ "AEG": 414,
482
+ "APV": 415,
483
+ "QM": 416,
484
+ "TVV": 417,
485
+ "TAQ": 418,
486
+ "PH": 419,
487
+ "CF": 420,
488
+ "ANL": 421,
489
+ "TES": 422,
490
+ "KM": 423,
491
+ "TPS": 424,
492
+ "GGV": 425,
493
+ "RLV": 426,
494
+ "NM": 427,
495
+ "PVL": 428,
496
+ "TLG": 429,
497
+ "DAG": 430,
498
+ "SSSS": 431,
499
+ "DC": 432,
500
+ "WI": 433,
501
+ "ELL": 434,
502
+ "EGL": 435,
503
+ "RLL": 436,
504
+ "ELG": 437,
505
+ "ANV": 438,
506
+ "SY": 439,
507
+ "EAA": 440,
508
+ "NH": 441,
509
+ "TVG": 442,
510
+ "SLV": 443,
511
+ "QQQQ": 444,
512
+ "PAA": 445,
513
+ "WQ": 446,
514
+ "EAL": 447,
515
+ "KH": 448,
516
+ "TNV": 449,
517
+ "SQ": 450,
518
+ "TGV": 451,
519
+ "MF": 452,
520
+ "DAL": 453,
521
+ "PAG": 454,
522
+ "DAV": 455,
523
+ "DLS": 456,
524
+ "TPV": 457,
525
+ "AEV": 458,
526
+ "DIL": 459,
527
+ "TNL": 460,
528
+ "PSP": 461,
529
+ "TLV": 462,
530
+ "WN": 463,
531
+ "WK": 464,
532
+ "TRL": 465,
533
+ "TTP": 466,
534
+ "ARG": 467,
535
+ "RC": 468,
536
+ "DLL": 469,
537
+ "DLV": 470,
538
+ "TAK": 471,
539
+ "PGL": 472,
540
+ "DLG": 473,
541
+ "SLG": 474,
542
+ "RAA": 475,
543
+ "DVV": 476,
544
+ "NGG": 477,
545
+ "SGV": 478,
546
+ "SSI": 479,
547
+ "TIS": 480,
548
+ "TPG": 481,
549
+ "RGL": 482,
550
+ "NC": 483,
551
+ "EEV": 484,
552
+ "TEK": 485,
553
+ "VVV": 486,
554
+ "FH": 487,
555
+ "YH": 488,
556
+ "EIL": 489,
557
+ "TAN": 490,
558
+ "NGL": 491,
559
+ "APS": 492,
560
+ "IH": 493,
561
+ "WE": 494,
562
+ "TQL": 495,
563
+ "RLG": 496,
564
+ "VVG": 497,
565
+ "TNI": 498,
566
+ "TLP": 499,
567
+ "SSP": 500,
568
+ "TTI": 501,
569
+ "DAS": 502,
570
+ "TKL": 503,
571
+ "NVL": 504,
572
+ "QLL": 505,
573
+ "TDS": 506,
574
+ "AIL": 507,
575
+ "AKK": 508,
576
+ "PLP": 509,
577
+ "QC": 510,
578
+ "DSL": 511,
579
+ "ELV": 512,
580
+ "DVS": 513,
581
+ "ANI": 514,
582
+ "NW": 515,
583
+ "ANS": 516,
584
+ "QW": 517,
585
+ "TSP": 518,
586
+ "QAA": 519,
587
+ "TNS": 520,
588
+ "DSS": 521,
589
+ "TAF": 522,
590
+ "PGS": 523,
591
+ "DSG": 524,
592
+ "VVS": 525,
593
+ "PM": 526,
594
+ "GK": 527,
595
+ "DAD": 528,
596
+ "LLV": 529,
597
+ "AGN": 530,
598
+ "LSP": 531,
599
+ "ESL": 532,
600
+ "NGS": 533,
601
+ "DDV": 534,
602
+ "EAV": 535,
603
+ "DEL": 536,
604
+ "TIL": 537,
605
+ "SVG": 538,
606
+ "NGV": 539,
607
+ "EEI": 540,
608
+ "AIG": 541,
609
+ "XXXXXXXX": 542,
610
+ "DDL": 543,
611
+ "AGI": 544,
612
+ "ASI": 545,
613
+ "CY": 546,
614
+ "FM": 547,
615
+ "AAR": 548,
616
+ "AAE": 549,
617
+ "AAI": 550,
618
+ "NIL": 551,
619
+ "TKK": 552,
620
+ "TSI": 553,
621
+ "TKV": 554,
622
+ "TVN": 555,
623
+ "DTL": 556,
624
+ "MY": 557,
625
+ "QGL": 558,
626
+ "AGE": 559,
627
+ "DEE": 560,
628
+ "ADS": 561,
629
+ "AGR": 562,
630
+ "SLP": 563,
631
+ "AES": 564,
632
+ "ASP": 565,
633
+ "LLE": 566,
634
+ "LSI": 567,
635
+ "DTV": 568,
636
+ "DTT": 569,
637
+ "PVG": 570,
638
+ "GGGG": 571,
639
+ "TRV": 572,
640
+ "PGV": 573,
641
+ "RAL": 574,
642
+ "PVS": 575,
643
+ "EVS": 576,
644
+ "AYL": 577,
645
+ "KC": 578,
646
+ "ADI": 579,
647
+ "WF": 580,
648
+ "NSS": 581,
649
+ "TQV": 582,
650
+ "DSI": 583,
651
+ "QAL": 584,
652
+ "PPP": 585,
653
+ "ARV": 586,
654
+ "EEE": 587,
655
+ "NTL": 588,
656
+ "DDG": 589,
657
+ "PDG": 590,
658
+ "RLR": 591,
659
+ "LLR": 592,
660
+ "DSV": 593,
661
+ "TTE": 594,
662
+ "EEG": 595,
663
+ "CH": 596,
664
+ "KW": 597,
665
+ "TKI": 598,
666
+ "AEI": 599,
667
+ "LSE": 600,
668
+ "MM": 601,
669
+ "AQG": 602,
670
+ "NVS": 603,
671
+ "DLP": 604,
672
+ "EGV": 605,
673
+ "DEV": 606,
674
+ "QQL": 607,
675
+ "EKL": 608,
676
+ "YW": 609,
677
+ "REL": 610,
678
+ "RIL": 611,
679
+ "LLP": 612,
680
+ "PLV": 613,
681
+ "DIS": 614,
682
+ "TAY": 615,
683
+ "AFL": 616,
684
+ "PVP": 617,
685
+ "QSL": 618,
686
+ "TVP": 619,
687
+ "NAA": 620,
688
+ "AIS": 621,
689
+ "ERL": 622,
690
+ "NLG": 623,
691
+ "RAV": 624,
692
+ "AQV": 625,
693
+ "EES": 626,
694
+ "DGI": 627,
695
+ "TIG": 628,
696
+ "EGS": 629,
697
+ "APP": 630,
698
+ "PLG": 631,
699
+ "LLK": 632,
700
+ "NLS": 633,
701
+ "TDI": 634,
702
+ "NLV": 635,
703
+ "QLV": 636,
704
+ "TPP": 637,
705
+ "RLS": 638,
706
+ "EEK": 639,
707
+ "DRL": 640,
708
+ "ETL": 641,
709
+ "VLG": 642,
710
+ "TRS": 643,
711
+ "TGN": 644,
712
+ "TQI": 645,
713
+ "TRG": 646,
714
+ "AHL": 647,
715
+ "ELP": 648,
716
+ "FC": 649,
717
+ "TNN": 650,
718
+ "DSDSDSDS": 651,
719
+ "TGI": 652,
720
+ "DTS": 653,
721
+ "RVS": 654,
722
+ "EIS": 655,
723
+ "MH": 656,
724
+ "KGL": 657,
725
+ "TIV": 658,
726
+ "TVE": 659,
727
+ "TLN": 660,
728
+ "DDI": 661,
729
+ "QLG": 662,
730
+ "DAE": 663,
731
+ "TVI": 664,
732
+ "DVG": 665,
733
+ "TQG": 666,
734
+ "FGL": 667,
735
+ "TKS": 668,
736
+ "QAV": 669,
737
+ "ARS": 670,
738
+ "KLV": 671,
739
+ "NIS": 672,
740
+ "DAP": 673,
741
+ "TQS": 674,
742
+ "DLI": 675,
743
+ "RGG": 676,
744
+ "WP": 677,
745
+ "AIV": 678,
746
+ "NSL": 679,
747
+ "DGK": 680,
748
+ "YM": 681,
749
+ "ESI": 682,
750
+ "RRL": 683,
751
+ "RSL": 684,
752
+ "ELS": 685,
753
+ "PSS": 686,
754
+ "EGG": 687,
755
+ "EKV": 688,
756
+ "QLS": 689,
757
+ "NTT": 690,
758
+ "RSS": 691,
759
+ "ESS": 692,
760
+ "ETT": 693,
761
+ "QSS": 694,
762
+ "TRI": 695,
763
+ "PGG": 696,
764
+ "EAE": 697,
765
+ "KIL": 698,
766
+ "FSS": 699,
767
+ "EIV": 700,
768
+ "SIL": 701,
769
+ "TDP": 702,
770
+ "NSG": 703,
771
+ "RGS": 704,
772
+ "TRR": 705,
773
+ "ADP": 706,
774
+ "QVV": 707,
775
+ "RVG": 708,
776
+ "TQQ": 709,
777
+ "DIV": 710,
778
+ "NVG": 711,
779
+ "KEL": 712,
780
+ "TLI": 713,
781
+ "QRL": 714,
782
+ "EAG": 715,
783
+ "AKV": 716,
784
+ "QIL": 717,
785
+ "AVI": 718,
786
+ "NLL": 719,
787
+ "NAG": 720,
788
+ "YC": 721,
789
+ "DTG": 722,
790
+ "NAS": 723,
791
+ "RAG": 724,
792
+ "NSI": 725,
793
+ "TVTV": 726,
794
+ "QGG": 727,
795
+ "SGI": 728,
796
+ "KGV": 729,
797
+ "HM": 730,
798
+ "QVL": 731,
799
+ "FGG": 732,
800
+ "EVV": 733,
801
+ "ESV": 734,
802
+ "QEL": 735,
803
+ "KEE": 736,
804
+ "DVI": 737,
805
+ "ETV": 738,
806
+ "PC": 739,
807
+ "AFG": 740,
808
+ "EKI": 741,
809
+ "ALI": 742,
810
+ "HC": 743,
811
+ "EQL": 744,
812
+ "TFS": 745,
813
+ "TPI": 746,
814
+ "SIS": 747,
815
+ "FW": 748,
816
+ "RGV": 749,
817
+ "NIG": 750,
818
+ "QTL": 751,
819
+ "EAS": 752,
820
+ "KLK": 753,
821
+ "NLI": 754,
822
+ "FSG": 755,
823
+ "RSG": 756,
824
+ "FDL": 757,
825
+ "PSI": 758,
826
+ "AVP": 759,
827
+ "TKG": 760,
828
+ "EDL": 761,
829
+ "KKKK": 762,
830
+ "RAR": 763,
831
+ "QAG": 764,
832
+ "EKS": 765,
833
+ "EVG": 766,
834
+ "TVK": 767,
835
+ "NAL": 768,
836
+ "DAI": 769,
837
+ "VGL": 770,
838
+ "NGI": 771,
839
+ "DEI": 772,
840
+ "AKG": 773,
841
+ "TLK": 774,
842
+ "EDV": 775,
843
+ "ETS": 776,
844
+ "ESG": 777,
845
+ "PTT": 778,
846
+ "WY": 779,
847
+ "KAL": 780,
848
+ "TVD": 781,
849
+ "KLG": 782,
850
+ "RDL": 783,
851
+ "QSV": 784,
852
+ "YLG": 785,
853
+ "LSF": 786,
854
+ "DQL": 787,
855
+ "TSGS": 788,
856
+ "CSS": 789,
857
+ "DDS": 790,
858
+ "DAR": 791,
859
+ "PAP": 792,
860
+ "RVV": 793,
861
+ "NTS": 794,
862
+ "PAV": 795,
863
+ "QAS": 796,
864
+ "AQS": 797,
865
+ "LSK": 798,
866
+ "EDG": 799,
867
+ "CC": 800,
868
+ "SLI": 801,
869
+ "DTI": 802,
870
+ "NTG": 803,
871
+ "RRS": 804,
872
+ "ELI": 805,
873
+ "NAV": 806,
874
+ "EVI": 807,
875
+ "QSG": 808,
876
+ "KLL": 809,
877
+ "DNL": 810,
878
+ "GGI": 811,
879
+ "KSS": 812,
880
+ "AAF": 813,
881
+ "RLP": 814,
882
+ "QTV": 815,
883
+ "QVS": 816,
884
+ "PVI": 817,
885
+ "VLV": 818,
886
+ "AHG": 819,
887
+ "TFL": 820,
888
+ "NSV": 821,
889
+ "DPS": 822,
890
+ "AKS": 823,
891
+ "RAE": 824,
892
+ "PSG": 825,
893
+ "QTT": 826,
894
+ "EKG": 827,
895
+ "KLI": 828,
896
+ "ENL": 829,
897
+ "RTL": 830,
898
+ "KKK": 831,
899
+ "LLI": 832,
900
+ "DRV": 833,
901
+ "FTG": 834,
902
+ "DAT": 835,
903
+ "NVV": 836,
904
+ "DVP": 837,
905
+ "AFV": 838,
906
+ "KSL": 839,
907
+ "DIG": 840,
908
+ "HW": 841,
909
+ "EPG": 842,
910
+ "DTD": 843,
911
+ "RRG": 844,
912
+ "PIL": 845,
913
+ "AAK": 846,
914
+ "FLG": 847,
915
+ "EII": 848,
916
+ "QGS": 849,
917
+ "NNI": 850,
918
+ "NNL": 851,
919
+ "NVI": 852,
920
+ "SIG": 853,
921
+ "TGK": 854,
922
+ "RTV": 855,
923
+ "NDG": 856,
924
+ "KKS": 857,
925
+ "AAQ": 858,
926
+ "FTV": 859,
927
+ "NDL": 860,
928
+ "DES": 861,
929
+ "RSV": 862,
930
+ "LLQ": 863,
931
+ "RDG": 864,
932
+ "ALK": 865,
933
+ "DEG": 866,
934
+ "ALE": 867,
935
+ "PEP": 868,
936
+ "TGP": 869,
937
+ "RAS": 870,
938
+ "ELK": 871,
939
+ "GLG": 872,
940
+ "DPV": 873,
941
+ "EKP": 874,
942
+ "DKL": 875,
943
+ "REE": 876,
944
+ "RVI": 877,
945
+ "NTV": 878,
946
+ "KVS": 879,
947
+ "DII": 880,
948
+ "KKI": 881,
949
+ "AML": 882,
950
+ "TYL": 883,
951
+ "EIG": 884,
952
+ "FSL": 885,
953
+ "AYV": 886,
954
+ "FAA": 887,
955
+ "QVG": 888,
956
+ "SVI": 889,
957
+ "ETI": 890,
958
+ "YGG": 891,
959
+ "VGV": 892,
960
+ "TYS": 893,
961
+ "KKV": 894,
962
+ "QTI": 895,
963
+ "NTI": 896,
964
+ "FGS": 897,
965
+ "AFS": 898,
966
+ "EEEE": 899,
967
+ "RLI": 900,
968
+ "RDV": 901,
969
+ "VGS": 902,
970
+ "TFG": 903,
971
+ "AGP": 904,
972
+ "RQL": 905,
973
+ "TRE": 906,
974
+ "RPG": 907,
975
+ "KIS": 908,
976
+ "ALQ": 909,
977
+ "HLL": 910,
978
+ "ARI": 911,
979
+ "EAP": 912,
980
+ "NNS": 913,
981
+ "QTS": 914,
982
+ "EGI": 915,
983
+ "FDV": 916,
984
+ "EPV": 917,
985
+ "TRP": 918,
986
+ "VVI": 919,
987
+ "NIV": 920,
988
+ "KAG": 921,
989
+ "NDV": 922,
990
+ "DNG": 923,
991
+ "QQV": 924,
992
+ "KKG": 925,
993
+ "QSI": 926,
994
+ "NEL": 927,
995
+ "QGV": 928,
996
+ "DNV": 929,
997
+ "KLS": 930,
998
+ "TAH": 931,
999
+ "QEE": 932,
1000
+ "SVP": 933,
1001
+ "ASF": 934,
1002
+ "API": 935,
1003
+ "KDL": 936,
1004
+ "QAE": 937,
1005
+ "EDI": 938,
1006
+ "RPV": 939,
1007
+ "RPL": 940,
1008
+ "WH": 941,
1009
+ "YTL": 942,
1010
+ "DPG": 943,
1011
+ "NNG": 944,
1012
+ "YSG": 945,
1013
+ "DPL": 946,
1014
+ "RIV": 947,
1015
+ "NII": 948,
1016
+ "EAI": 949,
1017
+ "ETG": 950,
1018
+ "RIS": 951,
1019
+ "EPL": 952,
1020
+ "DKV": 953,
1021
+ "PLL": 954,
1022
+ "YTV": 955,
1023
+ "PIG": 956,
1024
+ "FTL": 957,
1025
+ "PSL": 958,
1026
+ "AQI": 959,
1027
+ "WM": 960,
1028
+ "NAT": 961,
1029
+ "THL": 962,
1030
+ "DNS": 963,
1031
+ "QDL": 964,
1032
+ "QAR": 965,
1033
+ "QPG": 966,
1034
+ "PAL": 967,
1035
+ "CM": 968,
1036
+ "YTG": 969,
1037
+ "TFV": 970,
1038
+ "YSS": 971,
1039
+ "RTG": 972,
1040
+ "QIS": 973,
1041
+ "FVS": 974,
1042
+ "PAS": 975,
1043
+ "KEK": 976,
1044
+ "QPV": 977,
1045
+ "KDG": 978,
1046
+ "KAV": 979,
1047
+ "TII": 980,
1048
+ "QLI": 981,
1049
+ "FAG": 982,
1050
+ "DAN": 983,
1051
+ "AKI": 984,
1052
+ "RIG": 985,
1053
+ "NDI": 986,
1054
+ "QPL": 987,
1055
+ "RTT": 988,
1056
+ "PLS": 989,
1057
+ "ERV": 990,
1058
+ "PW": 991,
1059
+ "NAI": 992,
1060
+ "QTG": 993,
1061
+ "QKL": 994,
1062
+ "TYG": 995,
1063
+ "EKK": 996,
1064
+ "EAR": 997,
1065
+ "TAM": 998,
1066
+ "KVL": 999,
1067
+ "FDG": 1000,
1068
+ "KTL": 1001,
1069
+ "KGS": 1002,
1070
+ "XXXXXXXXXXXXXXXX": 1003,
1071
+ "PTE": 1004,
1072
+ "REV": 1005,
1073
+ "DKI": 1006,
1074
+ "QQG": 1007,
1075
+ "DAF": 1008,
1076
+ "KKL": 1009,
1077
+ "DAQ": 1010,
1078
+ "KAA": 1011,
1079
+ "PSV": 1012,
1080
+ "FSV": 1013,
1081
+ "DNI": 1014,
1082
+ "FGV": 1015,
1083
+ "DTP": 1016,
1084
+ "PIS": 1017,
1085
+ "FLS": 1018,
1086
+ "PEE": 1019,
1087
+ "QEV": 1020,
1088
+ "KEI": 1021,
1089
+ "GLV": 1022,
1090
+ "FAE": 1023
1091
+ },
1092
+ "merges": [
1093
+ "A A",
1094
+ "S S",
1095
+ "T T",
1096
+ "G G",
1097
+ "L L",
1098
+ "A G",
1099
+ "L S",
1100
+ "T V",
1101
+ "A V",
1102
+ "A L",
1103
+ "A S",
1104
+ "D G",
1105
+ "T G",
1106
+ "E E",
1107
+ "T L",
1108
+ "T S",
1109
+ "D V",
1110
+ "A E",
1111
+ "D L",
1112
+ "S G",
1113
+ "A P",
1114
+ "R L",
1115
+ "V V",
1116
+ "S L",
1117
+ "T P",
1118
+ "T I",
1119
+ "E L",
1120
+ "S V",
1121
+ "A D",
1122
+ "N G",
1123
+ "A R",
1124
+ "E V",
1125
+ "Q L",
1126
+ "T D",
1127
+ "A I",
1128
+ "N L",
1129
+ "E G",
1130
+ "P V",
1131
+ "K L",
1132
+ "P G",
1133
+ "E I",
1134
+ "N V",
1135
+ "D I",
1136
+ "P L",
1137
+ "D S",
1138
+ "K K",
1139
+ "R V",
1140
+ "N I",
1141
+ "A Q",
1142
+ "P S",
1143
+ "A T",
1144
+ "E K",
1145
+ "E S",
1146
+ "R G",
1147
+ "Q Q",
1148
+ "N S",
1149
+ "D D",
1150
+ "A K",
1151
+ "R R",
1152
+ "V L",
1153
+ "A F",
1154
+ "T F",
1155
+ "R I",
1156
+ "P P",
1157
+ "Q V",
1158
+ "T E",
1159
+ "A N",
1160
+ "S I",
1161
+ "K V",
1162
+ "Q G",
1163
+ "K I",
1164
+ "R S",
1165
+ "T Y",
1166
+ "D P",
1167
+ "N N",
1168
+ "K G",
1169
+ "D E",
1170
+ "Q I",
1171
+ "F G",
1172
+ "V G",
1173
+ "Y L",
1174
+ "F L",
1175
+ "T N",
1176
+ "P I",
1177
+ "K S",
1178
+ "Q S",
1179
+ "P E",
1180
+ "A Y",
1181
+ "H L",
1182
+ "R E",
1183
+ "F S",
1184
+ "F V",
1185
+ "T K",
1186
+ "G L",
1187
+ "V S",
1188
+ "T Q",
1189
+ "D N",
1190
+ "I L",
1191
+ "R P",
1192
+ "G S",
1193
+ "K E",
1194
+ "I S",
1195
+ "D F",
1196
+ "T R",
1197
+ "D R",
1198
+ "D Y",
1199
+ "A H",
1200
+ "D K",
1201
+ "I G",
1202
+ "Q E",
1203
+ "A M",
1204
+ "Y G",
1205
+ "N E",
1206
+ "I V",
1207
+ "Y S",
1208
+ "N P",
1209
+ "E R",
1210
+ "Q P",
1211
+ "Y V",
1212
+ "M L",
1213
+ "T A",
1214
+ "Q R",
1215
+ "G V",
1216
+ "N D",
1217
+ "K P",
1218
+ "F E",
1219
+ "F I",
1220
+ "N K",
1221
+ "H S",
1222
+ "H G",
1223
+ "Q K",
1224
+ "C L",
1225
+ "H V",
1226
+ "N Y",
1227
+ "I E",
1228
+ "D Q",
1229
+ "W L",
1230
+ "R K",
1231
+ "C S",
1232
+ "N F",
1233
+ "R D",
1234
+ "E P",
1235
+ "R F",
1236
+ "AA L",
1237
+ "E D",
1238
+ "I I",
1239
+ "T M",
1240
+ "T C",
1241
+ "N Q",
1242
+ "T H",
1243
+ "A GG",
1244
+ "F D",
1245
+ "AA G",
1246
+ "R Q",
1247
+ "A C",
1248
+ "P D",
1249
+ "V I",
1250
+ "E Q",
1251
+ "L G",
1252
+ "Y I",
1253
+ "A W",
1254
+ "M S",
1255
+ "M V",
1256
+ "K D",
1257
+ "L V",
1258
+ "SS S",
1259
+ "N R",
1260
+ "C G",
1261
+ "H I",
1262
+ "P K",
1263
+ "T W",
1264
+ "R Y",
1265
+ "E F",
1266
+ "E N",
1267
+ "A DG",
1268
+ "A LL",
1269
+ "P Q",
1270
+ "E Y",
1271
+ "C V",
1272
+ "T AA",
1273
+ "P F",
1274
+ "X X",
1275
+ "T SS",
1276
+ "M G",
1277
+ "K Q",
1278
+ "I D",
1279
+ "P R",
1280
+ "T LS",
1281
+ "A SS",
1282
+ "Q D",
1283
+ "R N",
1284
+ "W S",
1285
+ "R H",
1286
+ "F K",
1287
+ "V D",
1288
+ "V E",
1289
+ "K N",
1290
+ "T GG",
1291
+ "F F",
1292
+ "AS G",
1293
+ "Q N",
1294
+ "A TT",
1295
+ "Q Y",
1296
+ "H P",
1297
+ "A TG",
1298
+ "A TV",
1299
+ "K Y",
1300
+ "V P",
1301
+ "A LS",
1302
+ "Q F",
1303
+ "I N",
1304
+ "T AG",
1305
+ "I K",
1306
+ "T AS",
1307
+ "S P",
1308
+ "Y Y",
1309
+ "F N",
1310
+ "L P",
1311
+ "I P",
1312
+ "Y D",
1313
+ "E H",
1314
+ "T AV",
1315
+ "K R",
1316
+ "S D",
1317
+ "V R",
1318
+ "A TL",
1319
+ "G D",
1320
+ "E M",
1321
+ "T LL",
1322
+ "Q H",
1323
+ "L D",
1324
+ "Y R",
1325
+ "AA V",
1326
+ "T AL",
1327
+ "A TS",
1328
+ "K F",
1329
+ "GG G",
1330
+ "C P",
1331
+ "A DV",
1332
+ "S E",
1333
+ "LS G",
1334
+ "AE L",
1335
+ "AG L",
1336
+ "S F",
1337
+ "Y N",
1338
+ "D H",
1339
+ "P N",
1340
+ "T AP",
1341
+ "V N",
1342
+ "A DL",
1343
+ "LL L",
1344
+ "SS G",
1345
+ "AS L",
1346
+ "SS L",
1347
+ "F R",
1348
+ "Y E",
1349
+ "I R",
1350
+ "Y K",
1351
+ "A RL",
1352
+ "D M",
1353
+ "H H",
1354
+ "W G",
1355
+ "F P",
1356
+ "V K",
1357
+ "EE L",
1358
+ "I Q",
1359
+ "AA S",
1360
+ "LS L",
1361
+ "C R",
1362
+ "T DG",
1363
+ "TS G",
1364
+ "M P",
1365
+ "AL G",
1366
+ "A NG",
1367
+ "AV L",
1368
+ "H R",
1369
+ "C I",
1370
+ "AA AA",
1371
+ "V F",
1372
+ "TT G",
1373
+ "M E",
1374
+ "Y F",
1375
+ "S N",
1376
+ "M K",
1377
+ "T AE",
1378
+ "A EE",
1379
+ "Y P",
1380
+ "H E",
1381
+ "M I",
1382
+ "C E",
1383
+ "V Q",
1384
+ "T AT",
1385
+ "Y Q",
1386
+ "A TP",
1387
+ "TT S",
1388
+ "G E",
1389
+ "LL S",
1390
+ "AG S",
1391
+ "T EE",
1392
+ "F Q",
1393
+ "AG V",
1394
+ "G R",
1395
+ "W V",
1396
+ "XX XX",
1397
+ "TT L",
1398
+ "TV S",
1399
+ "G I",
1400
+ "C D",
1401
+ "TS L",
1402
+ "H D",
1403
+ "T DV",
1404
+ "M D",
1405
+ "A TI",
1406
+ "C K",
1407
+ "A TD",
1408
+ "TT V",
1409
+ "TG L",
1410
+ "M R",
1411
+ "T AD",
1412
+ "H F",
1413
+ "DG S",
1414
+ "SG S",
1415
+ "H Q",
1416
+ "C Q",
1417
+ "GG S",
1418
+ "W R",
1419
+ "I F",
1420
+ "LL G",
1421
+ "T DL",
1422
+ "DS DS",
1423
+ "A QL",
1424
+ "DV L",
1425
+ "M N",
1426
+ "M Q",
1427
+ "AS V",
1428
+ "TG S",
1429
+ "TV L",
1430
+ "H K",
1431
+ "G N",
1432
+ "DG L",
1433
+ "I Y",
1434
+ "T EL",
1435
+ "D W",
1436
+ "T AI",
1437
+ "G P",
1438
+ "AV V",
1439
+ "GG L",
1440
+ "EV L",
1441
+ "SG L",
1442
+ "C N",
1443
+ "F Y",
1444
+ "D AA",
1445
+ "SS V",
1446
+ "H Y",
1447
+ "AV G",
1448
+ "H N",
1449
+ "P Y",
1450
+ "S R",
1451
+ "S K",
1452
+ "AP G",
1453
+ "AL V",
1454
+ "D GG",
1455
+ "SV L",
1456
+ "AP L",
1457
+ "RV L",
1458
+ "LS V",
1459
+ "T EV",
1460
+ "R M",
1461
+ "AL P",
1462
+ "R W",
1463
+ "AV S",
1464
+ "DG V",
1465
+ "TP L",
1466
+ "A KL",
1467
+ "TS V",
1468
+ "AA P",
1469
+ "VV L",
1470
+ "AL R",
1471
+ "E C",
1472
+ "T EI",
1473
+ "TT TT",
1474
+ "SV S",
1475
+ "T AR",
1476
+ "T NG",
1477
+ "T EG",
1478
+ "E W",
1479
+ "AE G",
1480
+ "AP V",
1481
+ "Q M",
1482
+ "TV V",
1483
+ "T AQ",
1484
+ "P H",
1485
+ "C F",
1486
+ "A NL",
1487
+ "T ES",
1488
+ "K M",
1489
+ "TP S",
1490
+ "GG V",
1491
+ "RL V",
1492
+ "N M",
1493
+ "PV L",
1494
+ "TL G",
1495
+ "D AG",
1496
+ "SS SS",
1497
+ "D C",
1498
+ "W I",
1499
+ "E LL",
1500
+ "EG L",
1501
+ "R LL",
1502
+ "EL G",
1503
+ "A NV",
1504
+ "S Y",
1505
+ "E AA",
1506
+ "N H",
1507
+ "TV G",
1508
+ "SL V",
1509
+ "QQ QQ",
1510
+ "P AA",
1511
+ "W Q",
1512
+ "E AL",
1513
+ "K H",
1514
+ "T NV",
1515
+ "S Q",
1516
+ "TG V",
1517
+ "M F",
1518
+ "D AL",
1519
+ "P AG",
1520
+ "D AV",
1521
+ "D LS",
1522
+ "TP V",
1523
+ "AE V",
1524
+ "DI L",
1525
+ "T NL",
1526
+ "PS P",
1527
+ "TL V",
1528
+ "W N",
1529
+ "W K",
1530
+ "T RL",
1531
+ "TT P",
1532
+ "AR G",
1533
+ "R C",
1534
+ "D LL",
1535
+ "DL V",
1536
+ "T AK",
1537
+ "PG L",
1538
+ "DL G",
1539
+ "SL G",
1540
+ "R AA",
1541
+ "DV V",
1542
+ "N GG",
1543
+ "SG V",
1544
+ "SS I",
1545
+ "TI S",
1546
+ "TP G",
1547
+ "RG L",
1548
+ "N C",
1549
+ "EE V",
1550
+ "T EK",
1551
+ "VV V",
1552
+ "F H",
1553
+ "Y H",
1554
+ "EI L",
1555
+ "T AN",
1556
+ "NG L",
1557
+ "AP S",
1558
+ "I H",
1559
+ "W E",
1560
+ "T QL",
1561
+ "RL G",
1562
+ "VV G",
1563
+ "T NI",
1564
+ "TL P",
1565
+ "SS P",
1566
+ "TT I",
1567
+ "D AS",
1568
+ "T KL",
1569
+ "NV L",
1570
+ "Q LL",
1571
+ "TD S",
1572
+ "AI L",
1573
+ "A KK",
1574
+ "PL P",
1575
+ "Q C",
1576
+ "D SL",
1577
+ "EL V",
1578
+ "DV S",
1579
+ "A NI",
1580
+ "N W",
1581
+ "A NS",
1582
+ "Q W",
1583
+ "TS P",
1584
+ "Q AA",
1585
+ "T NS",
1586
+ "D SS",
1587
+ "T AF",
1588
+ "PG S",
1589
+ "D SG",
1590
+ "VV S",
1591
+ "P M",
1592
+ "G K",
1593
+ "D AD",
1594
+ "LL V",
1595
+ "AG N",
1596
+ "LS P",
1597
+ "E SL",
1598
+ "NG S",
1599
+ "D DV",
1600
+ "E AV",
1601
+ "D EL",
1602
+ "TI L",
1603
+ "SV G",
1604
+ "NG V",
1605
+ "EE I",
1606
+ "AI G",
1607
+ "XXXX XXXX",
1608
+ "D DL",
1609
+ "AG I",
1610
+ "AS I",
1611
+ "C Y",
1612
+ "F M",
1613
+ "AA R",
1614
+ "AA E",
1615
+ "AA I",
1616
+ "NI L",
1617
+ "T KK",
1618
+ "TS I",
1619
+ "T KV",
1620
+ "TV N",
1621
+ "D TL",
1622
+ "M Y",
1623
+ "QG L",
1624
+ "AG E",
1625
+ "D EE",
1626
+ "AD S",
1627
+ "AG R",
1628
+ "SL P",
1629
+ "AE S",
1630
+ "AS P",
1631
+ "LL E",
1632
+ "LS I",
1633
+ "D TV",
1634
+ "D TT",
1635
+ "PV G",
1636
+ "GG GG",
1637
+ "T RV",
1638
+ "PG V",
1639
+ "R AL",
1640
+ "PV S",
1641
+ "EV S",
1642
+ "A YL",
1643
+ "K C",
1644
+ "AD I",
1645
+ "W F",
1646
+ "N SS",
1647
+ "T QV",
1648
+ "DS I",
1649
+ "Q AL",
1650
+ "PP P",
1651
+ "AR V",
1652
+ "EE E",
1653
+ "N TL",
1654
+ "D DG",
1655
+ "P DG",
1656
+ "RL R",
1657
+ "LL R",
1658
+ "D SV",
1659
+ "TT E",
1660
+ "EE G",
1661
+ "C H",
1662
+ "K W",
1663
+ "T KI",
1664
+ "AE I",
1665
+ "LS E",
1666
+ "M M",
1667
+ "AQ G",
1668
+ "NV S",
1669
+ "DL P",
1670
+ "EG V",
1671
+ "D EV",
1672
+ "Q QL",
1673
+ "E KL",
1674
+ "Y W",
1675
+ "R EL",
1676
+ "RI L",
1677
+ "LL P",
1678
+ "PL V",
1679
+ "DI S",
1680
+ "T AY",
1681
+ "AF L",
1682
+ "PV P",
1683
+ "Q SL",
1684
+ "TV P",
1685
+ "N AA",
1686
+ "AI S",
1687
+ "E RL",
1688
+ "NL G",
1689
+ "R AV",
1690
+ "AQ V",
1691
+ "EE S",
1692
+ "DG I",
1693
+ "TI G",
1694
+ "EG S",
1695
+ "AP P",
1696
+ "PL G",
1697
+ "LL K",
1698
+ "N LS",
1699
+ "TD I",
1700
+ "NL V",
1701
+ "QL V",
1702
+ "TP P",
1703
+ "R LS",
1704
+ "EE K",
1705
+ "D RL",
1706
+ "E TL",
1707
+ "VL G",
1708
+ "T RS",
1709
+ "TG N",
1710
+ "T QI",
1711
+ "T RG",
1712
+ "A HL",
1713
+ "EL P",
1714
+ "F C",
1715
+ "T NN",
1716
+ "DSDS DSDS",
1717
+ "TG I",
1718
+ "D TS",
1719
+ "RV S",
1720
+ "EI S",
1721
+ "M H",
1722
+ "KG L",
1723
+ "TI V",
1724
+ "TV E",
1725
+ "TL N",
1726
+ "D DI",
1727
+ "QL G",
1728
+ "D AE",
1729
+ "TV I",
1730
+ "DV G",
1731
+ "T QG",
1732
+ "FG L",
1733
+ "T KS",
1734
+ "Q AV",
1735
+ "AR S",
1736
+ "KL V",
1737
+ "NI S",
1738
+ "D AP",
1739
+ "T QS",
1740
+ "DL I",
1741
+ "R GG",
1742
+ "W P",
1743
+ "AI V",
1744
+ "N SL",
1745
+ "DG K",
1746
+ "Y M",
1747
+ "ES I",
1748
+ "R RL",
1749
+ "R SL",
1750
+ "E LS",
1751
+ "P SS",
1752
+ "E GG",
1753
+ "EK V",
1754
+ "Q LS",
1755
+ "N TT",
1756
+ "R SS",
1757
+ "E SS",
1758
+ "E TT",
1759
+ "Q SS",
1760
+ "T RI",
1761
+ "P GG",
1762
+ "E AE",
1763
+ "KI L",
1764
+ "F SS",
1765
+ "EI V",
1766
+ "SI L",
1767
+ "TD P",
1768
+ "N SG",
1769
+ "RG S",
1770
+ "T RR",
1771
+ "AD P",
1772
+ "Q VV",
1773
+ "RV G",
1774
+ "T QQ",
1775
+ "DI V",
1776
+ "NV G",
1777
+ "K EL",
1778
+ "TL I",
1779
+ "Q RL",
1780
+ "E AG",
1781
+ "AK V",
1782
+ "QI L",
1783
+ "AV I",
1784
+ "N LL",
1785
+ "N AG",
1786
+ "Y C",
1787
+ "D TG",
1788
+ "N AS",
1789
+ "R AG",
1790
+ "NS I",
1791
+ "TV TV",
1792
+ "Q GG",
1793
+ "SG I",
1794
+ "KG V",
1795
+ "H M",
1796
+ "Q VL",
1797
+ "F GG",
1798
+ "E VV",
1799
+ "E SV",
1800
+ "Q EL",
1801
+ "K EE",
1802
+ "DV I",
1803
+ "E TV",
1804
+ "P C",
1805
+ "AF G",
1806
+ "EK I",
1807
+ "AL I",
1808
+ "H C",
1809
+ "E QL",
1810
+ "TF S",
1811
+ "TP I",
1812
+ "SI S",
1813
+ "F W",
1814
+ "RG V",
1815
+ "NI G",
1816
+ "Q TL",
1817
+ "E AS",
1818
+ "KL K",
1819
+ "NL I",
1820
+ "F SG",
1821
+ "R SG",
1822
+ "F DL",
1823
+ "PS I",
1824
+ "AV P",
1825
+ "T KG",
1826
+ "E DL",
1827
+ "KK KK",
1828
+ "R AR",
1829
+ "Q AG",
1830
+ "EK S",
1831
+ "EV G",
1832
+ "TV K",
1833
+ "N AL",
1834
+ "D AI",
1835
+ "VG L",
1836
+ "NG I",
1837
+ "D EI",
1838
+ "AK G",
1839
+ "TL K",
1840
+ "E DV",
1841
+ "E TS",
1842
+ "E SG",
1843
+ "P TT",
1844
+ "W Y",
1845
+ "K AL",
1846
+ "TV D",
1847
+ "KL G",
1848
+ "R DL",
1849
+ "Q SV",
1850
+ "YL G",
1851
+ "LS F",
1852
+ "D QL",
1853
+ "TS GS",
1854
+ "C SS",
1855
+ "D DS",
1856
+ "D AR",
1857
+ "P AP",
1858
+ "R VV",
1859
+ "N TS",
1860
+ "P AV",
1861
+ "Q AS",
1862
+ "AQ S",
1863
+ "LS K",
1864
+ "E DG",
1865
+ "C C",
1866
+ "SL I",
1867
+ "D TI",
1868
+ "N TG",
1869
+ "RR S",
1870
+ "EL I",
1871
+ "N AV",
1872
+ "EV I",
1873
+ "Q SG",
1874
+ "K LL",
1875
+ "D NL",
1876
+ "GG I",
1877
+ "K SS",
1878
+ "AA F",
1879
+ "RL P",
1880
+ "Q TV",
1881
+ "QV S",
1882
+ "PV I",
1883
+ "VL V",
1884
+ "AH G",
1885
+ "TF L",
1886
+ "N SV",
1887
+ "D PS",
1888
+ "AK S",
1889
+ "R AE",
1890
+ "P SG",
1891
+ "Q TT",
1892
+ "EK G",
1893
+ "KL I",
1894
+ "E NL",
1895
+ "R TL",
1896
+ "KK K",
1897
+ "LL I",
1898
+ "D RV",
1899
+ "F TG",
1900
+ "D AT",
1901
+ "N VV",
1902
+ "DV P",
1903
+ "AF V",
1904
+ "K SL",
1905
+ "DI G",
1906
+ "H W",
1907
+ "E PG",
1908
+ "D TD",
1909
+ "R RG",
1910
+ "PI L",
1911
+ "AA K",
1912
+ "FL G",
1913
+ "EI I",
1914
+ "QG S",
1915
+ "N NI",
1916
+ "N NL",
1917
+ "NV I",
1918
+ "SI G",
1919
+ "TG K",
1920
+ "R TV",
1921
+ "N DG",
1922
+ "KK S",
1923
+ "AA Q",
1924
+ "F TV",
1925
+ "N DL",
1926
+ "D ES",
1927
+ "R SV",
1928
+ "LL Q",
1929
+ "R DG",
1930
+ "AL K",
1931
+ "D EG",
1932
+ "AL E",
1933
+ "PE P",
1934
+ "TG P",
1935
+ "R AS",
1936
+ "EL K",
1937
+ "GL G",
1938
+ "D PV",
1939
+ "EK P",
1940
+ "D KL",
1941
+ "R EE",
1942
+ "RV I",
1943
+ "N TV",
1944
+ "KV S",
1945
+ "DI I",
1946
+ "KK I",
1947
+ "AM L",
1948
+ "TY L",
1949
+ "EI G",
1950
+ "F SL",
1951
+ "AY V",
1952
+ "F AA",
1953
+ "QV G",
1954
+ "SV I",
1955
+ "E TI",
1956
+ "Y GG",
1957
+ "VG V",
1958
+ "TY S",
1959
+ "KK V",
1960
+ "Q TI",
1961
+ "N TI",
1962
+ "FG S",
1963
+ "AF S",
1964
+ "EE EE",
1965
+ "RL I",
1966
+ "R DV",
1967
+ "VG S",
1968
+ "TF G",
1969
+ "AG P",
1970
+ "R QL",
1971
+ "T RE",
1972
+ "R PG",
1973
+ "KI S",
1974
+ "AL Q",
1975
+ "H LL",
1976
+ "AR I",
1977
+ "E AP",
1978
+ "N NS",
1979
+ "Q TS",
1980
+ "EG I",
1981
+ "F DV",
1982
+ "E PV",
1983
+ "T RP",
1984
+ "VV I",
1985
+ "NI V",
1986
+ "K AG",
1987
+ "N DV",
1988
+ "D NG",
1989
+ "QQ V",
1990
+ "KK G",
1991
+ "Q SI",
1992
+ "N EL",
1993
+ "QG V",
1994
+ "D NV",
1995
+ "K LS",
1996
+ "T AH",
1997
+ "Q EE",
1998
+ "SV P",
1999
+ "AS F",
2000
+ "AP I",
2001
+ "K DL",
2002
+ "Q AE",
2003
+ "E DI",
2004
+ "R PV",
2005
+ "R PL",
2006
+ "W H",
2007
+ "Y TL",
2008
+ "D PG",
2009
+ "N NG",
2010
+ "Y SG",
2011
+ "D PL",
2012
+ "RI V",
2013
+ "NI I",
2014
+ "E AI",
2015
+ "E TG",
2016
+ "RI S",
2017
+ "E PL",
2018
+ "D KV",
2019
+ "P LL",
2020
+ "Y TV",
2021
+ "PI G",
2022
+ "F TL",
2023
+ "P SL",
2024
+ "AQ I",
2025
+ "W M",
2026
+ "N AT",
2027
+ "T HL",
2028
+ "D NS",
2029
+ "Q DL",
2030
+ "Q AR",
2031
+ "Q PG",
2032
+ "P AL",
2033
+ "C M",
2034
+ "Y TG",
2035
+ "TF V",
2036
+ "Y SS",
2037
+ "R TG",
2038
+ "QI S",
2039
+ "FV S",
2040
+ "P AS",
2041
+ "K EK",
2042
+ "Q PV",
2043
+ "K DG",
2044
+ "K AV",
2045
+ "TI I",
2046
+ "QL I",
2047
+ "F AG",
2048
+ "D AN",
2049
+ "AK I",
2050
+ "RI G",
2051
+ "N DI",
2052
+ "Q PL",
2053
+ "R TT",
2054
+ "P LS",
2055
+ "E RV",
2056
+ "P W",
2057
+ "N AI",
2058
+ "Q TG",
2059
+ "Q KL",
2060
+ "TY G",
2061
+ "E KK",
2062
+ "E AR",
2063
+ "T AM",
2064
+ "K VL",
2065
+ "F DG",
2066
+ "K TL",
2067
+ "KG S",
2068
+ "XXXXXXXX XXXXXXXX",
2069
+ "P TE",
2070
+ "R EV",
2071
+ "D KI",
2072
+ "QQ G",
2073
+ "D AF",
2074
+ "K KL",
2075
+ "D AQ",
2076
+ "K AA",
2077
+ "P SV",
2078
+ "F SV",
2079
+ "D NI",
2080
+ "FG V",
2081
+ "D TP",
2082
+ "PI S",
2083
+ "F LS",
2084
+ "P EE",
2085
+ "Q EV",
2086
+ "K EI",
2087
+ "GL V",
2088
+ "F AE"
2089
+ ]
2090
+ }
2091
+ }
tokenizer_config.json ADDED
@@ -0,0 +1,52 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "added_tokens_decoder": {
3
+ "0": {
4
+ "content": "[UNK]",
5
+ "lstrip": false,
6
+ "normalized": false,
7
+ "rstrip": false,
8
+ "single_word": false,
9
+ "special": true
10
+ },
11
+ "1": {
12
+ "content": "[CLS]",
13
+ "lstrip": false,
14
+ "normalized": false,
15
+ "rstrip": false,
16
+ "single_word": false,
17
+ "special": true
18
+ },
19
+ "2": {
20
+ "content": "[SEP]",
21
+ "lstrip": false,
22
+ "normalized": false,
23
+ "rstrip": false,
24
+ "single_word": false,
25
+ "special": true
26
+ },
27
+ "3": {
28
+ "content": "[PAD]",
29
+ "lstrip": false,
30
+ "normalized": false,
31
+ "rstrip": false,
32
+ "single_word": false,
33
+ "special": true
34
+ },
35
+ "4": {
36
+ "content": "[MASK]",
37
+ "lstrip": false,
38
+ "normalized": false,
39
+ "rstrip": false,
40
+ "single_word": false,
41
+ "special": true
42
+ }
43
+ },
44
+ "clean_up_tokenization_spaces": true,
45
+ "cls_token": "[CLS]",
46
+ "mask_token": "[MASK]",
47
+ "model_max_length": 1000000000000000019884624838656,
48
+ "pad_token": "[PAD]",
49
+ "sep_token": "[SEP]",
50
+ "tokenizer_class": "PreTrainedTokenizerFast",
51
+ "unk_token": "[UNK]"
52
+ }
trainer_state.json ADDED
@@ -0,0 +1,2290 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_metric": 2.9280490707606077e-05,
3
+ "best_model_checkpoint": "./results/models/checkpoint-156240",
4
+ "epoch": 8.0,
5
+ "eval_steps": 500,
6
+ "global_step": 156240,
7
+ "is_hyper_param_search": false,
8
+ "is_local_process_zero": true,
9
+ "is_world_process_zero": true,
10
+ "log_history": [
11
+ {
12
+ "epoch": 0.025601638504864313,
13
+ "grad_norm": 0.0004177093505859375,
14
+ "learning_rate": 0.0009994879672299028,
15
+ "loss": 0.023,
16
+ "step": 500
17
+ },
18
+ {
19
+ "epoch": 0.051203277009728626,
20
+ "grad_norm": 0.000274658203125,
21
+ "learning_rate": 0.0009989759344598056,
22
+ "loss": 0.0,
23
+ "step": 1000
24
+ },
25
+ {
26
+ "epoch": 0.07680491551459294,
27
+ "grad_norm": 0.000270843505859375,
28
+ "learning_rate": 0.0009984639016897081,
29
+ "loss": 0.0,
30
+ "step": 1500
31
+ },
32
+ {
33
+ "epoch": 0.10240655401945725,
34
+ "grad_norm": 0.0002231597900390625,
35
+ "learning_rate": 0.000997951868919611,
36
+ "loss": 0.0,
37
+ "step": 2000
38
+ },
39
+ {
40
+ "epoch": 0.12800819252432155,
41
+ "grad_norm": 0.00021457672119140625,
42
+ "learning_rate": 0.0009974398361495137,
43
+ "loss": 0.0,
44
+ "step": 2500
45
+ },
46
+ {
47
+ "epoch": 0.15360983102918588,
48
+ "grad_norm": 0.00020694732666015625,
49
+ "learning_rate": 0.0009969278033794163,
50
+ "loss": 0.0,
51
+ "step": 3000
52
+ },
53
+ {
54
+ "epoch": 0.17921146953405018,
55
+ "grad_norm": 0.00020313262939453125,
56
+ "learning_rate": 0.000996415770609319,
57
+ "loss": 0.0,
58
+ "step": 3500
59
+ },
60
+ {
61
+ "epoch": 0.2048131080389145,
62
+ "grad_norm": 0.000202178955078125,
63
+ "learning_rate": 0.0009959037378392218,
64
+ "loss": 0.0,
65
+ "step": 4000
66
+ },
67
+ {
68
+ "epoch": 0.2304147465437788,
69
+ "grad_norm": 0.0001926422119140625,
70
+ "learning_rate": 0.0009953917050691244,
71
+ "loss": 0.0,
72
+ "step": 4500
73
+ },
74
+ {
75
+ "epoch": 0.2560163850486431,
76
+ "grad_norm": 0.0024566650390625,
77
+ "learning_rate": 0.0009948796722990272,
78
+ "loss": 0.0813,
79
+ "step": 5000
80
+ },
81
+ {
82
+ "epoch": 0.2816180235535074,
83
+ "grad_norm": 0.0025482177734375,
84
+ "learning_rate": 0.00099436763952893,
85
+ "loss": 0.001,
86
+ "step": 5500
87
+ },
88
+ {
89
+ "epoch": 0.30721966205837176,
90
+ "grad_norm": 0.00048828125,
91
+ "learning_rate": 0.0009938556067588325,
92
+ "loss": 0.0005,
93
+ "step": 6000
94
+ },
95
+ {
96
+ "epoch": 0.33282130056323606,
97
+ "grad_norm": 0.00058746337890625,
98
+ "learning_rate": 0.0009933435739887353,
99
+ "loss": 0.0004,
100
+ "step": 6500
101
+ },
102
+ {
103
+ "epoch": 0.35842293906810035,
104
+ "grad_norm": 0.000614166259765625,
105
+ "learning_rate": 0.000992831541218638,
106
+ "loss": 0.0004,
107
+ "step": 7000
108
+ },
109
+ {
110
+ "epoch": 0.38402457757296465,
111
+ "grad_norm": 0.014404296875,
112
+ "learning_rate": 0.0009923195084485406,
113
+ "loss": 0.0002,
114
+ "step": 7500
115
+ },
116
+ {
117
+ "epoch": 0.409626216077829,
118
+ "grad_norm": 0.000820159912109375,
119
+ "learning_rate": 0.0009918074756784434,
120
+ "loss": 0.0002,
121
+ "step": 8000
122
+ },
123
+ {
124
+ "epoch": 0.4352278545826933,
125
+ "grad_norm": 0.000713348388671875,
126
+ "learning_rate": 0.0009912954429083462,
127
+ "loss": 0.0002,
128
+ "step": 8500
129
+ },
130
+ {
131
+ "epoch": 0.4608294930875576,
132
+ "grad_norm": 0.00057220458984375,
133
+ "learning_rate": 0.000990783410138249,
134
+ "loss": 0.0002,
135
+ "step": 9000
136
+ },
137
+ {
138
+ "epoch": 0.4864311315924219,
139
+ "grad_norm": 0.0026702880859375,
140
+ "learning_rate": 0.0009902713773681515,
141
+ "loss": 0.0002,
142
+ "step": 9500
143
+ },
144
+ {
145
+ "epoch": 0.5120327700972862,
146
+ "grad_norm": 0.0026702880859375,
147
+ "learning_rate": 0.0009897593445980543,
148
+ "loss": 0.0001,
149
+ "step": 10000
150
+ },
151
+ {
152
+ "epoch": 0.5376344086021505,
153
+ "grad_norm": 0.0005035400390625,
154
+ "learning_rate": 0.000989247311827957,
155
+ "loss": 0.0001,
156
+ "step": 10500
157
+ },
158
+ {
159
+ "epoch": 0.5632360471070148,
160
+ "grad_norm": 0.00052642822265625,
161
+ "learning_rate": 0.0009887352790578599,
162
+ "loss": 0.0002,
163
+ "step": 11000
164
+ },
165
+ {
166
+ "epoch": 0.5888376856118792,
167
+ "grad_norm": 0.000667572021484375,
168
+ "learning_rate": 0.0009882232462877624,
169
+ "loss": 0.0001,
170
+ "step": 11500
171
+ },
172
+ {
173
+ "epoch": 0.6144393241167435,
174
+ "grad_norm": 0.0003376007080078125,
175
+ "learning_rate": 0.0009877112135176652,
176
+ "loss": 0.0001,
177
+ "step": 12000
178
+ },
179
+ {
180
+ "epoch": 0.6400409626216078,
181
+ "grad_norm": 0.00029754638671875,
182
+ "learning_rate": 0.000987199180747568,
183
+ "loss": 0.0001,
184
+ "step": 12500
185
+ },
186
+ {
187
+ "epoch": 0.6656426011264721,
188
+ "grad_norm": 0.000308990478515625,
189
+ "learning_rate": 0.0009866871479774705,
190
+ "loss": 0.0001,
191
+ "step": 13000
192
+ },
193
+ {
194
+ "epoch": 0.6912442396313364,
195
+ "grad_norm": 0.0003147125244140625,
196
+ "learning_rate": 0.0009861751152073733,
197
+ "loss": 0.0001,
198
+ "step": 13500
199
+ },
200
+ {
201
+ "epoch": 0.7168458781362007,
202
+ "grad_norm": 0.0004367828369140625,
203
+ "learning_rate": 0.0009856630824372759,
204
+ "loss": 0.0001,
205
+ "step": 14000
206
+ },
207
+ {
208
+ "epoch": 0.742447516641065,
209
+ "grad_norm": 0.0002880096435546875,
210
+ "learning_rate": 0.0009851510496671787,
211
+ "loss": 0.0001,
212
+ "step": 14500
213
+ },
214
+ {
215
+ "epoch": 0.7680491551459293,
216
+ "grad_norm": 0.000274658203125,
217
+ "learning_rate": 0.0009846390168970814,
218
+ "loss": 0.0001,
219
+ "step": 15000
220
+ },
221
+ {
222
+ "epoch": 0.7936507936507936,
223
+ "grad_norm": 0.0003528594970703125,
224
+ "learning_rate": 0.000984126984126984,
225
+ "loss": 0.0001,
226
+ "step": 15500
227
+ },
228
+ {
229
+ "epoch": 0.819252432155658,
230
+ "grad_norm": 0.0002880096435546875,
231
+ "learning_rate": 0.0009836149513568868,
232
+ "loss": 0.0001,
233
+ "step": 16000
234
+ },
235
+ {
236
+ "epoch": 0.8448540706605223,
237
+ "grad_norm": 0.0002536773681640625,
238
+ "learning_rate": 0.0009831029185867896,
239
+ "loss": 0.0001,
240
+ "step": 16500
241
+ },
242
+ {
243
+ "epoch": 0.8704557091653866,
244
+ "grad_norm": 0.0003509521484375,
245
+ "learning_rate": 0.0009825908858166923,
246
+ "loss": 0.0001,
247
+ "step": 17000
248
+ },
249
+ {
250
+ "epoch": 0.8960573476702509,
251
+ "grad_norm": 0.000244140625,
252
+ "learning_rate": 0.0009820788530465951,
253
+ "loss": 0.0,
254
+ "step": 17500
255
+ },
256
+ {
257
+ "epoch": 0.9216589861751152,
258
+ "grad_norm": 0.000568389892578125,
259
+ "learning_rate": 0.0009815668202764977,
260
+ "loss": 0.0007,
261
+ "step": 18000
262
+ },
263
+ {
264
+ "epoch": 0.9472606246799795,
265
+ "grad_norm": 0.0005645751953125,
266
+ "learning_rate": 0.0009810547875064005,
267
+ "loss": 0.0001,
268
+ "step": 18500
269
+ },
270
+ {
271
+ "epoch": 0.9728622631848438,
272
+ "grad_norm": 0.00037384033203125,
273
+ "learning_rate": 0.0009805427547363032,
274
+ "loss": 0.0001,
275
+ "step": 19000
276
+ },
277
+ {
278
+ "epoch": 0.9984639016897081,
279
+ "grad_norm": 0.000507354736328125,
280
+ "learning_rate": 0.000980030721966206,
281
+ "loss": 0.0001,
282
+ "step": 19500
283
+ },
284
+ {
285
+ "epoch": 1.0,
286
+ "eval_loss": 6.09988892392721e-05,
287
+ "eval_runtime": 0.5651,
288
+ "eval_samples_per_second": 1769.543,
289
+ "eval_steps_per_second": 3.539,
290
+ "step": 19530
291
+ },
292
+ {
293
+ "epoch": 1.0240655401945724,
294
+ "grad_norm": 0.00133514404296875,
295
+ "learning_rate": 0.0009795186891961086,
296
+ "loss": 0.0001,
297
+ "step": 20000
298
+ },
299
+ {
300
+ "epoch": 1.0496671786994367,
301
+ "grad_norm": 0.0004024505615234375,
302
+ "learning_rate": 0.0009790066564260114,
303
+ "loss": 0.0001,
304
+ "step": 20500
305
+ },
306
+ {
307
+ "epoch": 1.075268817204301,
308
+ "grad_norm": 0.00031280517578125,
309
+ "learning_rate": 0.000978494623655914,
310
+ "loss": 0.0002,
311
+ "step": 21000
312
+ },
313
+ {
314
+ "epoch": 1.1008704557091653,
315
+ "grad_norm": 0.0002689361572265625,
316
+ "learning_rate": 0.0009779825908858167,
317
+ "loss": 0.0001,
318
+ "step": 21500
319
+ },
320
+ {
321
+ "epoch": 1.1264720942140296,
322
+ "grad_norm": 0.00023365020751953125,
323
+ "learning_rate": 0.0009774705581157195,
324
+ "loss": 0.0001,
325
+ "step": 22000
326
+ },
327
+ {
328
+ "epoch": 1.1520737327188941,
329
+ "grad_norm": 0.00032806396484375,
330
+ "learning_rate": 0.000976958525345622,
331
+ "loss": 0.0001,
332
+ "step": 22500
333
+ },
334
+ {
335
+ "epoch": 1.1776753712237582,
336
+ "grad_norm": 0.000274658203125,
337
+ "learning_rate": 0.0009764464925755249,
338
+ "loss": 0.0001,
339
+ "step": 23000
340
+ },
341
+ {
342
+ "epoch": 1.2032770097286227,
343
+ "grad_norm": 0.0002651214599609375,
344
+ "learning_rate": 0.0009759344598054276,
345
+ "loss": 0.0001,
346
+ "step": 23500
347
+ },
348
+ {
349
+ "epoch": 1.228878648233487,
350
+ "grad_norm": 0.00029754638671875,
351
+ "learning_rate": 0.0009754224270353303,
352
+ "loss": 0.0001,
353
+ "step": 24000
354
+ },
355
+ {
356
+ "epoch": 1.2544802867383513,
357
+ "grad_norm": 0.000308990478515625,
358
+ "learning_rate": 0.0009749103942652329,
359
+ "loss": 0.0001,
360
+ "step": 24500
361
+ },
362
+ {
363
+ "epoch": 1.2800819252432156,
364
+ "grad_norm": 0.000263214111328125,
365
+ "learning_rate": 0.0009743983614951357,
366
+ "loss": 0.0001,
367
+ "step": 25000
368
+ },
369
+ {
370
+ "epoch": 1.30568356374808,
371
+ "grad_norm": 0.00074005126953125,
372
+ "learning_rate": 0.0009738863287250385,
373
+ "loss": 0.0001,
374
+ "step": 25500
375
+ },
376
+ {
377
+ "epoch": 1.3312852022529442,
378
+ "grad_norm": 0.000354766845703125,
379
+ "learning_rate": 0.000973374295954941,
380
+ "loss": 0.0001,
381
+ "step": 26000
382
+ },
383
+ {
384
+ "epoch": 1.3568868407578085,
385
+ "grad_norm": 0.0003108978271484375,
386
+ "learning_rate": 0.0009728622631848438,
387
+ "loss": 0.0001,
388
+ "step": 26500
389
+ },
390
+ {
391
+ "epoch": 1.3824884792626728,
392
+ "grad_norm": 0.000278472900390625,
393
+ "learning_rate": 0.0009723502304147466,
394
+ "loss": 0.0001,
395
+ "step": 27000
396
+ },
397
+ {
398
+ "epoch": 1.4080901177675371,
399
+ "grad_norm": 0.000293731689453125,
400
+ "learning_rate": 0.0009718381976446493,
401
+ "loss": 0.0,
402
+ "step": 27500
403
+ },
404
+ {
405
+ "epoch": 1.4336917562724014,
406
+ "grad_norm": 0.000263214111328125,
407
+ "learning_rate": 0.000971326164874552,
408
+ "loss": 0.0,
409
+ "step": 28000
410
+ },
411
+ {
412
+ "epoch": 1.4592933947772657,
413
+ "grad_norm": 0.000293731689453125,
414
+ "learning_rate": 0.0009708141321044547,
415
+ "loss": 0.0,
416
+ "step": 28500
417
+ },
418
+ {
419
+ "epoch": 1.48489503328213,
420
+ "grad_norm": 0.000278472900390625,
421
+ "learning_rate": 0.0009703020993343574,
422
+ "loss": 0.0001,
423
+ "step": 29000
424
+ },
425
+ {
426
+ "epoch": 1.5104966717869943,
427
+ "grad_norm": 0.00030517578125,
428
+ "learning_rate": 0.0009697900665642602,
429
+ "loss": 0.0001,
430
+ "step": 29500
431
+ },
432
+ {
433
+ "epoch": 1.5360983102918588,
434
+ "grad_norm": 0.0002899169921875,
435
+ "learning_rate": 0.0009692780337941628,
436
+ "loss": 0.0,
437
+ "step": 30000
438
+ },
439
+ {
440
+ "epoch": 1.561699948796723,
441
+ "grad_norm": 0.000766754150390625,
442
+ "learning_rate": 0.0009687660010240655,
443
+ "loss": 0.0,
444
+ "step": 30500
445
+ },
446
+ {
447
+ "epoch": 1.5873015873015874,
448
+ "grad_norm": 0.00032806396484375,
449
+ "learning_rate": 0.0009682539682539683,
450
+ "loss": 0.0001,
451
+ "step": 31000
452
+ },
453
+ {
454
+ "epoch": 1.6129032258064515,
455
+ "grad_norm": 0.000301361083984375,
456
+ "learning_rate": 0.000967741935483871,
457
+ "loss": 0.0001,
458
+ "step": 31500
459
+ },
460
+ {
461
+ "epoch": 1.638504864311316,
462
+ "grad_norm": 0.003997802734375,
463
+ "learning_rate": 0.0009672299027137736,
464
+ "loss": 0.0001,
465
+ "step": 32000
466
+ },
467
+ {
468
+ "epoch": 1.66410650281618,
469
+ "grad_norm": 0.0003452301025390625,
470
+ "learning_rate": 0.0009667178699436764,
471
+ "loss": 0.0001,
472
+ "step": 32500
473
+ },
474
+ {
475
+ "epoch": 1.6897081413210446,
476
+ "grad_norm": 0.0002918243408203125,
477
+ "learning_rate": 0.0009662058371735791,
478
+ "loss": 0.0001,
479
+ "step": 33000
480
+ },
481
+ {
482
+ "epoch": 1.7153097798259087,
483
+ "grad_norm": 0.0002422332763671875,
484
+ "learning_rate": 0.0009656938044034819,
485
+ "loss": 0.0001,
486
+ "step": 33500
487
+ },
488
+ {
489
+ "epoch": 1.7409114183307732,
490
+ "grad_norm": 0.00124359130859375,
491
+ "learning_rate": 0.0009651817716333846,
492
+ "loss": 0.0,
493
+ "step": 34000
494
+ },
495
+ {
496
+ "epoch": 1.7665130568356375,
497
+ "grad_norm": 0.0009002685546875,
498
+ "learning_rate": 0.0009646697388632872,
499
+ "loss": 0.0001,
500
+ "step": 34500
501
+ },
502
+ {
503
+ "epoch": 1.7921146953405018,
504
+ "grad_norm": 0.00030517578125,
505
+ "learning_rate": 0.00096415770609319,
506
+ "loss": 0.0,
507
+ "step": 35000
508
+ },
509
+ {
510
+ "epoch": 1.8177163338453661,
511
+ "grad_norm": 0.00041961669921875,
512
+ "learning_rate": 0.0009636456733230928,
513
+ "loss": 0.0001,
514
+ "step": 35500
515
+ },
516
+ {
517
+ "epoch": 1.8433179723502304,
518
+ "grad_norm": 0.0003108978271484375,
519
+ "learning_rate": 0.0009631336405529954,
520
+ "loss": 0.0,
521
+ "step": 36000
522
+ },
523
+ {
524
+ "epoch": 1.8689196108550947,
525
+ "grad_norm": 0.0002727508544921875,
526
+ "learning_rate": 0.0009626216077828981,
527
+ "loss": 0.0,
528
+ "step": 36500
529
+ },
530
+ {
531
+ "epoch": 1.894521249359959,
532
+ "grad_norm": 0.0002613067626953125,
533
+ "learning_rate": 0.0009621095750128009,
534
+ "loss": 0.0,
535
+ "step": 37000
536
+ },
537
+ {
538
+ "epoch": 1.9201228878648233,
539
+ "grad_norm": 0.0003204345703125,
540
+ "learning_rate": 0.0009615975422427036,
541
+ "loss": 0.0,
542
+ "step": 37500
543
+ },
544
+ {
545
+ "epoch": 1.9457245263696876,
546
+ "grad_norm": 0.000255584716796875,
547
+ "learning_rate": 0.0009610855094726063,
548
+ "loss": 0.0,
549
+ "step": 38000
550
+ },
551
+ {
552
+ "epoch": 1.971326164874552,
553
+ "grad_norm": 0.000263214111328125,
554
+ "learning_rate": 0.0009605734767025089,
555
+ "loss": 0.0,
556
+ "step": 38500
557
+ },
558
+ {
559
+ "epoch": 1.9969278033794162,
560
+ "grad_norm": 0.00048828125,
561
+ "learning_rate": 0.0009600614439324117,
562
+ "loss": 0.0001,
563
+ "step": 39000
564
+ },
565
+ {
566
+ "epoch": 2.0,
567
+ "eval_loss": 3.46598717442248e-05,
568
+ "eval_runtime": 0.5684,
569
+ "eval_samples_per_second": 1759.346,
570
+ "eval_steps_per_second": 3.519,
571
+ "step": 39060
572
+ },
573
+ {
574
+ "epoch": 2.0225294418842807,
575
+ "grad_norm": 0.000423431396484375,
576
+ "learning_rate": 0.0009595494111623145,
577
+ "loss": 0.0,
578
+ "step": 39500
579
+ },
580
+ {
581
+ "epoch": 2.048131080389145,
582
+ "grad_norm": 0.0002899169921875,
583
+ "learning_rate": 0.0009590373783922171,
584
+ "loss": 0.0,
585
+ "step": 40000
586
+ },
587
+ {
588
+ "epoch": 2.0737327188940093,
589
+ "grad_norm": 0.0002593994140625,
590
+ "learning_rate": 0.0009585253456221198,
591
+ "loss": 0.0,
592
+ "step": 40500
593
+ },
594
+ {
595
+ "epoch": 2.0993343573988734,
596
+ "grad_norm": 0.00042724609375,
597
+ "learning_rate": 0.0009580133128520226,
598
+ "loss": 0.0,
599
+ "step": 41000
600
+ },
601
+ {
602
+ "epoch": 2.124935995903738,
603
+ "grad_norm": 0.00032806396484375,
604
+ "learning_rate": 0.0009575012800819252,
605
+ "loss": 0.0,
606
+ "step": 41500
607
+ },
608
+ {
609
+ "epoch": 2.150537634408602,
610
+ "grad_norm": 0.0002918243408203125,
611
+ "learning_rate": 0.000956989247311828,
612
+ "loss": 0.0,
613
+ "step": 42000
614
+ },
615
+ {
616
+ "epoch": 2.1761392729134665,
617
+ "grad_norm": 0.00250244140625,
618
+ "learning_rate": 0.0009564772145417307,
619
+ "loss": 0.0,
620
+ "step": 42500
621
+ },
622
+ {
623
+ "epoch": 2.2017409114183306,
624
+ "grad_norm": 0.0002803802490234375,
625
+ "learning_rate": 0.0009559651817716334,
626
+ "loss": 0.0,
627
+ "step": 43000
628
+ },
629
+ {
630
+ "epoch": 2.227342549923195,
631
+ "grad_norm": 0.0003681182861328125,
632
+ "learning_rate": 0.0009554531490015361,
633
+ "loss": 0.0,
634
+ "step": 43500
635
+ },
636
+ {
637
+ "epoch": 2.252944188428059,
638
+ "grad_norm": 0.0002689361572265625,
639
+ "learning_rate": 0.0009549411162314389,
640
+ "loss": 0.0,
641
+ "step": 44000
642
+ },
643
+ {
644
+ "epoch": 2.2785458269329237,
645
+ "grad_norm": 0.000370025634765625,
646
+ "learning_rate": 0.0009544290834613415,
647
+ "loss": 0.0001,
648
+ "step": 44500
649
+ },
650
+ {
651
+ "epoch": 2.3041474654377883,
652
+ "grad_norm": 0.0002536773681640625,
653
+ "learning_rate": 0.0009539170506912443,
654
+ "loss": 0.0,
655
+ "step": 45000
656
+ },
657
+ {
658
+ "epoch": 2.3297491039426523,
659
+ "grad_norm": 0.000591278076171875,
660
+ "learning_rate": 0.0009534050179211469,
661
+ "loss": 0.0,
662
+ "step": 45500
663
+ },
664
+ {
665
+ "epoch": 2.3553507424475164,
666
+ "grad_norm": 0.000514984130859375,
667
+ "learning_rate": 0.0009528929851510497,
668
+ "loss": 0.0,
669
+ "step": 46000
670
+ },
671
+ {
672
+ "epoch": 2.380952380952381,
673
+ "grad_norm": 0.00025177001953125,
674
+ "learning_rate": 0.0009523809523809524,
675
+ "loss": 0.0,
676
+ "step": 46500
677
+ },
678
+ {
679
+ "epoch": 2.4065540194572455,
680
+ "grad_norm": 0.0002384185791015625,
681
+ "learning_rate": 0.000951868919610855,
682
+ "loss": 0.0,
683
+ "step": 47000
684
+ },
685
+ {
686
+ "epoch": 2.4321556579621095,
687
+ "grad_norm": 0.000247955322265625,
688
+ "learning_rate": 0.0009513568868407578,
689
+ "loss": 0.0,
690
+ "step": 47500
691
+ },
692
+ {
693
+ "epoch": 2.457757296466974,
694
+ "grad_norm": 0.000240325927734375,
695
+ "learning_rate": 0.0009508448540706606,
696
+ "loss": 0.0,
697
+ "step": 48000
698
+ },
699
+ {
700
+ "epoch": 2.483358934971838,
701
+ "grad_norm": 0.000274658203125,
702
+ "learning_rate": 0.0009503328213005633,
703
+ "loss": 0.0,
704
+ "step": 48500
705
+ },
706
+ {
707
+ "epoch": 2.5089605734767026,
708
+ "grad_norm": 0.0002269744873046875,
709
+ "learning_rate": 0.000949820788530466,
710
+ "loss": 0.0,
711
+ "step": 49000
712
+ },
713
+ {
714
+ "epoch": 2.5345622119815667,
715
+ "grad_norm": 0.0003204345703125,
716
+ "learning_rate": 0.0009493087557603687,
717
+ "loss": 0.0,
718
+ "step": 49500
719
+ },
720
+ {
721
+ "epoch": 2.5601638504864312,
722
+ "grad_norm": 0.00787353515625,
723
+ "learning_rate": 0.0009487967229902714,
724
+ "loss": 0.0,
725
+ "step": 50000
726
+ },
727
+ {
728
+ "epoch": 2.5857654889912953,
729
+ "grad_norm": 0.00022983551025390625,
730
+ "learning_rate": 0.0009482846902201742,
731
+ "loss": 0.0,
732
+ "step": 50500
733
+ },
734
+ {
735
+ "epoch": 2.61136712749616,
736
+ "grad_norm": 0.0002651214599609375,
737
+ "learning_rate": 0.0009477726574500767,
738
+ "loss": 0.0,
739
+ "step": 51000
740
+ },
741
+ {
742
+ "epoch": 2.636968766001024,
743
+ "grad_norm": 0.0002346038818359375,
744
+ "learning_rate": 0.0009472606246799795,
745
+ "loss": 0.0,
746
+ "step": 51500
747
+ },
748
+ {
749
+ "epoch": 2.6625704045058884,
750
+ "grad_norm": 0.00021839141845703125,
751
+ "learning_rate": 0.0009467485919098823,
752
+ "loss": 0.0,
753
+ "step": 52000
754
+ },
755
+ {
756
+ "epoch": 2.688172043010753,
757
+ "grad_norm": 0.0004177093505859375,
758
+ "learning_rate": 0.000946236559139785,
759
+ "loss": 0.0,
760
+ "step": 52500
761
+ },
762
+ {
763
+ "epoch": 2.713773681515617,
764
+ "grad_norm": 0.000247955322265625,
765
+ "learning_rate": 0.0009457245263696876,
766
+ "loss": 0.0,
767
+ "step": 53000
768
+ },
769
+ {
770
+ "epoch": 2.739375320020481,
771
+ "grad_norm": 0.0002269744873046875,
772
+ "learning_rate": 0.0009452124935995904,
773
+ "loss": 0.0,
774
+ "step": 53500
775
+ },
776
+ {
777
+ "epoch": 2.7649769585253456,
778
+ "grad_norm": 0.00025177001953125,
779
+ "learning_rate": 0.0009447004608294931,
780
+ "loss": 0.0,
781
+ "step": 54000
782
+ },
783
+ {
784
+ "epoch": 2.79057859703021,
785
+ "grad_norm": 0.0002536773681640625,
786
+ "learning_rate": 0.0009441884280593959,
787
+ "loss": 0.0,
788
+ "step": 54500
789
+ },
790
+ {
791
+ "epoch": 2.8161802355350742,
792
+ "grad_norm": 0.0002346038818359375,
793
+ "learning_rate": 0.0009436763952892985,
794
+ "loss": 0.0,
795
+ "step": 55000
796
+ },
797
+ {
798
+ "epoch": 2.8417818740399383,
799
+ "grad_norm": 0.0013427734375,
800
+ "learning_rate": 0.0009431643625192012,
801
+ "loss": 0.0,
802
+ "step": 55500
803
+ },
804
+ {
805
+ "epoch": 2.867383512544803,
806
+ "grad_norm": 0.0003528594970703125,
807
+ "learning_rate": 0.000942652329749104,
808
+ "loss": 0.0,
809
+ "step": 56000
810
+ },
811
+ {
812
+ "epoch": 2.8929851510496674,
813
+ "grad_norm": 0.00023746490478515625,
814
+ "learning_rate": 0.0009421402969790068,
815
+ "loss": 0.0,
816
+ "step": 56500
817
+ },
818
+ {
819
+ "epoch": 2.9185867895545314,
820
+ "grad_norm": 0.00055694580078125,
821
+ "learning_rate": 0.0009416282642089093,
822
+ "loss": 0.0,
823
+ "step": 57000
824
+ },
825
+ {
826
+ "epoch": 2.944188428059396,
827
+ "grad_norm": 0.0002307891845703125,
828
+ "learning_rate": 0.0009411162314388121,
829
+ "loss": 0.0,
830
+ "step": 57500
831
+ },
832
+ {
833
+ "epoch": 2.96979006656426,
834
+ "grad_norm": 0.000286102294921875,
835
+ "learning_rate": 0.0009406041986687148,
836
+ "loss": 0.0,
837
+ "step": 58000
838
+ },
839
+ {
840
+ "epoch": 2.9953917050691246,
841
+ "grad_norm": 0.00019550323486328125,
842
+ "learning_rate": 0.0009400921658986176,
843
+ "loss": 0.0,
844
+ "step": 58500
845
+ },
846
+ {
847
+ "epoch": 3.0,
848
+ "eval_loss": 3.896626367350109e-05,
849
+ "eval_runtime": 0.5618,
850
+ "eval_samples_per_second": 1780.014,
851
+ "eval_steps_per_second": 3.56,
852
+ "step": 58590
853
+ },
854
+ {
855
+ "epoch": 3.0209933435739886,
856
+ "grad_norm": 0.000255584716796875,
857
+ "learning_rate": 0.0009395801331285202,
858
+ "loss": 0.0,
859
+ "step": 59000
860
+ },
861
+ {
862
+ "epoch": 3.046594982078853,
863
+ "grad_norm": 0.000209808349609375,
864
+ "learning_rate": 0.0009390681003584229,
865
+ "loss": 0.0,
866
+ "step": 59500
867
+ },
868
+ {
869
+ "epoch": 3.0721966205837172,
870
+ "grad_norm": 0.00021457672119140625,
871
+ "learning_rate": 0.0009385560675883257,
872
+ "loss": 0.0,
873
+ "step": 60000
874
+ },
875
+ {
876
+ "epoch": 3.0977982590885818,
877
+ "grad_norm": 0.0013580322265625,
878
+ "learning_rate": 0.0009380440348182285,
879
+ "loss": 0.0,
880
+ "step": 60500
881
+ },
882
+ {
883
+ "epoch": 3.123399897593446,
884
+ "grad_norm": 0.0002002716064453125,
885
+ "learning_rate": 0.000937532002048131,
886
+ "loss": 0.0,
887
+ "step": 61000
888
+ },
889
+ {
890
+ "epoch": 3.1490015360983103,
891
+ "grad_norm": 0.0002727508544921875,
892
+ "learning_rate": 0.0009370199692780338,
893
+ "loss": 0.0,
894
+ "step": 61500
895
+ },
896
+ {
897
+ "epoch": 3.1746031746031744,
898
+ "grad_norm": 0.0003662109375,
899
+ "learning_rate": 0.0009365079365079366,
900
+ "loss": 0.0,
901
+ "step": 62000
902
+ },
903
+ {
904
+ "epoch": 3.200204813108039,
905
+ "grad_norm": 0.0002040863037109375,
906
+ "learning_rate": 0.0009359959037378392,
907
+ "loss": 0.0,
908
+ "step": 62500
909
+ },
910
+ {
911
+ "epoch": 3.225806451612903,
912
+ "grad_norm": 0.00020694732666015625,
913
+ "learning_rate": 0.0009354838709677419,
914
+ "loss": 0.0,
915
+ "step": 63000
916
+ },
917
+ {
918
+ "epoch": 3.2514080901177675,
919
+ "grad_norm": 0.000213623046875,
920
+ "learning_rate": 0.0009349718381976447,
921
+ "loss": 0.0,
922
+ "step": 63500
923
+ },
924
+ {
925
+ "epoch": 3.277009728622632,
926
+ "grad_norm": 0.00020313262939453125,
927
+ "learning_rate": 0.0009344598054275474,
928
+ "loss": 0.0,
929
+ "step": 64000
930
+ },
931
+ {
932
+ "epoch": 3.302611367127496,
933
+ "grad_norm": 0.0001983642578125,
934
+ "learning_rate": 0.0009339477726574501,
935
+ "loss": 0.0,
936
+ "step": 64500
937
+ },
938
+ {
939
+ "epoch": 3.32821300563236,
940
+ "grad_norm": 0.0002117156982421875,
941
+ "learning_rate": 0.0009334357398873528,
942
+ "loss": 0.0,
943
+ "step": 65000
944
+ },
945
+ {
946
+ "epoch": 3.3538146441372247,
947
+ "grad_norm": 0.0004444122314453125,
948
+ "learning_rate": 0.0009329237071172555,
949
+ "loss": 0.0,
950
+ "step": 65500
951
+ },
952
+ {
953
+ "epoch": 3.3794162826420893,
954
+ "grad_norm": 0.00024318695068359375,
955
+ "learning_rate": 0.0009324116743471583,
956
+ "loss": 0.0,
957
+ "step": 66000
958
+ },
959
+ {
960
+ "epoch": 3.4050179211469533,
961
+ "grad_norm": 0.004608154296875,
962
+ "learning_rate": 0.0009318996415770609,
963
+ "loss": 0.0,
964
+ "step": 66500
965
+ },
966
+ {
967
+ "epoch": 3.430619559651818,
968
+ "grad_norm": 0.00022029876708984375,
969
+ "learning_rate": 0.0009313876088069637,
970
+ "loss": 0.0,
971
+ "step": 67000
972
+ },
973
+ {
974
+ "epoch": 3.456221198156682,
975
+ "grad_norm": 0.00021648406982421875,
976
+ "learning_rate": 0.0009308755760368664,
977
+ "loss": 0.0,
978
+ "step": 67500
979
+ },
980
+ {
981
+ "epoch": 3.4818228366615465,
982
+ "grad_norm": 0.0002841949462890625,
983
+ "learning_rate": 0.000930363543266769,
984
+ "loss": 0.0,
985
+ "step": 68000
986
+ },
987
+ {
988
+ "epoch": 3.5074244751664105,
989
+ "grad_norm": 0.0002269744873046875,
990
+ "learning_rate": 0.0009298515104966718,
991
+ "loss": 0.0,
992
+ "step": 68500
993
+ },
994
+ {
995
+ "epoch": 3.533026113671275,
996
+ "grad_norm": 0.00021648406982421875,
997
+ "learning_rate": 0.0009293394777265746,
998
+ "loss": 0.0,
999
+ "step": 69000
1000
+ },
1001
+ {
1002
+ "epoch": 3.558627752176139,
1003
+ "grad_norm": 0.0002079010009765625,
1004
+ "learning_rate": 0.0009288274449564772,
1005
+ "loss": 0.0,
1006
+ "step": 69500
1007
+ },
1008
+ {
1009
+ "epoch": 3.5842293906810037,
1010
+ "grad_norm": 0.00022125244140625,
1011
+ "learning_rate": 0.00092831541218638,
1012
+ "loss": 0.0,
1013
+ "step": 70000
1014
+ },
1015
+ {
1016
+ "epoch": 3.6098310291858677,
1017
+ "grad_norm": 0.00021457672119140625,
1018
+ "learning_rate": 0.0009278033794162827,
1019
+ "loss": 0.0,
1020
+ "step": 70500
1021
+ },
1022
+ {
1023
+ "epoch": 3.6354326676907323,
1024
+ "grad_norm": 0.000209808349609375,
1025
+ "learning_rate": 0.0009272913466461854,
1026
+ "loss": 0.0,
1027
+ "step": 71000
1028
+ },
1029
+ {
1030
+ "epoch": 3.6610343061955968,
1031
+ "grad_norm": 0.0002727508544921875,
1032
+ "learning_rate": 0.0009267793138760881,
1033
+ "loss": 0.0,
1034
+ "step": 71500
1035
+ },
1036
+ {
1037
+ "epoch": 3.686635944700461,
1038
+ "grad_norm": 0.00020313262939453125,
1039
+ "learning_rate": 0.0009262672811059907,
1040
+ "loss": 0.0,
1041
+ "step": 72000
1042
+ },
1043
+ {
1044
+ "epoch": 3.712237583205325,
1045
+ "grad_norm": 0.0002288818359375,
1046
+ "learning_rate": 0.0009257552483358935,
1047
+ "loss": 0.0,
1048
+ "step": 72500
1049
+ },
1050
+ {
1051
+ "epoch": 3.7378392217101895,
1052
+ "grad_norm": 0.0010528564453125,
1053
+ "learning_rate": 0.0009252432155657963,
1054
+ "loss": 0.0,
1055
+ "step": 73000
1056
+ },
1057
+ {
1058
+ "epoch": 3.763440860215054,
1059
+ "grad_norm": 0.00019550323486328125,
1060
+ "learning_rate": 0.0009247311827956989,
1061
+ "loss": 0.0,
1062
+ "step": 73500
1063
+ },
1064
+ {
1065
+ "epoch": 3.789042498719918,
1066
+ "grad_norm": 0.00021266937255859375,
1067
+ "learning_rate": 0.0009242191500256016,
1068
+ "loss": 0.0,
1069
+ "step": 74000
1070
+ },
1071
+ {
1072
+ "epoch": 3.814644137224782,
1073
+ "grad_norm": 0.0003509521484375,
1074
+ "learning_rate": 0.0009237071172555044,
1075
+ "loss": 0.0,
1076
+ "step": 74500
1077
+ },
1078
+ {
1079
+ "epoch": 3.8402457757296466,
1080
+ "grad_norm": 0.00020599365234375,
1081
+ "learning_rate": 0.0009231950844854071,
1082
+ "loss": 0.0,
1083
+ "step": 75000
1084
+ },
1085
+ {
1086
+ "epoch": 3.865847414234511,
1087
+ "grad_norm": 0.0002002716064453125,
1088
+ "learning_rate": 0.0009226830517153098,
1089
+ "loss": 0.0,
1090
+ "step": 75500
1091
+ },
1092
+ {
1093
+ "epoch": 3.8914490527393752,
1094
+ "grad_norm": 0.00021076202392578125,
1095
+ "learning_rate": 0.0009221710189452125,
1096
+ "loss": 0.0,
1097
+ "step": 76000
1098
+ },
1099
+ {
1100
+ "epoch": 3.9170506912442398,
1101
+ "grad_norm": 0.0002918243408203125,
1102
+ "learning_rate": 0.0009216589861751152,
1103
+ "loss": 0.0,
1104
+ "step": 76500
1105
+ },
1106
+ {
1107
+ "epoch": 3.942652329749104,
1108
+ "grad_norm": 0.0003223419189453125,
1109
+ "learning_rate": 0.000921146953405018,
1110
+ "loss": 0.0,
1111
+ "step": 77000
1112
+ },
1113
+ {
1114
+ "epoch": 3.9682539682539684,
1115
+ "grad_norm": 0.0003032684326171875,
1116
+ "learning_rate": 0.0009206349206349207,
1117
+ "loss": 0.0,
1118
+ "step": 77500
1119
+ },
1120
+ {
1121
+ "epoch": 3.9938556067588324,
1122
+ "grad_norm": 0.000698089599609375,
1123
+ "learning_rate": 0.0009201228878648233,
1124
+ "loss": 0.0,
1125
+ "step": 78000
1126
+ },
1127
+ {
1128
+ "epoch": 4.0,
1129
+ "eval_loss": 3.7267222069203854e-05,
1130
+ "eval_runtime": 0.546,
1131
+ "eval_samples_per_second": 1831.41,
1132
+ "eval_steps_per_second": 3.663,
1133
+ "step": 78120
1134
+ },
1135
+ {
1136
+ "epoch": 4.0194572452636965,
1137
+ "grad_norm": 0.0002651214599609375,
1138
+ "learning_rate": 0.0009196108550947261,
1139
+ "loss": 0.0,
1140
+ "step": 78500
1141
+ },
1142
+ {
1143
+ "epoch": 4.0450588837685615,
1144
+ "grad_norm": 0.00020885467529296875,
1145
+ "learning_rate": 0.0009190988223246288,
1146
+ "loss": 0.0,
1147
+ "step": 79000
1148
+ },
1149
+ {
1150
+ "epoch": 4.070660522273426,
1151
+ "grad_norm": 0.00019741058349609375,
1152
+ "learning_rate": 0.0009185867895545314,
1153
+ "loss": 0.0,
1154
+ "step": 79500
1155
+ },
1156
+ {
1157
+ "epoch": 4.09626216077829,
1158
+ "grad_norm": 0.00021839141845703125,
1159
+ "learning_rate": 0.0009180747567844342,
1160
+ "loss": 0.0,
1161
+ "step": 80000
1162
+ },
1163
+ {
1164
+ "epoch": 4.121863799283154,
1165
+ "grad_norm": 0.00020313262939453125,
1166
+ "learning_rate": 0.0009175627240143369,
1167
+ "loss": 0.0,
1168
+ "step": 80500
1169
+ },
1170
+ {
1171
+ "epoch": 4.147465437788019,
1172
+ "grad_norm": 0.00023555755615234375,
1173
+ "learning_rate": 0.0009170506912442397,
1174
+ "loss": 0.0,
1175
+ "step": 81000
1176
+ },
1177
+ {
1178
+ "epoch": 4.173067076292883,
1179
+ "grad_norm": 0.0003204345703125,
1180
+ "learning_rate": 0.0009165386584741425,
1181
+ "loss": 0.0,
1182
+ "step": 81500
1183
+ },
1184
+ {
1185
+ "epoch": 4.198668714797747,
1186
+ "grad_norm": 0.00022411346435546875,
1187
+ "learning_rate": 0.000916026625704045,
1188
+ "loss": 0.0,
1189
+ "step": 82000
1190
+ },
1191
+ {
1192
+ "epoch": 4.224270353302611,
1193
+ "grad_norm": 0.0004634857177734375,
1194
+ "learning_rate": 0.0009155145929339478,
1195
+ "loss": 0.0,
1196
+ "step": 82500
1197
+ },
1198
+ {
1199
+ "epoch": 4.249871991807476,
1200
+ "grad_norm": 0.00023651123046875,
1201
+ "learning_rate": 0.0009150025601638506,
1202
+ "loss": 0.0,
1203
+ "step": 83000
1204
+ },
1205
+ {
1206
+ "epoch": 4.27547363031234,
1207
+ "grad_norm": 0.000202178955078125,
1208
+ "learning_rate": 0.0009144905273937532,
1209
+ "loss": 0.0,
1210
+ "step": 83500
1211
+ },
1212
+ {
1213
+ "epoch": 4.301075268817204,
1214
+ "grad_norm": 0.0003528594970703125,
1215
+ "learning_rate": 0.0009139784946236559,
1216
+ "loss": 0.0,
1217
+ "step": 84000
1218
+ },
1219
+ {
1220
+ "epoch": 4.326676907322069,
1221
+ "grad_norm": 0.0003204345703125,
1222
+ "learning_rate": 0.0009134664618535587,
1223
+ "loss": 0.0,
1224
+ "step": 84500
1225
+ },
1226
+ {
1227
+ "epoch": 4.352278545826933,
1228
+ "grad_norm": 0.000598907470703125,
1229
+ "learning_rate": 0.0009129544290834614,
1230
+ "loss": 0.0,
1231
+ "step": 85000
1232
+ },
1233
+ {
1234
+ "epoch": 4.377880184331797,
1235
+ "grad_norm": 0.000286102294921875,
1236
+ "learning_rate": 0.0009124423963133641,
1237
+ "loss": 0.0,
1238
+ "step": 85500
1239
+ },
1240
+ {
1241
+ "epoch": 4.403481822836661,
1242
+ "grad_norm": 0.00021839141845703125,
1243
+ "learning_rate": 0.0009119303635432667,
1244
+ "loss": 0.0,
1245
+ "step": 86000
1246
+ },
1247
+ {
1248
+ "epoch": 4.429083461341526,
1249
+ "grad_norm": 0.000225067138671875,
1250
+ "learning_rate": 0.0009114183307731695,
1251
+ "loss": 0.0,
1252
+ "step": 86500
1253
+ },
1254
+ {
1255
+ "epoch": 4.45468509984639,
1256
+ "grad_norm": 0.00141143798828125,
1257
+ "learning_rate": 0.0009109062980030723,
1258
+ "loss": 0.0,
1259
+ "step": 87000
1260
+ },
1261
+ {
1262
+ "epoch": 4.480286738351254,
1263
+ "grad_norm": 0.00022602081298828125,
1264
+ "learning_rate": 0.0009103942652329749,
1265
+ "loss": 0.0,
1266
+ "step": 87500
1267
+ },
1268
+ {
1269
+ "epoch": 4.505888376856118,
1270
+ "grad_norm": 0.000335693359375,
1271
+ "learning_rate": 0.0009098822324628776,
1272
+ "loss": 0.0,
1273
+ "step": 88000
1274
+ },
1275
+ {
1276
+ "epoch": 4.531490015360983,
1277
+ "grad_norm": 0.00019073486328125,
1278
+ "learning_rate": 0.0009093701996927804,
1279
+ "loss": 0.0,
1280
+ "step": 88500
1281
+ },
1282
+ {
1283
+ "epoch": 4.5570916538658475,
1284
+ "grad_norm": 0.00019931793212890625,
1285
+ "learning_rate": 0.000908858166922683,
1286
+ "loss": 0.0,
1287
+ "step": 89000
1288
+ },
1289
+ {
1290
+ "epoch": 4.5826932923707115,
1291
+ "grad_norm": 0.00021839141845703125,
1292
+ "learning_rate": 0.0009083461341525858,
1293
+ "loss": 0.0,
1294
+ "step": 89500
1295
+ },
1296
+ {
1297
+ "epoch": 4.6082949308755765,
1298
+ "grad_norm": 0.001983642578125,
1299
+ "learning_rate": 0.0009078341013824885,
1300
+ "loss": 0.0,
1301
+ "step": 90000
1302
+ },
1303
+ {
1304
+ "epoch": 4.633896569380441,
1305
+ "grad_norm": 0.00020503997802734375,
1306
+ "learning_rate": 0.0009073220686123912,
1307
+ "loss": 0.0,
1308
+ "step": 90500
1309
+ },
1310
+ {
1311
+ "epoch": 4.659498207885305,
1312
+ "grad_norm": 0.0002689361572265625,
1313
+ "learning_rate": 0.000906810035842294,
1314
+ "loss": 0.0,
1315
+ "step": 91000
1316
+ },
1317
+ {
1318
+ "epoch": 4.685099846390169,
1319
+ "grad_norm": 0.0002307891845703125,
1320
+ "learning_rate": 0.0009062980030721967,
1321
+ "loss": 0.0,
1322
+ "step": 91500
1323
+ },
1324
+ {
1325
+ "epoch": 4.710701484895033,
1326
+ "grad_norm": 0.00020885467529296875,
1327
+ "learning_rate": 0.0009057859703020993,
1328
+ "loss": 0.0,
1329
+ "step": 92000
1330
+ },
1331
+ {
1332
+ "epoch": 4.736303123399898,
1333
+ "grad_norm": 0.0002536773681640625,
1334
+ "learning_rate": 0.0009052739375320021,
1335
+ "loss": 0.0,
1336
+ "step": 92500
1337
+ },
1338
+ {
1339
+ "epoch": 4.761904761904762,
1340
+ "grad_norm": 0.0002040863037109375,
1341
+ "learning_rate": 0.0009047619047619047,
1342
+ "loss": 0.0,
1343
+ "step": 93000
1344
+ },
1345
+ {
1346
+ "epoch": 4.787506400409626,
1347
+ "grad_norm": 0.00174713134765625,
1348
+ "learning_rate": 0.0009042498719918075,
1349
+ "loss": 0.0,
1350
+ "step": 93500
1351
+ },
1352
+ {
1353
+ "epoch": 4.813108038914491,
1354
+ "grad_norm": 0.00020313262939453125,
1355
+ "learning_rate": 0.0009037378392217102,
1356
+ "loss": 0.0,
1357
+ "step": 94000
1358
+ },
1359
+ {
1360
+ "epoch": 4.838709677419355,
1361
+ "grad_norm": 0.0001926422119140625,
1362
+ "learning_rate": 0.0009032258064516129,
1363
+ "loss": 0.0,
1364
+ "step": 94500
1365
+ },
1366
+ {
1367
+ "epoch": 4.864311315924219,
1368
+ "grad_norm": 0.000186920166015625,
1369
+ "learning_rate": 0.0009027137736815156,
1370
+ "loss": 0.0,
1371
+ "step": 95000
1372
+ },
1373
+ {
1374
+ "epoch": 4.889912954429083,
1375
+ "grad_norm": 0.00019359588623046875,
1376
+ "learning_rate": 0.0009022017409114184,
1377
+ "loss": 0.0,
1378
+ "step": 95500
1379
+ },
1380
+ {
1381
+ "epoch": 4.915514592933948,
1382
+ "grad_norm": 0.00018310546875,
1383
+ "learning_rate": 0.000901689708141321,
1384
+ "loss": 0.0,
1385
+ "step": 96000
1386
+ },
1387
+ {
1388
+ "epoch": 4.941116231438812,
1389
+ "grad_norm": 0.000579833984375,
1390
+ "learning_rate": 0.0009011776753712238,
1391
+ "loss": 0.0,
1392
+ "step": 96500
1393
+ },
1394
+ {
1395
+ "epoch": 4.966717869943676,
1396
+ "grad_norm": 0.00017833709716796875,
1397
+ "learning_rate": 0.0009006656426011265,
1398
+ "loss": 0.0,
1399
+ "step": 97000
1400
+ },
1401
+ {
1402
+ "epoch": 4.99231950844854,
1403
+ "grad_norm": 0.000507354736328125,
1404
+ "learning_rate": 0.0009001536098310292,
1405
+ "loss": 0.0,
1406
+ "step": 97500
1407
+ },
1408
+ {
1409
+ "epoch": 5.0,
1410
+ "eval_loss": 3.1982614018488675e-05,
1411
+ "eval_runtime": 0.5464,
1412
+ "eval_samples_per_second": 1830.037,
1413
+ "eval_steps_per_second": 3.66,
1414
+ "step": 97650
1415
+ },
1416
+ {
1417
+ "epoch": 5.017921146953405,
1418
+ "grad_norm": 0.00020313262939453125,
1419
+ "learning_rate": 0.000899641577060932,
1420
+ "loss": 0.0,
1421
+ "step": 98000
1422
+ },
1423
+ {
1424
+ "epoch": 5.043522785458269,
1425
+ "grad_norm": 0.0002651214599609375,
1426
+ "learning_rate": 0.0008991295442908345,
1427
+ "loss": 0.0,
1428
+ "step": 98500
1429
+ },
1430
+ {
1431
+ "epoch": 5.0691244239631335,
1432
+ "grad_norm": 0.000335693359375,
1433
+ "learning_rate": 0.0008986175115207373,
1434
+ "loss": 0.0,
1435
+ "step": 99000
1436
+ },
1437
+ {
1438
+ "epoch": 5.0947260624679975,
1439
+ "grad_norm": 0.000186920166015625,
1440
+ "learning_rate": 0.0008981054787506401,
1441
+ "loss": 0.0,
1442
+ "step": 99500
1443
+ },
1444
+ {
1445
+ "epoch": 5.1203277009728625,
1446
+ "grad_norm": 0.0001850128173828125,
1447
+ "learning_rate": 0.0008975934459805428,
1448
+ "loss": 0.0,
1449
+ "step": 100000
1450
+ },
1451
+ {
1452
+ "epoch": 5.145929339477727,
1453
+ "grad_norm": 0.00022411346435546875,
1454
+ "learning_rate": 0.0008970814132104454,
1455
+ "loss": 0.0,
1456
+ "step": 100500
1457
+ },
1458
+ {
1459
+ "epoch": 5.171530977982591,
1460
+ "grad_norm": 0.0002002716064453125,
1461
+ "learning_rate": 0.0008965693804403482,
1462
+ "loss": 0.0,
1463
+ "step": 101000
1464
+ },
1465
+ {
1466
+ "epoch": 5.197132616487456,
1467
+ "grad_norm": 0.00019550323486328125,
1468
+ "learning_rate": 0.0008960573476702509,
1469
+ "loss": 0.0,
1470
+ "step": 101500
1471
+ },
1472
+ {
1473
+ "epoch": 5.22273425499232,
1474
+ "grad_norm": 0.00019931793212890625,
1475
+ "learning_rate": 0.0008955453149001537,
1476
+ "loss": 0.0,
1477
+ "step": 102000
1478
+ },
1479
+ {
1480
+ "epoch": 5.248335893497184,
1481
+ "grad_norm": 0.0002918243408203125,
1482
+ "learning_rate": 0.0008950332821300563,
1483
+ "loss": 0.0,
1484
+ "step": 102500
1485
+ },
1486
+ {
1487
+ "epoch": 5.273937532002048,
1488
+ "grad_norm": 0.000217437744140625,
1489
+ "learning_rate": 0.000894521249359959,
1490
+ "loss": 0.0,
1491
+ "step": 103000
1492
+ },
1493
+ {
1494
+ "epoch": 5.299539170506913,
1495
+ "grad_norm": 0.00018405914306640625,
1496
+ "learning_rate": 0.0008940092165898618,
1497
+ "loss": 0.0,
1498
+ "step": 103500
1499
+ },
1500
+ {
1501
+ "epoch": 5.325140809011777,
1502
+ "grad_norm": 0.0009918212890625,
1503
+ "learning_rate": 0.0008934971838197646,
1504
+ "loss": 0.0,
1505
+ "step": 104000
1506
+ },
1507
+ {
1508
+ "epoch": 5.350742447516641,
1509
+ "grad_norm": 0.00086212158203125,
1510
+ "learning_rate": 0.0008929851510496671,
1511
+ "loss": 0.0,
1512
+ "step": 104500
1513
+ },
1514
+ {
1515
+ "epoch": 5.376344086021505,
1516
+ "grad_norm": 0.00020122528076171875,
1517
+ "learning_rate": 0.0008924731182795699,
1518
+ "loss": 0.0,
1519
+ "step": 105000
1520
+ },
1521
+ {
1522
+ "epoch": 5.40194572452637,
1523
+ "grad_norm": 0.00019168853759765625,
1524
+ "learning_rate": 0.0008919610855094726,
1525
+ "loss": 0.0,
1526
+ "step": 105500
1527
+ },
1528
+ {
1529
+ "epoch": 5.427547363031234,
1530
+ "grad_norm": 0.0001964569091796875,
1531
+ "learning_rate": 0.0008914490527393754,
1532
+ "loss": 0.0,
1533
+ "step": 106000
1534
+ },
1535
+ {
1536
+ "epoch": 5.453149001536098,
1537
+ "grad_norm": 0.0001811981201171875,
1538
+ "learning_rate": 0.000890937019969278,
1539
+ "loss": 0.0,
1540
+ "step": 106500
1541
+ },
1542
+ {
1543
+ "epoch": 5.478750640040962,
1544
+ "grad_norm": 0.000179290771484375,
1545
+ "learning_rate": 0.0008904249871991807,
1546
+ "loss": 0.0,
1547
+ "step": 107000
1548
+ },
1549
+ {
1550
+ "epoch": 5.504352278545827,
1551
+ "grad_norm": 0.0001926422119140625,
1552
+ "learning_rate": 0.0008899129544290835,
1553
+ "loss": 0.0,
1554
+ "step": 107500
1555
+ },
1556
+ {
1557
+ "epoch": 5.529953917050691,
1558
+ "grad_norm": 0.00018310546875,
1559
+ "learning_rate": 0.0008894009216589863,
1560
+ "loss": 0.0,
1561
+ "step": 108000
1562
+ },
1563
+ {
1564
+ "epoch": 5.555555555555555,
1565
+ "grad_norm": 0.00018596649169921875,
1566
+ "learning_rate": 0.0008888888888888888,
1567
+ "loss": 0.0,
1568
+ "step": 108500
1569
+ },
1570
+ {
1571
+ "epoch": 5.58115719406042,
1572
+ "grad_norm": 0.0002689361572265625,
1573
+ "learning_rate": 0.0008883768561187916,
1574
+ "loss": 0.0,
1575
+ "step": 109000
1576
+ },
1577
+ {
1578
+ "epoch": 5.606758832565284,
1579
+ "grad_norm": 0.00018215179443359375,
1580
+ "learning_rate": 0.0008878648233486944,
1581
+ "loss": 0.0,
1582
+ "step": 109500
1583
+ },
1584
+ {
1585
+ "epoch": 5.6323604710701485,
1586
+ "grad_norm": 0.0002613067626953125,
1587
+ "learning_rate": 0.000887352790578597,
1588
+ "loss": 0.0,
1589
+ "step": 110000
1590
+ },
1591
+ {
1592
+ "epoch": 5.6579621095750126,
1593
+ "grad_norm": 0.0002613067626953125,
1594
+ "learning_rate": 0.0008868407578084997,
1595
+ "loss": 0.0,
1596
+ "step": 110500
1597
+ },
1598
+ {
1599
+ "epoch": 5.683563748079877,
1600
+ "grad_norm": 0.0001926422119140625,
1601
+ "learning_rate": 0.0008863287250384025,
1602
+ "loss": 0.0,
1603
+ "step": 111000
1604
+ },
1605
+ {
1606
+ "epoch": 5.709165386584742,
1607
+ "grad_norm": 0.00018024444580078125,
1608
+ "learning_rate": 0.0008858166922683052,
1609
+ "loss": 0.0,
1610
+ "step": 111500
1611
+ },
1612
+ {
1613
+ "epoch": 5.734767025089606,
1614
+ "grad_norm": 0.000186920166015625,
1615
+ "learning_rate": 0.000885304659498208,
1616
+ "loss": 0.0,
1617
+ "step": 112000
1618
+ },
1619
+ {
1620
+ "epoch": 5.76036866359447,
1621
+ "grad_norm": 0.00018024444580078125,
1622
+ "learning_rate": 0.0008847926267281106,
1623
+ "loss": 0.0,
1624
+ "step": 112500
1625
+ },
1626
+ {
1627
+ "epoch": 5.785970302099335,
1628
+ "grad_norm": 0.000347137451171875,
1629
+ "learning_rate": 0.0008842805939580133,
1630
+ "loss": 0.0,
1631
+ "step": 113000
1632
+ },
1633
+ {
1634
+ "epoch": 5.811571940604199,
1635
+ "grad_norm": 0.00023174285888671875,
1636
+ "learning_rate": 0.0008837685611879161,
1637
+ "loss": 0.0,
1638
+ "step": 113500
1639
+ },
1640
+ {
1641
+ "epoch": 5.837173579109063,
1642
+ "grad_norm": 0.000392913818359375,
1643
+ "learning_rate": 0.0008832565284178187,
1644
+ "loss": 0.0,
1645
+ "step": 114000
1646
+ },
1647
+ {
1648
+ "epoch": 5.862775217613927,
1649
+ "grad_norm": 0.0003032684326171875,
1650
+ "learning_rate": 0.0008827444956477215,
1651
+ "loss": 0.0,
1652
+ "step": 114500
1653
+ },
1654
+ {
1655
+ "epoch": 5.888376856118792,
1656
+ "grad_norm": 0.0001964569091796875,
1657
+ "learning_rate": 0.0008822324628776242,
1658
+ "loss": 0.0,
1659
+ "step": 115000
1660
+ },
1661
+ {
1662
+ "epoch": 5.913978494623656,
1663
+ "grad_norm": 0.00020122528076171875,
1664
+ "learning_rate": 0.0008817204301075269,
1665
+ "loss": 0.0,
1666
+ "step": 115500
1667
+ },
1668
+ {
1669
+ "epoch": 5.93958013312852,
1670
+ "grad_norm": 0.00019550323486328125,
1671
+ "learning_rate": 0.0008812083973374296,
1672
+ "loss": 0.0,
1673
+ "step": 116000
1674
+ },
1675
+ {
1676
+ "epoch": 5.965181771633384,
1677
+ "grad_norm": 0.000179290771484375,
1678
+ "learning_rate": 0.0008806963645673324,
1679
+ "loss": 0.0,
1680
+ "step": 116500
1681
+ },
1682
+ {
1683
+ "epoch": 5.990783410138249,
1684
+ "grad_norm": 0.00018024444580078125,
1685
+ "learning_rate": 0.000880184331797235,
1686
+ "loss": 0.0,
1687
+ "step": 117000
1688
+ },
1689
+ {
1690
+ "epoch": 6.0,
1691
+ "eval_loss": 3.100566391367465e-05,
1692
+ "eval_runtime": 0.5504,
1693
+ "eval_samples_per_second": 1816.722,
1694
+ "eval_steps_per_second": 3.633,
1695
+ "step": 117180
1696
+ },
1697
+ {
1698
+ "epoch": 6.016385048643113,
1699
+ "grad_norm": 0.0001983642578125,
1700
+ "learning_rate": 0.0008796722990271378,
1701
+ "loss": 0.0,
1702
+ "step": 117500
1703
+ },
1704
+ {
1705
+ "epoch": 6.041986687147977,
1706
+ "grad_norm": 0.00018978118896484375,
1707
+ "learning_rate": 0.0008791602662570405,
1708
+ "loss": 0.0,
1709
+ "step": 118000
1710
+ },
1711
+ {
1712
+ "epoch": 6.067588325652842,
1713
+ "grad_norm": 0.00017833709716796875,
1714
+ "learning_rate": 0.0008786482334869432,
1715
+ "loss": 0.0,
1716
+ "step": 118500
1717
+ },
1718
+ {
1719
+ "epoch": 6.093189964157706,
1720
+ "grad_norm": 0.00019168853759765625,
1721
+ "learning_rate": 0.0008781362007168459,
1722
+ "loss": 0.0,
1723
+ "step": 119000
1724
+ },
1725
+ {
1726
+ "epoch": 6.11879160266257,
1727
+ "grad_norm": 0.000209808349609375,
1728
+ "learning_rate": 0.0008776241679467485,
1729
+ "loss": 0.0,
1730
+ "step": 119500
1731
+ },
1732
+ {
1733
+ "epoch": 6.1443932411674345,
1734
+ "grad_norm": 0.0001964569091796875,
1735
+ "learning_rate": 0.0008771121351766513,
1736
+ "loss": 0.0,
1737
+ "step": 120000
1738
+ },
1739
+ {
1740
+ "epoch": 6.169994879672299,
1741
+ "grad_norm": 0.0002002716064453125,
1742
+ "learning_rate": 0.0008766001024065541,
1743
+ "loss": 0.0,
1744
+ "step": 120500
1745
+ },
1746
+ {
1747
+ "epoch": 6.1955965181771635,
1748
+ "grad_norm": 0.0001773834228515625,
1749
+ "learning_rate": 0.0008760880696364567,
1750
+ "loss": 0.0,
1751
+ "step": 121000
1752
+ },
1753
+ {
1754
+ "epoch": 6.221198156682028,
1755
+ "grad_norm": 0.0001773834228515625,
1756
+ "learning_rate": 0.0008755760368663594,
1757
+ "loss": 0.0,
1758
+ "step": 121500
1759
+ },
1760
+ {
1761
+ "epoch": 6.246799795186892,
1762
+ "grad_norm": 0.0002040863037109375,
1763
+ "learning_rate": 0.0008750640040962622,
1764
+ "loss": 0.0,
1765
+ "step": 122000
1766
+ },
1767
+ {
1768
+ "epoch": 6.272401433691757,
1769
+ "grad_norm": 0.0002498626708984375,
1770
+ "learning_rate": 0.0008745519713261649,
1771
+ "loss": 0.0,
1772
+ "step": 122500
1773
+ },
1774
+ {
1775
+ "epoch": 6.298003072196621,
1776
+ "grad_norm": 0.000179290771484375,
1777
+ "learning_rate": 0.0008740399385560676,
1778
+ "loss": 0.0,
1779
+ "step": 123000
1780
+ },
1781
+ {
1782
+ "epoch": 6.323604710701485,
1783
+ "grad_norm": 0.00018405914306640625,
1784
+ "learning_rate": 0.0008735279057859703,
1785
+ "loss": 0.0,
1786
+ "step": 123500
1787
+ },
1788
+ {
1789
+ "epoch": 6.349206349206349,
1790
+ "grad_norm": 0.00017452239990234375,
1791
+ "learning_rate": 0.000873015873015873,
1792
+ "loss": 0.0,
1793
+ "step": 124000
1794
+ },
1795
+ {
1796
+ "epoch": 6.374807987711214,
1797
+ "grad_norm": 0.00018024444580078125,
1798
+ "learning_rate": 0.0008725038402457758,
1799
+ "loss": 0.0,
1800
+ "step": 124500
1801
+ },
1802
+ {
1803
+ "epoch": 6.400409626216078,
1804
+ "grad_norm": 0.00026702880859375,
1805
+ "learning_rate": 0.0008719918074756785,
1806
+ "loss": 0.0,
1807
+ "step": 125000
1808
+ },
1809
+ {
1810
+ "epoch": 6.426011264720942,
1811
+ "grad_norm": 0.00018978118896484375,
1812
+ "learning_rate": 0.0008714797747055811,
1813
+ "loss": 0.0,
1814
+ "step": 125500
1815
+ },
1816
+ {
1817
+ "epoch": 6.451612903225806,
1818
+ "grad_norm": 0.00018405914306640625,
1819
+ "learning_rate": 0.0008709677419354839,
1820
+ "loss": 0.0,
1821
+ "step": 126000
1822
+ },
1823
+ {
1824
+ "epoch": 6.477214541730671,
1825
+ "grad_norm": 0.00018596649169921875,
1826
+ "learning_rate": 0.0008704557091653866,
1827
+ "loss": 0.0,
1828
+ "step": 126500
1829
+ },
1830
+ {
1831
+ "epoch": 6.502816180235535,
1832
+ "grad_norm": 0.000568389892578125,
1833
+ "learning_rate": 0.0008699436763952893,
1834
+ "loss": 0.0,
1835
+ "step": 127000
1836
+ },
1837
+ {
1838
+ "epoch": 6.528417818740399,
1839
+ "grad_norm": 0.00060272216796875,
1840
+ "learning_rate": 0.000869431643625192,
1841
+ "loss": 0.0,
1842
+ "step": 127500
1843
+ },
1844
+ {
1845
+ "epoch": 6.554019457245264,
1846
+ "grad_norm": 0.000179290771484375,
1847
+ "learning_rate": 0.0008689196108550947,
1848
+ "loss": 0.0,
1849
+ "step": 128000
1850
+ },
1851
+ {
1852
+ "epoch": 6.579621095750128,
1853
+ "grad_norm": 0.000255584716796875,
1854
+ "learning_rate": 0.0008684075780849975,
1855
+ "loss": 0.0,
1856
+ "step": 128500
1857
+ },
1858
+ {
1859
+ "epoch": 6.605222734254992,
1860
+ "grad_norm": 0.00018978118896484375,
1861
+ "learning_rate": 0.0008678955453149003,
1862
+ "loss": 0.0,
1863
+ "step": 129000
1864
+ },
1865
+ {
1866
+ "epoch": 6.630824372759856,
1867
+ "grad_norm": 0.00019550323486328125,
1868
+ "learning_rate": 0.0008673835125448028,
1869
+ "loss": 0.0,
1870
+ "step": 129500
1871
+ },
1872
+ {
1873
+ "epoch": 6.65642601126472,
1874
+ "grad_norm": 0.0004329681396484375,
1875
+ "learning_rate": 0.0008668714797747056,
1876
+ "loss": 0.0,
1877
+ "step": 130000
1878
+ },
1879
+ {
1880
+ "epoch": 6.682027649769585,
1881
+ "grad_norm": 0.00040435791015625,
1882
+ "learning_rate": 0.0008663594470046084,
1883
+ "loss": 0.0,
1884
+ "step": 130500
1885
+ },
1886
+ {
1887
+ "epoch": 6.7076292882744495,
1888
+ "grad_norm": 0.000392913818359375,
1889
+ "learning_rate": 0.000865847414234511,
1890
+ "loss": 0.0,
1891
+ "step": 131000
1892
+ },
1893
+ {
1894
+ "epoch": 6.733230926779314,
1895
+ "grad_norm": 0.0001811981201171875,
1896
+ "learning_rate": 0.0008653353814644137,
1897
+ "loss": 0.0,
1898
+ "step": 131500
1899
+ },
1900
+ {
1901
+ "epoch": 6.7588325652841785,
1902
+ "grad_norm": 0.00018310546875,
1903
+ "learning_rate": 0.0008648233486943165,
1904
+ "loss": 0.0,
1905
+ "step": 132000
1906
+ },
1907
+ {
1908
+ "epoch": 6.784434203789043,
1909
+ "grad_norm": 0.00022125244140625,
1910
+ "learning_rate": 0.0008643113159242192,
1911
+ "loss": 0.0,
1912
+ "step": 132500
1913
+ },
1914
+ {
1915
+ "epoch": 6.810035842293907,
1916
+ "grad_norm": 0.00018310546875,
1917
+ "learning_rate": 0.000863799283154122,
1918
+ "loss": 0.0,
1919
+ "step": 133000
1920
+ },
1921
+ {
1922
+ "epoch": 6.835637480798771,
1923
+ "grad_norm": 0.00022125244140625,
1924
+ "learning_rate": 0.0008632872503840245,
1925
+ "loss": 0.0,
1926
+ "step": 133500
1927
+ },
1928
+ {
1929
+ "epoch": 6.861239119303636,
1930
+ "grad_norm": 0.0001926422119140625,
1931
+ "learning_rate": 0.0008627752176139273,
1932
+ "loss": 0.0,
1933
+ "step": 134000
1934
+ },
1935
+ {
1936
+ "epoch": 6.8868407578085,
1937
+ "grad_norm": 0.00023365020751953125,
1938
+ "learning_rate": 0.0008622631848438301,
1939
+ "loss": 0.0,
1940
+ "step": 134500
1941
+ },
1942
+ {
1943
+ "epoch": 6.912442396313364,
1944
+ "grad_norm": 0.00018787384033203125,
1945
+ "learning_rate": 0.0008617511520737327,
1946
+ "loss": 0.0,
1947
+ "step": 135000
1948
+ },
1949
+ {
1950
+ "epoch": 6.938044034818228,
1951
+ "grad_norm": 0.0003833770751953125,
1952
+ "learning_rate": 0.0008612391193036354,
1953
+ "loss": 0.0,
1954
+ "step": 135500
1955
+ },
1956
+ {
1957
+ "epoch": 6.963645673323093,
1958
+ "grad_norm": 0.0003833770751953125,
1959
+ "learning_rate": 0.0008607270865335382,
1960
+ "loss": 0.0,
1961
+ "step": 136000
1962
+ },
1963
+ {
1964
+ "epoch": 6.989247311827957,
1965
+ "grad_norm": 0.0004425048828125,
1966
+ "learning_rate": 0.0008602150537634409,
1967
+ "loss": 0.0,
1968
+ "step": 136500
1969
+ },
1970
+ {
1971
+ "epoch": 7.0,
1972
+ "eval_loss": 3.138924512313679e-05,
1973
+ "eval_runtime": 0.5584,
1974
+ "eval_samples_per_second": 1790.97,
1975
+ "eval_steps_per_second": 3.582,
1976
+ "step": 136710
1977
+ },
1978
+ {
1979
+ "epoch": 7.014848950332821,
1980
+ "grad_norm": 0.0001811981201171875,
1981
+ "learning_rate": 0.0008597030209933436,
1982
+ "loss": 0.0,
1983
+ "step": 137000
1984
+ },
1985
+ {
1986
+ "epoch": 7.040450588837686,
1987
+ "grad_norm": 0.0002536773681640625,
1988
+ "learning_rate": 0.0008591909882232463,
1989
+ "loss": 0.0,
1990
+ "step": 137500
1991
+ },
1992
+ {
1993
+ "epoch": 7.06605222734255,
1994
+ "grad_norm": 0.0004558563232421875,
1995
+ "learning_rate": 0.000858678955453149,
1996
+ "loss": 0.0,
1997
+ "step": 138000
1998
+ },
1999
+ {
2000
+ "epoch": 7.091653865847414,
2001
+ "grad_norm": 0.00017833709716796875,
2002
+ "learning_rate": 0.0008581669226830518,
2003
+ "loss": 0.0,
2004
+ "step": 138500
2005
+ },
2006
+ {
2007
+ "epoch": 7.117255504352278,
2008
+ "grad_norm": 0.0001773834228515625,
2009
+ "learning_rate": 0.0008576548899129545,
2010
+ "loss": 0.0,
2011
+ "step": 139000
2012
+ },
2013
+ {
2014
+ "epoch": 7.142857142857143,
2015
+ "grad_norm": 0.0004482269287109375,
2016
+ "learning_rate": 0.0008571428571428571,
2017
+ "loss": 0.0,
2018
+ "step": 139500
2019
+ },
2020
+ {
2021
+ "epoch": 7.168458781362007,
2022
+ "grad_norm": 0.00019359588623046875,
2023
+ "learning_rate": 0.0008566308243727599,
2024
+ "loss": 0.0,
2025
+ "step": 140000
2026
+ },
2027
+ {
2028
+ "epoch": 7.194060419866871,
2029
+ "grad_norm": 0.0001811981201171875,
2030
+ "learning_rate": 0.0008561187916026625,
2031
+ "loss": 0.0,
2032
+ "step": 140500
2033
+ },
2034
+ {
2035
+ "epoch": 7.2196620583717355,
2036
+ "grad_norm": 0.000576019287109375,
2037
+ "learning_rate": 0.0008556067588325653,
2038
+ "loss": 0.0,
2039
+ "step": 141000
2040
+ },
2041
+ {
2042
+ "epoch": 7.2452636968766,
2043
+ "grad_norm": 0.00018787384033203125,
2044
+ "learning_rate": 0.000855094726062468,
2045
+ "loss": 0.0,
2046
+ "step": 141500
2047
+ },
2048
+ {
2049
+ "epoch": 7.2708653353814645,
2050
+ "grad_norm": 0.0002155303955078125,
2051
+ "learning_rate": 0.0008545826932923707,
2052
+ "loss": 0.0,
2053
+ "step": 142000
2054
+ },
2055
+ {
2056
+ "epoch": 7.296466973886329,
2057
+ "grad_norm": 0.00018215179443359375,
2058
+ "learning_rate": 0.0008540706605222734,
2059
+ "loss": 0.0,
2060
+ "step": 142500
2061
+ },
2062
+ {
2063
+ "epoch": 7.322068612391193,
2064
+ "grad_norm": 0.0002346038818359375,
2065
+ "learning_rate": 0.0008535586277521762,
2066
+ "loss": 0.0,
2067
+ "step": 143000
2068
+ },
2069
+ {
2070
+ "epoch": 7.347670250896058,
2071
+ "grad_norm": 0.00023937225341796875,
2072
+ "learning_rate": 0.0008530465949820788,
2073
+ "loss": 0.0,
2074
+ "step": 143500
2075
+ },
2076
+ {
2077
+ "epoch": 7.373271889400922,
2078
+ "grad_norm": 0.0002460479736328125,
2079
+ "learning_rate": 0.0008525345622119816,
2080
+ "loss": 0.0,
2081
+ "step": 144000
2082
+ },
2083
+ {
2084
+ "epoch": 7.398873527905786,
2085
+ "grad_norm": 0.0002613067626953125,
2086
+ "learning_rate": 0.0008520225294418843,
2087
+ "loss": 0.0,
2088
+ "step": 144500
2089
+ },
2090
+ {
2091
+ "epoch": 7.42447516641065,
2092
+ "grad_norm": 0.00017642974853515625,
2093
+ "learning_rate": 0.000851510496671787,
2094
+ "loss": 0.0,
2095
+ "step": 145000
2096
+ },
2097
+ {
2098
+ "epoch": 7.450076804915515,
2099
+ "grad_norm": 0.000232696533203125,
2100
+ "learning_rate": 0.0008509984639016898,
2101
+ "loss": 0.0,
2102
+ "step": 145500
2103
+ },
2104
+ {
2105
+ "epoch": 7.475678443420379,
2106
+ "grad_norm": 0.0001697540283203125,
2107
+ "learning_rate": 0.0008504864311315924,
2108
+ "loss": 0.0,
2109
+ "step": 146000
2110
+ },
2111
+ {
2112
+ "epoch": 7.501280081925243,
2113
+ "grad_norm": 0.00017833709716796875,
2114
+ "learning_rate": 0.0008499743983614951,
2115
+ "loss": 0.0,
2116
+ "step": 146500
2117
+ },
2118
+ {
2119
+ "epoch": 7.526881720430108,
2120
+ "grad_norm": 0.000377655029296875,
2121
+ "learning_rate": 0.0008494623655913979,
2122
+ "loss": 0.0,
2123
+ "step": 147000
2124
+ },
2125
+ {
2126
+ "epoch": 7.552483358934972,
2127
+ "grad_norm": 0.000194549560546875,
2128
+ "learning_rate": 0.0008489503328213006,
2129
+ "loss": 0.0,
2130
+ "step": 147500
2131
+ },
2132
+ {
2133
+ "epoch": 7.578084997439836,
2134
+ "grad_norm": 0.0004825592041015625,
2135
+ "learning_rate": 0.0008484383000512033,
2136
+ "loss": 0.0,
2137
+ "step": 148000
2138
+ },
2139
+ {
2140
+ "epoch": 7.6036866359447,
2141
+ "grad_norm": 0.0001773834228515625,
2142
+ "learning_rate": 0.000847926267281106,
2143
+ "loss": 0.0,
2144
+ "step": 148500
2145
+ },
2146
+ {
2147
+ "epoch": 7.629288274449565,
2148
+ "grad_norm": 0.0001926422119140625,
2149
+ "learning_rate": 0.0008474142345110087,
2150
+ "loss": 0.0,
2151
+ "step": 149000
2152
+ },
2153
+ {
2154
+ "epoch": 7.654889912954429,
2155
+ "grad_norm": 0.00018215179443359375,
2156
+ "learning_rate": 0.0008469022017409115,
2157
+ "loss": 0.0,
2158
+ "step": 149500
2159
+ },
2160
+ {
2161
+ "epoch": 7.680491551459293,
2162
+ "grad_norm": 0.00019073486328125,
2163
+ "learning_rate": 0.0008463901689708142,
2164
+ "loss": 0.0,
2165
+ "step": 150000
2166
+ },
2167
+ {
2168
+ "epoch": 7.706093189964157,
2169
+ "grad_norm": 0.00017833709716796875,
2170
+ "learning_rate": 0.0008458781362007168,
2171
+ "loss": 0.0,
2172
+ "step": 150500
2173
+ },
2174
+ {
2175
+ "epoch": 7.731694828469022,
2176
+ "grad_norm": 0.0001926422119140625,
2177
+ "learning_rate": 0.0008453661034306196,
2178
+ "loss": 0.0,
2179
+ "step": 151000
2180
+ },
2181
+ {
2182
+ "epoch": 7.757296466973886,
2183
+ "grad_norm": 0.000274658203125,
2184
+ "learning_rate": 0.0008448540706605224,
2185
+ "loss": 0.0,
2186
+ "step": 151500
2187
+ },
2188
+ {
2189
+ "epoch": 7.7828981054787505,
2190
+ "grad_norm": 0.00017452239990234375,
2191
+ "learning_rate": 0.0008443420378904249,
2192
+ "loss": 0.0,
2193
+ "step": 152000
2194
+ },
2195
+ {
2196
+ "epoch": 7.808499743983615,
2197
+ "grad_norm": 0.00017833709716796875,
2198
+ "learning_rate": 0.0008438300051203277,
2199
+ "loss": 0.0,
2200
+ "step": 152500
2201
+ },
2202
+ {
2203
+ "epoch": 7.8341013824884795,
2204
+ "grad_norm": 0.000301361083984375,
2205
+ "learning_rate": 0.0008433179723502304,
2206
+ "loss": 0.0,
2207
+ "step": 153000
2208
+ },
2209
+ {
2210
+ "epoch": 7.859703020993344,
2211
+ "grad_norm": 0.0001697540283203125,
2212
+ "learning_rate": 0.0008428059395801332,
2213
+ "loss": 0.0,
2214
+ "step": 153500
2215
+ },
2216
+ {
2217
+ "epoch": 7.885304659498208,
2218
+ "grad_norm": 0.00020313262939453125,
2219
+ "learning_rate": 0.0008422939068100358,
2220
+ "loss": 0.0,
2221
+ "step": 154000
2222
+ },
2223
+ {
2224
+ "epoch": 7.910906298003072,
2225
+ "grad_norm": 0.0004425048828125,
2226
+ "learning_rate": 0.0008417818740399385,
2227
+ "loss": 0.0,
2228
+ "step": 154500
2229
+ },
2230
+ {
2231
+ "epoch": 7.936507936507937,
2232
+ "grad_norm": 0.00017452239990234375,
2233
+ "learning_rate": 0.0008412698412698413,
2234
+ "loss": 0.0,
2235
+ "step": 155000
2236
+ },
2237
+ {
2238
+ "epoch": 7.962109575012801,
2239
+ "grad_norm": 0.0002346038818359375,
2240
+ "learning_rate": 0.0008407578084997441,
2241
+ "loss": 0.0,
2242
+ "step": 155500
2243
+ },
2244
+ {
2245
+ "epoch": 7.987711213517665,
2246
+ "grad_norm": 0.0001773834228515625,
2247
+ "learning_rate": 0.0008402457757296466,
2248
+ "loss": 0.0,
2249
+ "step": 156000
2250
+ },
2251
+ {
2252
+ "epoch": 8.0,
2253
+ "eval_loss": 2.9280490707606077e-05,
2254
+ "eval_runtime": 0.5361,
2255
+ "eval_samples_per_second": 1865.365,
2256
+ "eval_steps_per_second": 3.731,
2257
+ "step": 156240
2258
+ }
2259
+ ],
2260
+ "logging_steps": 500,
2261
+ "max_steps": 976500,
2262
+ "num_input_tokens_seen": 0,
2263
+ "num_train_epochs": 50,
2264
+ "save_steps": 500,
2265
+ "stateful_callbacks": {
2266
+ "EarlyStoppingCallback": {
2267
+ "args": {
2268
+ "early_stopping_patience": 3,
2269
+ "early_stopping_threshold": 0.0
2270
+ },
2271
+ "attributes": {
2272
+ "early_stopping_patience_counter": 0
2273
+ }
2274
+ },
2275
+ "TrainerControl": {
2276
+ "args": {
2277
+ "should_epoch_stop": false,
2278
+ "should_evaluate": false,
2279
+ "should_log": false,
2280
+ "should_save": true,
2281
+ "should_training_stop": false
2282
+ },
2283
+ "attributes": {}
2284
+ }
2285
+ },
2286
+ "total_flos": 4.132510754955264e+18,
2287
+ "train_batch_size": 512,
2288
+ "trial_name": null,
2289
+ "trial_params": null
2290
+ }
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8d7d47a9dc1255c4869fc594070ddc1cdf2f07b583e4fdf3a9debc6cdf6cf6f2
3
+ size 5304