andrew-healey commited on
Commit
fbbd0d0
·
verified ·
1 Parent(s): a149f24

Upload folder using huggingface_hub

Browse files
attention_kindself_n_heads2_seed1341/args.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"hellaswag": true, "attention_kind": "selective", "log_dir": "wider_is_better_4/attention_kindself_n_heads2_seed1341", "resume_checkpoint": null, "resume_optimizer": false, "add_a_head": false, "add_head_to_start": true, "new_head_init": "normal", "n_heads": 2, "protect_bos_token": true, "prevent_from_masking_myself": true, "max_steps": 10000, "warmup_steps": 200, "group": "wider_is_better_4", "use_wandb": true, "kill_self_after_run": false, "random_seed": 1341, "memory_penalty_epsilon": 0.1, "selection_head_linear_combo": "none", "selection_head_linear_combo_scale": 1.0, "protection_kind": "none", "leaky_relu_alpha": null, "leaky_relu_bias": null, "use_compile": true, "use_mini_model": false, "upload_to_hf": true, "seq_len": 256, "batch_size": 120, "total_batch_size": 30720, "protection_head_scaling_factor": 1.0, "protection_head_bias": 0.0, "n_sliced_masks": null, "n_latent_masks": null, "mask_layernorm": false, "residual_attention_masks": false, "compute_base_shapes": false, "base_shapes_savefile": null, "mup": true, "disable_selection": true, "mup_enable_coord_check_logging": false, "max_lr": 0.00015, "decay_lr": true, "readout_zero_init": false, "query_zero_init": false, "l1_loss": false, "debugpy": false, "key": "1.5e-4_30720_2_1341", "n_embd": 128}
attention_kindself_n_heads2_seed1341/dataloader_02500.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:db41c5e5513137877487a93451adf8ec4ed2448ab6e9471ebd5595c8e3293875
3
+ size 964
attention_kindself_n_heads2_seed1341/dataloader_05000.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f6df8947c6ab773db1947914387d3db345a84828521d3a64bae9b652e1b0a410
3
+ size 964
attention_kindself_n_heads2_seed1341/dataloader_07500.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:169891a726a7ff746d1a7aa99f459a66d85ceb4e9f2583f790f5b8501f97b6af
3
+ size 964
attention_kindself_n_heads2_seed1341/dataloader_09999.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e685a568a36c792ccbe7b5fcae0b9d630955e589991190bd8902836cea6a91df
3
+ size 964
attention_kindself_n_heads2_seed1341/log2.txt ADDED
@@ -0,0 +1,303 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ max_steps: 10000
2
+ 0 val loss 11.7485
3
+ 0 val perplexity 126564.2812
4
+ 0 train 11.765417 (lr=7.5000e-07) (hash(x)=145079536)
5
+ 100 val loss 9.4784
6
+ 100 val perplexity 13074.5156
7
+ 100 train 9.419608 (lr=7.5750e-05) (hash(x)=154745873)
8
+ 200 val loss 7.8020
9
+ 200 val perplexity 2445.3750
10
+ 200 train 7.989964 (lr=1.5000e-04) (hash(x)=155800595)
11
+ 300 val loss 7.6031
12
+ 300 val perplexity 2004.4130
13
+ 300 train 7.563851 (lr=1.4997e-04) (hash(x)=148595389)
14
+ 400 val loss 7.5158
15
+ 400 val perplexity 1836.9117
16
+ 400 train 7.429459 (lr=1.4986e-04) (hash(x)=145606733)
17
+ 500 val loss 7.4147
18
+ 500 val perplexity 1660.1486
19
+ 500 train 7.209660 (lr=1.4969e-04) (hash(x)=138221231)
20
+ 600 val loss 7.3171
21
+ 600 val perplexity 1505.7720
22
+ 600 train 7.316947 (lr=1.4945e-04) (hash(x)=150367139)
23
+ 700 val loss 7.2359
24
+ 700 val perplexity 1388.3401
25
+ 700 train 7.397117 (lr=1.4913e-04) (hash(x)=155579314)
26
+ 800 val loss 7.1930
27
+ 800 val perplexity 1330.1278
28
+ 800 train 7.265964 (lr=1.4876e-04) (hash(x)=155747374)
29
+ 900 val loss 7.1470
30
+ 900 val perplexity 1270.2546
31
+ 900 train 7.368423 (lr=1.4831e-04) (hash(x)=159334575)
32
+ 1000 val loss 7.1074
33
+ 1000 val perplexity 1220.9775
34
+ 1000 train 6.987814 (lr=1.4779e-04) (hash(x)=140604760)
35
+ 1100 val loss 7.0742
36
+ 1100 val perplexity 1181.1272
37
+ 1100 train 6.914634 (lr=1.4721e-04) (hash(x)=146208052)
38
+ 1200 val loss 7.0423
39
+ 1200 val perplexity 1143.9719
40
+ 1200 train 6.952001 (lr=1.4656e-04) (hash(x)=148404734)
41
+ 1300 val loss 7.0121
42
+ 1300 val perplexity 1109.9885
43
+ 1300 train 6.945094 (lr=1.4585e-04) (hash(x)=155681970)
44
+ 1400 val loss 6.9783
45
+ 1400 val perplexity 1073.1346
46
+ 1400 train 6.930018 (lr=1.4507e-04) (hash(x)=148115934)
47
+ 1500 val loss 6.9871
48
+ 1500 val perplexity 1082.5601
49
+ 1500 train 7.123736 (lr=1.4422e-04) (hash(x)=157074034)
50
+ 1600 val loss 6.9358
51
+ 1600 val perplexity 1028.4413
52
+ 1600 train 6.715860 (lr=1.4332e-04) (hash(x)=137464699)
53
+ 1700 val loss 6.9239
54
+ 1700 val perplexity 1016.2919
55
+ 1700 train 7.034598 (lr=1.4235e-04) (hash(x)=166955614)
56
+ 1800 val loss 6.9125
57
+ 1800 val perplexity 1004.7353
58
+ 1800 train 6.741632 (lr=1.4131e-04) (hash(x)=143886042)
59
+ 1900 val loss 6.8980
60
+ 1900 val perplexity 990.3217
61
+ 1900 train 7.446872 (lr=1.4022e-04) (hash(x)=193610391)
62
+ 2000 val loss 6.9033
63
+ 2000 val perplexity 995.5397
64
+ 2000 train 7.087366 (lr=1.3907e-04) (hash(x)=163799796)
65
+ 2100 val loss 6.8726
66
+ 2100 val perplexity 965.4910
67
+ 2100 train 6.842040 (lr=1.3786e-04) (hash(x)=154107339)
68
+ 2200 val loss 6.8655
69
+ 2200 val perplexity 958.5820
70
+ 2200 train 6.853259 (lr=1.3660e-04) (hash(x)=144592844)
71
+ 2300 val loss 6.8478
72
+ 2300 val perplexity 941.7633
73
+ 2300 train 6.925510 (lr=1.3527e-04) (hash(x)=154535861)
74
+ 2400 val loss 6.8561
75
+ 2400 val perplexity 949.6703
76
+ 2400 train 7.553051 (lr=1.3390e-04) (hash(x)=204706354)
77
+ 2500 val loss 6.8432
78
+ 2500 val perplexity 937.5167
79
+ 2500 train 6.673442 (lr=1.3247e-04) (hash(x)=143783202)
80
+ 2600 val loss 6.8431
81
+ 2600 val perplexity 937.3611
82
+ 2600 train 6.760664 (lr=1.3099e-04) (hash(x)=150862210)
83
+ 2700 val loss 6.8435
84
+ 2700 val perplexity 937.7397
85
+ 2700 train 6.881901 (lr=1.2946e-04) (hash(x)=150753426)
86
+ 2800 val loss 6.8609
87
+ 2800 val perplexity 954.1826
88
+ 2800 train 6.834103 (lr=1.2788e-04) (hash(x)=147766811)
89
+ 2900 val loss 6.8580
90
+ 2900 val perplexity 951.4316
91
+ 2900 train 6.800954 (lr=1.2626e-04) (hash(x)=151177814)
92
+ 3000 val loss 6.8636
93
+ 3000 val perplexity 956.8106
94
+ 3000 train 6.742915 (lr=1.2459e-04) (hash(x)=135925327)
95
+ 3100 val loss 6.8565
96
+ 3100 val perplexity 950.0213
97
+ 3100 train 6.527127 (lr=1.2287e-04) (hash(x)=124842353)
98
+ 3200 val loss 6.8793
99
+ 3200 val perplexity 971.9348
100
+ 3200 train 6.991030 (lr=1.2112e-04) (hash(x)=160440642)
101
+ 3300 val loss 6.8749
102
+ 3300 val perplexity 967.7066
103
+ 3300 train 6.796642 (lr=1.1932e-04) (hash(x)=143017131)
104
+ 3400 val loss 6.8772
105
+ 3400 val perplexity 969.8584
106
+ 3400 train 7.140552 (lr=1.1749e-04) (hash(x)=148143474)
107
+ 3500 val loss 6.8898
108
+ 3500 val perplexity 982.2486
109
+ 3500 train 6.721855 (lr=1.1562e-04) (hash(x)=148732639)
110
+ 3600 val loss 6.9119
111
+ 3600 val perplexity 1004.1658
112
+ 3600 train 6.894956 (lr=1.1372e-04) (hash(x)=146333803)
113
+ 3700 val loss 6.9203
114
+ 3700 val perplexity 1012.6199
115
+ 3700 train 6.769926 (lr=1.1179e-04) (hash(x)=143611445)
116
+ 3800 val loss 6.9329
117
+ 3800 val perplexity 1025.4385
118
+ 3800 train 6.760955 (lr=1.0982e-04) (hash(x)=141272115)
119
+ 3900 val loss 6.9786
120
+ 3900 val perplexity 1073.4069
121
+ 3900 train 6.905924 (lr=1.0783e-04) (hash(x)=149161848)
122
+ 4000 val loss 6.9697
123
+ 4000 val perplexity 1063.8679
124
+ 4000 train 7.017169 (lr=1.0581e-04) (hash(x)=152036955)
125
+ 4100 val loss 6.9576
126
+ 4100 val perplexity 1051.1597
127
+ 4100 train 7.015798 (lr=1.0377e-04) (hash(x)=153260968)
128
+ 4200 val loss 6.9468
129
+ 4200 val perplexity 1039.8118
130
+ 4200 train 6.828518 (lr=1.0171e-04) (hash(x)=147629478)
131
+ 4300 val loss 6.9483
132
+ 4300 val perplexity 1041.4170
133
+ 4300 train 6.986649 (lr=9.9622e-05) (hash(x)=161701080)
134
+ 4400 val loss 6.9648
135
+ 4400 val perplexity 1058.7490
136
+ 4400 train 7.014188 (lr=9.7520e-05) (hash(x)=155290524)
137
+ 4500 val loss 6.9779
138
+ 4500 val perplexity 1072.6814
139
+ 4500 train 6.799006 (lr=9.5403e-05) (hash(x)=144726593)
140
+ 4600 val loss 6.9727
141
+ 4600 val perplexity 1067.1399
142
+ 4600 train 6.923756 (lr=9.3273e-05) (hash(x)=149517202)
143
+ 4700 val loss 6.9861
144
+ 4700 val perplexity 1081.5153
145
+ 4700 train 6.972135 (lr=9.1132e-05) (hash(x)=139403008)
146
+ 4800 val loss 6.9899
147
+ 4800 val perplexity 1085.6399
148
+ 4800 train 7.032064 (lr=8.8982e-05) (hash(x)=153387238)
149
+ 4900 val loss 6.9891
150
+ 4900 val perplexity 1084.7205
151
+ 4900 train 6.754529 (lr=8.6825e-05) (hash(x)=139881569)
152
+ 5000 val loss 7.0118
153
+ 5000 val perplexity 1109.6436
154
+ 5000 train 6.992263 (lr=8.4663e-05) (hash(x)=152602580)
155
+ 5100 val loss 7.0029
156
+ 5100 val perplexity 1099.8224
157
+ 5100 train 7.053523 (lr=8.2500e-05) (hash(x)=150598460)
158
+ 5200 val loss 7.0302
159
+ 5200 val perplexity 1130.2755
160
+ 5200 train 6.957996 (lr=8.0337e-05) (hash(x)=147660496)
161
+ 5300 val loss 7.0439
162
+ 5300 val perplexity 1145.8979
163
+ 5300 train 6.824490 (lr=7.8175e-05) (hash(x)=147693848)
164
+ 5400 val loss 7.0494
165
+ 5400 val perplexity 1152.1111
166
+ 5400 train 6.819526 (lr=7.6018e-05) (hash(x)=136048145)
167
+ 5500 val loss 7.0286
168
+ 5500 val perplexity 1128.4321
169
+ 5500 train 6.952648 (lr=7.3868e-05) (hash(x)=140313123)
170
+ 5600 val loss 7.0243
171
+ 5600 val perplexity 1123.5532
172
+ 5600 train 7.072820 (lr=7.1727e-05) (hash(x)=156997993)
173
+ 5700 val loss 7.0190
174
+ 5700 val perplexity 1117.7191
175
+ 5700 train 6.996371 (lr=6.9597e-05) (hash(x)=143513880)
176
+ 5800 val loss 7.0359
177
+ 5800 val perplexity 1136.7084
178
+ 5800 train 7.186930 (lr=6.7480e-05) (hash(x)=153736628)
179
+ 5900 val loss 7.0234
180
+ 5900 val perplexity 1122.5753
181
+ 5900 train 7.216059 (lr=6.5378e-05) (hash(x)=166385417)
182
+ 6000 val loss 7.0429
183
+ 6000 val perplexity 1144.7292
184
+ 6000 train 7.260878 (lr=6.3294e-05) (hash(x)=153216517)
185
+ 6100 val loss 7.0557
186
+ 6100 val perplexity 1159.4033
187
+ 6100 train 6.955714 (lr=6.1230e-05) (hash(x)=145387545)
188
+ 6200 val loss 7.0218
189
+ 6200 val perplexity 1120.8168
190
+ 6200 train 7.143466 (lr=5.9188e-05) (hash(x)=156306460)
191
+ 6300 val loss 7.0178
192
+ 6300 val perplexity 1116.2905
193
+ 6300 train 6.915298 (lr=5.7169e-05) (hash(x)=148610447)
194
+ 6400 val loss 7.0173
195
+ 6400 val perplexity 1115.7765
196
+ 6400 train 7.196865 (lr=5.5177e-05) (hash(x)=158343535)
197
+ 6500 val loss 7.0138
198
+ 6500 val perplexity 1111.9158
199
+ 6500 train 7.139856 (lr=5.3213e-05) (hash(x)=157742557)
200
+ 6600 val loss 7.0123
201
+ 6600 val perplexity 1110.1829
202
+ 6600 train 6.970760 (lr=5.1279e-05) (hash(x)=150457308)
203
+ 6700 val loss 7.0039
204
+ 6700 val perplexity 1100.9420
205
+ 6700 train 6.917010 (lr=4.9377e-05) (hash(x)=142426205)
206
+ 6800 val loss 6.9906
207
+ 6800 val perplexity 1086.3260
208
+ 6800 train 6.841436 (lr=4.7509e-05) (hash(x)=151522525)
209
+ 6900 val loss 6.9892
210
+ 6900 val perplexity 1084.8425
211
+ 6900 train 6.885376 (lr=4.5676e-05) (hash(x)=152029630)
212
+ 7000 val loss 6.9861
213
+ 7000 val perplexity 1081.5116
214
+ 7000 train 6.869016 (lr=4.3882e-05) (hash(x)=142978954)
215
+ 7100 val loss 6.9814
216
+ 7100 val perplexity 1076.4751
217
+ 7100 train 6.878259 (lr=4.2128e-05) (hash(x)=148064359)
218
+ 7200 val loss 6.9847
219
+ 7200 val perplexity 1080.0104
220
+ 7200 train 6.921792 (lr=4.0414e-05) (hash(x)=155771845)
221
+ 7300 val loss 6.9786
222
+ 7300 val perplexity 1073.4514
223
+ 7300 train 6.795961 (lr=3.8745e-05) (hash(x)=141639844)
224
+ 7400 val loss 6.9686
225
+ 7400 val perplexity 1062.6931
226
+ 7400 train 7.014091 (lr=3.7120e-05) (hash(x)=153466021)
227
+ 7500 val loss 6.9593
228
+ 7500 val perplexity 1052.8451
229
+ 7500 train 6.973252 (lr=3.5541e-05) (hash(x)=148843587)
230
+ 7600 val loss 6.9536
231
+ 7600 val perplexity 1046.9158
232
+ 7600 train 7.142570 (lr=3.4011e-05) (hash(x)=152053933)
233
+ 7700 val loss 6.9655
234
+ 7700 val perplexity 1059.4176
235
+ 7700 train 6.750750 (lr=3.2531e-05) (hash(x)=141564003)
236
+ 7800 val loss 6.9516
237
+ 7800 val perplexity 1044.7963
238
+ 7800 train 6.831956 (lr=3.1102e-05) (hash(x)=148617843)
239
+ 7900 val loss 6.9453
240
+ 7900 val perplexity 1038.2882
241
+ 7900 train 6.908850 (lr=2.9726e-05) (hash(x)=146417668)
242
+ 8000 val loss 6.9433
243
+ 8000 val perplexity 1036.2242
244
+ 8000 train 6.803242 (lr=2.8405e-05) (hash(x)=150165488)
245
+ 8100 val loss 6.9308
246
+ 8100 val perplexity 1023.3010
247
+ 8100 train 6.904036 (lr=2.7138e-05) (hash(x)=144818855)
248
+ 8200 val loss 6.9245
249
+ 8200 val perplexity 1016.8993
250
+ 8200 train 6.961853 (lr=2.5929e-05) (hash(x)=143074954)
251
+ 8300 val loss 6.9278
252
+ 8300 val perplexity 1020.2126
253
+ 8300 train 6.859095 (lr=2.4778e-05) (hash(x)=145172494)
254
+ 8400 val loss 6.9240
255
+ 8400 val perplexity 1016.4125
256
+ 8400 train 6.922443 (lr=2.3686e-05) (hash(x)=152284393)
257
+ 8500 val loss 6.9218
258
+ 8500 val perplexity 1014.1067
259
+ 8500 train 6.807056 (lr=2.2655e-05) (hash(x)=147044705)
260
+ 8600 val loss 6.9201
261
+ 8600 val perplexity 1012.4186
262
+ 8600 train 6.835523 (lr=2.1685e-05) (hash(x)=147762608)
263
+ 8700 val loss 6.9165
264
+ 8700 val perplexity 1008.7327
265
+ 8700 train 6.911353 (lr=2.0777e-05) (hash(x)=141994567)
266
+ 8800 val loss 6.9159
267
+ 8800 val perplexity 1008.1883
268
+ 8800 train 6.944641 (lr=1.9933e-05) (hash(x)=152552896)
269
+ 8900 val loss 6.9124
270
+ 8900 val perplexity 1004.6610
271
+ 8900 train 6.772640 (lr=1.9153e-05) (hash(x)=139714454)
272
+ 9000 val loss 6.9093
273
+ 9000 val perplexity 1001.4971
274
+ 9000 train 6.958855 (lr=1.8439e-05) (hash(x)=152274419)
275
+ 9100 val loss 6.9083
276
+ 9100 val perplexity 1000.5162
277
+ 9100 train 7.023602 (lr=1.7790e-05) (hash(x)=160536885)
278
+ 9200 val loss 6.9114
279
+ 9200 val perplexity 1003.6890
280
+ 9200 train 6.744764 (lr=1.7208e-05) (hash(x)=138386899)
281
+ 9300 val loss 6.9087
282
+ 9300 val perplexity 1000.9141
283
+ 9300 train 6.904637 (lr=1.6692e-05) (hash(x)=149957457)
284
+ 9400 val loss 6.9071
285
+ 9400 val perplexity 999.3447
286
+ 9400 train 7.003169 (lr=1.6245e-05) (hash(x)=161671548)
287
+ 9500 val loss 6.9127
288
+ 9500 val perplexity 1004.9370
289
+ 9500 train 7.034237 (lr=1.5865e-05) (hash(x)=171857400)
290
+ 9600 val loss 6.9064
291
+ 9600 val perplexity 998.6649
292
+ 9600 train 6.631417 (lr=1.5554e-05) (hash(x)=135378471)
293
+ 9700 val loss 6.9055
294
+ 9700 val perplexity 997.7429
295
+ 9700 train 6.824327 (lr=1.5312e-05) (hash(x)=149514591)
296
+ 9800 val loss 6.9051
297
+ 9800 val perplexity 997.3576
298
+ 9800 train 6.875938 (lr=1.5139e-05) (hash(x)=152679072)
299
+ 9900 val loss 6.9097
300
+ 9900 val perplexity 1001.9810
301
+ 9900 train 6.841558 (lr=1.5035e-05) (hash(x)=150415193)
302
+ 9999 val loss 6.9041
303
+ 9999 val perplexity 996.3181
attention_kindself_n_heads2_seed1341/model_02500.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:77ed0f7d4653690c071f763652be4c24339c49c1aaf59bbd59dde8f9bfd77b4d
3
+ size 38587970
attention_kindself_n_heads2_seed1341/model_05000.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e676a70847e2cd0c45a6022f8fb780c6f363d2cc5aec763d9ad8b11925a5a278
3
+ size 38587970
attention_kindself_n_heads2_seed1341/model_07500.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2fbb92e6a73f27c026033df1c43df475c25ea1bace8e425bd876bc18fb0c1c14
3
+ size 38587970
attention_kindself_n_heads2_seed1341/model_09999.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ab0bbb0a3d853852359e3c2a794b1008491b1776966827f857a7a4f837eb9a17
3
+ size 38587970
attention_kindself_n_heads2_seed1341/optimizer_02500.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6c8775e66b7f5f93dc88fdfdc905853fa6e114da9e5dd4ef8343334bc32e20c1
3
+ size 70895430
attention_kindself_n_heads2_seed1341/optimizer_05000.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:08ab1f75ddfa99ee6abfbae19643ac708f6b2b4e230359892d77635503e18718
3
+ size 70895430
attention_kindself_n_heads2_seed1341/optimizer_07500.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a28f04aae3faf44fb13e2ae82ed23f947df716d02ff8dd0015876bc0966e48d4
3
+ size 70895430
attention_kindself_n_heads2_seed1341/optimizer_09999.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6531d46b69edf8d3eb3c505f69dd46829f338364b9bf2b90995a9da877f54548
3
+ size 70895430