andrew-healey commited on
Commit
a149f24
·
verified ·
1 Parent(s): d830177

Upload folder using huggingface_hub

Browse files
attention_kindself_n_heads2_seed1340/args.json CHANGED
@@ -1 +1 @@
1
- {"hellaswag": true, "attention_kind": "selective", "log_dir": "wider_is_better_2/attention_kindself_n_heads2_seed1340", "resume_checkpoint": null, "resume_optimizer": false, "add_a_head": false, "add_head_to_start": true, "new_head_init": "normal", "n_heads": 2, "protect_bos_token": true, "prevent_from_masking_myself": true, "max_steps": 100, "warmup_steps": 200, "group": "wider_is_better_2", "use_wandb": true, "kill_self_after_run": false, "random_seed": 1340, "memory_penalty_epsilon": 0.1, "selection_head_linear_combo": "none", "selection_head_linear_combo_scale": 1.0, "protection_kind": "none", "leaky_relu_alpha": null, "leaky_relu_bias": null, "use_compile": true, "use_mini_model": false, "upload_to_hf": true, "seq_len": 256, "batch_size": 80, "total_batch_size": 20480, "protection_head_scaling_factor": 1.0, "protection_head_bias": 0.0, "n_sliced_masks": null, "n_latent_masks": null, "mask_layernorm": false, "residual_attention_masks": false, "compute_base_shapes": false, "base_shapes_savefile": null, "mup": true, "disable_selection": true, "mup_enable_coord_check_logging": false, "max_lr": 0.00015, "decay_lr": true, "readout_zero_init": false, "query_zero_init": false, "l1_loss": false, "debugpy": false, "key": "1.5e-4_20480_2_1340", "n_embd": 128}
 
1
+ {"hellaswag": true, "attention_kind": "selective", "log_dir": "wider_is_better_4/attention_kindself_n_heads2_seed1340", "resume_checkpoint": null, "resume_optimizer": false, "add_a_head": false, "add_head_to_start": true, "new_head_init": "normal", "n_heads": 2, "protect_bos_token": true, "prevent_from_masking_myself": true, "max_steps": 10000, "warmup_steps": 200, "group": "wider_is_better_4", "use_wandb": true, "kill_self_after_run": false, "random_seed": 1340, "memory_penalty_epsilon": 0.1, "selection_head_linear_combo": "none", "selection_head_linear_combo_scale": 1.0, "protection_kind": "none", "leaky_relu_alpha": null, "leaky_relu_bias": null, "use_compile": true, "use_mini_model": false, "upload_to_hf": true, "seq_len": 256, "batch_size": 120, "total_batch_size": 30720, "protection_head_scaling_factor": 1.0, "protection_head_bias": 0.0, "n_sliced_masks": null, "n_latent_masks": null, "mask_layernorm": false, "residual_attention_masks": false, "compute_base_shapes": false, "base_shapes_savefile": null, "mup": true, "disable_selection": true, "mup_enable_coord_check_logging": false, "max_lr": 0.00015, "decay_lr": true, "readout_zero_init": false, "query_zero_init": false, "l1_loss": false, "debugpy": false, "key": "1.5e-4_30720_2_1340", "n_embd": 128}
attention_kindself_n_heads2_seed1340/dataloader_02500.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:db41c5e5513137877487a93451adf8ec4ed2448ab6e9471ebd5595c8e3293875
3
+ size 964
attention_kindself_n_heads2_seed1340/dataloader_05000.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f6df8947c6ab773db1947914387d3db345a84828521d3a64bae9b652e1b0a410
3
+ size 964
attention_kindself_n_heads2_seed1340/dataloader_07500.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:169891a726a7ff746d1a7aa99f459a66d85ceb4e9f2583f790f5b8501f97b6af
3
+ size 964
attention_kindself_n_heads2_seed1340/dataloader_09999.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e685a568a36c792ccbe7b5fcae0b9d630955e589991190bd8902836cea6a91df
3
+ size 964
attention_kindself_n_heads2_seed1340/log2.txt CHANGED
@@ -1,8 +1,303 @@
1
- max_steps: 100
2
- 0 val loss 11.7860
3
- 0 val perplexity 131404.2344
4
- 0 train 11.783506 (lr=7.5000e-07) (hash(x)=108792797)
5
- 50 val loss 10.2867
6
- 50 val perplexity 29340.1914
7
- 99 val loss 9.6755
8
- 99 val perplexity 15922.8945
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ max_steps: 10000
2
+ 0 val loss 11.7850
3
+ 0 val perplexity 131263.8281
4
+ 0 train 11.783751 (lr=7.5000e-07) (hash(x)=164406924)
5
+ 100 val loss 9.5740
6
+ 100 val perplexity 14385.7539
7
+ 100 train 9.921918 (lr=7.5750e-05) (hash(x)=177407419)
8
+ 200 val loss 7.9257
9
+ 200 val perplexity 2767.4197
10
+ 200 train 7.846782 (lr=1.5000e-04) (hash(x)=144903932)
11
+ 300 val loss 7.6414
12
+ 300 val perplexity 2082.7278
13
+ 300 train 7.977518 (lr=1.4997e-04) (hash(x)=173839165)
14
+ 400 val loss 7.5895
15
+ 400 val perplexity 1977.3179
16
+ 400 train 7.678867 (lr=1.4986e-04) (hash(x)=167734596)
17
+ 500 val loss 7.4751
18
+ 500 val perplexity 1763.6210
19
+ 500 train 7.504052 (lr=1.4969e-04) (hash(x)=153224076)
20
+ 600 val loss 7.3653
21
+ 600 val perplexity 1580.1370
22
+ 600 train 7.281658 (lr=1.4945e-04) (hash(x)=149619098)
23
+ 700 val loss 7.2957
24
+ 700 val perplexity 1473.9863
25
+ 700 train 7.237281 (lr=1.4913e-04) (hash(x)=146539909)
26
+ 800 val loss 7.2312
27
+ 800 val perplexity 1381.8326
28
+ 800 train 7.211849 (lr=1.4876e-04) (hash(x)=153710890)
29
+ 900 val loss 7.1839
30
+ 900 val perplexity 1318.0844
31
+ 900 train 7.129651 (lr=1.4831e-04) (hash(x)=155873620)
32
+ 1000 val loss 7.1559
33
+ 1000 val perplexity 1281.5947
34
+ 1000 train 7.079763 (lr=1.4779e-04) (hash(x)=145450636)
35
+ 1100 val loss 7.1203
36
+ 1100 val perplexity 1236.8442
37
+ 1100 train 7.152329 (lr=1.4721e-04) (hash(x)=154123388)
38
+ 1200 val loss 7.0945
39
+ 1200 val perplexity 1205.2845
40
+ 1200 train 6.972703 (lr=1.4656e-04) (hash(x)=145249251)
41
+ 1300 val loss 7.0616
42
+ 1300 val perplexity 1166.2850
43
+ 1300 train 6.977951 (lr=1.4585e-04) (hash(x)=148937127)
44
+ 1400 val loss 7.0520
45
+ 1400 val perplexity 1155.1272
46
+ 1400 train 7.145495 (lr=1.4507e-04) (hash(x)=150475545)
47
+ 1500 val loss 7.0578
48
+ 1500 val perplexity 1161.9419
49
+ 1500 train 7.006241 (lr=1.4422e-04) (hash(x)=154653428)
50
+ 1600 val loss 7.0461
51
+ 1600 val perplexity 1148.4038
52
+ 1600 train 7.037293 (lr=1.4332e-04) (hash(x)=144483776)
53
+ 1700 val loss 7.0419
54
+ 1700 val perplexity 1143.5295
55
+ 1700 train 7.204151 (lr=1.4235e-04) (hash(x)=157395496)
56
+ 1800 val loss 7.0301
57
+ 1800 val perplexity 1130.1246
58
+ 1800 train 7.051272 (lr=1.4131e-04) (hash(x)=157916369)
59
+ 1900 val loss 7.0318
60
+ 1900 val perplexity 1132.0376
61
+ 1900 train 7.213277 (lr=1.4022e-04) (hash(x)=166073923)
62
+ 2000 val loss 7.0333
63
+ 2000 val perplexity 1133.7172
64
+ 2000 train 7.098075 (lr=1.3907e-04) (hash(x)=154856891)
65
+ 2100 val loss 7.0248
66
+ 2100 val perplexity 1124.2070
67
+ 2100 train 7.025970 (lr=1.3786e-04) (hash(x)=151925203)
68
+ 2200 val loss 7.0277
69
+ 2200 val perplexity 1127.4404
70
+ 2200 train 6.762675 (lr=1.3660e-04) (hash(x)=136191502)
71
+ 2300 val loss 7.0135
72
+ 2300 val perplexity 1111.4832
73
+ 2300 train 7.118563 (lr=1.3527e-04) (hash(x)=153273362)
74
+ 2400 val loss 7.0093
75
+ 2400 val perplexity 1106.8591
76
+ 2400 train 6.947052 (lr=1.3390e-04) (hash(x)=148021541)
77
+ 2500 val loss 6.9992
78
+ 2500 val perplexity 1095.7874
79
+ 2500 train 6.928439 (lr=1.3247e-04) (hash(x)=141356608)
80
+ 2600 val loss 6.9854
81
+ 2600 val perplexity 1080.7104
82
+ 2600 train 6.947350 (lr=1.3099e-04) (hash(x)=146005217)
83
+ 2700 val loss 6.9816
84
+ 2700 val perplexity 1076.6876
85
+ 2700 train 6.828278 (lr=1.2946e-04) (hash(x)=144511718)
86
+ 2800 val loss 6.9653
87
+ 2800 val perplexity 1059.2560
88
+ 2800 train 6.858193 (lr=1.2788e-04) (hash(x)=146019502)
89
+ 2900 val loss 6.9852
90
+ 2900 val perplexity 1080.5378
91
+ 2900 train 6.887365 (lr=1.2626e-04) (hash(x)=146496200)
92
+ 3000 val loss 6.9667
93
+ 3000 val perplexity 1060.7450
94
+ 3000 train 6.898639 (lr=1.2459e-04) (hash(x)=150127281)
95
+ 3100 val loss 6.9651
96
+ 3100 val perplexity 1059.0126
97
+ 3100 train 6.930223 (lr=1.2287e-04) (hash(x)=142022255)
98
+ 3200 val loss 6.9589
99
+ 3200 val perplexity 1052.4626
100
+ 3200 train 7.018985 (lr=1.2112e-04) (hash(x)=154120875)
101
+ 3300 val loss 6.9767
102
+ 3300 val perplexity 1071.3512
103
+ 3300 train 7.028908 (lr=1.1932e-04) (hash(x)=153999717)
104
+ 3400 val loss 6.9871
105
+ 3400 val perplexity 1082.5930
106
+ 3400 train 6.806038 (lr=1.1749e-04) (hash(x)=139694097)
107
+ 3500 val loss 6.9942
108
+ 3500 val perplexity 1090.3142
109
+ 3500 train 7.172703 (lr=1.1562e-04) (hash(x)=162992732)
110
+ 3600 val loss 7.0057
111
+ 3600 val perplexity 1102.8688
112
+ 3600 train 6.995212 (lr=1.1372e-04) (hash(x)=147574101)
113
+ 3700 val loss 7.0180
114
+ 3700 val perplexity 1116.4977
115
+ 3700 train 7.104524 (lr=1.1179e-04) (hash(x)=157763099)
116
+ 3800 val loss 7.0007
117
+ 3800 val perplexity 1097.4445
118
+ 3800 train 7.165614 (lr=1.0982e-04) (hash(x)=170800034)
119
+ 3900 val loss 6.9947
120
+ 3900 val perplexity 1090.8093
121
+ 3900 train 7.080643 (lr=1.0783e-04) (hash(x)=164984528)
122
+ 4000 val loss 7.0219
123
+ 4000 val perplexity 1120.9375
124
+ 4000 train 6.913085 (lr=1.0581e-04) (hash(x)=141743323)
125
+ 4100 val loss 7.0270
126
+ 4100 val perplexity 1126.6466
127
+ 4100 train 7.062133 (lr=1.0377e-04) (hash(x)=153392872)
128
+ 4200 val loss 7.0313
129
+ 4200 val perplexity 1131.4791
130
+ 4200 train 6.917869 (lr=1.0171e-04) (hash(x)=149074933)
131
+ 4300 val loss 7.0305
132
+ 4300 val perplexity 1130.6038
133
+ 4300 train 7.356206 (lr=9.9622e-05) (hash(x)=167823423)
134
+ 4400 val loss 7.0137
135
+ 4400 val perplexity 1111.8049
136
+ 4400 train 6.783382 (lr=9.7520e-05) (hash(x)=141203114)
137
+ 4500 val loss 7.0465
138
+ 4500 val perplexity 1148.7822
139
+ 4500 train 7.041791 (lr=9.5403e-05) (hash(x)=146284780)
140
+ 4600 val loss 7.0304
141
+ 4600 val perplexity 1130.5154
142
+ 4600 train 6.864891 (lr=9.3273e-05) (hash(x)=141126464)
143
+ 4700 val loss 7.0183
144
+ 4700 val perplexity 1116.8528
145
+ 4700 train 7.025716 (lr=9.1132e-05) (hash(x)=154751926)
146
+ 4800 val loss 7.0227
147
+ 4800 val perplexity 1121.8369
148
+ 4800 train 7.103513 (lr=8.8982e-05) (hash(x)=154793198)
149
+ 4900 val loss 7.0128
150
+ 4900 val perplexity 1110.7892
151
+ 4900 train 6.800518 (lr=8.6825e-05) (hash(x)=139406392)
152
+ 5000 val loss 7.0039
153
+ 5000 val perplexity 1100.8984
154
+ 5000 train 6.796443 (lr=8.4663e-05) (hash(x)=153548741)
155
+ 5100 val loss 6.9985
156
+ 5100 val perplexity 1095.0018
157
+ 5100 train 7.045188 (lr=8.2500e-05) (hash(x)=160488568)
158
+ 5200 val loss 6.9949
159
+ 5200 val perplexity 1091.0116
160
+ 5200 train 7.002579 (lr=8.0337e-05) (hash(x)=149645053)
161
+ 5300 val loss 6.9837
162
+ 5300 val perplexity 1078.9227
163
+ 5300 train 7.054364 (lr=7.8175e-05) (hash(x)=155820556)
164
+ 5400 val loss 7.0153
165
+ 5400 val perplexity 1113.5081
166
+ 5400 train 6.957558 (lr=7.6018e-05) (hash(x)=147538134)
167
+ 5500 val loss 7.0064
168
+ 5500 val perplexity 1103.7095
169
+ 5500 train 7.222918 (lr=7.3868e-05) (hash(x)=166889307)
170
+ 5600 val loss 6.9996
171
+ 5600 val perplexity 1096.1846
172
+ 5600 train 6.758119 (lr=7.1727e-05) (hash(x)=139516699)
173
+ 5700 val loss 6.9948
174
+ 5700 val perplexity 1090.8945
175
+ 5700 train 6.732394 (lr=6.9597e-05) (hash(x)=140453511)
176
+ 5800 val loss 7.0181
177
+ 5800 val perplexity 1116.6265
178
+ 5800 train 6.979203 (lr=6.7480e-05) (hash(x)=162964847)
179
+ 5900 val loss 6.9981
180
+ 5900 val perplexity 1094.6046
181
+ 5900 train 7.015715 (lr=6.5378e-05) (hash(x)=150606634)
182
+ 6000 val loss 6.9796
183
+ 6000 val perplexity 1074.4781
184
+ 6000 train 7.086629 (lr=6.3294e-05) (hash(x)=149890857)
185
+ 6100 val loss 6.9583
186
+ 6100 val perplexity 1051.8706
187
+ 6100 train 7.058212 (lr=6.1230e-05) (hash(x)=173884145)
188
+ 6200 val loss 6.9536
189
+ 6200 val perplexity 1046.9213
190
+ 6200 train 6.991053 (lr=5.9188e-05) (hash(x)=151987098)
191
+ 6300 val loss 6.9523
192
+ 6300 val perplexity 1045.5223
193
+ 6300 train 6.878451 (lr=5.7169e-05) (hash(x)=148853562)
194
+ 6400 val loss 6.9578
195
+ 6400 val perplexity 1051.3311
196
+ 6400 train 6.752846 (lr=5.5177e-05) (hash(x)=141530101)
197
+ 6500 val loss 6.9590
198
+ 6500 val perplexity 1052.5645
199
+ 6500 train 6.840059 (lr=5.3213e-05) (hash(x)=142297809)
200
+ 6600 val loss 6.9567
201
+ 6600 val perplexity 1050.2007
202
+ 6600 train 6.832217 (lr=5.1279e-05) (hash(x)=142447782)
203
+ 6700 val loss 6.9620
204
+ 6700 val perplexity 1055.7686
205
+ 6700 train 6.890851 (lr=4.9377e-05) (hash(x)=147004686)
206
+ 6800 val loss 6.9580
207
+ 6800 val perplexity 1051.4945
208
+ 6800 train 6.712971 (lr=4.7509e-05) (hash(x)=133438702)
209
+ 6900 val loss 6.9632
210
+ 6900 val perplexity 1057.0248
211
+ 6900 train 6.953128 (lr=4.5676e-05) (hash(x)=157085143)
212
+ 7000 val loss 6.9596
213
+ 7000 val perplexity 1053.2207
214
+ 7000 train 6.863688 (lr=4.3882e-05) (hash(x)=139437666)
215
+ 7100 val loss 6.9593
216
+ 7100 val perplexity 1052.9325
217
+ 7100 train 7.042618 (lr=4.2128e-05) (hash(x)=159792986)
218
+ 7200 val loss 6.9537
219
+ 7200 val perplexity 1046.9686
220
+ 7200 train 6.918267 (lr=4.0414e-05) (hash(x)=144930687)
221
+ 7300 val loss 6.9589
222
+ 7300 val perplexity 1052.4857
223
+ 7300 train 7.072165 (lr=3.8745e-05) (hash(x)=156242690)
224
+ 7400 val loss 6.9562
225
+ 7400 val perplexity 1049.6550
226
+ 7400 train 6.846870 (lr=3.7120e-05) (hash(x)=148183719)
227
+ 7500 val loss 6.9638
228
+ 7500 val perplexity 1057.6418
229
+ 7500 train 6.969202 (lr=3.5541e-05) (hash(x)=152494758)
230
+ 7600 val loss 6.9693
231
+ 7600 val perplexity 1063.5234
232
+ 7600 train 6.731656 (lr=3.4011e-05) (hash(x)=142485027)
233
+ 7700 val loss 6.9700
234
+ 7700 val perplexity 1064.2169
235
+ 7700 train 6.836930 (lr=3.2531e-05) (hash(x)=147512165)
236
+ 7800 val loss 6.9714
237
+ 7800 val perplexity 1065.7069
238
+ 7800 train 6.926557 (lr=3.1102e-05) (hash(x)=160346994)
239
+ 7900 val loss 6.9710
240
+ 7900 val perplexity 1065.2572
241
+ 7900 train 6.821373 (lr=2.9726e-05) (hash(x)=144488254)
242
+ 8000 val loss 6.9635
243
+ 8000 val perplexity 1057.3303
244
+ 8000 train 6.803780 (lr=2.8405e-05) (hash(x)=147637019)
245
+ 8100 val loss 6.9626
246
+ 8100 val perplexity 1056.4025
247
+ 8100 train 6.837252 (lr=2.7138e-05) (hash(x)=147340534)
248
+ 8200 val loss 6.9577
249
+ 8200 val perplexity 1051.2007
250
+ 8200 train 6.985175 (lr=2.5929e-05) (hash(x)=151630665)
251
+ 8300 val loss 6.9486
252
+ 8300 val perplexity 1041.6604
253
+ 8300 train 6.984809 (lr=2.4778e-05) (hash(x)=149747064)
254
+ 8400 val loss 6.9489
255
+ 8400 val perplexity 1042.0428
256
+ 8400 train 7.122874 (lr=2.3686e-05) (hash(x)=154245770)
257
+ 8500 val loss 6.9517
258
+ 8500 val perplexity 1044.9717
259
+ 8500 train 6.851659 (lr=2.2655e-05) (hash(x)=152559100)
260
+ 8600 val loss 6.9506
261
+ 8600 val perplexity 1043.7401
262
+ 8600 train 7.547762 (lr=2.1685e-05) (hash(x)=181365926)
263
+ 8700 val loss 6.9468
264
+ 8700 val perplexity 1039.7740
265
+ 8700 train 6.828834 (lr=2.0777e-05) (hash(x)=154405991)
266
+ 8800 val loss 6.9442
267
+ 8800 val perplexity 1037.1665
268
+ 8800 train 6.917340 (lr=1.9933e-05) (hash(x)=153755904)
269
+ 8900 val loss 6.9421
270
+ 8900 val perplexity 1034.9789
271
+ 8900 train 6.898244 (lr=1.9153e-05) (hash(x)=152120568)
272
+ 9000 val loss 6.9374
273
+ 9000 val perplexity 1030.0432
274
+ 9000 train 6.777120 (lr=1.8439e-05) (hash(x)=142797279)
275
+ 9100 val loss 6.9372
276
+ 9100 val perplexity 1029.9150
277
+ 9100 train 6.816922 (lr=1.7790e-05) (hash(x)=143037503)
278
+ 9200 val loss 6.9377
279
+ 9200 val perplexity 1030.3806
280
+ 9200 train 6.680987 (lr=1.7208e-05) (hash(x)=113690273)
281
+ 9300 val loss 6.9365
282
+ 9300 val perplexity 1029.1438
283
+ 9300 train 6.874697 (lr=1.6692e-05) (hash(x)=158025077)
284
+ 9400 val loss 6.9350
285
+ 9400 val perplexity 1027.6565
286
+ 9400 train 7.008310 (lr=1.6245e-05) (hash(x)=158251718)
287
+ 9500 val loss 6.9362
288
+ 9500 val perplexity 1028.8671
289
+ 9500 train 6.972598 (lr=1.5865e-05) (hash(x)=154752610)
290
+ 9600 val loss 6.9330
291
+ 9600 val perplexity 1025.6161
292
+ 9600 train 6.812103 (lr=1.5554e-05) (hash(x)=146889093)
293
+ 9700 val loss 6.9289
294
+ 9700 val perplexity 1021.3994
295
+ 9700 train 6.929122 (lr=1.5312e-05) (hash(x)=156906516)
296
+ 9800 val loss 6.9319
297
+ 9800 val perplexity 1024.4108
298
+ 9800 train 6.816188 (lr=1.5139e-05) (hash(x)=153841927)
299
+ 9900 val loss 6.9264
300
+ 9900 val perplexity 1018.7877
301
+ 9900 train 7.127296 (lr=1.5035e-05) (hash(x)=163514334)
302
+ 9999 val loss 6.9252
303
+ 9999 val perplexity 1017.6002
attention_kindself_n_heads2_seed1340/model_02500.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1d0cb281ffa5b73b76269d698422eae36153d1106009d88bebdb89ad528a8269
3
+ size 38587970
attention_kindself_n_heads2_seed1340/model_05000.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a0120590c4423fcf6b02fe6999d70767574bde22f027fec516ae0888e9c0e667
3
+ size 38587970
attention_kindself_n_heads2_seed1340/model_07500.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ed1f8f8595bc20eb8922496b53fa25c2fbf0d335f0abc3512640f1d56ca26f6d
3
+ size 38587970
attention_kindself_n_heads2_seed1340/model_09999.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c113c1a8d1636453b9f7aa9539512fd0704d0edce3082f16d260238e3fd59c22
3
+ size 38587970
attention_kindself_n_heads2_seed1340/optimizer_02500.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6db01261ceef93360527719c31ea07ae96d4fc420b42e3db7a8d9fc6d1515c0d
3
+ size 70895430
attention_kindself_n_heads2_seed1340/optimizer_05000.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4a9dc6f53d0f4493aa90ed43b8268caf9b65543c9f042fd964a9cf556254d2ef
3
+ size 70895430
attention_kindself_n_heads2_seed1340/optimizer_07500.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b29970e210453c390b64db07af5ecd2bb54b8aec0f26a23ae2f4b6b0be5c90f6
3
+ size 70895430
attention_kindself_n_heads2_seed1340/optimizer_09999.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9abb8d87d2e7fdcfd93cd0d080cfcb97eabecdccb343db7cf720086ab43b9625
3
+ size 70895430