andrew-healey commited on
Commit
2891038
·
verified ·
1 Parent(s): bf35566

Upload folder using huggingface_hub

Browse files
attention_kindself_n_heads2_seed1338/args.json CHANGED
@@ -1 +1 @@
1
- {"hellaswag": true, "attention_kind": "selective", "log_dir": "wider_is_better_2/attention_kindself_n_heads2_seed1338", "resume_checkpoint": null, "resume_optimizer": false, "add_a_head": false, "add_head_to_start": true, "new_head_init": "normal", "n_heads": 2, "protect_bos_token": true, "prevent_from_masking_myself": true, "max_steps": 1000, "warmup_steps": 200, "group": "wider_is_better_2", "use_wandb": true, "kill_self_after_run": false, "random_seed": 1338, "memory_penalty_epsilon": 0.1, "selection_head_linear_combo": "none", "selection_head_linear_combo_scale": 1.0, "protection_kind": "none", "leaky_relu_alpha": null, "leaky_relu_bias": null, "use_compile": true, "use_mini_model": false, "upload_to_hf": true, "seq_len": 256, "batch_size": 120, "total_batch_size": 30720, "protection_head_scaling_factor": 1.0, "protection_head_bias": 0.0, "n_sliced_masks": null, "n_latent_masks": null, "mask_layernorm": false, "residual_attention_masks": false, "compute_base_shapes": false, "base_shapes_savefile": null, "mup": true, "disable_selection": true, "mup_enable_coord_check_logging": false, "max_lr": 0.00015, "decay_lr": true, "readout_zero_init": false, "query_zero_init": false, "l1_loss": false, "debugpy": false, "key": "1.5e-4_30720_2_1338", "n_embd": 128}
 
1
+ {"hellaswag": true, "attention_kind": "selective", "log_dir": "wider_is_better_3/attention_kindself_n_heads2_seed1338", "resume_checkpoint": null, "resume_optimizer": false, "add_a_head": false, "add_head_to_start": true, "new_head_init": "normal", "n_heads": 2, "protect_bos_token": true, "prevent_from_masking_myself": true, "max_steps": 10000, "warmup_steps": 200, "group": "wider_is_better_3", "use_wandb": true, "kill_self_after_run": false, "random_seed": 1338, "memory_penalty_epsilon": 0.1, "selection_head_linear_combo": "none", "selection_head_linear_combo_scale": 1.0, "protection_kind": "none", "leaky_relu_alpha": null, "leaky_relu_bias": null, "use_compile": true, "use_mini_model": false, "upload_to_hf": true, "seq_len": 256, "batch_size": 120, "total_batch_size": 30720, "protection_head_scaling_factor": 1.0, "protection_head_bias": 0.0, "n_sliced_masks": null, "n_latent_masks": null, "mask_layernorm": false, "residual_attention_masks": false, "compute_base_shapes": false, "base_shapes_savefile": null, "mup": true, "disable_selection": true, "mup_enable_coord_check_logging": false, "max_lr": 0.00015, "decay_lr": true, "readout_zero_init": false, "query_zero_init": false, "l1_loss": false, "debugpy": false, "key": "1.5e-4_30720_2_1338", "n_embd": 128}
attention_kindself_n_heads2_seed1338/dataloader_02500.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:8b2ea67f78ff5a7970d0db044ff7ee527b3dc065f295fd30f588df4b44b568d0
3
  size 964
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:db41c5e5513137877487a93451adf8ec4ed2448ab6e9471ebd5595c8e3293875
3
  size 964
attention_kindself_n_heads2_seed1338/dataloader_05000.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f6df8947c6ab773db1947914387d3db345a84828521d3a64bae9b652e1b0a410
3
+ size 964
attention_kindself_n_heads2_seed1338/dataloader_07500.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:169891a726a7ff746d1a7aa99f459a66d85ceb4e9f2583f790f5b8501f97b6af
3
+ size 964
attention_kindself_n_heads2_seed1338/dataloader_09999.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e685a568a36c792ccbe7b5fcae0b9d630955e589991190bd8902836cea6a91df
3
+ size 964
attention_kindself_n_heads2_seed1338/log2.txt CHANGED
@@ -1,33 +1,303 @@
1
- max_steps: 1000
2
  0 val loss 11.7416
3
  0 val perplexity 125697.3672
4
  0 train 11.753315 (lr=7.5000e-07) (hash(x)=150327452)
5
- 100 val loss 9.5851
6
- 100 val perplexity 14546.3086
7
- 100 train 9.568905 (lr=7.5750e-05) (hash(x)=166441190)
8
- 200 val loss 7.8255
9
- 200 val perplexity 2503.6711
10
- 200 train 8.030132 (lr=1.5000e-04) (hash(x)=166780046)
11
- 300 val loss 7.6187
12
- 300 val perplexity 2035.9526
13
- 300 train 7.591647 (lr=1.4486e-04) (hash(x)=159835303)
14
- 400 val loss 7.5894
15
- 400 val perplexity 1977.2133
16
- 400 train 7.367747 (lr=1.3023e-04) (hash(x)=155040610)
17
- 500 val loss 7.5263
18
- 500 val perplexity 1856.2716
19
- 500 train 7.213230 (lr=1.0833e-04) (hash(x)=130190460)
20
- 600 val loss 7.4244
21
- 600 val perplexity 1676.3500
22
- 600 train 7.441990 (lr=8.2500e-05) (hash(x)=155504036)
23
- 700 val loss 7.3556
24
- 700 val perplexity 1564.9131
25
- 700 train 7.212329 (lr=5.6669e-05) (hash(x)=137347213)
26
- 800 val loss 7.3140
27
- 800 val perplexity 1501.1636
28
- 800 train 7.167484 (lr=3.4770e-05) (hash(x)=143823248)
29
- 900 val loss 7.2899
30
- 900 val perplexity 1465.3568
31
- 900 train 7.409986 (lr=2.0138e-05) (hash(x)=156260416)
32
- 999 val loss 7.2806
33
- 999 val perplexity 1451.7960
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ max_steps: 10000
2
  0 val loss 11.7416
3
  0 val perplexity 125697.3672
4
  0 train 11.753315 (lr=7.5000e-07) (hash(x)=150327452)
5
+ 100 val loss 9.6201
6
+ 100 val perplexity 15064.4131
7
+ 100 train 9.604169 (lr=7.5750e-05) (hash(x)=166441190)
8
+ 200 val loss 7.8332
9
+ 200 val perplexity 2523.0488
10
+ 200 train 8.022912 (lr=1.5000e-04) (hash(x)=166780046)
11
+ 300 val loss 7.6313
12
+ 300 val perplexity 2061.6272
13
+ 300 train 7.611700 (lr=1.4997e-04) (hash(x)=159835303)
14
+ 400 val loss 7.5769
15
+ 400 val perplexity 1952.6234
16
+ 400 train 7.347703 (lr=1.4986e-04) (hash(x)=155040610)
17
+ 500 val loss 7.4986
18
+ 500 val perplexity 1805.4681
19
+ 500 train 7.181684 (lr=1.4969e-04) (hash(x)=130190460)
20
+ 600 val loss 7.3565
21
+ 600 val perplexity 1566.3457
22
+ 600 train 7.384015 (lr=1.4945e-04) (hash(x)=155504036)
23
+ 700 val loss 7.2681
24
+ 700 val perplexity 1433.8582
25
+ 700 train 7.153148 (lr=1.4913e-04) (hash(x)=137347213)
26
+ 800 val loss 7.1957
27
+ 800 val perplexity 1333.6768
28
+ 800 train 7.041455 (lr=1.4876e-04) (hash(x)=143823248)
29
+ 900 val loss 7.1419
30
+ 900 val perplexity 1263.8528
31
+ 900 train 7.212487 (lr=1.4831e-04) (hash(x)=156260416)
32
+ 1000 val loss 7.1035
33
+ 1000 val perplexity 1216.2551
34
+ 1000 train 7.062214 (lr=1.4779e-04) (hash(x)=143734685)
35
+ 1100 val loss 7.0765
36
+ 1100 val perplexity 1183.8506
37
+ 1100 train 7.014541 (lr=1.4721e-04) (hash(x)=160013925)
38
+ 1200 val loss 7.0683
39
+ 1200 val perplexity 1174.1959
40
+ 1200 train 6.797521 (lr=1.4656e-04) (hash(x)=150678249)
41
+ 1300 val loss 7.0484
42
+ 1300 val perplexity 1150.9941
43
+ 1300 train 6.830014 (lr=1.4585e-04) (hash(x)=149073315)
44
+ 1400 val loss 7.0032
45
+ 1400 val perplexity 1100.1003
46
+ 1400 train 7.245300 (lr=1.4507e-04) (hash(x)=175802021)
47
+ 1500 val loss 6.9704
48
+ 1500 val perplexity 1064.6219
49
+ 1500 train 7.214554 (lr=1.4422e-04) (hash(x)=171034639)
50
+ 1600 val loss 6.9542
51
+ 1600 val perplexity 1047.5050
52
+ 1600 train 7.139720 (lr=1.4332e-04) (hash(x)=158681215)
53
+ 1700 val loss 6.9351
54
+ 1700 val perplexity 1027.7344
55
+ 1700 train 6.886345 (lr=1.4235e-04) (hash(x)=152116061)
56
+ 1800 val loss 6.9281
57
+ 1800 val perplexity 1020.5926
58
+ 1800 train 6.856886 (lr=1.4131e-04) (hash(x)=146108145)
59
+ 1900 val loss 6.9195
60
+ 1900 val perplexity 1011.7723
61
+ 1900 train 6.746204 (lr=1.4022e-04) (hash(x)=147598108)
62
+ 2000 val loss 6.9275
63
+ 2000 val perplexity 1019.9300
64
+ 2000 train 6.777522 (lr=1.3907e-04) (hash(x)=154996086)
65
+ 2100 val loss 6.9437
66
+ 2100 val perplexity 1036.5835
67
+ 2100 train 6.720708 (lr=1.3786e-04) (hash(x)=153396183)
68
+ 2200 val loss 6.8914
69
+ 2200 val perplexity 983.7560
70
+ 2200 train 6.887761 (lr=1.3660e-04) (hash(x)=153885445)
71
+ 2300 val loss 6.8962
72
+ 2300 val perplexity 988.5034
73
+ 2300 train 6.891620 (lr=1.3527e-04) (hash(x)=159666385)
74
+ 2400 val loss 6.8916
75
+ 2400 val perplexity 983.9639
76
+ 2400 train 6.878953 (lr=1.3390e-04) (hash(x)=142353087)
77
+ 2500 val loss 6.8715
78
+ 2500 val perplexity 964.4097
79
+ 2500 train 6.866309 (lr=1.3247e-04) (hash(x)=146491718)
80
+ 2600 val loss 6.8939
81
+ 2600 val perplexity 986.2383
82
+ 2600 train 6.818327 (lr=1.3099e-04) (hash(x)=150750353)
83
+ 2700 val loss 6.8951
84
+ 2700 val perplexity 987.3794
85
+ 2700 train 6.645774 (lr=1.2946e-04) (hash(x)=129849193)
86
+ 2800 val loss 6.9084
87
+ 2800 val perplexity 1000.6135
88
+ 2800 train 6.779920 (lr=1.2788e-04) (hash(x)=152767913)
89
+ 2900 val loss 6.9086
90
+ 2900 val perplexity 1000.8000
91
+ 2900 train 6.695253 (lr=1.2626e-04) (hash(x)=146531140)
92
+ 3000 val loss 6.9069
93
+ 3000 val perplexity 999.1417
94
+ 3000 train 6.972168 (lr=1.2459e-04) (hash(x)=151562048)
95
+ 3100 val loss 6.8928
96
+ 3100 val perplexity 985.1803
97
+ 3100 train 6.883137 (lr=1.2287e-04) (hash(x)=146001424)
98
+ 3200 val loss 6.9080
99
+ 3200 val perplexity 1000.2314
100
+ 3200 train 6.950475 (lr=1.2112e-04) (hash(x)=166486165)
101
+ 3300 val loss 6.8918
102
+ 3300 val perplexity 984.2046
103
+ 3300 train 6.821273 (lr=1.1932e-04) (hash(x)=150866680)
104
+ 3400 val loss 6.8931
105
+ 3400 val perplexity 985.4133
106
+ 3400 train 6.814807 (lr=1.1749e-04) (hash(x)=143900419)
107
+ 3500 val loss 6.8856
108
+ 3500 val perplexity 978.0688
109
+ 3500 train 6.742932 (lr=1.1562e-04) (hash(x)=148845794)
110
+ 3600 val loss 6.8965
111
+ 3600 val perplexity 988.8202
112
+ 3600 train 6.752052 (lr=1.1372e-04) (hash(x)=145667796)
113
+ 3700 val loss 6.8843
114
+ 3700 val perplexity 976.8593
115
+ 3700 train 6.895640 (lr=1.1179e-04) (hash(x)=163563851)
116
+ 3800 val loss 6.8852
117
+ 3800 val perplexity 977.6622
118
+ 3800 train 6.841506 (lr=1.0982e-04) (hash(x)=147488689)
119
+ 3900 val loss 6.8712
120
+ 3900 val perplexity 964.0805
121
+ 3900 train 6.865108 (lr=1.0783e-04) (hash(x)=148186608)
122
+ 4000 val loss 6.8620
123
+ 4000 val perplexity 955.2548
124
+ 4000 train 6.724196 (lr=1.0581e-04) (hash(x)=142970187)
125
+ 4100 val loss 6.8597
126
+ 4100 val perplexity 953.1231
127
+ 4100 train 6.846410 (lr=1.0377e-04) (hash(x)=141584883)
128
+ 4200 val loss 6.8748
129
+ 4200 val perplexity 967.6102
130
+ 4200 train 6.687943 (lr=1.0171e-04) (hash(x)=145664585)
131
+ 4300 val loss 6.8631
132
+ 4300 val perplexity 956.3550
133
+ 4300 train 6.722301 (lr=9.9622e-05) (hash(x)=143736499)
134
+ 4400 val loss 6.8809
135
+ 4400 val perplexity 973.4774
136
+ 4400 train 6.711131 (lr=9.7520e-05) (hash(x)=151883322)
137
+ 4500 val loss 6.8824
138
+ 4500 val perplexity 974.9603
139
+ 4500 train 6.796933 (lr=9.5403e-05) (hash(x)=153904871)
140
+ 4600 val loss 6.8754
141
+ 4600 val perplexity 968.1945
142
+ 4600 train 6.904389 (lr=9.3273e-05) (hash(x)=154893521)
143
+ 4700 val loss 6.8497
144
+ 4700 val perplexity 943.6158
145
+ 4700 train 6.898540 (lr=9.1132e-05) (hash(x)=152323949)
146
+ 4800 val loss 6.8467
147
+ 4800 val perplexity 940.7521
148
+ 4800 train 6.815135 (lr=8.8982e-05) (hash(x)=154104619)
149
+ 4900 val loss 6.8394
150
+ 4900 val perplexity 933.9498
151
+ 4900 train 6.894418 (lr=8.6825e-05) (hash(x)=146311426)
152
+ 5000 val loss 6.8306
153
+ 5000 val perplexity 925.7856
154
+ 5000 train 6.867855 (lr=8.4663e-05) (hash(x)=156741847)
155
+ 5100 val loss 6.8428
156
+ 5100 val perplexity 937.1395
157
+ 5100 train 6.661076 (lr=8.2500e-05) (hash(x)=142086346)
158
+ 5200 val loss 6.8371
159
+ 5200 val perplexity 931.8088
160
+ 5200 train 6.645678 (lr=8.0337e-05) (hash(x)=150265428)
161
+ 5300 val loss 6.8375
162
+ 5300 val perplexity 932.1777
163
+ 5300 train 6.740306 (lr=7.8175e-05) (hash(x)=151339108)
164
+ 5400 val loss 6.8435
165
+ 5400 val perplexity 937.7697
166
+ 5400 train 6.829397 (lr=7.6018e-05) (hash(x)=154654372)
167
+ 5500 val loss 6.8223
168
+ 5500 val perplexity 918.0942
169
+ 5500 train 6.819933 (lr=7.3868e-05) (hash(x)=150575051)
170
+ 5600 val loss 6.8220
171
+ 5600 val perplexity 917.8280
172
+ 5600 train 6.706783 (lr=7.1727e-05) (hash(x)=140396423)
173
+ 5700 val loss 6.8161
174
+ 5700 val perplexity 912.3840
175
+ 5700 train 6.713886 (lr=6.9597e-05) (hash(x)=144678758)
176
+ 5800 val loss 6.8157
177
+ 5800 val perplexity 912.0787
178
+ 5800 train 6.931098 (lr=6.7480e-05) (hash(x)=151992743)
179
+ 5900 val loss 6.8214
180
+ 5900 val perplexity 917.2242
181
+ 5900 train 6.631341 (lr=6.5378e-05) (hash(x)=144396927)
182
+ 6000 val loss 6.8123
183
+ 6000 val perplexity 908.9440
184
+ 6000 train 6.779160 (lr=6.3294e-05) (hash(x)=165478625)
185
+ 6100 val loss 6.8249
186
+ 6100 val perplexity 920.5113
187
+ 6100 train 6.614169 (lr=6.1230e-05) (hash(x)=147088621)
188
+ 6200 val loss 6.8202
189
+ 6200 val perplexity 916.1298
190
+ 6200 train 6.632910 (lr=5.9188e-05) (hash(x)=140794994)
191
+ 6300 val loss 6.8036
192
+ 6300 val perplexity 901.0964
193
+ 6300 train 6.729023 (lr=5.7169e-05) (hash(x)=134780906)
194
+ 6400 val loss 6.7968
195
+ 6400 val perplexity 895.0114
196
+ 6400 train 6.793316 (lr=5.5177e-05) (hash(x)=149023655)
197
+ 6500 val loss 6.7931
198
+ 6500 val perplexity 891.6610
199
+ 6500 train 6.701253 (lr=5.3213e-05) (hash(x)=147497796)
200
+ 6600 val loss 6.8039
201
+ 6600 val perplexity 901.3718
202
+ 6600 train 6.673730 (lr=5.1279e-05) (hash(x)=152902689)
203
+ 6700 val loss 6.7977
204
+ 6700 val perplexity 895.8217
205
+ 6700 train 6.848180 (lr=4.9377e-05) (hash(x)=153846046)
206
+ 6800 val loss 6.7915
207
+ 6800 val perplexity 890.2208
208
+ 6800 train 6.912618 (lr=4.7509e-05) (hash(x)=158512738)
209
+ 6900 val loss 6.7982
210
+ 6900 val perplexity 896.2477
211
+ 6900 train 7.119676 (lr=4.5676e-05) (hash(x)=156849968)
212
+ 7000 val loss 6.7934
213
+ 7000 val perplexity 891.9128
214
+ 7000 train 6.619100 (lr=4.3882e-05) (hash(x)=142395855)
215
+ 7100 val loss 6.7941
216
+ 7100 val perplexity 892.5535
217
+ 7100 train 6.689756 (lr=4.2128e-05) (hash(x)=147114884)
218
+ 7200 val loss 6.7909
219
+ 7200 val perplexity 889.7142
220
+ 7200 train 6.818041 (lr=4.0414e-05) (hash(x)=156979839)
221
+ 7300 val loss 6.7856
222
+ 7300 val perplexity 885.0470
223
+ 7300 train 6.568993 (lr=3.8745e-05) (hash(x)=145584373)
224
+ 7400 val loss 6.7859
225
+ 7400 val perplexity 885.2930
226
+ 7400 train 6.541038 (lr=3.7120e-05) (hash(x)=141508204)
227
+ 7500 val loss 6.7813
228
+ 7500 val perplexity 881.1838
229
+ 7500 train 6.823518 (lr=3.5541e-05) (hash(x)=148803965)
230
+ 7600 val loss 6.7773
231
+ 7600 val perplexity 877.6575
232
+ 7600 train 6.834270 (lr=3.4011e-05) (hash(x)=151019676)
233
+ 7700 val loss 6.7653
234
+ 7700 val perplexity 867.2352
235
+ 7700 train 6.744458 (lr=3.2531e-05) (hash(x)=143155750)
236
+ 7800 val loss 6.7642
237
+ 7800 val perplexity 866.2475
238
+ 7800 train 6.768767 (lr=3.1102e-05) (hash(x)=152569653)
239
+ 7900 val loss 6.7715
240
+ 7900 val perplexity 872.6087
241
+ 7900 train 6.631834 (lr=2.9726e-05) (hash(x)=143519455)
242
+ 8000 val loss 6.7690
243
+ 8000 val perplexity 870.4249
244
+ 8000 train 6.949303 (lr=2.8405e-05) (hash(x)=161180944)
245
+ 8100 val loss 6.7674
246
+ 8100 val perplexity 869.0335
247
+ 8100 train 6.787279 (lr=2.7138e-05) (hash(x)=154107345)
248
+ 8200 val loss 6.7723
249
+ 8200 val perplexity 873.3314
250
+ 8200 train 6.770579 (lr=2.5929e-05) (hash(x)=152486517)
251
+ 8300 val loss 6.7649
252
+ 8300 val perplexity 866.9132
253
+ 8300 train 6.811017 (lr=2.4778e-05) (hash(x)=156167749)
254
+ 8400 val loss 6.7587
255
+ 8400 val perplexity 861.5378
256
+ 8400 train 6.823242 (lr=2.3686e-05) (hash(x)=149155006)
257
+ 8500 val loss 6.7589
258
+ 8500 val perplexity 861.7334
259
+ 8500 train 6.719562 (lr=2.2655e-05) (hash(x)=147844390)
260
+ 8600 val loss 6.7531
261
+ 8600 val perplexity 856.7430
262
+ 8600 train 6.996366 (lr=2.1685e-05) (hash(x)=165753320)
263
+ 8700 val loss 6.7559
264
+ 8700 val perplexity 859.0920
265
+ 8700 train 6.559268 (lr=2.0777e-05) (hash(x)=146079979)
266
+ 8800 val loss 6.7537
267
+ 8800 val perplexity 857.1827
268
+ 8800 train 6.958503 (lr=1.9933e-05) (hash(x)=172259509)
269
+ 8900 val loss 6.7585
270
+ 8900 val perplexity 861.3132
271
+ 8900 train 6.468870 (lr=1.9153e-05) (hash(x)=145148314)
272
+ 9000 val loss 6.7579
273
+ 9000 val perplexity 860.8126
274
+ 9000 train 6.682094 (lr=1.8439e-05) (hash(x)=144250633)
275
+ 9100 val loss 6.7501
276
+ 9100 val perplexity 854.1321
277
+ 9100 train 6.842451 (lr=1.7790e-05) (hash(x)=157219797)
278
+ 9200 val loss 6.7436
279
+ 9200 val perplexity 848.6393
280
+ 9200 train 6.705823 (lr=1.7208e-05) (hash(x)=142743778)
281
+ 9300 val loss 6.7426
282
+ 9300 val perplexity 847.7992
283
+ 9300 train 6.634955 (lr=1.6692e-05) (hash(x)=139669771)
284
+ 9400 val loss 6.7407
285
+ 9400 val perplexity 846.1219
286
+ 9400 train 6.705801 (lr=1.6245e-05) (hash(x)=145916843)
287
+ 9500 val loss 6.7419
288
+ 9500 val perplexity 847.1890
289
+ 9500 train 6.666107 (lr=1.5865e-05) (hash(x)=150196125)
290
+ 9600 val loss 6.7429
291
+ 9600 val perplexity 847.9799
292
+ 9600 train 7.004552 (lr=1.5554e-05) (hash(x)=160041419)
293
+ 9700 val loss 6.7439
294
+ 9700 val perplexity 848.8408
295
+ 9700 train 6.564997 (lr=1.5312e-05) (hash(x)=139931627)
296
+ 9800 val loss 6.7451
297
+ 9800 val perplexity 849.9206
298
+ 9800 train 6.690317 (lr=1.5139e-05) (hash(x)=150370792)
299
+ 9900 val loss 6.7428
300
+ 9900 val perplexity 847.9448
301
+ 9900 train 6.630862 (lr=1.5035e-05) (hash(x)=153014886)
302
+ 9999 val loss 6.7419
303
+ 9999 val perplexity 847.1777
attention_kindself_n_heads2_seed1338/model_02500.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:2b8bf63e616b2995a387f712418fa13b0c65b7c4c7db97c18b07262c31afb621
3
  size 38587970
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5b73aa1ac7459fe80fa7b731b144cd02b3c6701ed4b9bfd960608895bfe2749a
3
  size 38587970
attention_kindself_n_heads2_seed1338/model_05000.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6bcda143c4a6b4349980475f8babe36cfb3615523852f295b762631c8e0ec174
3
+ size 38587970
attention_kindself_n_heads2_seed1338/model_07500.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fca5d36b680500b16fb77ecfb7784d38e08a5e813e40273d3c4f8360fba90c86
3
+ size 38587970
attention_kindself_n_heads2_seed1338/model_09999.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:100a485858bb1e5b4b45958235cfa1485cff4256bc4c97dd275ea02227d29b83
3
+ size 38587970
attention_kindself_n_heads2_seed1338/optimizer_02500.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:969dbc2e3eeb64f68cda0363e463553aef315cfddffab0e27429ace5fbf80957
3
  size 70895430
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3a87762ff5148cf595c61c3f52de365f5f62b26aae66a126fabe6cc543830491
3
  size 70895430
attention_kindself_n_heads2_seed1338/optimizer_05000.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8167874cb7ac007e632a1b1085cae22e83522bf0de428a29ce32827be3e50974
3
+ size 70895430
attention_kindself_n_heads2_seed1338/optimizer_07500.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1727f180c4df8f144b9e4a86143d154ce8b5aa77daef829b0413115038e1b132
3
+ size 70895430
attention_kindself_n_heads2_seed1338/optimizer_09999.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c8d238c2e252457a15c0e94d3e4ebd740f99e8ff5e7d121b1378b4e47cb57f60
3
+ size 70895430