andrew-healey commited on
Commit
af25225
·
verified ·
1 Parent(s): 2891038

Upload folder using huggingface_hub

Browse files
attention_kindself_n_heads2_seed1339/args.json CHANGED
@@ -1 +1 @@
1
- {"hellaswag": true, "attention_kind": "selective", "log_dir": "wider_is_better_2/attention_kindself_n_heads2_seed1339", "resume_checkpoint": null, "resume_optimizer": false, "add_a_head": false, "add_head_to_start": true, "new_head_init": "normal", "n_heads": 2, "protect_bos_token": true, "prevent_from_masking_myself": true, "max_steps": 1000, "warmup_steps": 200, "group": "wider_is_better_2", "use_wandb": true, "kill_self_after_run": false, "random_seed": 1339, "memory_penalty_epsilon": 0.1, "selection_head_linear_combo": "none", "selection_head_linear_combo_scale": 1.0, "protection_kind": "none", "leaky_relu_alpha": null, "leaky_relu_bias": null, "use_compile": true, "use_mini_model": false, "upload_to_hf": true, "seq_len": 256, "batch_size": 120, "total_batch_size": 30720, "protection_head_scaling_factor": 1.0, "protection_head_bias": 0.0, "n_sliced_masks": null, "n_latent_masks": null, "mask_layernorm": false, "residual_attention_masks": false, "compute_base_shapes": false, "base_shapes_savefile": null, "mup": true, "disable_selection": true, "mup_enable_coord_check_logging": false, "max_lr": 0.00015, "decay_lr": true, "readout_zero_init": false, "query_zero_init": false, "l1_loss": false, "debugpy": false, "key": "1.5e-4_30720_2_1339", "n_embd": 128}
 
1
+ {"hellaswag": true, "attention_kind": "selective", "log_dir": "wider_is_better_3/attention_kindself_n_heads2_seed1339", "resume_checkpoint": null, "resume_optimizer": false, "add_a_head": false, "add_head_to_start": true, "new_head_init": "normal", "n_heads": 2, "protect_bos_token": true, "prevent_from_masking_myself": true, "max_steps": 10000, "warmup_steps": 200, "group": "wider_is_better_3", "use_wandb": true, "kill_self_after_run": false, "random_seed": 1339, "memory_penalty_epsilon": 0.1, "selection_head_linear_combo": "none", "selection_head_linear_combo_scale": 1.0, "protection_kind": "none", "leaky_relu_alpha": null, "leaky_relu_bias": null, "use_compile": true, "use_mini_model": false, "upload_to_hf": true, "seq_len": 256, "batch_size": 120, "total_batch_size": 30720, "protection_head_scaling_factor": 1.0, "protection_head_bias": 0.0, "n_sliced_masks": null, "n_latent_masks": null, "mask_layernorm": false, "residual_attention_masks": false, "compute_base_shapes": false, "base_shapes_savefile": null, "mup": true, "disable_selection": true, "mup_enable_coord_check_logging": false, "max_lr": 0.00015, "decay_lr": true, "readout_zero_init": false, "query_zero_init": false, "l1_loss": false, "debugpy": false, "key": "1.5e-4_30720_2_1339", "n_embd": 128}
attention_kindself_n_heads2_seed1339/dataloader_02500.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:8b2ea67f78ff5a7970d0db044ff7ee527b3dc065f295fd30f588df4b44b568d0
3
  size 964
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:db41c5e5513137877487a93451adf8ec4ed2448ab6e9471ebd5595c8e3293875
3
  size 964
attention_kindself_n_heads2_seed1339/dataloader_05000.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f6df8947c6ab773db1947914387d3db345a84828521d3a64bae9b652e1b0a410
3
+ size 964
attention_kindself_n_heads2_seed1339/dataloader_07500.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:169891a726a7ff746d1a7aa99f459a66d85ceb4e9f2583f790f5b8501f97b6af
3
+ size 964
attention_kindself_n_heads2_seed1339/dataloader_09999.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e685a568a36c792ccbe7b5fcae0b9d630955e589991190bd8902836cea6a91df
3
+ size 964
attention_kindself_n_heads2_seed1339/log2.txt CHANGED
@@ -1,33 +1,303 @@
1
- max_steps: 1000
2
  0 val loss 11.7353
3
  0 val perplexity 124899.7500
4
  0 train 11.734357 (lr=7.5000e-07) (hash(x)=150724848)
5
- 100 val loss 9.8470
6
- 100 val perplexity 18902.1074
7
- 100 train 9.889466 (lr=7.5750e-05) (hash(x)=149217061)
8
- 200 val loss 8.0909
9
- 200 val perplexity 3264.6475
10
- 200 train 8.076604 (lr=1.5000e-04) (hash(x)=149910534)
11
- 300 val loss 7.6713
12
- 300 val perplexity 2145.9644
13
- 300 train 7.565024 (lr=1.4486e-04) (hash(x)=142185643)
14
- 400 val loss 7.6198
15
- 400 val perplexity 2038.1362
16
- 400 train 7.653818 (lr=1.3023e-04) (hash(x)=148123706)
17
- 500 val loss 7.5713
18
- 500 val perplexity 1941.6194
19
- 500 train 7.423750 (lr=1.0833e-04) (hash(x)=149952383)
20
- 600 val loss 7.5234
21
- 600 val perplexity 1850.8201
22
- 600 train 7.326363 (lr=8.2500e-05) (hash(x)=146678221)
23
- 700 val loss 7.4437
24
- 700 val perplexity 1709.0602
25
- 700 train 7.472978 (lr=5.6669e-05) (hash(x)=156180736)
26
- 800 val loss 7.3997
27
- 800 val perplexity 1635.4878
28
- 800 train 7.385680 (lr=3.4770e-05) (hash(x)=151700982)
29
- 900 val loss 7.3755
30
- 900 val perplexity 1596.3323
31
- 900 train 7.181541 (lr=2.0138e-05) (hash(x)=147288467)
32
- 999 val loss 7.3566
33
- 999 val perplexity 1566.4429
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ max_steps: 10000
2
  0 val loss 11.7353
3
  0 val perplexity 124899.7500
4
  0 train 11.734357 (lr=7.5000e-07) (hash(x)=150724848)
5
+ 100 val loss 9.7726
6
+ 100 val perplexity 17545.4785
7
+ 100 train 9.813877 (lr=7.5750e-05) (hash(x)=149217061)
8
+ 200 val loss 8.0307
9
+ 200 val perplexity 3073.7659
10
+ 200 train 8.014332 (lr=1.5000e-04) (hash(x)=149910534)
11
+ 300 val loss 7.6553
12
+ 300 val perplexity 2111.8521
13
+ 300 train 7.533482 (lr=1.4997e-04) (hash(x)=142185643)
14
+ 400 val loss 7.5761
15
+ 400 val perplexity 1950.9631
16
+ 400 train 7.601968 (lr=1.4986e-04) (hash(x)=148123706)
17
+ 500 val loss 7.4882
18
+ 500 val perplexity 1786.7975
19
+ 500 train 7.325721 (lr=1.4969e-04) (hash(x)=149952383)
20
+ 600 val loss 7.4041
21
+ 600 val perplexity 1642.7119
22
+ 600 train 7.203185 (lr=1.4945e-04) (hash(x)=146678221)
23
+ 700 val loss 7.3024
24
+ 700 val perplexity 1483.8464
25
+ 700 train 7.340023 (lr=1.4913e-04) (hash(x)=156180736)
26
+ 800 val loss 7.2771
27
+ 800 val perplexity 1446.7216
28
+ 800 train 7.261787 (lr=1.4876e-04) (hash(x)=151700982)
29
+ 900 val loss 7.2097
30
+ 900 val perplexity 1352.5447
31
+ 900 train 6.998355 (lr=1.4831e-04) (hash(x)=147288467)
32
+ 1000 val loss 7.1825
33
+ 1000 val perplexity 1316.2076
34
+ 1000 train 7.251370 (lr=1.4779e-04) (hash(x)=156182087)
35
+ 1100 val loss 7.1756
36
+ 1100 val perplexity 1307.2065
37
+ 1100 train 7.030709 (lr=1.4721e-04) (hash(x)=147861550)
38
+ 1200 val loss 7.1420
39
+ 1200 val perplexity 1263.9703
40
+ 1200 train 6.998680 (lr=1.4656e-04) (hash(x)=149318660)
41
+ 1300 val loss 7.1447
42
+ 1300 val perplexity 1267.4147
43
+ 1300 train 6.989931 (lr=1.4585e-04) (hash(x)=151631103)
44
+ 1400 val loss 7.1348
45
+ 1400 val perplexity 1254.8351
46
+ 1400 train 7.153018 (lr=1.4507e-04) (hash(x)=150482428)
47
+ 1500 val loss 7.1031
48
+ 1500 val perplexity 1215.7200
49
+ 1500 train 6.968789 (lr=1.4422e-04) (hash(x)=144458617)
50
+ 1600 val loss 7.0542
51
+ 1600 val perplexity 1157.7150
52
+ 1600 train 6.957231 (lr=1.4332e-04) (hash(x)=143268605)
53
+ 1700 val loss 7.0536
54
+ 1700 val perplexity 1157.0317
55
+ 1700 train 6.998997 (lr=1.4235e-04) (hash(x)=150817011)
56
+ 1800 val loss 7.0532
57
+ 1800 val perplexity 1156.5905
58
+ 1800 train 7.054293 (lr=1.4131e-04) (hash(x)=152322423)
59
+ 1900 val loss 7.0219
60
+ 1900 val perplexity 1120.9279
61
+ 1900 train 6.839796 (lr=1.4022e-04) (hash(x)=143574126)
62
+ 2000 val loss 7.0374
63
+ 2000 val perplexity 1138.4637
64
+ 2000 train 6.824109 (lr=1.3907e-04) (hash(x)=147904298)
65
+ 2100 val loss 7.0565
66
+ 2100 val perplexity 1160.4254
67
+ 2100 train 6.824200 (lr=1.3786e-04) (hash(x)=144719400)
68
+ 2200 val loss 7.0406
69
+ 2200 val perplexity 1142.0664
70
+ 2200 train 7.274655 (lr=1.3660e-04) (hash(x)=154343147)
71
+ 2300 val loss 7.0282
72
+ 2300 val perplexity 1128.0421
73
+ 2300 train 7.071688 (lr=1.3527e-04) (hash(x)=154372635)
74
+ 2400 val loss 7.0393
75
+ 2400 val perplexity 1140.5922
76
+ 2400 train 6.903203 (lr=1.3390e-04) (hash(x)=141843115)
77
+ 2500 val loss 7.0292
78
+ 2500 val perplexity 1129.1168
79
+ 2500 train 6.928387 (lr=1.3247e-04) (hash(x)=149417679)
80
+ 2600 val loss 7.0380
81
+ 2600 val perplexity 1139.1198
82
+ 2600 train 6.913923 (lr=1.3099e-04) (hash(x)=145279030)
83
+ 2700 val loss 7.0337
84
+ 2700 val perplexity 1134.2703
85
+ 2700 train 6.934816 (lr=1.2946e-04) (hash(x)=146363911)
86
+ 2800 val loss 7.0472
87
+ 2800 val perplexity 1149.6542
88
+ 2800 train 7.011040 (lr=1.2788e-04) (hash(x)=152507639)
89
+ 2900 val loss 7.0594
90
+ 2900 val perplexity 1163.7246
91
+ 2900 train 6.877071 (lr=1.2626e-04) (hash(x)=139296191)
92
+ 3000 val loss 7.0639
93
+ 3000 val perplexity 1169.0483
94
+ 3000 train 7.075006 (lr=1.2459e-04) (hash(x)=148473774)
95
+ 3100 val loss 7.0261
96
+ 3100 val perplexity 1125.5903
97
+ 3100 train 7.102339 (lr=1.2287e-04) (hash(x)=154130527)
98
+ 3200 val loss 7.0256
99
+ 3200 val perplexity 1125.0581
100
+ 3200 train 6.960464 (lr=1.2112e-04) (hash(x)=151117002)
101
+ 3300 val loss 7.0234
102
+ 3300 val perplexity 1122.5508
103
+ 3300 train 7.141053 (lr=1.1932e-04) (hash(x)=151035383)
104
+ 3400 val loss 7.0216
105
+ 3400 val perplexity 1120.6094
106
+ 3400 train 7.131077 (lr=1.1749e-04) (hash(x)=150614749)
107
+ 3500 val loss 7.0022
108
+ 3500 val perplexity 1099.0701
109
+ 3500 train 7.045189 (lr=1.1562e-04) (hash(x)=151431391)
110
+ 3600 val loss 7.0122
111
+ 3600 val perplexity 1110.0785
112
+ 3600 train 6.969791 (lr=1.1372e-04) (hash(x)=151140797)
113
+ 3700 val loss 7.0076
114
+ 3700 val perplexity 1104.9702
115
+ 3700 train 7.197464 (lr=1.1179e-04) (hash(x)=154608340)
116
+ 3800 val loss 7.0241
117
+ 3800 val perplexity 1123.4337
118
+ 3800 train 7.019115 (lr=1.0982e-04) (hash(x)=169641309)
119
+ 3900 val loss 7.0132
120
+ 3900 val perplexity 1111.1599
121
+ 3900 train 6.957850 (lr=1.0783e-04) (hash(x)=150592479)
122
+ 4000 val loss 6.9882
123
+ 4000 val perplexity 1083.7604
124
+ 4000 train 6.972362 (lr=1.0581e-04) (hash(x)=155495317)
125
+ 4100 val loss 6.9885
126
+ 4100 val perplexity 1084.1361
127
+ 4100 train 7.163860 (lr=1.0377e-04) (hash(x)=170323970)
128
+ 4200 val loss 6.9760
129
+ 4200 val perplexity 1070.6144
130
+ 4200 train 6.787168 (lr=1.0171e-04) (hash(x)=140854360)
131
+ 4300 val loss 6.9719
132
+ 4300 val perplexity 1066.2782
133
+ 4300 train 6.963425 (lr=9.9622e-05) (hash(x)=148926644)
134
+ 4400 val loss 6.9518
135
+ 4400 val perplexity 1044.9806
136
+ 4400 train 6.990151 (lr=9.7520e-05) (hash(x)=149997871)
137
+ 4500 val loss 6.9349
138
+ 4500 val perplexity 1027.5555
139
+ 4500 train 6.909260 (lr=9.5403e-05) (hash(x)=149219289)
140
+ 4600 val loss 6.9528
141
+ 4600 val perplexity 1046.0680
142
+ 4600 train 6.752494 (lr=9.3273e-05) (hash(x)=139836197)
143
+ 4700 val loss 6.9502
144
+ 4700 val perplexity 1043.3162
145
+ 4700 train 6.911660 (lr=9.1132e-05) (hash(x)=150722883)
146
+ 4800 val loss 6.9306
147
+ 4800 val perplexity 1023.1166
148
+ 4800 train 7.115446 (lr=8.8982e-05) (hash(x)=166864648)
149
+ 4900 val loss 6.9272
150
+ 4900 val perplexity 1019.5853
151
+ 4900 train 7.025172 (lr=8.6825e-05) (hash(x)=157746743)
152
+ 5000 val loss 6.9264
153
+ 5000 val perplexity 1018.8072
154
+ 5000 train 6.886820 (lr=8.4663e-05) (hash(x)=153032257)
155
+ 5100 val loss 6.9071
156
+ 5100 val perplexity 999.3232
157
+ 5100 train 6.971920 (lr=8.2500e-05) (hash(x)=151862371)
158
+ 5200 val loss 6.9168
159
+ 5200 val perplexity 1009.1252
160
+ 5200 train 7.182480 (lr=8.0337e-05) (hash(x)=168862995)
161
+ 5300 val loss 6.9219
162
+ 5300 val perplexity 1014.2078
163
+ 5300 train 6.877322 (lr=7.8175e-05) (hash(x)=148243567)
164
+ 5400 val loss 6.9206
165
+ 5400 val perplexity 1012.9241
166
+ 5400 train 6.864289 (lr=7.6018e-05) (hash(x)=145381742)
167
+ 5500 val loss 6.9204
168
+ 5500 val perplexity 1012.7020
169
+ 5500 train 7.048699 (lr=7.3868e-05) (hash(x)=155602174)
170
+ 5600 val loss 6.9294
171
+ 5600 val perplexity 1021.8431
172
+ 5600 train 7.007695 (lr=7.1727e-05) (hash(x)=160637672)
173
+ 5700 val loss 6.9148
174
+ 5700 val perplexity 1007.0419
175
+ 5700 train 6.867439 (lr=6.9597e-05) (hash(x)=152101131)
176
+ 5800 val loss 6.9122
177
+ 5800 val perplexity 1004.4901
178
+ 5800 train 6.883580 (lr=6.7480e-05) (hash(x)=152327089)
179
+ 5900 val loss 6.9251
180
+ 5900 val perplexity 1017.4454
181
+ 5900 train 6.865512 (lr=6.5378e-05) (hash(x)=151094543)
182
+ 6000 val loss 6.9199
183
+ 6000 val perplexity 1012.2385
184
+ 6000 train 6.998046 (lr=6.3294e-05) (hash(x)=160233315)
185
+ 6100 val loss 6.9092
186
+ 6100 val perplexity 1001.4684
187
+ 6100 train 6.989634 (lr=6.1230e-05) (hash(x)=158295337)
188
+ 6200 val loss 6.9051
189
+ 6200 val perplexity 997.3576
190
+ 6200 train 6.864357 (lr=5.9188e-05) (hash(x)=151269817)
191
+ 6300 val loss 6.9087
192
+ 6300 val perplexity 1000.9571
193
+ 6300 train 6.993814 (lr=5.7169e-05) (hash(x)=156912756)
194
+ 6400 val loss 6.9069
195
+ 6400 val perplexity 999.1902
196
+ 6400 train 6.829133 (lr=5.5177e-05) (hash(x)=125779637)
197
+ 6500 val loss 6.9165
198
+ 6500 val perplexity 1008.7678
199
+ 6500 train 6.815157 (lr=5.3213e-05) (hash(x)=148157661)
200
+ 6600 val loss 6.9218
201
+ 6600 val perplexity 1014.1851
202
+ 6600 train 6.967198 (lr=5.1279e-05) (hash(x)=152820049)
203
+ 6700 val loss 6.9065
204
+ 6700 val perplexity 998.7644
205
+ 6700 train 6.869339 (lr=4.9377e-05) (hash(x)=155818717)
206
+ 6800 val loss 6.9026
207
+ 6800 val perplexity 994.8915
208
+ 6800 train 6.959887 (lr=4.7509e-05) (hash(x)=158833881)
209
+ 6900 val loss 6.8969
210
+ 6900 val perplexity 989.2281
211
+ 6900 train 6.941722 (lr=4.5676e-05) (hash(x)=164984909)
212
+ 7000 val loss 6.8960
213
+ 7000 val perplexity 988.3021
214
+ 7000 train 6.765996 (lr=4.3882e-05) (hash(x)=151172493)
215
+ 7100 val loss 6.8978
216
+ 7100 val perplexity 990.1153
217
+ 7100 train 6.869718 (lr=4.2128e-05) (hash(x)=154023633)
218
+ 7200 val loss 6.9033
219
+ 7200 val perplexity 995.5060
220
+ 7200 train 6.771121 (lr=4.0414e-05) (hash(x)=139351250)
221
+ 7300 val loss 6.8981
222
+ 7300 val perplexity 990.3571
223
+ 7300 train 6.861828 (lr=3.8745e-05) (hash(x)=158719458)
224
+ 7400 val loss 6.8909
225
+ 7400 val perplexity 983.3278
226
+ 7400 train 7.006841 (lr=3.7120e-05) (hash(x)=162250810)
227
+ 7500 val loss 6.8890
228
+ 7500 val perplexity 981.3918
229
+ 7500 train 6.747966 (lr=3.5541e-05) (hash(x)=142827557)
230
+ 7600 val loss 6.8887
231
+ 7600 val perplexity 981.1364
232
+ 7600 train 6.809592 (lr=3.4011e-05) (hash(x)=147830801)
233
+ 7700 val loss 6.8842
234
+ 7700 val perplexity 976.6763
235
+ 7700 train 6.998799 (lr=3.2531e-05) (hash(x)=165233314)
236
+ 7800 val loss 6.8781
237
+ 7800 val perplexity 970.7328
238
+ 7800 train 6.888107 (lr=3.1102e-05) (hash(x)=160654717)
239
+ 7900 val loss 6.8828
240
+ 7900 val perplexity 975.3890
241
+ 7900 train 6.760226 (lr=2.9726e-05) (hash(x)=140712586)
242
+ 8000 val loss 6.8813
243
+ 8000 val perplexity 973.8698
244
+ 8000 train 6.973397 (lr=2.8405e-05) (hash(x)=150309061)
245
+ 8100 val loss 6.8759
246
+ 8100 val perplexity 968.6202
247
+ 8100 train 6.917377 (lr=2.7138e-05) (hash(x)=158805466)
248
+ 8200 val loss 6.8768
249
+ 8200 val perplexity 969.5352
250
+ 8200 train 6.721768 (lr=2.5929e-05) (hash(x)=142814078)
251
+ 8300 val loss 6.8779
252
+ 8300 val perplexity 970.5875
253
+ 8300 train 6.822116 (lr=2.4778e-05) (hash(x)=148340417)
254
+ 8400 val loss 6.8837
255
+ 8400 val perplexity 976.2219
256
+ 8400 train 6.829408 (lr=2.3686e-05) (hash(x)=157132639)
257
+ 8500 val loss 6.8814
258
+ 8500 val perplexity 973.9942
259
+ 8500 train 6.841990 (lr=2.2655e-05) (hash(x)=152994240)
260
+ 8600 val loss 6.8780
261
+ 8600 val perplexity 970.6819
262
+ 8600 train 6.828690 (lr=2.1685e-05) (hash(x)=152210154)
263
+ 8700 val loss 6.8737
264
+ 8700 val perplexity 966.5657
265
+ 8700 train 6.898990 (lr=2.0777e-05) (hash(x)=149631247)
266
+ 8800 val loss 6.8730
267
+ 8800 val perplexity 965.8230
268
+ 8800 train 6.727984 (lr=1.9933e-05) (hash(x)=145617784)
269
+ 8900 val loss 6.8711
270
+ 8900 val perplexity 964.0148
271
+ 8900 train 6.975079 (lr=1.9153e-05) (hash(x)=155984970)
272
+ 9000 val loss 6.8723
273
+ 9000 val perplexity 965.1191
274
+ 9000 train 6.648523 (lr=1.8439e-05) (hash(x)=143433013)
275
+ 9100 val loss 6.8737
276
+ 9100 val perplexity 966.4919
277
+ 9100 train 6.767800 (lr=1.7790e-05) (hash(x)=152507533)
278
+ 9200 val loss 6.8703
279
+ 9200 val perplexity 963.2626
280
+ 9200 train 6.991444 (lr=1.7208e-05) (hash(x)=162400650)
281
+ 9300 val loss 6.8667
282
+ 9300 val perplexity 959.7634
283
+ 9300 train 6.878949 (lr=1.6692e-05) (hash(x)=148419068)
284
+ 9400 val loss 6.8682
285
+ 9400 val perplexity 961.1745
286
+ 9400 train 6.837451 (lr=1.6245e-05) (hash(x)=145025169)
287
+ 9500 val loss 6.8684
288
+ 9500 val perplexity 961.3817
289
+ 9500 train 6.826965 (lr=1.5865e-05) (hash(x)=145104235)
290
+ 9600 val loss 6.8702
291
+ 9600 val perplexity 963.1565
292
+ 9600 train 6.798606 (lr=1.5554e-05) (hash(x)=150499797)
293
+ 9700 val loss 6.8707
294
+ 9700 val perplexity 963.6577
295
+ 9700 train 6.689303 (lr=1.5312e-05) (hash(x)=139690022)
296
+ 9800 val loss 6.8689
297
+ 9800 val perplexity 961.9287
298
+ 9800 train 6.852724 (lr=1.5139e-05) (hash(x)=145645279)
299
+ 9900 val loss 6.8711
300
+ 9900 val perplexity 964.0272
301
+ 9900 train 6.785625 (lr=1.5035e-05) (hash(x)=150103432)
302
+ 9999 val loss 6.8705
303
+ 9999 val perplexity 963.4500
attention_kindself_n_heads2_seed1339/model_02500.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:526a65c0524517edd3cafd2f637f997cf653665a3f310fa034b04ee1dfc3f921
3
  size 38587970
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3fc1b63627195057a4c942a3e917c1a8efa782e2aedfdaf127b672b7c6ea3c51
3
  size 38587970
attention_kindself_n_heads2_seed1339/model_05000.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:43e11c87f8189366a1a631654e8af438fc6c9400b004304dc7c83f448e482398
3
+ size 38587970
attention_kindself_n_heads2_seed1339/model_07500.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:04d5c4a169b8e36ad6535c9f1ce1eb1b912a5c2f4f09b0d7aa76882590689750
3
+ size 38587970
attention_kindself_n_heads2_seed1339/model_09999.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a9e52db128e16a735f111343b41231db5a987c26b72b31b76645b4e54fc6da37
3
+ size 38587970
attention_kindself_n_heads2_seed1339/optimizer_02500.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:48a5746ce89866de6d5e633cc5c9dc61607046c68d5f9936c4ae9c2f173da070
3
  size 70895430
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:86f627707e6c065305b2c0d7bfdab20334ff5e0a3584495015ef8c448e8f144d
3
  size 70895430
attention_kindself_n_heads2_seed1339/optimizer_05000.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d61fa85f6c376914274be629bc0eac5a59a534791989f372a8e8bba92dc40281
3
+ size 70895430
attention_kindself_n_heads2_seed1339/optimizer_07500.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:225354c08248313f0cc7697fc63972e363f6756c00b49b8bd7c7a75e39ec2b27
3
+ size 70895430
attention_kindself_n_heads2_seed1339/optimizer_09999.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ac44cf9be14c262abd9dcaf8a209310ee9701b655acefbd539f2f83073d65709
3
+ size 70895430