andrew-healey commited on
Commit
20ed9a6
·
verified ·
1 Parent(s): 7a6177e

Upload folder using huggingface_hub

Browse files
attention_kindselective_n_heads2_seed1340/args.json CHANGED
@@ -1 +1 @@
1
- {"hellaswag": true, "attention_kind": "selective", "log_dir": "wider_is_better_2/attention_kindselective_n_heads2_seed1340", "resume_checkpoint": null, "resume_optimizer": false, "add_a_head": false, "add_head_to_start": true, "new_head_init": "normal", "n_heads": 2, "protect_bos_token": true, "prevent_from_masking_myself": true, "max_steps": 100, "warmup_steps": 200, "group": "wider_is_better_2", "use_wandb": true, "kill_self_after_run": false, "random_seed": 1340, "memory_penalty_epsilon": 0.1, "selection_head_linear_combo": "none", "selection_head_linear_combo_scale": 1.0, "protection_kind": "none", "leaky_relu_alpha": null, "leaky_relu_bias": null, "use_compile": true, "use_mini_model": false, "upload_to_hf": true, "seq_len": 256, "batch_size": 80, "total_batch_size": 20480, "protection_head_scaling_factor": 1.0, "protection_head_bias": 0.0, "n_sliced_masks": null, "n_latent_masks": null, "mask_layernorm": false, "residual_attention_masks": false, "compute_base_shapes": false, "base_shapes_savefile": null, "mup": true, "disable_selection": false, "mup_enable_coord_check_logging": false, "max_lr": 0.00015, "decay_lr": true, "readout_zero_init": false, "query_zero_init": false, "l1_loss": false, "debugpy": false, "key": "1.5e-4_20480_2_1340", "n_embd": 128}
 
1
+ {"hellaswag": true, "attention_kind": "selective", "log_dir": "wider_is_better_2/attention_kindselective_n_heads2_seed1340", "resume_checkpoint": null, "resume_optimizer": false, "add_a_head": false, "add_head_to_start": true, "new_head_init": "normal", "n_heads": 2, "protect_bos_token": true, "prevent_from_masking_myself": true, "max_steps": 1000, "warmup_steps": 200, "group": "wider_is_better_2", "use_wandb": true, "kill_self_after_run": false, "random_seed": 1340, "memory_penalty_epsilon": 0.1, "selection_head_linear_combo": "none", "selection_head_linear_combo_scale": 1.0, "protection_kind": "none", "leaky_relu_alpha": null, "leaky_relu_bias": null, "use_compile": true, "use_mini_model": false, "upload_to_hf": true, "seq_len": 256, "batch_size": 80, "total_batch_size": 20480, "protection_head_scaling_factor": 1.0, "protection_head_bias": 0.0, "n_sliced_masks": null, "n_latent_masks": null, "mask_layernorm": false, "residual_attention_masks": false, "compute_base_shapes": false, "base_shapes_savefile": null, "mup": true, "disable_selection": false, "mup_enable_coord_check_logging": false, "max_lr": 0.00015, "decay_lr": true, "readout_zero_init": false, "query_zero_init": false, "l1_loss": false, "debugpy": false, "key": "1.5e-4_20480_2_1340", "n_embd": 128}
attention_kindselective_n_heads2_seed1340/dataloader_00999.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a8513131d2bf732f8f6d97ff82d32c505b2a2f3f9f2247bac315bb09ce0e1f97
3
+ size 964
attention_kindselective_n_heads2_seed1340/log2.txt CHANGED
@@ -1,8 +1,53 @@
1
- max_steps: 100
2
  0 val loss 11.8215
3
  0 val perplexity 136141.5156
4
  0 train 11.828733 (lr=7.5000e-07) (hash(x)=108792797)
5
- 50 val loss 10.2382
6
- 50 val perplexity 27951.8145
7
- 99 val loss 9.2469
8
- 99 val perplexity 10372.1592
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ max_steps: 1000
2
  0 val loss 11.8215
3
  0 val perplexity 136141.5156
4
  0 train 11.828733 (lr=7.5000e-07) (hash(x)=108792797)
5
+ 50 val loss 10.2379
6
+ 50 val perplexity 27942.2988
7
+ 100 val loss 9.3592
8
+ 100 val perplexity 11605.1504
9
+ 100 train 9.345304 (lr=7.5750e-05) (hash(x)=98274409)
10
+ 150 val loss 8.3020
11
+ 150 val perplexity 4031.8477
12
+ 200 val loss 7.7993
13
+ 200 val perplexity 2438.8237
14
+ 200 train 8.310523 (lr=1.5000e-04) (hash(x)=124454966)
15
+ 250 val loss 7.6909
16
+ 250 val perplexity 2188.3867
17
+ 300 val loss 7.6519
18
+ 300 val perplexity 2104.7246
19
+ 300 train 7.570929 (lr=1.4486e-04) (hash(x)=100355155)
20
+ 350 val loss 7.6552
21
+ 350 val perplexity 2111.5752
22
+ 400 val loss 7.6471
23
+ 400 val perplexity 2094.6055
24
+ 400 train 7.469439 (lr=1.3023e-04) (hash(x)=97953247)
25
+ 450 val loss 7.6306
26
+ 450 val perplexity 2060.3564
27
+ 500 val loss 7.6261
28
+ 500 val perplexity 2050.9602
29
+ 500 train 7.449285 (lr=1.0833e-04) (hash(x)=91758530)
30
+ 550 val loss 7.6088
31
+ 550 val perplexity 2015.9412
32
+ 600 val loss 7.5876
33
+ 600 val perplexity 1973.5529
34
+ 600 train 7.642048 (lr=8.2500e-05) (hash(x)=111869413)
35
+ 650 val loss 7.5746
36
+ 650 val perplexity 1948.1482
37
+ 700 val loss 7.5607
38
+ 700 val perplexity 1921.2594
39
+ 700 train 7.497701 (lr=5.6669e-05) (hash(x)=102988617)
40
+ 750 val loss 7.5448
41
+ 750 val perplexity 1890.8264
42
+ 800 val loss 7.5325
43
+ 800 val perplexity 1867.7794
44
+ 800 train 7.752382 (lr=3.4770e-05) (hash(x)=112812937)
45
+ 850 val loss 7.5270
46
+ 850 val perplexity 1857.4615
47
+ 900 val loss 7.5220
48
+ 900 val perplexity 1848.2378
49
+ 900 train 7.442432 (lr=2.0138e-05) (hash(x)=100208804)
50
+ 950 val loss 7.5186
51
+ 950 val perplexity 1841.9471
52
+ 999 val loss 7.5168
53
+ 999 val perplexity 1838.6222
attention_kindselective_n_heads2_seed1340/model_00999.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2094dcacf8344e63dadb4e18ca3ae58ab1a24c10b6f4e7d6f3a6d6d1ffaed3fe
3
+ size 38587970
attention_kindselective_n_heads2_seed1340/optimizer_00999.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d69ecb649f12afbc8a21ce3af93dc0599693b4426086bffd21536252414b269b
3
+ size 70895430