andrew-healey commited on
Commit
4f6c1ce
·
verified ·
1 Parent(s): 5923e79

Upload folder using huggingface_hub

Browse files
attention_kindselective_n_heads2_seed1339/args.json CHANGED
@@ -1 +1 @@
1
- {"hellaswag": true, "attention_kind": "selective", "log_dir": "wider_is_better_0/attention_kindselective_n_heads2_seed1339", "resume_checkpoint": null, "resume_optimizer": false, "add_a_head": false, "add_head_to_start": true, "new_head_init": "normal", "n_heads": 2, "protect_bos_token": true, "prevent_from_masking_myself": true, "max_steps": 10, "warmup_steps": 200, "group": "wider_is_better_0", "use_wandb": true, "kill_self_after_run": false, "random_seed": 1339, "memory_penalty_epsilon": 0.1, "selection_head_linear_combo": "none", "selection_head_linear_combo_scale": 1.0, "protection_kind": "none", "leaky_relu_alpha": null, "leaky_relu_bias": null, "use_compile": true, "use_mini_model": false, "upload_to_hf": true, "seq_len": 256, "batch_size": 40, "total_batch_size": 10240, "protection_head_scaling_factor": 1.0, "protection_head_bias": 0.0, "n_sliced_masks": null, "n_latent_masks": null, "mask_layernorm": false, "residual_attention_masks": false, "compute_base_shapes": false, "base_shapes_savefile": null, "mup": true, "disable_selection": false, "mup_enable_coord_check_logging": false, "max_lr": 0.00015, "decay_lr": true, "readout_zero_init": false, "query_zero_init": false, "l1_loss": false, "debugpy": false, "key": "1.5e-4_10240_2_1339", "n_embd": 128}
 
1
+ {"hellaswag": true, "attention_kind": "selective", "log_dir": "wider_is_better_2/attention_kindselective_n_heads2_seed1339", "resume_checkpoint": null, "resume_optimizer": false, "add_a_head": false, "add_head_to_start": true, "new_head_init": "normal", "n_heads": 2, "protect_bos_token": true, "prevent_from_masking_myself": true, "max_steps": 100, "warmup_steps": 200, "group": "wider_is_better_2", "use_wandb": true, "kill_self_after_run": false, "random_seed": 1339, "memory_penalty_epsilon": 0.1, "selection_head_linear_combo": "none", "selection_head_linear_combo_scale": 1.0, "protection_kind": "none", "leaky_relu_alpha": null, "leaky_relu_bias": null, "use_compile": true, "use_mini_model": false, "upload_to_hf": true, "seq_len": 256, "batch_size": 80, "total_batch_size": 20480, "protection_head_scaling_factor": 1.0, "protection_head_bias": 0.0, "n_sliced_masks": null, "n_latent_masks": null, "mask_layernorm": false, "residual_attention_masks": false, "compute_base_shapes": false, "base_shapes_savefile": null, "mup": true, "disable_selection": false, "mup_enable_coord_check_logging": false, "max_lr": 0.00015, "decay_lr": true, "readout_zero_init": false, "query_zero_init": false, "l1_loss": false, "debugpy": false, "key": "1.5e-4_20480_2_1339", "n_embd": 128}
attention_kindselective_n_heads2_seed1339/dataloader_00099.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:580e5e81f112aa98b56a5810a4962c6f47cc749a08325f59a6f3df76f5120431
3
+ size 964
attention_kindselective_n_heads2_seed1339/log2.txt CHANGED
@@ -1,15 +1,8 @@
1
- max_steps: 10
2
- 0 val loss 11.7444
3
- 0 val perplexity 126046.6875
4
- 0 train 11.719157 (lr=7.5000e-07) (hash(x)=55241167)
5
- 1 train 11.739908 (lr=1.5000e-06) (hash(x)=51881965)
6
- 2 train 11.748451 (lr=2.2500e-06) (hash(x)=43601716)
7
- 3 train 11.765677 (lr=3.0000e-06) (hash(x)=58724315)
8
- 4 train 11.734903 (lr=3.7500e-06) (hash(x)=51093844)
9
- 5 train 11.750270 (lr=4.5000e-06) (hash(x)=49464291)
10
- 6 train 11.713595 (lr=5.2500e-06) (hash(x)=48476334)
11
- 7 train 11.673182 (lr=6.0000e-06) (hash(x)=60510992)
12
- 8 train 11.695110 (lr=6.7500e-06) (hash(x)=60540681)
13
- 9 val loss 11.6731
14
- 9 val perplexity 117374.5391
15
- 9 train 11.689736 (lr=7.5000e-06) (hash(x)=52990419)
 
1
+ max_steps: 100
2
+ 0 val loss 11.7471
3
+ 0 val perplexity 126386.8516
4
+ 0 train 11.738260 (lr=7.5000e-07) (hash(x)=107123132)
5
+ 50 val loss 10.5025
6
+ 50 val perplexity 36404.6875
7
+ 99 val loss 9.5862
8
+ 99 val perplexity 14562.5625
 
 
 
 
 
 
 
attention_kindselective_n_heads2_seed1339/model_00099.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8037cdd10518e93ccbac434724937b225f6aba951f343f7f0ad9532c7ded094e
3
+ size 38587970
attention_kindselective_n_heads2_seed1339/optimizer_00099.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:96ecef86622cd57c05c4cac45de3d1be84dff46c663b619611de5b43367ba984
3
+ size 70895430