andrew-healey commited on
Commit
0ee702e
·
verified ·
1 Parent(s): e6b1362

Upload folder using huggingface_hub

Browse files
logs/fix_1_latent_mask/1_latent_mask_lr_30e-4_n_latent_masks_2_seed_1339_debug/args.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"hellaswag": true, "attention_kind": "selective", "log_dir": "logs/fix_1_latent_mask/1_latent_mask_lr_30e-4_n_latent_masks_2_seed_1339_debug", "resume_checkpoint": null, "resume_optimizer": false, "add_a_head": false, "add_head_to_start": true, "new_head_init": "normal", "n_heads": 12, "n_embd": 264, "head_dim": 22, "protect_bos_token": true, "prevent_from_masking_myself": true, "max_steps": 200, "warmup_steps": 250, "group": "fix_1_latent_mask", "use_wandb": true, "kill_self_after_run": false, "random_seed": 1339, "memory_penalty_epsilon": 0.1, "selection_head_linear_combo": "n_latent_masks", "selection_head_linear_combo_scale": 1.0, "disable_selection_head_linear_combo_bias": false, "assert_latent_matches_no_head": false, "protection_kind": "none", "leaky_relu_alpha": null, "leaky_relu_bias": null, "use_compile": false, "use_mini_model": false, "upload_to_hf": true, "seq_len": 256, "batch_size": 32, "total_batch_size": 131072, "protection_head_scaling_factor": 1.0, "protection_head_bias": 0.0, "n_sliced_masks": null, "n_latent_masks": 2, "init_latent_masks_to_identity": true, "latent_mask_scale": null, "latent_mask_sigmoid": false, "S_layernorm": false, "mask_layernorm": false, "residual_attention_masks": false, "compute_base_shapes": false, "base_shapes_savefile": null, "mup": true, "disable_selection": false, "mup_enable_coord_check_logging": false, "max_lr": 0.003, "decay_lr": true, "readout_zero_init": false, "query_zero_init": false, "l1_loss": false, "debugpy": false, "key": "1_latent_mask_lr_30e-4_n_latent_masks_2"}
logs/fix_1_latent_mask/1_latent_mask_lr_30e-4_n_latent_masks_2_seed_1339_debug/dataloader_00199.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d24eb2fd8d34a7930e7eb5aa4a87c4f658a7d3991f09d63a958e3e005f97b7e2
3
+ size 964
logs/fix_1_latent_mask/1_latent_mask_lr_30e-4_n_latent_masks_2_seed_1339_debug/log2.txt ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ max_steps: 200
2
+ 0 val loss 11.3204
3
+ 0 val perplexity 82489.9453
4
+ 0 train 11.303893 (lr=1.2000e-05) (hash(x)=44992657)
5
+ 10 train 9.654329 (lr=1.3200e-04) (hash(x)=33468021)
6
+ 20 train 8.357780 (lr=2.5200e-04) (hash(x)=40941803)
7
+ 30 train 7.772999 (lr=3.7200e-04) (hash(x)=36715902)
8
+ 40 train 7.644985 (lr=4.9200e-04) (hash(x)=32710993)
9
+ 50 train 7.588551 (lr=6.1200e-04) (hash(x)=43839896)
10
+ 60 train 7.487532 (lr=7.3200e-04) (hash(x)=40675468)
11
+ 70 train 7.052003 (lr=8.5200e-04) (hash(x)=34592631)
12
+ 80 train 7.059515 (lr=9.7200e-04) (hash(x)=44444845)
13
+ 90 train 6.858137 (lr=1.0920e-03) (hash(x)=41965258)
14
+ 100 val loss 6.7722
15
+ 100 val perplexity 873.2101
16
+ 100 train 6.624137 (lr=1.2120e-03) (hash(x)=41284750)
17
+ 110 train 6.641196 (lr=1.3320e-03) (hash(x)=41118734)
18
+ 120 train 6.478888 (lr=1.4520e-03) (hash(x)=37537547)
19
+ 130 train 6.362991 (lr=1.5720e-03) (hash(x)=43625179)
20
+ 140 train 6.364320 (lr=1.6920e-03) (hash(x)=41940760)
21
+ 150 train 6.416962 (lr=1.8120e-03) (hash(x)=39210431)
22
+ 160 train 6.395526 (lr=1.9320e-03) (hash(x)=41128294)
23
+ 170 train 6.268544 (lr=2.0520e-03) (hash(x)=41590227)
24
+ 180 train 6.216915 (lr=2.1720e-03) (hash(x)=38084871)
25
+ 190 train 6.136973 (lr=2.2920e-03) (hash(x)=34534333)
26
+ 199 val loss 6.0325
27
+ 199 val perplexity 416.7637
logs/fix_1_latent_mask/1_latent_mask_lr_30e-4_n_latent_masks_2_seed_1339_debug/model_00199.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:767ffd6ffdc428b96f1471686623f3e326aeb66d9ad4f15b9fc262c780dc177b
3
+ size 97707314
logs/fix_1_latent_mask/1_latent_mask_lr_30e-4_n_latent_masks_2_seed_1339_debug/optimizer_00199.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f08e7511e62e0d53ce71e24e79411a80513ba8fbeb57fbcea1b55e32f709222b
3
+ size 189136950