farpluto commited on
Commit
0134c04
·
verified ·
1 Parent(s): 5dbd45b

Add config

Browse files
Files changed (1) hide show
  1. config.json +24 -0
config.json ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "model_name": "Qwen/Qwen3-1.7B",
3
+ "hf_repo_id": "farpluto/doc-to-lora-niah",
4
+ "lora_r": 8,
5
+ "lora_alpha": 8.0,
6
+ "target_module": "down_proj",
7
+ "latent_dim": 512,
8
+ "n_percv_blocks": 8,
9
+ "lr": 4e-05,
10
+ "weight_decay": 0.01,
11
+ "grad_accum": 8,
12
+ "max_steps": 8000,
13
+ "warmup_steps": 200,
14
+ "max_grad_norm": 1.0,
15
+ "l1_coef": 0.05,
16
+ "eval_every": 500,
17
+ "save_every": 2000,
18
+ "ctx_min_len": 32,
19
+ "ctx_max_len": 256,
20
+ "n_train": 80000,
21
+ "n_eval": 300,
22
+ "seed": 42,
23
+ "out_dir": "d2l_ckpts"
24
+ }