idhant
commited on
Update config.json
Browse files- config.json +3 -2
config.json
CHANGED
|
@@ -9,7 +9,7 @@
|
|
| 9 |
"warmup_steps": 1000,
|
| 10 |
"resample_steps": null,
|
| 11 |
"device": "cuda",
|
| 12 |
-
"
|
| 13 |
"lm_name": "tiny-gpt-27M-mixtral-5l-active-27M",
|
| 14 |
"wandb_name": "L3-mu3.0e-02-lr1e-03-64-mixed",
|
| 15 |
"submodule_name": null,
|
|
@@ -17,6 +17,7 @@
|
|
| 17 |
"code_normalization": "MIXED",
|
| 18 |
"code_normalization_alpha_sae": 0.7,
|
| 19 |
"code_normalization_alpha_cc": 0.3,
|
| 20 |
-
"target_rms": 1.0
|
|
|
|
| 21 |
}
|
| 22 |
}
|
|
|
|
| 9 |
"warmup_steps": 1000,
|
| 10 |
"resample_steps": null,
|
| 11 |
"device": "cuda",
|
| 12 |
+
"layer": 3,
|
| 13 |
"lm_name": "tiny-gpt-27M-mixtral-5l-active-27M",
|
| 14 |
"wandb_name": "L3-mu3.0e-02-lr1e-03-64-mixed",
|
| 15 |
"submodule_name": null,
|
|
|
|
| 17 |
"code_normalization": "MIXED",
|
| 18 |
"code_normalization_alpha_sae": 0.7,
|
| 19 |
"code_normalization_alpha_cc": 0.3,
|
| 20 |
+
"target_rms": 1.0,
|
| 21 |
+
"num_layers": 2
|
| 22 |
}
|
| 23 |
}
|