NeelNanda commited on
Commit
8771271
·
1 Parent(s): 94c27fd

Create config.json

Browse files
Files changed (1) hide show
  1. config.json +34 -0
config.json ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "n_layers": 8,
3
+ "d_model": 1024,
4
+ "n_ctx": 1024,
5
+ "d_head": 64,
6
+ "model_name": "custom",
7
+ "n_heads": 16,
8
+ "d_mlp": 4096,
9
+ "act_fn": "solu_ln",
10
+ "d_vocab": 50278,
11
+ "eps": 1e-05,
12
+ "use_attn_result": false,
13
+ "use_attn_scale": true,
14
+ "use_local_attn": false,
15
+ "model_family": null,
16
+ "checkpoint": null,
17
+ "tokenizer_name": "EleutherAI/gpt-neox-20b",
18
+ "window_size": null,
19
+ "attn_types": null,
20
+ "init_mode": "gpt2",
21
+ "normalization_type": "LNPre",
22
+ "device": "cuda",
23
+ "attention_dir": "causal",
24
+ "attn_only": false,
25
+ "seed": 42,
26
+ "initializer_range": 0.025,
27
+ "init_weights": true,
28
+ "scale_attn_by_inverse_layer_idx": false,
29
+ "positional_embedding_type": "standard",
30
+ "final_rms": false,
31
+ "d_vocab_out": 50278,
32
+ "parallel_attn_mlp": false,
33
+ "rotary_dim": 64
34
+ }