LilPhat23 commited on
Commit
6de64de
·
verified ·
1 Parent(s): 7af71be

Upload config.json

Browse files
Files changed (1) hide show
  1. config.json +30 -0
config.json ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "model_type": "phogpt",
3
+ "architectures": ["PhoGPTForCausalLM"],
4
+ "hidden_size": 4096,
5
+ "num_attention_heads": 32,
6
+ "num_hidden_layers": 32,
7
+ "vocab_size": 51200,
8
+ "max_seq_len": 4096,
9
+ "torch_dtype": "float16",
10
+ "attn_config": {
11
+ "attn_impl": "triton",
12
+ "attn_pdrop": 0.0,
13
+ "attn_type": "multihead_attention",
14
+ "alibi": true,
15
+ "alibi_bias_max": 8,
16
+ "prefix_lm": false,
17
+ "qk_ln": false
18
+ },
19
+ "ffn_config": {
20
+ "ffn_type": "phogpt_mlp"
21
+ },
22
+ "init_config": {
23
+ "name": "kaiming_normal_",
24
+ "fan_mode": "fan_in",
25
+ "init_nonlinearity": "relu",
26
+ "init_div_is_residual": true
27
+ },
28
+ "use_cache": false
29
+ }
30
+