Caplin43 commited on
Commit
60a5405
·
verified ·
1 Parent(s): 3be8c5f

Create config.json

Browse files
Files changed (1) hide show
  1. config.json +12 -0
config.json ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "model_type": "causal_lm",
3
+ "architecture": "Transformer",
4
+ "hidden_size": 1024,
5
+ "num_attention_heads": 16,
6
+ "num_hidden_layers": 12,
7
+ "vocab_size": 32000,
8
+ "max_position_embeddings": 2048,
9
+ "activation_function": "gelu",
10
+ "layer_norm_eps": 1e-12,
11
+ "initializer_range": 0.02
12
+ }