File size: 172 Bytes
80db81f
 
 
 
 
 
 
 
 
 
 
b3b6708
1
2
3
4
5
6
7
8
9
10
11
12
{
"model_type": "lizard",
"architectures": [
"LizardForCausalLM"
],
"vocab_size": 24005,
"d_model": 256,
"n_heads": 8,
"n_layers": 6,
"max_length": 128,
"pad_token_id": 0
}