File size: 228 Bytes
c987317
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
{
    "vocab_size": 128256,
    "d_model": 384,
    "n_layers": 8,
    "n_heads": 6,
    "d_ff": 1536,
    "seq_len": 1024,
    "architectures": [
        "Model"
    ],
    "model_type": "llama",
    "torch_dtype": "bfloat16"
}