60m-llm / config.json
YADAV0206's picture
Upload folder using huggingface_hub
c987317 verified
{
"vocab_size": 128256,
"d_model": 384,
"n_layers": 8,
"n_heads": 6,
"d_ff": 1536,
"seq_len": 1024,
"architectures": [
"Model"
],
"model_type": "llama",
"torch_dtype": "bfloat16"
}