got-slm-4.5m / config.json
aman0419's picture
Upload config.json
b3b5e16 verified
raw
history blame contribute delete
423 Bytes
{
"architectures": [
"SLM"
],
"model_type": "custom",
"auto_map": {
"AutoConfig": "model.SLMConfig",
"AutoModel": "model.SLM",
"AutoModelForCausalLM": "model.SLM"
},
"vocab_size": 8000,
"block_size": 256,
"n_positions": 256,
"n_embd": 256,
"n_layer": 3,
"n_head": 4,
"dropout": 0.1,
"bias": true,
"torch_dtype": "float32",
"transformers_version": "4.36.0"
}