LLM-124M / config.json
cashu's picture
Upload folder using huggingface_hub
3e074af verified
raw
history blame contribute delete
255 Bytes
{
"architectures": [
"MyGPT"
],
"bias": false,
"block_size": 256,
"dropout": 0.2,
"model_type": "my-gpt",
"n_embd": 384,
"n_head": 6,
"n_layer": 6,
"torch_dtype": "float32",
"transformers_version": "4.42.3",
"vocab_size": 65
}