FaseehGPT / config.json
rohmab's picture
Upload folder using huggingface_hub
22c4b61 verified
raw
history blame
283 Bytes
{
"architectures": [
"ArabicGPTModel"
],
"dropout": 0.1,
"embed_dim": 512,
"ff_dim": 2048,
"max_seq_len": 256,
"model_type": "arabic-gpt",
"num_heads": 8,
"num_layers": 12,
"torch_dtype": "float32",
"transformers_version": "4.51.3",
"vocab_size": 32000
}