HCaLM / config.json
xiaoyangch's picture
Upload 5 files
4eec0b9 verified
raw
history blame
431 Bytes
{
"architectures": [
"CaLMModel"
],
"attention_dropout": 0.0,
"attention_heads": 12,
"cls_token_id": 0,
"embed_dim": 768,
"eos_token_id": 2,
"ffn_embed_dim": 3072,
"logit_bias": false,
"mask_token_id": 68,
"max_positions": 1024,
"model_type": "calm",
"num_layers": 12,
"pad_token_id": 1,
"rope_embedding": true,
"torch_dtype": "float32",
"transformers_version": "4.46.3",
"vocab_size": 69
}