Eso-LM-B-alpha-0_25 / config.json
zhihanyang's picture
Upload EsoLM
b3d8eb0 verified
raw
history blame contribute delete
500 Bytes
{
"_name_or_path": "sahoo-diffusion/Eso-LM-B-alpha-0_25",
"architectures": [
"EsoLM"
],
"auto_map": {
"AutoConfig": "config.EsoLMConfig",
"AutoModelForMaskedLM": "model.EsoLM"
},
"cond_dim": 128,
"dropout": 0.1,
"hidden_dim": 768,
"hidden_size": 768,
"mask_index": 50257,
"model_length": 1024,
"model_type": "EsoLM",
"n_blocks": 12,
"n_heads": 12,
"return_dict": false,
"torch_dtype": "float32",
"transformers_version": "4.49.0",
"vocab_size": 50258
}