File size: 1,110 Bytes
fee196d a29f88e 85da451 9a7ec7b dfb3337 9a7ec7b 85da451 fee196d 0249b1a fee196d faffbea fee196d 9a7ec7b fee196d d2e1abb 0249b1a fee196d 91342c4 fee196d d2e1abb 91342c4 d2e1abb 0249b1a fee196d |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 |
{
"architectures": [
"NeoLLMForCausalLM"
],
"attention_bias": false,
"attention_dropout": 0.1,
"auto_map": {
"AutoConfig": "configuration_neollm.NeoLLMConfig",
"AutoModel": "modeling_neollm.NeoLLMModel",
"AutoModelForCausalLM": "modeling_neollm.NeoLLMForCausalLM"
},
"dropout_rate": 0.1,
"dtype": "bfloat16",
"eos_token_id": 151643,
"fan_ratio": 0.125,
"fan_ratio_ffn": 0.0625,
"head_dim": 64,
"hidden_act": "xielu",
"hidden_size": 512,
"initializer_range": 0.02,
"intermediate_size": 1536,
"max_position_embeddings": 512,
"model_type": "neollm",
"num_attention_heads": 8,
"num_hidden_layers": 12,
"num_key_value_heads": 2,
"num_stack_heads": 4,
"pad_token_id": 151643,
"partial_rotary_factor": 0.25,
"rms_norm_eps": 1e-06,
"rope_parameters": {
"partial_rotary_factor": 0.25,
"rope_theta": 10000.0,
"rope_type": "default"
},
"rope_theta": 10000.0,
"stack_d_model": 16,
"stack_slots": 24,
"tie_word_embeddings": true,
"transformers_version": "5.0.0",
"use_cache": false,
"use_stack": true,
"vocab_size": 151665
}
|