SykoLLM-V5.5-Beta / config.json
SykoSLM's picture
Upload Phi3ForCausalLM
f28251e verified
raw
history blame contribute delete
851 Bytes
{
"architectures": [
"Phi3ForCausalLM"
],
"attention_dropout": 0.0,
"bos_token_id": 0,
"dtype": "float32",
"embd_pdrop": 0.0,
"eos_token_id": 0,
"hidden_act": "silu",
"hidden_size": 1024,
"ignore_keys_at_rope_validation": null,
"initializer_range": 0.02,
"intermediate_size": 3072,
"max_position_embeddings": 1024,
"model_type": "phi3",
"num_attention_heads": 8,
"num_hidden_layers": 28,
"num_key_value_heads": 8,
"original_max_position_embeddings": 4096,
"pad_token_id": 4,
"partial_rotary_factor": 1.0,
"resid_pdrop": 0.0,
"rms_norm_eps": 1e-05,
"rope_parameters": {
"partial_rotary_factor": 1.0,
"rope_theta": 10000.0,
"rope_type": "default"
},
"sliding_window": null,
"tie_word_embeddings": false,
"transformers_version": "5.0.0",
"use_cache": false,
"vocab_size": 32000
}