Adebolajo's picture
update phi models
9226c78
raw
history blame contribute delete
361 Bytes
{
"architecture": "Phi3ForCausalLM",
"dtype": "float16",
"num_hidden_layers": 32,
"num_attention_heads": 32,
"rope_theta": 10000.0,
"hidden_size": 3072,
"intermediate_size": 8192,
"vocab_size": 32064,
"max_position_embeddings": 4096,
"hidden_act": "swiglu",
"share_embedding_table": false,
"norm_epsilon": 1e-05
}