| {"architectures":["LlamaForCausalLM"],"hidden_size":2048,"num_hidden_layers":4,"num_attention_heads":64,"num_key_value_heads":32,"intermediate_size":4096,"vocab_size":256,"max_position_embeddings":2048,"rope_theta":10000.0,"rms_norm_eps":1e-6,"hidden_act":"silu","tie_word_embeddings":true} |