File size: 326 Bytes
8d1e0c8 3c9b9ff 8d1e0c8 3c9b9ff 8d1e0c8 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 | {
"architectures": [
"Qwen2ForCausalLM"
],
"hidden_size": 640,
"intermediate_size": 1536,
"max_position_embeddings": 768,
"model_type": "qwen2",
"num_attention_heads": 10,
"num_hidden_layers": 12,
"qkv_bias": true,
"torch_dtype": "float32",
"transformers_version": "4.47.1",
"vocab_size": 151936
}
|