Chronos-1.5B / config.json
squ11z1's picture
Create config.json
eea3c3c verified
raw
history blame contribute delete
580 Bytes
{
"architectures": ["Qwen2ForCausalLM"],
"model_type": "qwen2",
"vocab_size": 151936,
"hidden_size": 1536,
"intermediate_size": 8960,
"num_hidden_layers": 28,
"num_attention_heads": 12,
"num_key_value_heads": 2,
"max_position_embeddings": 32768,
"torch_dtype": "float16",
"transformers_version": "4.37.0",
"_name_or_path": "WeiboAI/VibeThinker-1.5B",
"use_cache": true,
"tie_word_embeddings": false,
"rope_theta": 1000000.0,
"quantization_config": {
"quantum_kernel": true,
"ibm_backend": "ibm_fez",
"qubits": 2,
"shots": 8192
}
}