File size: 226 Bytes
63cbfd0 |
1 2 3 4 5 6 7 8 9 |
{
"architectures": ["MistralForCausalLM"],
"model_type": "mistral",
"auto_map": {
"AutoModelForCausalLM": "mistralai/Mistral-7B-Instruct-v0.3"
},
"base_model_name_or_path": "mistralai/Mistral-7B-Instruct-v0.3"
}
|