File size: 168 Bytes
72b3949 |
1 2 3 4 5 6 7 8 9 |
{
"architectures": ["AutoModelForCausalLM"],
"model_type": "phi-2",
"torch_dtype": "float16",
"auto_map": {
"AutoModelForCausalLM": "microsoft/phi-2"
}
}
|
72b3949 |
1 2 3 4 5 6 7 8 9 |
{
"architectures": ["AutoModelForCausalLM"],
"model_type": "phi-2",
"torch_dtype": "float16",
"auto_map": {
"AutoModelForCausalLM": "microsoft/phi-2"
}
}
|