File size: 578 Bytes
0ddd67f ea10dc9 e446976 ea10dc9 0ddd67f e446976 add930f e446976 1b18c2c e446976 0ddd67f |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 |
{
"model_type": "transformer",
"architecture": "mistral",
"task_type": "CAUSAL_LM",
"peft_config": {
"lora_alpha": 16,
"lora_dropout": 0.1,
"r": 64,
"bias": "none",
"task_type": "CAUSAL_LM",
"target_modules": [
"q_proj", "k_proj", "v_proj", "o_proj", "gate_proj",
"up_proj", "down_proj"
]
},
"load_in_4bit": true,
"torch_dtype": "torch.bfloat16",
"device_map": "auto",
"trust_remote_code": true,
"quantization_config": {
"load_in_4bit": true
}
}
|