EpiMistral-7B / config.json
jattokatarratto's picture
Upload config.json
17a4b30 verified
raw
history blame contribute delete
616 Bytes
{
"model_type": "causal_lm",
"base_model": "Open-Orca/Mistral-7B-OpenOrca",
"quantization": {
"load_in_8bit": true,
"quantization_type": "standard_8bit",
"compute_dtype": "bfloat16"
},
"fine_tuning": {
"method": "PEFT",
"technique": "LoRA",
"lora_rank": 16,
"lora_alpha": 16,
"target_modules": [
"qproj",
"kproj",
"vproj",
"oproj",
"gateproj",
"upproj",
"downproj"
],
"dropout": 0.05
},
"device_map": "auto",
"trust_remote_code": true,
"tokenizer_options": {
"use_fast": true
}
}