EpiLLaMA-3.3-70B / config.json
jattokatarratto's picture
Upload config.json
b9d3f51 verified
raw
history blame contribute delete
620 Bytes
{
"model_type": "causal_lm",
"base_model": "meta-llama/Llama-3.3-70B-Instruct",
"quantization": {
"load_in_8bit": true,
"quantization_type": "standard_8bit",
"compute_dtype": "bfloat16"
},
"fine_tuning": {
"method": "PEFT",
"technique": "LoRA",
"lora_rank": 16,
"lora_alpha": 16,
"target_modules": [
"qproj",
"kproj",
"vproj",
"oproj",
"gateproj",
"upproj",
"downproj"
],
"dropout": 0.05
},
"device_map": "auto",
"trust_remote_code": true,
"tokenizer_options": {
"use_fast": true
}
}