vicunaft / config.json
siciai's picture
Update config.json
c760f0e
{
"auto_mapping":null,
"base_model_name_or_path":"lmsys/vicuna-7b-v1.3",
"bias":"none",
"fan_in_fan_out":false,
"inference_mode":true,
"init_lora_weights":true,
"layers_pattern":null,
"layers_to_transform":null,
"lora_alpha":32,
"lora_dropout":0.05,
"modules_to_save":null,
"peft_type":"LORA",
"r":8,
"revision":null,
"target_modules":[
"q_proj",
"v_proj"
],
"model_type": "llama",
"task_type":"CAUSAL_LM"
}