RyanDev1st commited on
Commit
02e33e3
verified
1 Parent(s): bd710a5

Update adapter_config.json

Browse files
Files changed (1) hide show
  1. adapter_config.json +3 -11
adapter_config.json CHANGED
@@ -2,18 +2,10 @@
2
  "base_model_name_or_path": "google/gemma-3-27b-it",
3
  "peft_type": "LORA",
4
  "task_type": "CAUSAL_LM",
5
- "r": 32,
6
- "lora_alpha": 64,
7
  "lora_dropout": 0.05,
8
  "bias": "none",
9
- "target_modules": [
10
- "q_proj",
11
- "k_proj",
12
- "v_proj",
13
- "o_proj",
14
- "gate_proj",
15
- "up_proj",
16
- "down_proj"
17
- ],
18
  "inference_mode": true
19
  }
 
2
  "base_model_name_or_path": "google/gemma-3-27b-it",
3
  "peft_type": "LORA",
4
  "task_type": "CAUSAL_LM",
5
+ "r": 4,
6
+ "lora_alpha": 8,
7
  "lora_dropout": 0.05,
8
  "bias": "none",
9
+ "target_modules": "model.language_model.layers.[\\d]+.(mlp|cross_attn|self_attn).(up|down|gate|q|k|v|o)_proj",
 
 
 
 
 
 
 
 
10
  "inference_mode": true
11
  }