pminhtamnb commited on
Commit
cccc280
·
1 Parent(s): 076d8d4

Upload 2 files

Browse files
Files changed (2) hide show
  1. adapter_config.json +5 -7
  2. adapter_model.bin +2 -2
adapter_config.json CHANGED
@@ -1,27 +1,25 @@
1
  {
2
  "auto_mapping": null,
3
- "base_model_name_or_path": "ckpt/mistralai/Mistral-7B-v0.1/",
4
  "bias": "none",
5
  "fan_in_fan_out": false,
6
  "inference_mode": true,
7
  "init_lora_weights": true,
8
  "layers_pattern": null,
9
  "layers_to_transform": null,
10
- "lora_alpha": 16,
11
  "lora_dropout": 0.05,
12
  "modules_to_save": null,
13
  "peft_type": "LORA",
14
- "r": 16,
15
  "revision": null,
16
  "target_modules": [
17
  "q_proj",
18
- "v_proj",
19
  "k_proj",
20
- "o_proj",
21
  "gate_proj",
22
  "up_proj",
23
- "down_proj",
24
- "lm_head"
25
  ],
26
  "task_type": "CAUSAL_LM"
27
  }
 
1
  {
2
  "auto_mapping": null,
3
+ "base_model_name_or_path": "ckpt/mistralai/Mistral-7B-v0.1",
4
  "bias": "none",
5
  "fan_in_fan_out": false,
6
  "inference_mode": true,
7
  "init_lora_weights": true,
8
  "layers_pattern": null,
9
  "layers_to_transform": null,
10
+ "lora_alpha": 32,
11
  "lora_dropout": 0.05,
12
  "modules_to_save": null,
13
  "peft_type": "LORA",
14
+ "r": 64,
15
  "revision": null,
16
  "target_modules": [
17
  "q_proj",
 
18
  "k_proj",
19
+ "v_proj",
20
  "gate_proj",
21
  "up_proj",
22
+ "down_proj"
 
23
  ],
24
  "task_type": "CAUSAL_LM"
25
  }
adapter_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:52a9b49d532045f399c525a2ff9e30b5c0382740900e9576263502f009ffb3e5
3
- size 85203666
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c5be6569ab8145f22f230f48b0431bf1c0607f45834b8e462957ba997942c58c
3
+ size 604118538