hsenussi commited on
Commit
08f91ca
·
verified ·
1 Parent(s): 5f2da41

Upload folder using huggingface_hub

Browse files
adapter_config.json CHANGED
@@ -16,7 +16,7 @@
16
  "layers_pattern": null,
17
  "layers_to_transform": null,
18
  "loftq_config": {},
19
- "lora_alpha": 32,
20
  "lora_bias": false,
21
  "lora_dropout": 0.05,
22
  "megatron_config": null,
@@ -25,14 +25,17 @@
25
  "peft_type": "LORA",
26
  "peft_version": "0.18.1",
27
  "qalora_group_size": 16,
28
- "r": 16,
29
  "rank_pattern": {},
30
  "revision": null,
31
  "target_modules": [
32
- "k_proj",
33
- "v_proj",
34
  "q_proj",
35
- "o_proj"
 
 
 
 
36
  ],
37
  "target_parameters": null,
38
  "task_type": "CAUSAL_LM",
 
16
  "layers_pattern": null,
17
  "layers_to_transform": null,
18
  "loftq_config": {},
19
+ "lora_alpha": 64,
20
  "lora_bias": false,
21
  "lora_dropout": 0.05,
22
  "megatron_config": null,
 
25
  "peft_type": "LORA",
26
  "peft_version": "0.18.1",
27
  "qalora_group_size": 16,
28
+ "r": 32,
29
  "rank_pattern": {},
30
  "revision": null,
31
  "target_modules": [
32
+ "gate_proj",
 
33
  "q_proj",
34
+ "k_proj",
35
+ "down_proj",
36
+ "o_proj",
37
+ "up_proj",
38
+ "v_proj"
39
  ],
40
  "target_parameters": null,
41
  "task_type": "CAUSAL_LM",
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:237791eb3529bc580a03693c7fdf20ecb4c8726134e0224c7c90c7b121d6705b
3
- size 2968160
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cdc1bf9fd5fbf9794f8a9ccf76700758cf144dbaaca90ac2cd350855aec1a784
3
+ size 15221224
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:73b22a6230302c12446c7022d1c3d69412c32f41acef419377e13a197bbde7b0
3
  size 5585
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b5d604b5c3e08afee7f04d0efe92c26124e77f1f5fe952f305a25fe9f1c970f6
3
  size 5585