robertou2 commited on
Commit
8e65050
·
verified ·
1 Parent(s): c26126f

Upload folder using huggingface_hub

Browse files
adapter_config.json CHANGED
@@ -10,23 +10,28 @@
10
  "layers_pattern": null,
11
  "layers_to_transform": null,
12
  "loftq_config": {},
13
- "lora_alpha": 48,
14
- "lora_dropout": 0.46,
15
  "megatron_config": null,
16
  "megatron_core": "megatron.core",
17
- "modules_to_save": null,
 
 
 
 
 
 
18
  "peft_type": "LORA",
19
  "r": 92,
20
  "rank_pattern": {},
21
  "revision": null,
22
  "target_modules": [
23
- "down_proj",
24
- "ffn",
25
  "qkv_proj",
26
- "o_proj",
27
- "gate_up_proj"
28
  ],
29
  "task_type": "CAUSAL_LM",
30
  "use_dora": false,
31
- "use_rslora": true
32
  }
 
10
  "layers_pattern": null,
11
  "layers_to_transform": null,
12
  "loftq_config": {},
13
+ "lora_alpha": 92,
14
+ "lora_dropout": 0.05,
15
  "megatron_config": null,
16
  "megatron_core": "megatron.core",
17
+ "modules_to_save": [
18
+ "embed_tokens",
19
+ "input_layernorm",
20
+ "post_attention_layernorm",
21
+ "norm",
22
+ "lm_head"
23
+ ],
24
  "peft_type": "LORA",
25
  "r": 92,
26
  "rank_pattern": {},
27
  "revision": null,
28
  "target_modules": [
29
+ "gate_up_proj",
 
30
  "qkv_proj",
31
+ "down_proj",
32
+ "o_proj"
33
  ],
34
  "task_type": "CAUSAL_LM",
35
  "use_dora": false,
36
+ "use_rslora": false
37
  }
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:534964e82302b3d244afd3df4cf6cffb6eb845bddda6e70500dfb77213f01b6f
3
- size 578848856
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e0baaf54bda52828060ee556ab71aff06409da79a11706550881639c1e205402
3
+ size 973658648
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:ce70071ff3c5729f40645852e777635faa561bfe18fb634517e946ceab689486
3
- size 5688
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b98132aa1f253acb655870a8cb84f62644bb769f47852c6799357af35133273f
3
+ size 5624