BKM1804 commited on
Commit
724ff9b
·
verified ·
1 Parent(s): 3916442

Upload model

Browse files
Files changed (2) hide show
  1. adapter_config.json +7 -7
  2. adapter_model.safetensors +2 -2
adapter_config.json CHANGED
@@ -1,10 +1,10 @@
1
  {
2
  "alpha_pattern": {},
3
  "auto_mapping": {
4
- "base_model_class": "LlamaForCausalLM",
5
- "parent_library": "transformers.models.llama.modeling_llama"
6
  },
7
- "base_model_name_or_path": "unsloth/llama-3-8b",
8
  "bias": "none",
9
  "corda_config": null,
10
  "eva_config": null,
@@ -28,13 +28,13 @@
28
  "rank_pattern": {},
29
  "revision": null,
30
  "target_modules": [
31
- "gate_proj",
32
  "down_proj",
33
  "up_proj",
34
- "k_proj",
35
- "q_proj",
36
  "o_proj",
37
- "v_proj"
 
38
  ],
39
  "target_parameters": null,
40
  "task_type": null,
 
1
  {
2
  "alpha_pattern": {},
3
  "auto_mapping": {
4
+ "base_model_class": "Qwen2ForCausalLM",
5
+ "parent_library": "transformers.models.qwen2.modeling_qwen2"
6
  },
7
+ "base_model_name_or_path": "Qwen/Qwen1.5-7B",
8
  "bias": "none",
9
  "corda_config": null,
10
  "eva_config": null,
 
28
  "rank_pattern": {},
29
  "revision": null,
30
  "target_modules": [
31
+ "q_proj",
32
  "down_proj",
33
  "up_proj",
34
+ "gate_proj",
 
35
  "o_proj",
36
+ "v_proj",
37
+ "k_proj"
38
  ],
39
  "target_parameters": null,
40
  "task_type": null,
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:6ef7f1d9a0b116dccbbf571b9f72563500ccd38e2b611f2fdbf6afd7590e1d77
3
- size 1342238560
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f437b52beab44248d5a29fea21450f22d53178ec22a17b209f1026d443b23435
3
+ size 1279323952