Kelson9 commited on
Commit
c464d77
·
verified ·
1 Parent(s): 4eac927

Upload LlamaForCausalLM

Browse files
adapter_config.json CHANGED
@@ -23,13 +23,13 @@
23
  "rank_pattern": {},
24
  "revision": null,
25
  "target_modules": [
26
- "up_proj",
27
  "down_proj",
28
- "gate_proj",
29
- "k_proj",
30
  "o_proj",
 
 
31
  "v_proj",
32
- "q_proj"
 
33
  ],
34
  "task_type": "CAUSAL_LM",
35
  "use_dora": false,
 
23
  "rank_pattern": {},
24
  "revision": null,
25
  "target_modules": [
 
26
  "down_proj",
 
 
27
  "o_proj",
28
+ "up_proj",
29
+ "k_proj",
30
  "v_proj",
31
+ "q_proj",
32
+ "gate_proj"
33
  ],
34
  "task_type": "CAUSAL_LM",
35
  "use_dora": false,
adapter_model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:82a2d92ec846f6a5132547aa7a0316e4372b02c9558b371c9b1860ffc2725e62
3
+ size 166062760
generation_config.json CHANGED
@@ -7,5 +7,5 @@
7
  "pad_token_id": 128004,
8
  "temperature": 0.6,
9
  "top_p": 0.95,
10
- "transformers_version": "4.48.3"
11
  }
 
7
  "pad_token_id": 128004,
8
  "temperature": 0.6,
9
  "top_p": 0.95,
10
+ "transformers_version": "4.50.0"
11
  }