Yeonjun commited on
Commit
4655d3d
·
verified ·
1 Parent(s): 5ab3951

Upload LlamaForCausalLM

Browse files
adapter_config.json CHANGED
@@ -23,13 +23,13 @@
23
  "rank_pattern": {},
24
  "revision": null,
25
  "target_modules": [
26
- "up_proj",
27
- "gate_proj",
28
  "v_proj",
29
- "k_proj",
30
  "down_proj",
 
 
31
  "o_proj",
32
- "q_proj"
 
33
  ],
34
  "task_type": "CAUSAL_LM",
35
  "use_dora": false,
 
23
  "rank_pattern": {},
24
  "revision": null,
25
  "target_modules": [
 
 
26
  "v_proj",
 
27
  "down_proj",
28
+ "q_proj",
29
+ "k_proj",
30
  "o_proj",
31
+ "up_proj",
32
+ "gate_proj"
33
  ],
34
  "task_type": "CAUSAL_LM",
35
  "use_dora": false,
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:bce33ae54c614676f2dab782cbab9916dea9ec141a61bea591bb0639a67f674b
3
  size 167832240
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ec8b15f748307f2f5286bf0d1d3356711597c87794e6283c1800fdd998f0b09c
3
  size 167832240
generation_config.json ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 128000,
4
+ "do_sample": true,
5
+ "eos_token_id": 128001,
6
+ "max_length": 131072,
7
+ "pad_token_id": 128004,
8
+ "temperature": 0.6,
9
+ "top_p": 0.95,
10
+ "transformers_version": "4.49.0"
11
+ }