AshanGimhana commited on
Commit
530e9d7
·
1 Parent(s): 3a66e98

Upload LlamaForCausalLM

Browse files
adapter_config.json CHANGED
@@ -19,8 +19,8 @@
19
  "rank_pattern": {},
20
  "revision": null,
21
  "target_modules": [
22
- "q_proj",
23
- "v_proj"
24
  ],
25
  "task_type": "CAUSAL_LM",
26
  "use_rslora": false
 
19
  "rank_pattern": {},
20
  "revision": null,
21
  "target_modules": [
22
+ "v_proj",
23
+ "q_proj"
24
  ],
25
  "task_type": "CAUSAL_LM",
26
  "use_rslora": false
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:1024318ca732d7a7e5fdab02dc17c8882baf9eb59a0e6bc02b9560b95078cb4c
3
  size 134235048
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7b58fb71a2981c24785c29215feb49c6dc972b0cc3066957560db6a7cfbb0c5b
3
  size 134235048
generation_config.json CHANGED
@@ -3,6 +3,5 @@
3
  "bos_token_id": 1,
4
  "eos_token_id": 2,
5
  "pad_token_id": 0,
6
- "transformers_version": "4.36.1",
7
- "use_cache": false
8
  }
 
3
  "bos_token_id": 1,
4
  "eos_token_id": 2,
5
  "pad_token_id": 0,
6
+ "transformers_version": "4.36.2"
 
7
  }