boringblobking commited on
Commit
12b735e
·
verified ·
1 Parent(s): 1de0499

Upload model trained with Unsloth

Browse files

Upload model trained with Unsloth 2x faster

Files changed (2) hide show
  1. adapter_config.json +9 -5
  2. adapter_model.safetensors +1 -1
adapter_config.json CHANGED
@@ -1,6 +1,10 @@
1
  {
2
  "alpha_pattern": {},
3
- "auto_mapping": null,
 
 
 
 
4
  "base_model_name_or_path": "unsloth/meta-llama-3.1-8b-unsloth-bnb-4bit",
5
  "bias": "none",
6
  "corda_config": null,
@@ -25,13 +29,13 @@
25
  "rank_pattern": {},
26
  "revision": null,
27
  "target_modules": [
 
 
 
28
  "v_proj",
29
  "q_proj",
30
  "o_proj",
31
- "down_proj",
32
- "up_proj",
33
- "gate_proj",
34
- "k_proj"
35
  ],
36
  "target_parameters": null,
37
  "task_type": "CAUSAL_LM",
 
1
  {
2
  "alpha_pattern": {},
3
+ "auto_mapping": {
4
+ "base_model_class": "LlamaForCausalLM",
5
+ "parent_library": "transformers.models.llama.modeling_llama",
6
+ "unsloth_fixed": true
7
+ },
8
  "base_model_name_or_path": "unsloth/meta-llama-3.1-8b-unsloth-bnb-4bit",
9
  "bias": "none",
10
  "corda_config": null,
 
29
  "rank_pattern": {},
30
  "revision": null,
31
  "target_modules": [
32
+ "k_proj",
33
+ "up_proj",
34
+ "down_proj",
35
  "v_proj",
36
  "q_proj",
37
  "o_proj",
38
+ "gate_proj"
 
 
 
39
  ],
40
  "target_parameters": null,
41
  "task_type": "CAUSAL_LM",
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:94bf81489f727dcba5be8305cdd08a2e25117c29955b525d3a81b4c39a00a31d
3
  size 167832240
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:66e6a554330ec7ea32163fb864b5f941f41c01d702a02ad7f89a87068a8285b3
3
  size 167832240