Schinchann commited on
Commit
707c73b
·
verified ·
1 Parent(s): 68f8269

Upload model trained with Unsloth

Browse files

Upload model trained with Unsloth 2x faster

Files changed (2) hide show
  1. adapter_config.json +8 -8
  2. adapter_model.safetensors +2 -2
adapter_config.json CHANGED
@@ -1,11 +1,11 @@
1
  {
2
  "alpha_pattern": {},
3
  "auto_mapping": {
4
- "base_model_class": "Qwen3ForCausalLM",
5
- "parent_library": "transformers.models.qwen3.modeling_qwen3",
6
  "unsloth_fixed": true
7
  },
8
- "base_model_name_or_path": "unsloth/qwen3-14b-unsloth-bnb-4bit",
9
  "bias": "none",
10
  "corda_config": null,
11
  "eva_config": null,
@@ -29,13 +29,13 @@
29
  "rank_pattern": {},
30
  "revision": null,
31
  "target_modules": [
32
- "k_proj",
33
- "up_proj",
34
- "gate_proj",
35
  "v_proj",
36
- "q_proj",
37
  "down_proj",
38
- "o_proj"
 
 
 
 
39
  ],
40
  "target_parameters": null,
41
  "task_type": "CAUSAL_LM",
 
1
  {
2
  "alpha_pattern": {},
3
  "auto_mapping": {
4
+ "base_model_class": "LlamaForCausalLM",
5
+ "parent_library": "transformers.models.llama.modeling_llama",
6
  "unsloth_fixed": true
7
  },
8
+ "base_model_name_or_path": "unsloth/tinyllama-bnb-4bit",
9
  "bias": "none",
10
  "corda_config": null,
11
  "eva_config": null,
 
29
  "rank_pattern": {},
30
  "revision": null,
31
  "target_modules": [
 
 
 
32
  "v_proj",
 
33
  "down_proj",
34
+ "up_proj",
35
+ "q_proj",
36
+ "k_proj",
37
+ "o_proj",
38
+ "gate_proj"
39
  ],
40
  "target_parameters": null,
41
  "task_type": "CAUSAL_LM",
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:a69c0d515122512d85bbdd59552f009d53f6048ab50adf871cc06a4275e106fb
3
- size 513877864
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:93c5d386849a9ddadbbfb9b9ebb7bec8f502a51a89c1cfdcd4a457b776cc13aa
3
+ size 100966336