mahmoudOmar03 commited on
Commit
e1a2e75
·
verified ·
1 Parent(s): 1e1546c

Upload model trained with Unsloth

Browse files

Upload model trained with Unsloth 2x faster

Files changed (2) hide show
  1. adapter_config.json +9 -8
  2. adapter_model.safetensors +1 -1
adapter_config.json CHANGED
@@ -2,7 +2,8 @@
2
  "alpha_pattern": {},
3
  "auto_mapping": {
4
  "base_model_class": "Qwen3ForCausalLM",
5
- "parent_library": "transformers.models.qwen3.modeling_qwen3"
 
6
  },
7
  "base_model_name_or_path": "unsloth/Qwen3-14B-unsloth-bnb-4bit",
8
  "bias": "none",
@@ -28,16 +29,16 @@
28
  "rank_pattern": {},
29
  "revision": null,
30
  "target_modules": [
31
- "o_proj",
32
- "k_proj",
33
- "up_proj",
34
- "q_proj",
35
- "gate_proj",
36
  "down_proj",
37
- "v_proj"
 
 
 
 
38
  ],
39
  "target_parameters": null,
40
- "task_type": null,
41
  "trainable_token_indices": null,
42
  "use_dora": false,
43
  "use_qalora": false,
 
2
  "alpha_pattern": {},
3
  "auto_mapping": {
4
  "base_model_class": "Qwen3ForCausalLM",
5
+ "parent_library": "transformers.models.qwen3.modeling_qwen3",
6
+ "unsloth_fixed": true
7
  },
8
  "base_model_name_or_path": "unsloth/Qwen3-14B-unsloth-bnb-4bit",
9
  "bias": "none",
 
29
  "rank_pattern": {},
30
  "revision": null,
31
  "target_modules": [
32
+ "v_proj",
 
 
 
 
33
  "down_proj",
34
+ "gate_proj",
35
+ "q_proj",
36
+ "up_proj",
37
+ "k_proj",
38
+ "o_proj"
39
  ],
40
  "target_parameters": null,
41
+ "task_type": "CAUSAL_LM",
42
  "trainable_token_indices": null,
43
  "use_dora": false,
44
  "use_qalora": false,
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:e6f0f5de78f1f8511d22041f75877b084b3c5d7a2c3760bbba0217e1aeabdd37
3
  size 513877864
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5cd57c96690453749091e12edb884b303855897fb4f42d2df94d0f32eff0b0f5
3
  size 513877864