Idrinth commited on
Commit
1bafa96
·
verified ·
1 Parent(s): 495eca5

Upload model trained with Unsloth

Browse files

Upload model trained with Unsloth 2x faster

Files changed (2) hide show
  1. adapter_config.json +11 -7
  2. adapter_model.safetensors +2 -2
adapter_config.json CHANGED
@@ -1,7 +1,11 @@
1
  {
2
  "alpha_pattern": {},
3
- "auto_mapping": null,
4
- "base_model_name_or_path": "unsloth/mistral-7b-instruct-v0.3-bnb-4bit",
 
 
 
 
5
  "bias": "none",
6
  "corda_config": null,
7
  "eva_config": null,
@@ -25,13 +29,13 @@
25
  "rank_pattern": {},
26
  "revision": null,
27
  "target_modules": [
28
- "q_proj",
29
- "o_proj",
30
- "down_proj",
31
- "v_proj",
32
  "up_proj",
 
33
  "gate_proj",
34
- "k_proj"
 
 
 
35
  ],
36
  "target_parameters": null,
37
  "task_type": "CAUSAL_LM",
 
1
  {
2
  "alpha_pattern": {},
3
+ "auto_mapping": {
4
+ "base_model_class": "Mistral3ForConditionalGeneration",
5
+ "parent_library": "transformers.models.mistral3.modeling_mistral3",
6
+ "unsloth_fixed": true
7
+ },
8
+ "base_model_name_or_path": "unsloth/mistral-small-3.2-24b-instruct-2506-unsloth-bnb-4bit",
9
  "bias": "none",
10
  "corda_config": null,
11
  "eva_config": null,
 
29
  "rank_pattern": {},
30
  "revision": null,
31
  "target_modules": [
 
 
 
 
32
  "up_proj",
33
+ "v_proj",
34
  "gate_proj",
35
+ "down_proj",
36
+ "k_proj",
37
+ "q_proj",
38
+ "o_proj"
39
  ],
40
  "target_parameters": null,
41
  "task_type": "CAUSAL_LM",
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:b12730a8ddb75eccc583adbe6c1860ac90b3631adcbfce0426347a04d192c95b
3
- size 83945296
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5c9d6e56b79808cc3c63ff4d1ca88f61f8cbe63a74b0eacca19d40dfbf883b38
3
+ size 203037224