Update adapter_config.json
Browse filesedit to remove automapping and set task type to causal lm to work with unsloth autoloader
- adapter_config.json +2 -5
adapter_config.json
CHANGED
|
@@ -1,9 +1,6 @@
|
|
| 1 |
{
|
| 2 |
"alpha_pattern": {},
|
| 3 |
-
"auto_mapping":
|
| 4 |
-
"base_model_class": "LlamaForCausalLM",
|
| 5 |
-
"parent_library": "transformers.models.llama.modeling_llama"
|
| 6 |
-
},
|
| 7 |
"base_model_name_or_path": "HuggingFaceTB/SmolLM2-1.7B-Instruct",
|
| 8 |
"bias": "none",
|
| 9 |
"fan_in_fan_out": false,
|
|
@@ -34,7 +31,7 @@
|
|
| 34 |
"o_proj",
|
| 35 |
"q_proj"
|
| 36 |
],
|
| 37 |
-
"task_type":
|
| 38 |
"use_dora": false,
|
| 39 |
"use_rslora": false
|
| 40 |
}
|
|
|
|
| 1 |
{
|
| 2 |
"alpha_pattern": {},
|
| 3 |
+
"auto_mapping": null,
|
|
|
|
|
|
|
|
|
|
| 4 |
"base_model_name_or_path": "HuggingFaceTB/SmolLM2-1.7B-Instruct",
|
| 5 |
"bias": "none",
|
| 6 |
"fan_in_fan_out": false,
|
|
|
|
| 31 |
"o_proj",
|
| 32 |
"q_proj"
|
| 33 |
],
|
| 34 |
+
"task_type": "CAUSAL_LM",
|
| 35 |
"use_dora": false,
|
| 36 |
"use_rslora": false
|
| 37 |
}
|