mark-22 commited on
Commit
08ca093
·
verified ·
1 Parent(s): e70f724

Update adapter_config.json

Browse files
Files changed (1) hide show
  1. adapter_config.json +0 -13
adapter_config.json CHANGED
@@ -1,7 +1,5 @@
1
  {
2
- "alora_invocation_tokens": null,
3
  "alpha_pattern": {},
4
- "arrow_config": null,
5
  "auto_mapping": {
6
  "base_model_class": "Qwen3ForCausalLM",
7
  "parent_library": "transformers.models.qwen3.modeling_qwen3",
@@ -9,29 +7,20 @@
9
  },
10
  "base_model_name_or_path": "unsloth/qwen3-4b-instruct-2507-unsloth-bnb-4bit",
11
  "bias": "none",
12
- "corda_config": null,
13
  "ensure_weight_tying": false,
14
- "eva_config": null,
15
- "exclude_modules": null,
16
  "fan_in_fan_out": false,
17
  "inference_mode": true,
18
  "init_lora_weights": true,
19
- "layer_replication": null,
20
- "layers_pattern": null,
21
- "layers_to_transform": null,
22
  "loftq_config": {},
23
  "lora_alpha": 128,
24
  "lora_bias": false,
25
  "lora_dropout": 0.0,
26
- "megatron_config": null,
27
  "megatron_core": "megatron.core",
28
- "modules_to_save": null,
29
  "peft_type": "LORA",
30
  "peft_version": "0.18.1",
31
  "qalora_group_size": 16,
32
  "r": 64,
33
  "rank_pattern": {},
34
- "revision": null,
35
  "target_modules": [
36
  "down_proj",
37
  "k_proj",
@@ -41,9 +30,7 @@
41
  "q_proj",
42
  "gate_proj"
43
  ],
44
- "target_parameters": null,
45
  "task_type": "CAUSAL_LM",
46
- "trainable_token_indices": null,
47
  "use_dora": false,
48
  "use_qalora": false,
49
  "use_rslora": false
 
1
  {
 
2
  "alpha_pattern": {},
 
3
  "auto_mapping": {
4
  "base_model_class": "Qwen3ForCausalLM",
5
  "parent_library": "transformers.models.qwen3.modeling_qwen3",
 
7
  },
8
  "base_model_name_or_path": "unsloth/qwen3-4b-instruct-2507-unsloth-bnb-4bit",
9
  "bias": "none",
 
10
  "ensure_weight_tying": false,
 
 
11
  "fan_in_fan_out": false,
12
  "inference_mode": true,
13
  "init_lora_weights": true,
 
 
 
14
  "loftq_config": {},
15
  "lora_alpha": 128,
16
  "lora_bias": false,
17
  "lora_dropout": 0.0,
 
18
  "megatron_core": "megatron.core",
 
19
  "peft_type": "LORA",
20
  "peft_version": "0.18.1",
21
  "qalora_group_size": 16,
22
  "r": 64,
23
  "rank_pattern": {},
 
24
  "target_modules": [
25
  "down_proj",
26
  "k_proj",
 
30
  "q_proj",
31
  "gate_proj"
32
  ],
 
33
  "task_type": "CAUSAL_LM",
 
34
  "use_dora": false,
35
  "use_qalora": false,
36
  "use_rslora": false