fumiama commited on
Commit
e3e99c8
·
verified ·
1 Parent(s): 7af2e31

revert: to older version

Browse files
Files changed (1) hide show
  1. adapter_config.json +5 -18
adapter_config.json CHANGED
@@ -1,16 +1,8 @@
1
  {
2
- "alora_invocation_tokens": null,
3
  "alpha_pattern": {},
4
- "arrow_config": null,
5
- "auto_mapping": {
6
- "base_model_class": "LlamaForCausalLM",
7
- "parent_library": "transformers.models.llama.modeling_llama",
8
- "unsloth_fixed": true
9
- },
10
  "base_model_name_or_path": "meta-llama/Meta-Llama-3.1-8B-Instruct",
11
  "bias": "none",
12
- "corda_config": null,
13
- "ensure_weight_tying": false,
14
  "eva_config": null,
15
  "exclude_modules": null,
16
  "fan_in_fan_out": false,
@@ -27,24 +19,19 @@
27
  "megatron_core": "megatron.core",
28
  "modules_to_save": null,
29
  "peft_type": "LORA",
30
- "peft_version": "0.18.0",
31
- "qalora_group_size": 16,
32
  "r": 16,
33
  "rank_pattern": {},
34
  "revision": null,
35
  "target_modules": [
36
- "o_proj",
37
  "up_proj",
38
- "q_proj",
39
- "k_proj",
40
  "gate_proj",
41
  "down_proj",
42
- "v_proj"
 
 
43
  ],
44
- "target_parameters": null,
45
  "task_type": "CAUSAL_LM",
46
- "trainable_token_indices": null,
47
  "use_dora": false,
48
- "use_qalora": false,
49
  "use_rslora": false
50
  }
 
1
  {
 
2
  "alpha_pattern": {},
3
+ "auto_mapping": null,
 
 
 
 
 
4
  "base_model_name_or_path": "meta-llama/Meta-Llama-3.1-8B-Instruct",
5
  "bias": "none",
 
 
6
  "eva_config": null,
7
  "exclude_modules": null,
8
  "fan_in_fan_out": false,
 
19
  "megatron_core": "megatron.core",
20
  "modules_to_save": null,
21
  "peft_type": "LORA",
 
 
22
  "r": 16,
23
  "rank_pattern": {},
24
  "revision": null,
25
  "target_modules": [
26
+ "v_proj",
27
  "up_proj",
 
 
28
  "gate_proj",
29
  "down_proj",
30
+ "o_proj",
31
+ "q_proj",
32
+ "k_proj"
33
  ],
 
34
  "task_type": "CAUSAL_LM",
 
35
  "use_dora": false,
 
36
  "use_rslora": false
37
  }