AmrBelal021 commited on
Commit
744e04c
·
verified ·
1 Parent(s): 7d5f913

Upload adapter_config.json

Browse files
Files changed (1) hide show
  1. adapter_config.json +8 -42
adapter_config.json CHANGED
@@ -1,47 +1,13 @@
1
  {
2
- "alora_invocation_tokens": null,
3
- "alpha_pattern": {},
4
- "arrow_config": null,
5
- "auto_mapping": {
6
- "base_model_class": "LlamaForCausalLM",
7
- "parent_library": "transformers.models.llama.modeling_llama",
8
- "unsloth_fixed": true
9
- },
10
  "base_model_name_or_path": "unsloth/meta-llama-3.1-8b-instruct-bnb-4bit",
11
- "bias": "none",
12
- "corda_config": null,
13
- "ensure_weight_tying": false,
14
- "eva_config": null,
15
- "exclude_modules": null,
16
- "fan_in_fan_out": false,
17
- "inference_mode": true,
18
- "init_lora_weights": true,
19
- "layer_replication": null,
20
- "layers_pattern": null,
21
- "layers_to_transform": null,
22
- "loftq_config": {},
23
- "lora_alpha": 16,
24
- "lora_bias": false,
25
- "lora_dropout": 0,
26
- "megatron_config": null,
27
- "megatron_core": "megatron.core",
28
- "modules_to_save": null,
29
  "peft_type": "LORA",
30
- "peft_version": "0.18.1",
31
- "qalora_group_size": 16,
32
- "r": 16,
33
- "rank_pattern": {},
34
- "revision": null,
35
- "target_modules": [
36
- "q_proj",
37
- "v_proj",
38
- "k_proj",
39
- "o_proj"
40
- ],
41
- "target_parameters": null,
42
  "task_type": "CAUSAL_LM",
43
- "trainable_token_indices": null,
44
- "use_dora": false,
45
- "use_qalora": false,
46
- "use_rslora": false
 
 
 
 
47
  }
 
1
  {
 
 
 
 
 
 
 
 
2
  "base_model_name_or_path": "unsloth/meta-llama-3.1-8b-instruct-bnb-4bit",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3
  "peft_type": "LORA",
 
 
 
 
 
 
 
 
 
 
 
 
4
  "task_type": "CAUSAL_LM",
5
+ "r": 16,
6
+ "target_modules": ["q_proj", "k_proj", "v_proj", "o_proj", "gate_proj", "up_proj", "down_proj"],
7
+ "lora_alpha": 32,
8
+ "lora_dropout": 0,
9
+ "fan_in_fan_out": false,
10
+ "bias": "none",
11
+ "modules_to_save": null,
12
+ "init_lora_weights": true
13
  }