Annie0430 commited on
Commit
443f436
·
verified ·
1 Parent(s): b30ded0

Upload test_whole_process/server/ckpt/client_client_1_round_2_model_0/adapter_config.json with huggingface_hub

Browse files
test_whole_process/server/ckpt/client_client_1_round_2_model_0/adapter_config.json CHANGED
@@ -1,34 +1,17 @@
1
  {
2
- "alpha_pattern": {},
3
- "auto_mapping": null,
4
  "base_model_name_or_path": "meta-llama/Llama-3.1-8B-Instruct",
5
  "bias": "none",
6
- "corda_config": null,
7
- "eva_config": null,
8
- "exclude_modules": null,
9
  "fan_in_fan_out": false,
10
  "inference_mode": true,
11
  "init_lora_weights": true,
12
- "layer_replication": null,
13
- "layers_pattern": null,
14
- "layers_to_transform": null,
15
- "loftq_config": {},
16
  "lora_alpha": 16,
17
- "lora_bias": false,
18
  "lora_dropout": 0.05,
19
- "megatron_config": null,
20
- "megatron_core": "megatron.core",
21
  "modules_to_save": null,
22
  "peft_type": "LORA",
23
  "r": 8,
24
- "rank_pattern": {},
25
- "revision": null,
26
  "target_modules": [
27
  "q_proj",
28
  "v_proj"
29
  ],
30
- "task_type": "CAUSAL_LM",
31
- "trainable_token_indices": null,
32
- "use_dora": false,
33
- "use_rslora": false
34
  }
 
1
  {
 
 
2
  "base_model_name_or_path": "meta-llama/Llama-3.1-8B-Instruct",
3
  "bias": "none",
 
 
 
4
  "fan_in_fan_out": false,
5
  "inference_mode": true,
6
  "init_lora_weights": true,
 
 
 
 
7
  "lora_alpha": 16,
 
8
  "lora_dropout": 0.05,
 
 
9
  "modules_to_save": null,
10
  "peft_type": "LORA",
11
  "r": 8,
 
 
12
  "target_modules": [
13
  "q_proj",
14
  "v_proj"
15
  ],
16
+ "task_type": "CAUSAL_LM"
 
 
 
17
  }