Update config.json
Browse files- config.json +0 -35
config.json
CHANGED
|
@@ -32,39 +32,4 @@
|
|
| 32 |
"transformers_version": "4.37.0",
|
| 33 |
"use_cache": true,
|
| 34 |
"vocab_size": 32000
|
| 35 |
-
|
| 36 |
-
|
| 37 |
-
"lora":{
|
| 38 |
-
"_num_labels": 2,
|
| 39 |
-
"alpha_pattern": {},
|
| 40 |
-
"auto_mapping": null,
|
| 41 |
-
"base_model_name_or_path": "mistralai/Mistral-7B-v0.1",
|
| 42 |
-
"bias": "none",
|
| 43 |
-
"fan_in_fan_out": false,
|
| 44 |
-
"inference_mode": true,
|
| 45 |
-
"init_lora_weights": true,
|
| 46 |
-
"layers_pattern": null,
|
| 47 |
-
"layers_to_transform": null,
|
| 48 |
-
"loftq_config": {},
|
| 49 |
-
"lora_alpha": 32,
|
| 50 |
-
"lora_dropout": 0.05,
|
| 51 |
-
"megatron_config": null,
|
| 52 |
-
"megatron_core": "megatron.core",
|
| 53 |
-
"modules_to_save": null,
|
| 54 |
-
"peft_type": "LORA",
|
| 55 |
-
"r": 8,
|
| 56 |
-
"rank_pattern": {},
|
| 57 |
-
"revision": null,
|
| 58 |
-
"target_modules": [
|
| 59 |
-
"down_proj",
|
| 60 |
-
"q_proj",
|
| 61 |
-
"k_proj",
|
| 62 |
-
"up_proj",
|
| 63 |
-
"score",
|
| 64 |
-
"gate_proj",
|
| 65 |
-
"o_proj",
|
| 66 |
-
"v_proj"
|
| 67 |
-
],
|
| 68 |
-
"task_type": "SEQ_CLS"
|
| 69 |
-
}
|
| 70 |
}
|
|
|
|
| 32 |
"transformers_version": "4.37.0",
|
| 33 |
"use_cache": true,
|
| 34 |
"vocab_size": 32000
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 35 |
}
|