ingeol commited on
Commit
d97328d
·
verified ·
1 Parent(s): d6665bc

Training in progress, step 195

Browse files
adapter_config.json CHANGED
@@ -1,20 +1,30 @@
1
  {
 
2
  "auto_mapping": null,
3
- "base_model_name_or_path": "EleutherAI/polyglot-ko-12.8b",
4
  "bias": "none",
5
  "fan_in_fan_out": false,
6
  "inference_mode": true,
7
  "init_lora_weights": true,
 
8
  "layers_pattern": null,
9
  "layers_to_transform": null,
 
10
  "lora_alpha": 32,
11
  "lora_dropout": 0.05,
 
 
12
  "modules_to_save": null,
13
  "peft_type": "LORA",
14
  "r": 8,
 
15
  "revision": null,
16
  "target_modules": [
17
- "query_key_value"
 
 
18
  ],
19
- "task_type": "CAUSAL_LM"
 
 
20
  }
 
1
  {
2
+ "alpha_pattern": {},
3
  "auto_mapping": null,
4
+ "base_model_name_or_path": "LGAI-EXAONE/EXAONE-3.0-7.8B-Instruct",
5
  "bias": "none",
6
  "fan_in_fan_out": false,
7
  "inference_mode": true,
8
  "init_lora_weights": true,
9
+ "layer_replication": null,
10
  "layers_pattern": null,
11
  "layers_to_transform": null,
12
+ "loftq_config": {},
13
  "lora_alpha": 32,
14
  "lora_dropout": 0.05,
15
+ "megatron_config": null,
16
+ "megatron_core": "megatron.core",
17
  "modules_to_save": null,
18
  "peft_type": "LORA",
19
  "r": 8,
20
+ "rank_pattern": {},
21
  "revision": null,
22
  "target_modules": [
23
+ "k_proj",
24
+ "v_proj",
25
+ "q_proj"
26
  ],
27
+ "task_type": "CAUSAL_LM",
28
+ "use_dora": false,
29
+ "use_rslora": false
30
  }
adapter_model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6e4d5ef9672c72bba1b7da5aace3c1fa233c946d226fa9840e372ab9f2ea9933
3
+ size 18901008
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:16da61c2e485cd70fb7912d66e349db28c669677de03162a495efe976d2bfb9e
3
- size 4027
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a99b8ec467340e3cee8ec04127bcd577140b811f2a872eca66bf6eb406a1020c
3
+ size 5176