texanrangee commited on
Commit
519c670
·
verified ·
1 Parent(s): 5f60b4a

Best model so far - eval_loss: 0.5032 -------------------------------------------------------- (Trained with Unsloth)

Browse files
Files changed (2) hide show
  1. adapter_config.json +7 -7
  2. adapter_model.safetensors +2 -2
adapter_config.json CHANGED
@@ -1,7 +1,7 @@
1
  {
2
  "alpha_pattern": {},
3
  "auto_mapping": null,
4
- "base_model_name_or_path": "unsloth/Qwen2-7B-Instruct",
5
  "bias": "none",
6
  "fan_in_fan_out": false,
7
  "inference_mode": true,
@@ -11,7 +11,7 @@
11
  "layers_to_transform": null,
12
  "loftq_config": {},
13
  "lora_alpha": 128,
14
- "lora_dropout": 0.05,
15
  "megatron_config": null,
16
  "megatron_core": "megatron.core",
17
  "modules_to_save": [
@@ -22,13 +22,13 @@
22
  "rank_pattern": {},
23
  "revision": null,
24
  "target_modules": [
25
- "k_proj",
26
- "gate_proj",
27
  "up_proj",
28
- "o_proj",
29
- "down_proj",
30
  "v_proj",
31
- "q_proj"
 
 
 
 
32
  ],
33
  "task_type": "CAUSAL_LM",
34
  "use_dora": false,
 
1
  {
2
  "alpha_pattern": {},
3
  "auto_mapping": null,
4
+ "base_model_name_or_path": "unsloth/gemma-1.1-2b-it",
5
  "bias": "none",
6
  "fan_in_fan_out": false,
7
  "inference_mode": true,
 
11
  "layers_to_transform": null,
12
  "loftq_config": {},
13
  "lora_alpha": 128,
14
+ "lora_dropout": 0.1,
15
  "megatron_config": null,
16
  "megatron_core": "megatron.core",
17
  "modules_to_save": [
 
22
  "rank_pattern": {},
23
  "revision": null,
24
  "target_modules": [
 
 
25
  "up_proj",
 
 
26
  "v_proj",
27
+ "down_proj",
28
+ "o_proj",
29
+ "q_proj",
30
+ "gate_proj",
31
+ "k_proj"
32
  ],
33
  "task_type": "CAUSAL_LM",
34
  "use_dora": false,
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:13d7145a7782881bac14be188dcf333e2abefb739484b81b972e7fe4981408e3
3
- size 1735970568
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6d84613a15020c2a788bb9b7d8b3ad1e0bee4bafacf2c3c61db1b40282ea3db6
3
+ size 1362396360