Lacax commited on
Commit
0f741e0
·
verified ·
1 Parent(s): d725174

Upload model trained with Unsloth

Browse files

Upload model trained with Unsloth 2x faster

Files changed (2) hide show
  1. adapter_config.json +2 -10
  2. adapter_model.safetensors +2 -2
adapter_config.json CHANGED
@@ -32,15 +32,7 @@
32
  "r": 32,
33
  "rank_pattern": {},
34
  "revision": null,
35
- "target_modules": [
36
- "o_proj",
37
- "k_proj",
38
- "v_proj",
39
- "up_proj",
40
- "q_proj",
41
- "gate_proj",
42
- "down_proj"
43
- ],
44
  "target_parameters": [
45
  "mlp.experts.gate_up_proj",
46
  "mlp.experts.down_proj"
@@ -49,5 +41,5 @@
49
  "trainable_token_indices": null,
50
  "use_dora": false,
51
  "use_qalora": false,
52
- "use_rslora": true
53
  }
 
32
  "r": 32,
33
  "rank_pattern": {},
34
  "revision": null,
35
+ "target_modules": "(?:.*?(?:vision|image|visual|patch|language|text).*?(?:self_attn|attention|attn|mlp|feed_forward|ffn|dense).*?(?:q_proj|k_proj|v_proj|o_proj|gate_proj|up_proj|down_proj|qkv|proj|lin1|lin2).*?)|(?:\\bmodel\\.layers\\.[\\d]{1,}\\.(?:self_attn|attention|attn|mlp|feed_forward|ffn|dense)\\.(?:(?:q_proj|k_proj|v_proj|o_proj|gate_proj|up_proj|down_proj|qkv|proj|lin1|lin2)))",
 
 
 
 
 
 
 
 
36
  "target_parameters": [
37
  "mlp.experts.gate_up_proj",
38
  "mlp.experts.down_proj"
 
41
  "trainable_token_indices": null,
42
  "use_dora": false,
43
  "use_qalora": false,
44
+ "use_rslora": false
45
  }
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:a5d3513547d7b037bd1e080743c78163731dccbdec112af539f935f4cb9e0687
3
- size 691141456
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:44827d1a99a988512419a1a29e2aae9a17cfbe121938595950c0144c4b03acba
3
+ size 18863392