Mouhamedamar commited on
Commit
b256a2e
·
verified ·
1 Parent(s): e842738

Upload model trained with Unsloth

Browse files

Upload model trained with Unsloth 2x faster

Files changed (2) hide show
  1. adapter_config.json +5 -13
  2. adapter_model.safetensors +2 -2
adapter_config.json CHANGED
@@ -3,11 +3,11 @@
3
  "alpha_pattern": {},
4
  "arrow_config": null,
5
  "auto_mapping": {
6
- "base_model_class": "DeepseekOCRForCausalLM",
7
- "parent_library": "transformers_modules.deepseek_ocr.modeling_deepseekocr",
8
  "unsloth_fixed": true
9
  },
10
- "base_model_name_or_path": "./deepseek_ocr",
11
  "bias": "none",
12
  "corda_config": null,
13
  "ensure_weight_tying": false,
@@ -27,20 +27,12 @@
27
  "megatron_core": "megatron.core",
28
  "modules_to_save": null,
29
  "peft_type": "LORA",
30
- "peft_version": "0.18.0",
31
  "qalora_group_size": 16,
32
  "r": 16,
33
  "rank_pattern": {},
34
  "revision": null,
35
- "target_modules": [
36
- "down_proj",
37
- "k_proj",
38
- "q_proj",
39
- "o_proj",
40
- "gate_proj",
41
- "up_proj",
42
- "v_proj"
43
- ],
44
  "target_parameters": null,
45
  "task_type": "CAUSAL_LM",
46
  "trainable_token_indices": null,
 
3
  "alpha_pattern": {},
4
  "arrow_config": null,
5
  "auto_mapping": {
6
+ "base_model_class": "Lfm2VlForConditionalGeneration",
7
+ "parent_library": "transformers.models.lfm2_vl.modeling_lfm2_vl",
8
  "unsloth_fixed": true
9
  },
10
+ "base_model_name_or_path": "LiquidAI/LFM2.5-VL-1.6B",
11
  "bias": "none",
12
  "corda_config": null,
13
  "ensure_weight_tying": false,
 
27
  "megatron_core": "megatron.core",
28
  "modules_to_save": null,
29
  "peft_type": "LORA",
30
+ "peft_version": "0.18.1",
31
  "qalora_group_size": 16,
32
  "r": 16,
33
  "rank_pattern": {},
34
  "revision": null,
35
+ "target_modules": "(?:.*?(?:language|text).*?(?:self_attn|attention|attn|mlp|feed_forward|ffn|dense).*?(?:k_proj|v_proj|q_proj|out_proj|fc1|fc2|in_proj|w1|w3|w2).*?)|(?:\\bmodel\\.layers\\.[\\d]{1,}\\.(?:self_attn|attention|attn|mlp|feed_forward|ffn|dense)\\.(?:(?:k_proj|v_proj|q_proj|out_proj|fc1|fc2|in_proj|w1|w3|w2)))",
 
 
 
 
 
 
 
 
36
  "target_parameters": null,
37
  "task_type": "CAUSAL_LM",
38
  "trainable_token_indices": null,
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:7b294f324ff48957841f7ee1071271c1e9cc117910ab6c5ddb0a90cc0a29d310
3
- size 310662536
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ebc4c03793468d36917217223b3b16a799ae5b400dca10665b7f7a04ff1224e1
3
+ size 36590480