ZTYikes commited on
Commit
94675b5
·
verified ·
1 Parent(s): 3b30b47

Upload nanoVLM using push_to_hub

Browse files
Files changed (2) hide show
  1. config.json +9 -9
  2. model.safetensors +2 -2
config.json CHANGED
@@ -9,29 +9,29 @@
9
  "vit_ln_eps": 1e-06,
10
  "vit_cls_flag": false,
11
  "vit_model_type": "google/siglip2-base-patch16-512",
12
- "lm_hidden_dim": 576,
13
- "lm_inter_dim": 1536,
14
  "lm_rms_eps": 1e-05,
15
  "lm_re_base": 100000,
16
  "lm_max_position_embeddings": 8192,
17
  "lm_base_vocab_size": 49152,
18
  "extra_token_amount": 66,
19
  "lm_vocab_size": 49218,
20
- "lm_n_heads": 9,
21
- "lm_n_kv_heads": 3,
22
  "lm_dropout": 0.0,
23
- "lm_n_blocks": 30,
24
  "lm_attn_scaling": 1.0,
25
- "lm_max_length": 256,
26
  "lm_use_tokens": false,
27
  "lm_tie_weights": true,
28
- "lm_model_type": "HuggingFaceTB/SmolLM2-135M",
29
  "lm_tokenizer": "HuggingFaceTB/SmolLM2-360M-Instruct",
30
  "lm_chat_template": "{% for message in messages %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}",
31
  "mp_pixel_shuffle_factor": 4,
32
  "mp_image_token_length": 64,
33
- "max_img_size": 512,
34
- "resize_to_max_side_len": false,
35
  "vlm_extra_tokens": {
36
  "image_token": "<|image|>",
37
  "global_image_token": "<|global_image|>",
 
9
  "vit_ln_eps": 1e-06,
10
  "vit_cls_flag": false,
11
  "vit_model_type": "google/siglip2-base-patch16-512",
12
+ "lm_hidden_dim": 960,
13
+ "lm_inter_dim": 2560,
14
  "lm_rms_eps": 1e-05,
15
  "lm_re_base": 100000,
16
  "lm_max_position_embeddings": 8192,
17
  "lm_base_vocab_size": 49152,
18
  "extra_token_amount": 66,
19
  "lm_vocab_size": 49218,
20
+ "lm_n_heads": 15,
21
+ "lm_n_kv_heads": 5,
22
  "lm_dropout": 0.0,
23
+ "lm_n_blocks": 32,
24
  "lm_attn_scaling": 1.0,
25
+ "lm_max_length": 4096,
26
  "lm_use_tokens": false,
27
  "lm_tie_weights": true,
28
+ "lm_model_type": "HuggingFaceTB/SmolLM2-360M-Instruct",
29
  "lm_tokenizer": "HuggingFaceTB/SmolLM2-360M-Instruct",
30
  "lm_chat_template": "{% for message in messages %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}",
31
  "mp_pixel_shuffle_factor": 4,
32
  "mp_image_token_length": 64,
33
+ "max_img_size": 2048,
34
+ "resize_to_max_side_len": true,
35
  "vlm_extra_tokens": {
36
  "image_token": "<|image|>",
37
  "global_image_token": "<|global_image|>",
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:4f71bb4db4cb339669078ad8820ab3f1fe580ebeacd637f493102baacc5f18b6
3
- size 912301840
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a3191fce850b9854588ebf562719fb19517bbd2b818d9d0d81f178cda6559db3
3
+ size 1840504504