Upload DualTowerVLM using push_to_hub
Browse files- config.json +1 -1
- model.safetensors +1 -1
config.json
CHANGED
|
@@ -23,7 +23,7 @@
|
|
| 23 |
"lm_n_blocks": 30,
|
| 24 |
"lm_attn_scaling": 1.0,
|
| 25 |
"lm_pad_aware_rope": false,
|
| 26 |
-
"lm_max_length":
|
| 27 |
"lm_use_tokens": false,
|
| 28 |
"lm_tie_weights": true,
|
| 29 |
"lm_model_type": "HuggingFaceTB/SmolLM2-135M-Instruct",
|
|
|
|
| 23 |
"lm_n_blocks": 30,
|
| 24 |
"lm_attn_scaling": 1.0,
|
| 25 |
"lm_pad_aware_rope": false,
|
| 26 |
+
"lm_max_length": 4096,
|
| 27 |
"lm_use_tokens": false,
|
| 28 |
"lm_tie_weights": true,
|
| 29 |
"lm_model_type": "HuggingFaceTB/SmolLM2-135M-Instruct",
|
model.safetensors
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 1450549888
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:e932d9b4dc987854108fcee178119ebcb293575f6cef981b4dc7f0ba9296da3c
|
| 3 |
size 1450549888
|