msluszniak's picture
fix(v0.9.0): split text vs VL into <size>/ and vl_<size>/; colocate tokenizers per-size; drop Qwen3.5-* legacy dirs
f296584 verified
*.7z filter=lfs diff=lfs merge=lfs -text
*.arrow filter=lfs diff=lfs merge=lfs -text
*.bin filter=lfs diff=lfs merge=lfs -text
*.bz2 filter=lfs diff=lfs merge=lfs -text
*.ckpt filter=lfs diff=lfs merge=lfs -text
*.ftz filter=lfs diff=lfs merge=lfs -text
*.gz filter=lfs diff=lfs merge=lfs -text
*.h5 filter=lfs diff=lfs merge=lfs -text
*.joblib filter=lfs diff=lfs merge=lfs -text
*.lfs.* filter=lfs diff=lfs merge=lfs -text
*.mlmodel filter=lfs diff=lfs merge=lfs -text
*.model filter=lfs diff=lfs merge=lfs -text
*.msgpack filter=lfs diff=lfs merge=lfs -text
*.npy filter=lfs diff=lfs merge=lfs -text
*.npz filter=lfs diff=lfs merge=lfs -text
*.onnx filter=lfs diff=lfs merge=lfs -text
*.ot filter=lfs diff=lfs merge=lfs -text
*.parquet filter=lfs diff=lfs merge=lfs -text
*.pb filter=lfs diff=lfs merge=lfs -text
*.pickle filter=lfs diff=lfs merge=lfs -text
*.pkl filter=lfs diff=lfs merge=lfs -text
*.pt filter=lfs diff=lfs merge=lfs -text
*.pth filter=lfs diff=lfs merge=lfs -text
*.rar filter=lfs diff=lfs merge=lfs -text
*.safetensors filter=lfs diff=lfs merge=lfs -text
saved_model/**/* filter=lfs diff=lfs merge=lfs -text
*.tar.* filter=lfs diff=lfs merge=lfs -text
*.tar filter=lfs diff=lfs merge=lfs -text
*.tflite filter=lfs diff=lfs merge=lfs -text
*.tgz filter=lfs diff=lfs merge=lfs -text
*.wasm filter=lfs diff=lfs merge=lfs -text
*.xz filter=lfs diff=lfs merge=lfs -text
*.zip filter=lfs diff=lfs merge=lfs -text
*.zst filter=lfs diff=lfs merge=lfs -text
*tfevents* filter=lfs diff=lfs merge=lfs -text
qwen3_5_0_8b_xnnpack_8da4w.pte filter=lfs diff=lfs merge=lfs -text
qwen3_5_2b_xnnpack_8da4w.pte filter=lfs diff=lfs merge=lfs -text
Qwen3.5-0.8B/tokenizer.json filter=lfs diff=lfs merge=lfs -text
Qwen3.5-2B/tokenizer.json filter=lfs diff=lfs merge=lfs -text
Qwen3.5-0.8B/qwen3_5_vl_0_8b_xnnpack_8da4w.pte filter=lfs diff=lfs merge=lfs -text
Qwen3.5-2B/qwen3_5_vl_2b_xnnpack_8da4w.pte filter=lfs diff=lfs merge=lfs -text
0_8b/xnnpack/qwen_3_5_0_8b_xnnpack_8da4w.pte filter=lfs diff=lfs merge=lfs -text
2b/xnnpack/qwen_3_5_2b_xnnpack_8da4w.pte filter=lfs diff=lfs merge=lfs -text
0_8b/tokenizer.json filter=lfs diff=lfs merge=lfs -text
2b/tokenizer.json filter=lfs diff=lfs merge=lfs -text
vl_0_8b/xnnpack/qwen_3_5_vl_0_8b_xnnpack_8da4w.pte filter=lfs diff=lfs merge=lfs -text
vl_0_8b/tokenizer.json filter=lfs diff=lfs merge=lfs -text
vl_2b/xnnpack/qwen_3_5_vl_2b_xnnpack_8da4w.pte filter=lfs diff=lfs merge=lfs -text
vl_2b/tokenizer.json filter=lfs diff=lfs merge=lfs -text