| [ |
| { |
| "id": "mlc-ai/Llama-3.1-8B-Instruct-q4f32_1-MLC", |
| "name": "Llama 3.1 8B Instruct q4f32", |
| "model_type": "llama", |
| "architectures": ["llama"], |
| "classification": "gen", |
| "confidence": "high", |
| "size_hint": "58Mb", |
| "fetchStatus": "ok", |
| "hasTokenizer": true, |
| "hasOnnxModel": true, |
| "isTransformersJsReady": true |
| }, |
| { |
| "id": "/models/google/gemma-2b/resolve/main", |
| "name": "Gemma 2B (local /models/google/gemma-2b/resolve/main)", |
| "model_type": "gemma", |
| "architectures": ["gemma"], |
| "classification": "gen", |
| "confidence": "medium", |
| "size_hint": "~2GB", |
| "fetchStatus": "pending-local", |
| "hasTokenizer": true, |
| "hasOnnxModel": false, |
| "isTransformersJsReady": false, |
| "requiresAuth": false, |
| "info": { "local_url": "/models/google/gemma-2b/resolve/main", "notes": "Place downloaded HF-style repo under ./models/google/gemma-2b so files resolve at /models/google/gemma-2b/resolve/main/..." } |
| }, |
| { |
| "id": "Xenova/llama2.c-stories15M", |
| "name": "Llama2.c Stories 15M", |
| "model_type": "llama", |
| "architectures": ["llama"], |
| "classification": "gen", |
| "confidence": "high", |
| "fetchStatus": "ok", |
| "hasTokenizer": true, |
| "hasOnnxModel": true, |
| "isTransformersJsReady": true, |
| "info": { |
| "display_name": "Llama2.c Stories 15M (Xenova)", |
| "params": "15M", |
| "params_count": 15000000, |
| "architecture": "LLaMA-family (tiny conversion for stories use via Xenova toolchain)", |
| "context_window": 2048, |
| "quantization": "likely float16/uint8/packed formats supported by Xenova runtime", |
| "hf_url": "https://huggingface.co/Xenova/llama2.c-stories15M", |
| "recommended_runtime": "transformers.js / Xenova runtime (wasm/webgpu)", |
| "is_mobile_capable": true, |
| "verified": false, |
| "assumed": true, |
| "notes": "Original curated metadata preserved; tokenizer/ONNX flags assumed for Xenova-converted tiny model." |
| } |
| }, |
| { |
| "id": "Xenova/phi-3-mini-4k-instruct", |
| "name": "phi-3-mini-4k-instruct", |
| "model_type": "phi3", |
| "architectures": ["phi3"], |
| "classification": "gen", |
| "confidence": "high", |
| "fetchStatus": "ok", |
| "hasTokenizer": true, |
| "hasOnnxModel": true, |
| "isTransformersJsReady": true, |
| "info": { |
| "display_name": "Phi-3 Mini 4k Instruct (Xenova)", |
| "params": "mini (estimate)", |
| "params_count": null, |
| "architecture": "Phi family (Phi-3 mini conversion)", |
| "context_window": 4096, |
| "hf_url": "https://huggingface.co/Xenova/phi-3-mini-4k-instruct", |
| "recommended_runtime": "transformers.js / Xenova runtime (wasm/webgpu)", |
| "is_mobile_capable": true, |
| "verified": false, |
| "assumed": true, |
| "notes": "Parameter count and asset layout should be verified against HF API." |
| } |
| }, |
| { |
| "id": "Xenova/distilgpt2", |
| "name": "distilgpt2", |
| "model_type": "gpt2", |
| "architectures": ["gpt2"], |
| "classification": "gen", |
| "confidence": "high", |
| "fetchStatus": "ok", |
| "hasTokenizer": true, |
| "hasOnnxModel": true, |
| "isTransformersJsReady": true, |
| "info": { |
| "display_name": "DistilGPT-2 (Xenova mirror)", |
| "params": "≈82M", |
| "params_count": 82000000, |
| "architecture": "GPT-2 distilled", |
| "context_window": 1024, |
| "hf_url": "https://huggingface.co/Xenova/distilgpt2", |
| "recommended_runtime": "transformers.js", |
| "is_mobile_capable": true, |
| "verified": false, |
| "assumed": true |
| } |
| }, |
| { |
| "id": "Xenova/gpt2", |
| "name": "gpt2", |
| "model_type": "gpt2", |
| "architectures": ["gpt2"], |
| "classification": "gen", |
| "confidence": "high", |
| "fetchStatus": "ok", |
| "hasTokenizer": true, |
| "hasOnnxModel": true, |
| "size_hint": "500Mb", |
| "isTransformersJsReady": true, |
| "info": { |
| "display_name": "GPT-2 (Xenova mirror)", |
| "params": "117M (base)", |
| "params_count": 117000000, |
| "architecture": "GPT-2", |
| "context_window": 1024, |
| "hf_url": "https://huggingface.co/Xenova/gpt2", |
| "recommended_runtime": "transformers.js", |
| "is_mobile_capable": true, |
| "verified": false, |
| "assumed": true |
| } |
| }, |
| { |
| "id": "Xenova/qwen-2.5-small-instruct", |
| "name": "qwen-2.5-small-instruct", |
| "model_type": "qwen", |
| "architectures": ["qwen"], |
| "classification": "gen", |
| "confidence": "medium", |
| "fetchStatus": "ok", |
| "hasTokenizer": true, |
| "hasOnnxModel": false, |
| "isTransformersJsReady": false, |
| "requiresAuth": true, |
| "info": { |
| "display_name": "Qwen-2.5 Small Instruct (Xenova mirror)", |
| "params": "≈2.5B (estimate)", |
| "params_count": null, |
| "architecture": "Qwen family", |
| "context_window": 8192, |
| "hf_url": "https://huggingface.co/Xenova/qwen-2.5-small-instruct", |
| "recommended_runtime": "Xenova runtime / transformers.js (quantized)", |
| "is_mobile_capable": false, |
| "verified": false, |
| "assumed": true, |
| "notes": "Marked conservatively as not transformers.js-ready / no ONNX in HF assets." |
| } |
| } |
| ] |