scottzilla-gateway / models.json
ScottzillaSystems's picture
add: complete model catalog with routing metadata
cb91e94 verified
{
"models": [
{
"alias": "chatgpt-5",
"hf_id": "ScottzillaSystems/ChatGPT-5",
"name": "ChatGPT-5",
"params": "494M",
"arch": "qwen2",
"task": "text-generation",
"capabilities": ["chat", "fast"],
"router_model": "Qwen/Qwen2.5-7B-Instruct",
"local_space": "ScottzillaSystems/ChatGPT-5-Chat",
"size_gb": 0.9
},
{
"alias": "cydonia-24b",
"hf_id": "ScottzillaSystems/Cydonia-24B-v4.1",
"name": "Cydonia 24B v4.1",
"params": "23.6B",
"arch": "mistral",
"task": "text-generation",
"capabilities": ["chat", "creative-writing", "roleplay"],
"router_model": "Qwen/Qwen3.5-27B",
"local_space": "ScottzillaSystems/Cydonia-24B-Chat",
"size_gb": 43.9
},
{
"alias": "qwen3.5-9b",
"hf_id": "ScottzillaSystems/Qwen3.5-9B",
"name": "Qwen3.5 9B",
"params": "9.7B",
"arch": "qwen3_5",
"task": "image-text-to-text",
"capabilities": ["chat", "vision", "multimodal", "262k-context"],
"router_model": "Qwen/Qwen3.5-9B",
"local_space": "ScottzillaSystems/Qwen3.5-9B-Chat",
"size_gb": 18.0
},
{
"alias": "qwen3.5-40b-uncensored",
"hf_id": "ScottzillaSystems/Qwen3.5-40B-Claude-4.6-Opus-Deckard-Heretic-Uncensored-Thinking",
"name": "Qwen3.5 40B Uncensored Thinking",
"params": "40B",
"arch": "qwen3_5",
"task": "image-text-to-text",
"capabilities": ["chat", "vision", "uncensored", "thinking", "reasoning"],
"size_gb": 80.0
},
{
"alias": "gemma4-31b-abliterated",
"hf_id": "ScottzillaSystems/Gemma-4-31B-JANG_4M-CRACK",
"name": "Gemma 4 31B Abliterated (MLX)",
"params": "31B",
"arch": "gemma4",
"task": "image-text-to-text",
"capabilities": ["chat", "vision", "abliterated", "uncensored", "mlx"],
"size_gb": 62.0
},
{
"alias": "gemma4-31b-gguf",
"hf_id": "ScottzillaSystems/Gemma-4-31B-it-abliterated",
"name": "Gemma 4 31B Abliterated (GGUF)",
"params": "31B",
"arch": "gemma4",
"task": "text-generation",
"capabilities": ["chat", "abliterated", "uncensored", "gguf", "ollama-compatible"],
"size_gb": 20.0
},
{
"alias": "qwen3-coder-abliterated",
"hf_id": "ScottzillaSystems/Huihui-Qwen3-Coder-Next-Opus-4.6-Reasoning-Distilled-abliterated",
"name": "Qwen3 Coder Next Abliterated",
"arch": "qwen3_next",
"task": "text-generation",
"capabilities": ["code", "reasoning", "abliterated", "uncensored"]
},
{
"alias": "qwen3-vl-8b-abliterated",
"hf_id": "ScottzillaSystems/Huihui-Qwen3-VL-8B-Instruct-abliterated",
"name": "Qwen3 VL 8B Abliterated",
"params": "8B",
"arch": "qwen3_vl",
"task": "image-text-to-text",
"capabilities": ["chat", "vision", "multimodal", "abliterated", "uncensored"],
"size_gb": 16.0
},
{
"alias": "qwen3.5-9b-abliterated",
"hf_id": "ScottzillaSystems/Huihui-Qwen3.5-9B-Claude-4.6-Opus-abliterated",
"name": "Qwen3.5 9B Claude Opus Abliterated",
"params": "9B",
"arch": "qwen3_5",
"task": "image-text-to-text",
"capabilities": ["chat", "vision", "abliterated", "uncensored", "opus-distilled"],
"size_gb": 18.0
},
{
"alias": "qwen3.6-27b-abliterated",
"hf_id": "ScottzillaSystems/Huihui-Qwen3.6-27B-abliterated",
"name": "Qwen3.6 27B Abliterated",
"params": "27B",
"arch": "qwen3_5",
"task": "image-text-to-text",
"capabilities": ["chat", "vision", "abliterated", "uncensored"],
"size_gb": 54.0
},
{
"alias": "qwen3.6-35b-abliterated",
"hf_id": "ScottzillaSystems/Huihui-Qwen3.6-35B-A3B-Claude-4.7-Opus-abliterated",
"name": "Qwen3.6 35B MoE Claude 4.7 Opus Abliterated",
"params": "35B (3B active)",
"arch": "qwen3_5_moe",
"task": "text-generation",
"capabilities": ["chat", "abliterated", "uncensored", "moe", "lora", "opus-distilled"],
"size_gb": 20.0
},
{
"alias": "qwen3.6-35b-mlx",
"hf_id": "ScottzillaSystems/Huihui-Qwen3.6-35B-A3B-Claude-4.7-Opus-abliterated-mlx-8bit",
"name": "Qwen3.6 35B MoE MLX 8-bit",
"params": "35B (3B active)",
"arch": "qwen3_5_moe",
"task": "text-generation",
"capabilities": ["chat", "abliterated", "mlx", "moe", "apple-silicon"],
"size_gb": 20.0
},
{
"alias": "supergemma4-abliterated",
"hf_id": "ScottzillaSystems/supergemma4-e4b-abliterated",
"name": "SuperGemma4 E4B Abliterated",
"arch": "gemma4",
"task": "text-generation",
"capabilities": ["chat", "abliterated"]
},
{
"alias": "fallen-command-111b",
"hf_id": "ScottzillaSystems/Fallen-Command-A-111B-v1_Compresses-Tensors",
"name": "Fallen Command-A 111B",
"params": "111B",
"arch": "command-a",
"task": "text-generation",
"capabilities": ["chat", "tool-use", "rag", "multilingual", "256k-context", "awq"],
"router_model": "CohereLabs/c4ai-command-a-03-2025",
"local_space": "ScottzillaSystems/Fallen-Command-A-111B-Chat"
},
{
"alias": "qwen-image-edit-nsfw",
"hf_id": "ScottzillaSystems/qwen-image-edit-plus-nsfw-lora",
"name": "Qwen Image Edit NSFW LoRA",
"arch": "qwen-image-edit",
"task": "image-to-image",
"capabilities": ["image-edit", "nsfw", "lora"],
"base_model": "Qwen/Qwen-Image-Edit-2511"
},
{
"alias": "qwen-image-edit-nsfw-v2",
"hf_id": "ScottzillaSystems/qwen-image-edit-plus-nsfw-lora2",
"name": "Qwen Image Edit NSFW LoRA v2",
"arch": "qwen-image-edit",
"task": "image-to-image",
"capabilities": ["image-edit", "nsfw", "lora"],
"base_model": "Qwen/Qwen-Image-Edit-2511"
}
],
"infrastructure": {
"searxng": "https://scottzillasystems-searxng.hf.space",
"ollama_server": "ScottzillaSystems/ollama-server",
"chat_spaces": [
"ScottzillaSystems/Cydonia-24B-Chat",
"ScottzillaSystems/ChatGPT-5-Chat",
"ScottzillaSystems/Qwen3.5-9B-Chat",
"ScottzillaSystems/Fallen-Command-A-111B-Chat"
]
}
}