File size: 6,240 Bytes
cb91e94
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
{
  "models": [
    {
      "alias": "chatgpt-5",
      "hf_id": "ScottzillaSystems/ChatGPT-5",
      "name": "ChatGPT-5",
      "params": "494M",
      "arch": "qwen2",
      "task": "text-generation",
      "capabilities": ["chat", "fast"],
      "router_model": "Qwen/Qwen2.5-7B-Instruct",
      "local_space": "ScottzillaSystems/ChatGPT-5-Chat",
      "size_gb": 0.9
    },
    {
      "alias": "cydonia-24b",
      "hf_id": "ScottzillaSystems/Cydonia-24B-v4.1",
      "name": "Cydonia 24B v4.1",
      "params": "23.6B",
      "arch": "mistral",
      "task": "text-generation",
      "capabilities": ["chat", "creative-writing", "roleplay"],
      "router_model": "Qwen/Qwen3.5-27B",
      "local_space": "ScottzillaSystems/Cydonia-24B-Chat",
      "size_gb": 43.9
    },
    {
      "alias": "qwen3.5-9b",
      "hf_id": "ScottzillaSystems/Qwen3.5-9B",
      "name": "Qwen3.5 9B",
      "params": "9.7B",
      "arch": "qwen3_5",
      "task": "image-text-to-text",
      "capabilities": ["chat", "vision", "multimodal", "262k-context"],
      "router_model": "Qwen/Qwen3.5-9B",
      "local_space": "ScottzillaSystems/Qwen3.5-9B-Chat",
      "size_gb": 18.0
    },
    {
      "alias": "qwen3.5-40b-uncensored",
      "hf_id": "ScottzillaSystems/Qwen3.5-40B-Claude-4.6-Opus-Deckard-Heretic-Uncensored-Thinking",
      "name": "Qwen3.5 40B Uncensored Thinking",
      "params": "40B",
      "arch": "qwen3_5",
      "task": "image-text-to-text",
      "capabilities": ["chat", "vision", "uncensored", "thinking", "reasoning"],
      "size_gb": 80.0
    },
    {
      "alias": "gemma4-31b-abliterated",
      "hf_id": "ScottzillaSystems/Gemma-4-31B-JANG_4M-CRACK",
      "name": "Gemma 4 31B Abliterated (MLX)",
      "params": "31B",
      "arch": "gemma4",
      "task": "image-text-to-text",
      "capabilities": ["chat", "vision", "abliterated", "uncensored", "mlx"],
      "size_gb": 62.0
    },
    {
      "alias": "gemma4-31b-gguf",
      "hf_id": "ScottzillaSystems/Gemma-4-31B-it-abliterated",
      "name": "Gemma 4 31B Abliterated (GGUF)",
      "params": "31B",
      "arch": "gemma4",
      "task": "text-generation",
      "capabilities": ["chat", "abliterated", "uncensored", "gguf", "ollama-compatible"],
      "size_gb": 20.0
    },
    {
      "alias": "qwen3-coder-abliterated",
      "hf_id": "ScottzillaSystems/Huihui-Qwen3-Coder-Next-Opus-4.6-Reasoning-Distilled-abliterated",
      "name": "Qwen3 Coder Next Abliterated",
      "arch": "qwen3_next",
      "task": "text-generation",
      "capabilities": ["code", "reasoning", "abliterated", "uncensored"]
    },
    {
      "alias": "qwen3-vl-8b-abliterated",
      "hf_id": "ScottzillaSystems/Huihui-Qwen3-VL-8B-Instruct-abliterated",
      "name": "Qwen3 VL 8B Abliterated",
      "params": "8B",
      "arch": "qwen3_vl",
      "task": "image-text-to-text",
      "capabilities": ["chat", "vision", "multimodal", "abliterated", "uncensored"],
      "size_gb": 16.0
    },
    {
      "alias": "qwen3.5-9b-abliterated",
      "hf_id": "ScottzillaSystems/Huihui-Qwen3.5-9B-Claude-4.6-Opus-abliterated",
      "name": "Qwen3.5 9B Claude Opus Abliterated",
      "params": "9B",
      "arch": "qwen3_5",
      "task": "image-text-to-text",
      "capabilities": ["chat", "vision", "abliterated", "uncensored", "opus-distilled"],
      "size_gb": 18.0
    },
    {
      "alias": "qwen3.6-27b-abliterated",
      "hf_id": "ScottzillaSystems/Huihui-Qwen3.6-27B-abliterated",
      "name": "Qwen3.6 27B Abliterated",
      "params": "27B",
      "arch": "qwen3_5",
      "task": "image-text-to-text",
      "capabilities": ["chat", "vision", "abliterated", "uncensored"],
      "size_gb": 54.0
    },
    {
      "alias": "qwen3.6-35b-abliterated",
      "hf_id": "ScottzillaSystems/Huihui-Qwen3.6-35B-A3B-Claude-4.7-Opus-abliterated",
      "name": "Qwen3.6 35B MoE Claude 4.7 Opus Abliterated",
      "params": "35B (3B active)",
      "arch": "qwen3_5_moe",
      "task": "text-generation",
      "capabilities": ["chat", "abliterated", "uncensored", "moe", "lora", "opus-distilled"],
      "size_gb": 20.0
    },
    {
      "alias": "qwen3.6-35b-mlx",
      "hf_id": "ScottzillaSystems/Huihui-Qwen3.6-35B-A3B-Claude-4.7-Opus-abliterated-mlx-8bit",
      "name": "Qwen3.6 35B MoE MLX 8-bit",
      "params": "35B (3B active)",
      "arch": "qwen3_5_moe",
      "task": "text-generation",
      "capabilities": ["chat", "abliterated", "mlx", "moe", "apple-silicon"],
      "size_gb": 20.0
    },
    {
      "alias": "supergemma4-abliterated",
      "hf_id": "ScottzillaSystems/supergemma4-e4b-abliterated",
      "name": "SuperGemma4 E4B Abliterated",
      "arch": "gemma4",
      "task": "text-generation",
      "capabilities": ["chat", "abliterated"]
    },
    {
      "alias": "fallen-command-111b",
      "hf_id": "ScottzillaSystems/Fallen-Command-A-111B-v1_Compresses-Tensors",
      "name": "Fallen Command-A 111B",
      "params": "111B",
      "arch": "command-a",
      "task": "text-generation",
      "capabilities": ["chat", "tool-use", "rag", "multilingual", "256k-context", "awq"],
      "router_model": "CohereLabs/c4ai-command-a-03-2025",
      "local_space": "ScottzillaSystems/Fallen-Command-A-111B-Chat"
    },
    {
      "alias": "qwen-image-edit-nsfw",
      "hf_id": "ScottzillaSystems/qwen-image-edit-plus-nsfw-lora",
      "name": "Qwen Image Edit NSFW LoRA",
      "arch": "qwen-image-edit",
      "task": "image-to-image",
      "capabilities": ["image-edit", "nsfw", "lora"],
      "base_model": "Qwen/Qwen-Image-Edit-2511"
    },
    {
      "alias": "qwen-image-edit-nsfw-v2",
      "hf_id": "ScottzillaSystems/qwen-image-edit-plus-nsfw-lora2",
      "name": "Qwen Image Edit NSFW LoRA v2",
      "arch": "qwen-image-edit",
      "task": "image-to-image",
      "capabilities": ["image-edit", "nsfw", "lora"],
      "base_model": "Qwen/Qwen-Image-Edit-2511"
    }
  ],
  "infrastructure": {
    "searxng": "https://scottzillasystems-searxng.hf.space",
    "ollama_server": "ScottzillaSystems/ollama-server",
    "chat_spaces": [
      "ScottzillaSystems/Cydonia-24B-Chat",
      "ScottzillaSystems/ChatGPT-5-Chat",
      "ScottzillaSystems/Qwen3.5-9B-Chat",
      "ScottzillaSystems/Fallen-Command-A-111B-Chat"
    ]
  }
}