Spaces:
Running
Running
Update app/providers.py
Browse files- app/providers.py +39 -0
app/providers.py
CHANGED
|
@@ -106,6 +106,45 @@ class BaseProvider:
|
|
| 106 |
# SECTION 2 — LLM Provider Implementations
|
| 107 |
# Only the API-specific parsing logic differs per provider.
|
| 108 |
# =============================================================================
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 109 |
|
| 110 |
class AnthropicProvider(BaseProvider):
|
| 111 |
"""Anthropic Claude API — Messages endpoint."""
|
|
|
|
| 106 |
# SECTION 2 — LLM Provider Implementations
|
| 107 |
# Only the API-specific parsing logic differs per provider.
|
| 108 |
# =============================================================================
|
| 109 |
+
# --- SmolLM2 (Custom Assistant Space) ----------------------------------------
|
| 110 |
+
class SmolLMProvider(BaseProvider):
|
| 111 |
+
"""
|
| 112 |
+
SmolLM2 Custom Assistant Space — OpenAI-compatible, ADI routing included.
|
| 113 |
+
Free tier on HF Spaces (CPU). Falls back to next provider on 503.
|
| 114 |
+
Response includes extra 'adi' field with score + decision (ignored by hub).
|
| 115 |
+
Deploy: https://github.com/VolkanSah/Multi-LLM-API-Gateway (smollm-space/)
|
| 116 |
+
|
| 117 |
+
.pyfun block:
|
| 118 |
+
[LLM_PROVIDER.smollm]
|
| 119 |
+
active = "true"
|
| 120 |
+
base_url = "https://codey-lab-SmolLM2-customs.hf.space/v1"
|
| 121 |
+
env_key = "HF_TOKEN"
|
| 122 |
+
default_model = "smollm2-360m"
|
| 123 |
+
models = "smollm2-360m, codey-lab/model.universal-mcp-hub"
|
| 124 |
+
fallback_to = "anthropic"
|
| 125 |
+
[LLM_PROVIDER.smollm_END]
|
| 126 |
+
"""
|
| 127 |
+
|
| 128 |
+
async def complete(self, prompt: str, model: str = None, max_tokens: int = 150) -> str:
|
| 129 |
+
data = await self._post(
|
| 130 |
+
f"{self.base_url}/chat/completions",
|
| 131 |
+
headers={
|
| 132 |
+
"Authorization": f"Bearer {self.key}",
|
| 133 |
+
"content-type": "application/json",
|
| 134 |
+
},
|
| 135 |
+
payload={
|
| 136 |
+
"model": model or self.model,
|
| 137 |
+
"max_tokens": max_tokens,
|
| 138 |
+
"messages": [{"role": "user", "content": prompt}],
|
| 139 |
+
},
|
| 140 |
+
)
|
| 141 |
+
return data["choices"][0]["message"]["content"]
|
| 142 |
+
|
| 143 |
+
|
| 144 |
+
|
| 145 |
+
|
| 146 |
+
|
| 147 |
+
|
| 148 |
|
| 149 |
class AnthropicProvider(BaseProvider):
|
| 150 |
"""Anthropic Claude API — Messages endpoint."""
|