Spaces:
Sleeping
Sleeping
File size: 4,145 Bytes
d1e46fe 594bc39 d1e46fe 594bc39 d1e46fe 594bc39 d1e46fe 594bc39 d1e46fe 594bc39 d1e46fe 594bc39 d1e46fe 594bc39 d1e46fe 594bc39 d1e46fe 594bc39 d1e46fe 594bc39 d1e46fe 594bc39 d1e46fe 594bc39 d1e46fe 594bc39 d1e46fe 594bc39 d1e46fe 594bc39 d1e46fe | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 | import requests
from typing import List, Dict, Optional
from config import (
OPENROUTER_API_KEY,
OPENROUTER_BASE_URL,
TEACHER_MODEL,
TEACHER_FALLBACK_MODELS,
TEACHER_TEMPERATURE,
TEACHER_MAX_TOKENS,
)
class TeacherModel:
"""
OpenRouter teacher client with:
- Primary model
- Fallback models
- Detailed error logging
"""
def __init__(self):
self.api_key = OPENROUTER_API_KEY
self.base_url = OPENROUTER_BASE_URL
self.session = requests.Session()
self.last_status_code: Optional[int] = None
self.last_error: str = ""
self.last_model_tried: str = ""
self.last_response_text: str = ""
def is_available(self) -> bool:
return bool(self.api_key)
def _call_model(
self,
model_id: str,
messages: List[Dict],
temperature: float,
max_tokens: int
) -> Optional[str]:
headers = {
"Content-Type": "application/json",
"Authorization": f"Bearer {self.api_key}",
"Accept": "application/json",
# recommended by OpenRouter
"HTTP-Referer": "https://huggingface.co/spaces/vedaco/veda-programming",
"X-Title": "Veda Programming Assistant",
}
payload = {
"model": model_id,
"messages": messages,
"temperature": float(temperature),
"max_tokens": int(max_tokens),
"stream": False
}
r = self.session.post(self.base_url, headers=headers, json=payload, timeout=60)
self.last_status_code = r.status_code
self.last_response_text = r.text[:2000]
if r.status_code != 200:
try:
err = r.json()
except Exception:
err = {"raw": r.text}
self.last_error = f"HTTP {r.status_code}: {err}"
return None
data = r.json()
if "error" in data:
self.last_error = f"OpenRouter error field: {data['error']}"
return None
choices = data.get("choices") or []
if not choices:
self.last_error = f"No choices in response: {data}"
return None
msg = choices[0].get("message") or {}
content = msg.get("content", "")
if not content:
self.last_error = f"Empty content: {data}"
return None
self.last_error = ""
return content
def ask(
self,
user_message: str,
conversation_history: Optional[List[Dict[str, str]]] = None,
temperature: float = None,
max_tokens: int = None
) -> Optional[str]:
if not self.is_available():
self.last_error = "OPENROUTER_API_KEY missing"
print("[Teacher] Missing OPENROUTER_API_KEY")
return None
temperature = TEACHER_TEMPERATURE if temperature is None else float(temperature)
max_tokens = TEACHER_MAX_TOKENS if max_tokens is None else int(max_tokens)
messages = [
{
"role": "system",
"content": (
"You are a helpful programming assistant. "
"Answer clearly. When asked to write code, output correct Python code. "
"Use markdown code blocks like ```python ... ```."
)
}
]
if conversation_history:
for m in conversation_history[-10:]:
role = m.get("role", "user")
content = m.get("content", "")
if content:
messages.append({"role": role, "content": content})
messages.append({"role": "user", "content": user_message})
candidates = [TEACHER_MODEL] + list(TEACHER_FALLBACK_MODELS)
for model_id in candidates:
self.last_model_tried = model_id
out = self._call_model(model_id, messages, temperature, max_tokens)
if out:
return out
print(f"[Teacher] Failed model {model_id}: {self.last_error}")
return None
teacher = TeacherModel() |