Spaces:
Sleeping
Sleeping
| import requests | |
| from typing import List, Dict, Optional | |
| from config import ( | |
| OPENROUTER_API_KEY, | |
| OPENROUTER_BASE_URL, | |
| TEACHER_MODEL, | |
| TEACHER_FALLBACK_MODELS, | |
| TEACHER_TEMPERATURE, | |
| TEACHER_MAX_TOKENS, | |
| ) | |
| class TeacherModel: | |
| """ | |
| OpenRouter teacher client with: | |
| - Primary model | |
| - Fallback models | |
| - Detailed error logging | |
| """ | |
| def __init__(self): | |
| self.api_key = OPENROUTER_API_KEY | |
| self.base_url = OPENROUTER_BASE_URL | |
| self.session = requests.Session() | |
| self.last_status_code: Optional[int] = None | |
| self.last_error: str = "" | |
| self.last_model_tried: str = "" | |
| self.last_response_text: str = "" | |
| def is_available(self) -> bool: | |
| return bool(self.api_key) | |
| def _call_model( | |
| self, | |
| model_id: str, | |
| messages: List[Dict], | |
| temperature: float, | |
| max_tokens: int | |
| ) -> Optional[str]: | |
| headers = { | |
| "Content-Type": "application/json", | |
| "Authorization": f"Bearer {self.api_key}", | |
| "Accept": "application/json", | |
| # recommended by OpenRouter | |
| "HTTP-Referer": "https://huggingface.co/spaces/vedaco/veda-programming", | |
| "X-Title": "Veda Programming Assistant", | |
| } | |
| payload = { | |
| "model": model_id, | |
| "messages": messages, | |
| "temperature": float(temperature), | |
| "max_tokens": int(max_tokens), | |
| "stream": False | |
| } | |
| r = self.session.post(self.base_url, headers=headers, json=payload, timeout=60) | |
| self.last_status_code = r.status_code | |
| self.last_response_text = r.text[:2000] | |
| if r.status_code != 200: | |
| try: | |
| err = r.json() | |
| except Exception: | |
| err = {"raw": r.text} | |
| self.last_error = f"HTTP {r.status_code}: {err}" | |
| return None | |
| data = r.json() | |
| if "error" in data: | |
| self.last_error = f"OpenRouter error field: {data['error']}" | |
| return None | |
| choices = data.get("choices") or [] | |
| if not choices: | |
| self.last_error = f"No choices in response: {data}" | |
| return None | |
| msg = choices[0].get("message") or {} | |
| content = msg.get("content", "") | |
| if not content: | |
| self.last_error = f"Empty content: {data}" | |
| return None | |
| self.last_error = "" | |
| return content | |
| def ask( | |
| self, | |
| user_message: str, | |
| conversation_history: Optional[List[Dict[str, str]]] = None, | |
| temperature: float = None, | |
| max_tokens: int = None | |
| ) -> Optional[str]: | |
| if not self.is_available(): | |
| self.last_error = "OPENROUTER_API_KEY missing" | |
| print("[Teacher] Missing OPENROUTER_API_KEY") | |
| return None | |
| temperature = TEACHER_TEMPERATURE if temperature is None else float(temperature) | |
| max_tokens = TEACHER_MAX_TOKENS if max_tokens is None else int(max_tokens) | |
| messages = [ | |
| { | |
| "role": "system", | |
| "content": ( | |
| "You are a helpful programming assistant. " | |
| "Answer clearly. When asked to write code, output correct Python code. " | |
| "Use markdown code blocks like ```python ... ```." | |
| ) | |
| } | |
| ] | |
| if conversation_history: | |
| for m in conversation_history[-10:]: | |
| role = m.get("role", "user") | |
| content = m.get("content", "") | |
| if content: | |
| messages.append({"role": role, "content": content}) | |
| messages.append({"role": "user", "content": user_message}) | |
| candidates = [TEACHER_MODEL] + list(TEACHER_FALLBACK_MODELS) | |
| for model_id in candidates: | |
| self.last_model_tried = model_id | |
| out = self._call_model(model_id, messages, temperature, max_tokens) | |
| if out: | |
| return out | |
| print(f"[Teacher] Failed model {model_id}: {self.last_error}") | |
| return None | |
| teacher = TeacherModel() |