Spaces:
Sleeping
Sleeping
| import os | |
| import requests | |
| import json | |
| from typing import Dict, List, Optional | |
| class OpenRouterClient: | |
| def __init__(self, api_key: str = ""): | |
| self.base_url = "https://openrouter.ai/api/v1" | |
| self.api_key = api_key or os.getenv("OPENROUTER_API_KEY", "") | |
| self.headers = { | |
| "Authorization": f"Bearer {self.api_key}", | |
| "Content-Type": "application/json", | |
| "HTTP-Referer": "https://huggingface.co/spaces", | |
| "X-Title": "OpenRouter AI Hub" | |
| } | |
| def chat_completion( | |
| self, | |
| model: str, | |
| messages: List[Dict[str, str]], | |
| temperature: Optional[float] = 0.7, | |
| max_tokens: Optional[int] = 1024, | |
| **kwargs | |
| ) -> Dict: | |
| """ | |
| Get chat completion from OpenRouter | |
| Parameters: | |
| - model: Model identifier (e.g., "openai/gpt-3.5-turbo") | |
| - messages: List of message dictionaries with role and content | |
| - temperature: Creativity parameter (0-2) | |
| - max_tokens: Maximum length of response | |
| """ | |
| payload = { | |
| "model": model, | |
| "messages": messages, | |
| "temperature": temperature, | |
| "max_tokens": max_tokens, | |
| **kwargs | |
| } | |
| try: | |
| response = requests.post( | |
| f"{self.base_url}/chat/completions", | |
| headers=self.headers, | |
| data=json.dumps(payload) | |
| ) | |
| response.raise_for_status() | |
| return response.json() | |
| except requests.exceptions.RequestException as e: | |
| raise Exception(f"OpenRouter API request failed: {str(e)}") | |
| def list_models(self) -> List[Dict]: | |
| """ | |
| List all available models from OpenRouter | |
| """ | |
| try: | |
| response = requests.get( | |
| f"{self.base_url}/models", | |
| headers=self.headers | |
| ) | |
| response.raise_for_status() | |
| return response.json().get("data", []) | |
| except requests.exceptions.RequestException as e: | |
| raise Exception(f"OpenRouter API request failed: {str(e)}") | |
| def get_openrouter_client(): | |
| return OpenRouterClient() |