Spaces:
Sleeping
Sleeping
File size: 2,239 Bytes
9d9f345 e4dd88b 9d9f345 e4dd88b 9d9f345 e4dd88b 9d9f345 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 | import os
import requests
import json
from typing import Dict, List, Optional
class OpenRouterClient:
def __init__(self, api_key: str = ""):
self.base_url = "https://openrouter.ai/api/v1"
self.api_key = api_key or os.getenv("OPENROUTER_API_KEY", "")
self.headers = {
"Authorization": f"Bearer {self.api_key}",
"Content-Type": "application/json",
"HTTP-Referer": "https://huggingface.co/spaces",
"X-Title": "OpenRouter AI Hub"
}
def chat_completion(
self,
model: str,
messages: List[Dict[str, str]],
temperature: Optional[float] = 0.7,
max_tokens: Optional[int] = 1024,
**kwargs
) -> Dict:
"""
Get chat completion from OpenRouter
Parameters:
- model: Model identifier (e.g., "openai/gpt-3.5-turbo")
- messages: List of message dictionaries with role and content
- temperature: Creativity parameter (0-2)
- max_tokens: Maximum length of response
"""
payload = {
"model": model,
"messages": messages,
"temperature": temperature,
"max_tokens": max_tokens,
**kwargs
}
try:
response = requests.post(
f"{self.base_url}/chat/completions",
headers=self.headers,
data=json.dumps(payload)
)
response.raise_for_status()
return response.json()
except requests.exceptions.RequestException as e:
raise Exception(f"OpenRouter API request failed: {str(e)}")
def list_models(self) -> List[Dict]:
"""
List all available models from OpenRouter
"""
try:
response = requests.get(
f"{self.base_url}/models",
headers=self.headers
)
response.raise_for_status()
return response.json().get("data", [])
except requests.exceptions.RequestException as e:
raise Exception(f"OpenRouter API request failed: {str(e)}")
def get_openrouter_client():
return OpenRouterClient() |