Spaces:
Sleeping
Sleeping
| import os | |
| from fastapi import FastAPI, HTTPException | |
| from fastapi.middleware.cors import CORSMiddleware | |
| from pydantic import BaseModel | |
| from typing import List | |
| import httpx | |
| import logging | |
| logging.basicConfig(level=logging.INFO) | |
| logger = logging.getLogger(__name__) | |
| # Get OpenAI API key from Hugging Face Secrets | |
| OPENAI_API_KEY = os.environ.get("OPENAI_API_KEY") | |
| if not OPENAI_API_KEY: | |
| raise RuntimeError("OPENAI_API_KEY environment variable not set") | |
| app = FastAPI(title="Proxy for Unity") | |
| # Enable CORS for Unity WebGL builds | |
| app.add_middleware( | |
| CORSMiddleware, | |
| allow_origins=["*"], # In production, specify your domain | |
| allow_credentials=True, | |
| allow_methods=["*"], | |
| allow_headers=["*"], | |
| ) | |
| class Message(BaseModel): | |
| role: str | |
| content: str | |
| class ChatRequest(BaseModel): | |
| model: str | |
| temperature: float | |
| messages: List[Message] | |
| async def root(): | |
| return { | |
| "status": "running", | |
| "message": "OpenAI Proxy Active", | |
| "endpoints": { | |
| "chat": "POST /chat" | |
| } | |
| } | |
| async def proxy_chat(request: ChatRequest): | |
| try: | |
| logger.info(f"Received request: {request.model}, {len(request.messages)} messages") | |
| # Forward request to OpenAI | |
| async with httpx.AsyncClient(timeout=60.0) as client: | |
| openai_response = await client.post( | |
| "https://api.openai.com/v1/chat/completions", | |
| json={ | |
| "model": request.model, | |
| "temperature": request.temperature, | |
| "messages": [ | |
| {"role": msg.role, "content": msg.content} | |
| for msg in request.messages | |
| ] | |
| }, | |
| headers={ | |
| "Authorization": f"Bearer {OPENAI_API_KEY}", | |
| "Content-Type": "application/json" | |
| } | |
| ) | |
| if openai_response.status_code != 200: | |
| logger.error(f"OpenAI API error: {openai_response.text}") | |
| raise HTTPException( | |
| status_code=openai_response.status_code, | |
| detail=openai_response.text | |
| ) | |
| result = openai_response.json() | |
| logger.info(f"Successfully proxied request") | |
| return result | |
| except httpx.HTTPError as e: | |
| logger.error(f"HTTP error: {str(e)}") | |
| raise HTTPException(status_code=500, detail=f"Proxy error: {str(e)}") | |
| except Exception as e: | |
| logger.error(f"Unexpected error: {str(e)}") | |
| raise HTTPException(status_code=500, detail=f"Server error: {str(e)}") | |
| async def health(): | |
| return {"status": "healthy", "api_configured": bool(OPENAI_API_KEY)} |