Spaces:
Sleeping
Sleeping
File size: 2,826 Bytes
fdcb6c5 82bb4d1 fdcb6c5 82bb4d1 fdcb6c5 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 | import os
from fastapi import FastAPI, HTTPException
from fastapi.middleware.cors import CORSMiddleware
from pydantic import BaseModel
from typing import List
import httpx
import logging
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
# Get OpenAI API key from Hugging Face Secrets
OPENAI_API_KEY = os.environ.get("OPENAI_API_KEY")
if not OPENAI_API_KEY:
raise RuntimeError("OPENAI_API_KEY environment variable not set")
app = FastAPI(title="Proxy for Unity")
# Enable CORS for Unity WebGL builds
app.add_middleware(
CORSMiddleware,
allow_origins=["*"], # In production, specify your domain
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
class Message(BaseModel):
role: str
content: str
class ChatRequest(BaseModel):
model: str
temperature: float
messages: List[Message]
@app.get("/")
async def root():
return {
"status": "running",
"message": "OpenAI Proxy Active",
"endpoints": {
"chat": "POST /chat"
}
}
@app.post("/chat")
async def proxy_chat(request: ChatRequest):
try:
logger.info(f"Received request: {request.model}, {len(request.messages)} messages")
# Forward request to OpenAI
async with httpx.AsyncClient(timeout=60.0) as client:
openai_response = await client.post(
"https://api.openai.com/v1/chat/completions",
json={
"model": request.model,
"temperature": request.temperature,
"messages": [
{"role": msg.role, "content": msg.content}
for msg in request.messages
]
},
headers={
"Authorization": f"Bearer {OPENAI_API_KEY}",
"Content-Type": "application/json"
}
)
if openai_response.status_code != 200:
logger.error(f"OpenAI API error: {openai_response.text}")
raise HTTPException(
status_code=openai_response.status_code,
detail=openai_response.text
)
result = openai_response.json()
logger.info(f"Successfully proxied request")
return result
except httpx.HTTPError as e:
logger.error(f"HTTP error: {str(e)}")
raise HTTPException(status_code=500, detail=f"Proxy error: {str(e)}")
except Exception as e:
logger.error(f"Unexpected error: {str(e)}")
raise HTTPException(status_code=500, detail=f"Server error: {str(e)}")
@app.get("/health")
async def health():
return {"status": "healthy", "api_configured": bool(OPENAI_API_KEY)} |