jejunepixels commited on
Commit
fdcb6c5
·
verified ·
1 Parent(s): ebf82cb

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +94 -0
app.py ADDED
@@ -0,0 +1,94 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ from fastapi import FastAPI, HTTPException
3
+ from fastapi.middleware.cors import CORSMiddleware
4
+ from pydantic import BaseModel
5
+ from typing import List
6
+ import httpx
7
+ import logging
8
+
9
+ logging.basicConfig(level=logging.INFO)
10
+ logger = logging.getLogger(__name__)
11
+
12
+ # Get OpenAI API key from Hugging Face Secrets
13
+ OPENAI_API_KEY = os.environ.get("OPENAI_API_KEY")
14
+ if not OPENAI_API_KEY:
15
+ raise RuntimeError("OPENAI_API_KEY environment variable not set")
16
+
17
+ app = FastAPI(title="OpenAI Proxy for Unity")
18
+
19
+ # Enable CORS for Unity WebGL builds
20
+ app.add_middleware(
21
+ CORSMiddleware,
22
+ allow_origins=["*"], # In production, specify your domain
23
+ allow_credentials=True,
24
+ allow_methods=["*"],
25
+ allow_headers=["*"],
26
+ )
27
+
28
+ class Message(BaseModel):
29
+ role: str
30
+ content: str
31
+
32
+ class ChatRequest(BaseModel):
33
+ model: str
34
+ temperature: float
35
+ messages: List[Message]
36
+
37
+ @app.get("/")
38
+ async def root():
39
+ return {
40
+ "status": "running",
41
+ "message": "OpenAI Proxy Active",
42
+ "endpoints": {
43
+ "chat": "POST /chat"
44
+ }
45
+ }
46
+
47
+ @app.post("/chat")
48
+ async def proxy_chat(request: ChatRequest):
49
+ """
50
+ Proxy endpoint that forwards requests to OpenAI API
51
+ The API key is stored securely on the server
52
+ """
53
+ try:
54
+ logger.info(f"Received request: {request.model}, {len(request.messages)} messages")
55
+
56
+ # Forward request to OpenAI
57
+ async with httpx.AsyncClient(timeout=60.0) as client:
58
+ openai_response = await client.post(
59
+ "https://api.openai.com/v1/chat/completions",
60
+ json={
61
+ "model": request.model,
62
+ "temperature": request.temperature,
63
+ "messages": [
64
+ {"role": msg.role, "content": msg.content}
65
+ for msg in request.messages
66
+ ]
67
+ },
68
+ headers={
69
+ "Authorization": f"Bearer {OPENAI_API_KEY}",
70
+ "Content-Type": "application/json"
71
+ }
72
+ )
73
+
74
+ if openai_response.status_code != 200:
75
+ logger.error(f"OpenAI API error: {openai_response.text}")
76
+ raise HTTPException(
77
+ status_code=openai_response.status_code,
78
+ detail=openai_response.text
79
+ )
80
+
81
+ result = openai_response.json()
82
+ logger.info(f"Successfully proxied request")
83
+ return result
84
+
85
+ except httpx.HTTPError as e:
86
+ logger.error(f"HTTP error: {str(e)}")
87
+ raise HTTPException(status_code=500, detail=f"Proxy error: {str(e)}")
88
+ except Exception as e:
89
+ logger.error(f"Unexpected error: {str(e)}")
90
+ raise HTTPException(status_code=500, detail=f"Server error: {str(e)}")
91
+
92
+ @app.get("/health")
93
+ async def health():
94
+ return {"status": "healthy", "api_configured": bool(OPENAI_API_KEY)}