openai-proxy / main.py
igor04091968's picture
feat: Disable database logging for read-only filesystem
a464a86
#!/usr/bin/env python3
import httpx
from fastapi import FastAPI, Request, HTTPException
from utils import PathMatchingTree, OverrideStreamResponse
proxied_hosts = PathMatchingTree({
"/": "https://api.openai.com",
"/backend-api/conversation": "https://chat.openai.com",
})
# FastAPI app
app = FastAPI()
async def proxy_openai_api(request: Request):
# proxy request to OpenAI API
headers = {k: v for k, v in request.headers.items() if
k not in {'host', 'content-length', 'x-forwarded-for', 'x-real-ip', 'connection'}}
url = f'{proxied_hosts.get_matching(request.url.path)}{request.url.path}'
# create httpx async client
client = httpx.AsyncClient()
request_body = await request.json() if request.method in {'POST', 'PUT'} else None
async def stream_api_response():
try:
st = client.stream(request.method, url, headers=headers, params=request.query_params, json=request_body)
async with st as res:
response.status_code = res.status_code
response.init_headers({k: v for k, v in res.headers.items() if
k not in {'content-length', 'content-encoding', 'alt-svc'}})
async for chunk in res.aiter_bytes():
yield chunk
except httpx.RequestError as exc:
raise HTTPException(status_code=500, detail=f'An error occurred while requesting: {exc}')
response = OverrideStreamResponse(stream_api_response())
return response
@app.route('/{path:path}', methods=['GET', 'POST', 'PUT', 'DELETE'])
async def request_handler(request: Request):
return await proxy_openai_api(request)
if __name__ == '__main__':
import uvicorn
uvicorn.run("main:app", host="127.0.0.1", port=8000, log_level="info", reload=True)