Spaces:
Paused
Paused
| import json | |
| from fastapi import APIRouter, Depends, HTTPException, Request, Response | |
| from fastapi.responses import StreamingResponse, JSONResponse | |
| from api.auth import verify_app_secret | |
| from api.config import ALLOWED_MODELS | |
| from api.models import ChatRequest | |
| from api.utils import process_non_streaming_response, process_streaming_response | |
| from api.logger import setup_logger | |
| logger = setup_logger(__name__) | |
| router = APIRouter() | |
| async def chat_completions_options(): | |
| return Response( | |
| status_code=200, | |
| headers={ | |
| "Access-Control-Allow-Origin": "*", | |
| "Access-Control-Allow-Methods": "POST, OPTIONS", | |
| "Access-Control-Allow-Headers": "Content-Type, Authorization", | |
| }, | |
| ) | |
| async def list_models(): | |
| return {"object": "list", "data": ALLOWED_MODELS} | |
| async def stream_wrapper(request: ChatRequest): | |
| """包装流式响应,处理可能的错误""" | |
| try: | |
| async for chunk in process_streaming_response(request): | |
| yield chunk | |
| except Exception as e: | |
| logger.error(f"Error in stream_wrapper: {str(e)}") | |
| error_response = { | |
| "error": { | |
| "message": "抱歉,处理请求时出现错误,请重试", | |
| "type": "stream_error", | |
| "code": 500 | |
| } | |
| } | |
| yield f"data: {json.dumps(error_response)}\n\n" | |
| yield "data: [DONE]\n\n" | |
| async def chat_completions( | |
| request: ChatRequest, app_secret: str = Depends(verify_app_secret) | |
| ): | |
| logger.info("Entering chat_completions route") | |
| logger.info(f"Received request: {request}") | |
| logger.info(f"App secret: {app_secret}") | |
| logger.info(f"Received chat completion request for model: {request.model}") | |
| if request.model not in [model["id"] for model in ALLOWED_MODELS]: | |
| raise HTTPException( | |
| status_code=400, | |
| detail=f"Model {request.model} is not allowed. Allowed models are: {', '.join(model['id'] for model in ALLOWED_MODELS)}", | |
| ) | |
| try: | |
| if request.stream: | |
| logger.info("Streaming response") | |
| return StreamingResponse( | |
| stream_wrapper(request), | |
| media_type="text/event-stream", | |
| headers={ | |
| "Cache-Control": "no-cache", | |
| "Connection": "keep-alive", | |
| "Transfer-Encoding": "chunked" | |
| } | |
| ) | |
| else: | |
| logger.info("Non-streaming response") | |
| return await process_non_streaming_response(request) | |
| except Exception as e: | |
| logger.error(f"Error in chat_completions: {str(e)}") | |
| return JSONResponse( | |
| status_code=500, | |
| content={"error": "处理请求时出现错误,请重试"} | |
| ) | |
| def health_check(request: Request): | |
| return Response(content=json.dumps({"status": "ok"}), media_type="application/json") | |