huijio commited on
Commit
9e646b9
·
verified ·
1 Parent(s): a743fdf

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +37 -33
app.py CHANGED
@@ -1,37 +1,31 @@
1
- from fastapi import FastAPI, HTTPException
 
2
  from pydantic import BaseModel
3
  import requests
4
  import uuid
5
  from datetime import datetime
6
- from fastapi.middleware.cors import CORSMiddleware
7
 
8
- app = FastAPI(title="MultiChatAI to OpenAI Compatible API")
9
-
10
- # Allow CORS if needed
11
- app.add_middleware(
12
- CORSMiddleware,
13
- allow_origins=["*"],
14
- allow_credentials=True,
15
- allow_methods=["*"],
16
- allow_headers=["*"],
17
- )
18
 
19
  # OpenAI-compatible request model
 
 
 
 
20
  class ChatCompletionRequest(BaseModel):
21
  model: str = "deepseek-ai/DeepSeek-V3"
22
- messages: list[dict]
23
  temperature: float = 0.5
24
  max_tokens: int = None
25
- stream: bool = False
26
 
27
  @app.post("/v1/chat/completions")
28
  async def chat_completion(request: ChatCompletionRequest):
29
  try:
30
- # Transform OpenAI format to MultiChatAI format
31
  multi_chat_body = {
32
  "chatSettings": {
33
  "model": request.model,
34
- "prompt": "You are a friendly, helpful AI assistant.",
35
  "temperature": request.temperature,
36
  "contextLength": 32000,
37
  "includeProfileContext": True,
@@ -39,17 +33,15 @@ async def chat_completion(request: ChatCompletionRequest):
39
  "embeddingsProvider": "openai"
40
  },
41
  "messages": [
42
- {"role": "system", "content": f"Today is {datetime.now().strftime('%m/%d/%Y')}.\n\nUser Instructions:\nYou are a friendly, helpful AI assistant."},
43
- *request.messages
44
  ],
45
  "customModelId": ""
46
  }
47
 
48
  headers = {
49
  "accept": "*/*",
50
- "accept-language": "en-US,en;q=0.9",
51
- "content-type": "text/plain;charset=UTF-8",
52
- "priority": "u=1, i"
53
  }
54
 
55
  # Make request to MultiChatAI
@@ -60,13 +52,14 @@ async def chat_completion(request: ChatCompletionRequest):
60
  )
61
 
62
  if response.status_code != 200:
63
- raise HTTPException(status_code=response.status_code, detail="MultiChatAI API error")
 
 
 
64
 
65
- # Transform MultiChatAI response to OpenAI format
66
- multi_chat_response = response.text
67
-
68
- openai_response = {
69
- "id": f"chatcmpl-{str(uuid.uuid4())}",
70
  "object": "chat.completion",
71
  "created": int(datetime.now().timestamp()),
72
  "model": request.model,
@@ -74,18 +67,29 @@ async def chat_completion(request: ChatCompletionRequest):
74
  "index": 0,
75
  "message": {
76
  "role": "assistant",
77
- "content": multi_chat_response
78
  },
79
  "finish_reason": "stop"
80
  }],
81
  "usage": {
82
- "prompt_tokens": 0, # You might need to calculate these
83
  "completion_tokens": 0,
84
  "total_tokens": 0
85
  }
86
- }
87
-
88
- return openai_response
89
 
90
  except Exception as e:
91
- raise HTTPException(status_code=500, detail=str(e))
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from fastapi import FastAPI, HTTPException, Request
2
+ from fastapi.responses import JSONResponse
3
  from pydantic import BaseModel
4
  import requests
5
  import uuid
6
  from datetime import datetime
 
7
 
8
+ app = FastAPI()
 
 
 
 
 
 
 
 
 
9
 
10
  # OpenAI-compatible request model
11
+ class ChatMessage(BaseModel):
12
+ role: str
13
+ content: str
14
+
15
  class ChatCompletionRequest(BaseModel):
16
  model: str = "deepseek-ai/DeepSeek-V3"
17
+ messages: list[ChatMessage]
18
  temperature: float = 0.5
19
  max_tokens: int = None
 
20
 
21
  @app.post("/v1/chat/completions")
22
  async def chat_completion(request: ChatCompletionRequest):
23
  try:
24
+ # Prepare the MultiChatAI request
25
  multi_chat_body = {
26
  "chatSettings": {
27
  "model": request.model,
28
+ "prompt": "You are a helpful AI assistant.",
29
  "temperature": request.temperature,
30
  "contextLength": 32000,
31
  "includeProfileContext": True,
 
33
  "embeddingsProvider": "openai"
34
  },
35
  "messages": [
36
+ {"role": "system", "content": f"Today is {datetime.now().strftime('%m/%d/%Y')}.\n\nYou are a helpful AI assistant."},
37
+ *[{"role": msg.role, "content": msg.content} for msg in request.messages]
38
  ],
39
  "customModelId": ""
40
  }
41
 
42
  headers = {
43
  "accept": "*/*",
44
+ "content-type": "application/json",
 
 
45
  }
46
 
47
  # Make request to MultiChatAI
 
52
  )
53
 
54
  if response.status_code != 200:
55
+ raise HTTPException(
56
+ status_code=response.status_code,
57
+ detail=f"MultiChatAI API error: {response.text}"
58
+ )
59
 
60
+ # Transform response to OpenAI format
61
+ return JSONResponse({
62
+ "id": f"chatcmpl-{uuid.uuid4()}",
 
 
63
  "object": "chat.completion",
64
  "created": int(datetime.now().timestamp()),
65
  "model": request.model,
 
67
  "index": 0,
68
  "message": {
69
  "role": "assistant",
70
+ "content": response.text.strip()
71
  },
72
  "finish_reason": "stop"
73
  }],
74
  "usage": {
75
+ "prompt_tokens": 0,
76
  "completion_tokens": 0,
77
  "total_tokens": 0
78
  }
79
+ })
 
 
80
 
81
  except Exception as e:
82
+ raise HTTPException(status_code=500, detail=str(e))
83
+
84
+ @app.get("/")
85
+ async def health_check():
86
+ return {"status": "healthy"}
87
+
88
+ # Add CORS middleware if needed
89
+ @app.middleware("http")
90
+ async def add_cors_header(request: Request, call_next):
91
+ response = await call_next(request)
92
+ response.headers["Access-Control-Allow-Origin"] = "*"
93
+ response.headers["Access-Control-Allow-Methods"] = "GET, POST, OPTIONS"
94
+ response.headers["Access-Control-Allow-Headers"] = "Content-Type"
95
+ return response