Hiren122 commited on
Commit
e7aca61
·
verified ·
1 Parent(s): 31932c6

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +82 -0
app.py ADDED
@@ -0,0 +1,82 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from fastapi import FastAPI, Request
2
+ from fastapi.responses import JSONResponse, StreamingResponse
3
+ import httpx
4
+ import json
5
+ import uuid
6
+
7
+ app = FastAPI()
8
+
9
+ TARGET = "https://api.stack-ai.com/projects/699674cff2af4ce0b6865ca3/chat/assistant"
10
+ API_KEY = "910a861f-6fd7-4c8e-b91b-6281ff0083d0"
11
+
12
+
13
+ @app.post("/v1/chat/completions")
14
+ async def chat_completions(req: Request):
15
+ body = await req.json()
16
+
17
+ messages = body.get("messages", [])
18
+ stream = body.get("stream", False)
19
+
20
+ # Extract last user message
21
+ user_msg = ""
22
+ for m in reversed(messages):
23
+ if m["role"] == "user":
24
+ user_msg = m["content"]
25
+ break
26
+
27
+ stack_payload = {
28
+ "new_message_id": str(uuid.uuid4()),
29
+ "parent_message_id": str(uuid.uuid4()),
30
+ "user_id": "openai_proxy",
31
+ "message": user_msg,
32
+ "attachments": [],
33
+ "conversation_id": str(uuid.uuid4()),
34
+ "stream": stream,
35
+ "is_client_side_generated_conversation_id": False,
36
+ "additional_inputs": {}
37
+ }
38
+
39
+ headers = {
40
+ "x-api-key": API_KEY,
41
+ "content-type": "application/json"
42
+ }
43
+
44
+ async with httpx.AsyncClient(timeout=120) as client:
45
+
46
+ if stream:
47
+
48
+ async def event_stream():
49
+ async with client.stream("POST", TARGET, json=stack_payload, headers=headers) as r:
50
+ async for chunk in r.aiter_text():
51
+ yield chunk
52
+
53
+ return StreamingResponse(event_stream(), media_type="text/event-stream")
54
+
55
+ else:
56
+ r = await client.post(TARGET, json=stack_payload, headers=headers)
57
+ data = r.json()
58
+
59
+ # Convert to OpenAI format
60
+ response_text = ""
61
+
62
+ try:
63
+ response_text = data["choices"][0]["message"]["content"]
64
+ except:
65
+ response_text = str(data)
66
+
67
+ return JSONResponse({
68
+ "id": "chatcmpl-proxy",
69
+ "object": "chat.completion",
70
+ "created": 0,
71
+ "model": "stack-ai",
72
+ "choices": [
73
+ {
74
+ "index": 0,
75
+ "message": {
76
+ "role": "assistant",
77
+ "content": response_text
78
+ },
79
+ "finish_reason": "stop"
80
+ }
81
+ ]
82
+ })