Adedoyinjames commited on
Commit
28ad213
·
verified ·
1 Parent(s): b80421f

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +73 -5
app.py CHANGED
@@ -1,5 +1,8 @@
1
  import os
 
2
  import gradio as gr
 
 
3
  from openai import OpenAI
4
 
5
  # ---------------------------
@@ -27,7 +30,23 @@ SYSTEM_PROMPT = (
27
  MODEL_ID = "Qwen/Qwen3-Next-80B-A3B-Instruct:novita"
28
 
29
  # ---------------------------
30
- # 3. Chat function
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
31
  # ---------------------------
32
 
33
  def chat_fn(message, history):
@@ -52,7 +71,7 @@ def chat_fn(message, history):
52
  messages=messages,
53
  )
54
 
55
- reply = completion.choices[0].message["content"]
56
 
57
  except Exception as e:
58
  reply = f"Error: {str(e)}"
@@ -60,7 +79,37 @@ def chat_fn(message, history):
60
  return reply
61
 
62
  # ---------------------------
63
- # 4. Gradio UI
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
64
  # ---------------------------
65
 
66
  with gr.Blocks(title="YAH Assistant") as demo:
@@ -68,6 +117,17 @@ with gr.Blocks(title="YAH Assistant") as demo:
68
  """
69
  ## YAH Assistant
70
  Large-model chat interface powered by Hugging Face Router.
 
 
 
 
 
 
 
 
 
 
 
71
  """
72
  )
73
 
@@ -81,5 +141,13 @@ with gr.Blocks(title="YAH Assistant") as demo:
81
 
82
  msg.submit(respond, [msg, chatbot], [msg, chatbot])
83
 
84
- # Launch
85
- demo.launch()
 
 
 
 
 
 
 
 
 
1
  import os
2
+ import json
3
  import gradio as gr
4
+ from fastapi import FastAPI, HTTPException
5
+ from pydantic import BaseModel
6
  from openai import OpenAI
7
 
8
  # ---------------------------
 
30
  MODEL_ID = "Qwen/Qwen3-Next-80B-A3B-Instruct:novita"
31
 
32
  # ---------------------------
33
+ # 3. Pydantic models for API
34
+ # ---------------------------
35
+
36
+ class ChatMessage(BaseModel):
37
+ role: str
38
+ content: str
39
+
40
+ class ChatRequest(BaseModel):
41
+ message: str
42
+ history: list[tuple[str, str]] = []
43
+
44
+ class ChatResponse(BaseModel):
45
+ response: str
46
+ error: str = None
47
+
48
+ # ---------------------------
49
+ # 4. Core chat function
50
  # ---------------------------
51
 
52
  def chat_fn(message, history):
 
71
  messages=messages,
72
  )
73
 
74
+ reply = completion.choices[0].message.content
75
 
76
  except Exception as e:
77
  reply = f"Error: {str(e)}"
 
79
  return reply
80
 
81
  # ---------------------------
82
+ # 5. API Endpoint Function
83
+ # ---------------------------
84
+
85
+ def chat_api_endpoint(chat_request: ChatRequest) -> ChatResponse:
86
+ """
87
+ API endpoint for chat interactions
88
+ """
89
+ try:
90
+ reply = chat_fn(chat_request.message, chat_request.history)
91
+ return ChatResponse(response=reply)
92
+ except Exception as e:
93
+ return ChatResponse(response="", error=str(e))
94
+
95
+ # ---------------------------
96
+ # 6. FastAPI App Setup
97
+ # ---------------------------
98
+
99
+ app = FastAPI(title="YAH Assistant API")
100
+
101
+ # Add the API endpoint
102
+ @app.post("/chat/")
103
+ async def chat(request: ChatRequest):
104
+ return chat_api_endpoint(request)
105
+
106
+ # Health check endpoint
107
+ @app.get("/health")
108
+ async def health_check():
109
+ return {"status": "healthy", "model": MODEL_ID}
110
+
111
+ # ---------------------------
112
+ # 7. Gradio UI (keeps the original interface)
113
  # ---------------------------
114
 
115
  with gr.Blocks(title="YAH Assistant") as demo:
 
117
  """
118
  ## YAH Assistant
119
  Large-model chat interface powered by Hugging Face Router.
120
+
121
+ ### API Usage
122
+ You can also interact with this assistant via API:
123
+ - **Endpoint:** `POST /chat/`
124
+ - **Body:**
125
+ ```json
126
+ {
127
+ "message": "Your message here",
128
+ "history": [["Hello", "Hi there!"], ["How are you?", "I'm good!"]]
129
+ }
130
+ ```
131
  """
132
  )
133
 
 
141
 
142
  msg.submit(respond, [msg, chatbot], [msg, chatbot])
143
 
144
+ # ---------------------------
145
+ # 8. Mount Gradio app to FastAPI
146
+ # ---------------------------
147
+
148
+ app = gr.mount_gradio_app(app, demo, path="/")
149
+
150
+ # Launch instructions
151
+ if __name__ == "__main__":
152
+ import uvicorn
153
+ uvicorn.run(app, host="0.0.0.0", port=7860)