Update app.py
Browse files
app.py
CHANGED
|
@@ -2,28 +2,25 @@ import pathlib
|
|
| 2 |
import os
|
| 3 |
import gradio as gr
|
| 4 |
from mcp import StdioServerParameters
|
| 5 |
-
from smolagents import MCPClient
|
| 6 |
-
from smolagents.agents import OpenAIAgent # <-- use OpenAI-aware agent
|
| 7 |
|
|
|
|
| 8 |
SERVER_PATH = pathlib.Path(__file__).with_name("mcp_server.py")
|
| 9 |
|
|
|
|
|
|
|
|
|
|
| 10 |
|
| 11 |
def respond(message: str, history: list):
|
| 12 |
-
"""
|
| 13 |
-
# Spin up the MCP tool server
|
| 14 |
params = StdioServerParameters(command="python", args=[str(SERVER_PATH)])
|
| 15 |
|
| 16 |
-
# Connect and get tools
|
| 17 |
with MCPClient(params) as tools:
|
| 18 |
-
|
| 19 |
-
agent =
|
| 20 |
-
tools=tools,
|
| 21 |
-
model=os.getenv("OPENAI_MODEL", "gpt-4o-preview"),
|
| 22 |
-
temperature=0.3,
|
| 23 |
-
)
|
| 24 |
answer = agent.run(message)
|
| 25 |
|
| 26 |
-
# Update chat
|
| 27 |
history.append({"role": "user", "content": message})
|
| 28 |
history.append({"role": "assistant", "content": answer})
|
| 29 |
return history, history
|
|
@@ -34,7 +31,8 @@ with gr.Blocks(title="Enterprise SQL Agent") as demo:
|
|
| 34 |
chatbot = gr.Chatbot(type="messages", label="Enterprise SQL Agent")
|
| 35 |
|
| 36 |
textbox = gr.Textbox(
|
| 37 |
-
placeholder="Ask: Who are my inactive Northeast customers?",
|
|
|
|
| 38 |
)
|
| 39 |
textbox.submit(respond, [textbox, chat_state], [chatbot, chat_state])
|
| 40 |
|
|
@@ -45,7 +43,7 @@ with gr.Blocks(title="Enterprise SQL Agent") as demo:
|
|
| 45 |
- List customers sorted by last order date.
|
| 46 |
- Find clients from the West with recent orders.
|
| 47 |
|
| 48 |
-
|
| 49 |
"""
|
| 50 |
)
|
| 51 |
|
|
|
|
| 2 |
import os
|
| 3 |
import gradio as gr
|
| 4 |
from mcp import StdioServerParameters
|
| 5 |
+
from smolagents import MCPClient, CodeAgent, InferenceClientModel
|
|
|
|
| 6 |
|
| 7 |
+
# Absolute path to sibling mcp_server.py
|
| 8 |
SERVER_PATH = pathlib.Path(__file__).with_name("mcp_server.py")
|
| 9 |
|
| 10 |
+
# Lazy‑load a Hugging Face Inference model (defaults to Qwen if no id given)
|
| 11 |
+
HF_MODEL_ID = os.getenv("HF_MODEL_ID", "Qwen/Qwen2.5-Coder-8B-Instruct") # small & free-tier friendly
|
| 12 |
+
|
| 13 |
|
| 14 |
def respond(message: str, history: list):
|
| 15 |
+
"""Route the prompt through an MCP‑enabled CodeAgent and return the answer."""
|
|
|
|
| 16 |
params = StdioServerParameters(command="python", args=[str(SERVER_PATH)])
|
| 17 |
|
|
|
|
| 18 |
with MCPClient(params) as tools:
|
| 19 |
+
model = InferenceClientModel(model_id=HF_MODEL_ID)
|
| 20 |
+
agent = CodeAgent(tools=tools, model=model)
|
|
|
|
|
|
|
|
|
|
|
|
|
| 21 |
answer = agent.run(message)
|
| 22 |
|
| 23 |
+
# Update chat in OpenAI‑style messages
|
| 24 |
history.append({"role": "user", "content": message})
|
| 25 |
history.append({"role": "assistant", "content": answer})
|
| 26 |
return history, history
|
|
|
|
| 31 |
chatbot = gr.Chatbot(type="messages", label="Enterprise SQL Agent")
|
| 32 |
|
| 33 |
textbox = gr.Textbox(
|
| 34 |
+
placeholder="Ask: Who are my inactive Northeast customers?",
|
| 35 |
+
show_label=False,
|
| 36 |
)
|
| 37 |
textbox.submit(respond, [textbox, chat_state], [chatbot, chat_state])
|
| 38 |
|
|
|
|
| 43 |
- List customers sorted by last order date.
|
| 44 |
- Find clients from the West with recent orders.
|
| 45 |
|
| 46 |
+
_Powered by smolagents + MCP + Hugging Face Inference API_
|
| 47 |
"""
|
| 48 |
)
|
| 49 |
|