aaa / src /streamlit_app.py
alexlopezcifuentes's picture
Upload src/streamlit_app.py with huggingface_hub
9978fd9 verified
#!/usr/bin/env python3
"""
Self-contained chat app that talks to a local open-source LLM (via Ollama)
and exposes MCP (Model-Context-Protocol) tools to the model.
Requires:
pip install ollama mcp httpx
# Example MCP server: https://github.com/modelcontextprotocol/servers
# Start one, e.g.:
# npx -y @modelcontextprotocol/server-filesystem /tmp
"""
import asyncio
import json
import sys
from typing import Any, Dict, List, Optional
import httpx
import ollama
from mcp import ClientSession, StdioServerParameters
from mcp.client.stdio import stdio_client
SYSTEM_PROMPT = (
"You are a helpful assistant. "
"You have tools available via MCP. "
"When you need to use a tool, respond with EXACTLY the following JSON and nothing else:\n"
'{"tool": "<tool_name>", "arguments": {...}}\n'
"When you receive the tool result, incorporate it into a friendly reply."
)
class ChatApp:
def __init__(
self,
model: str = "llama3.1",
mcp_command: Optional[str] = None,
mcp_args: Optional[List[str]] = None,
):
self.model = model
self.client = ollama.AsyncClient()
self.history: List[Dict[str, str]] = [
{"role": "system", "content": SYSTEM_PROMPT}
]
self.mcp_command = mcp_command or "npx"
self.mcp_args = mcp_args or [
"-y",
"@modelcontextprotocol/server-filesystem",
"/tmp",
]
async def start_mcp(self) -> None:
server_params = StdioServerParameters(
command=self.mcp_command,
args=self.mcp_args,
)
self.mcp_client = await stdio_client(server_params).__aenter__()
self.session = await ClientSession(self.mcp_client).__aenter__()
await self.session.initialize()
async def list_tools(self) -> List[Dict[str, Any]]:
return await self.session.list_tools()
async def call_tool(self, tool_name: str, arguments: Dict[str, Any]) -> Any:
result = await self.session.call_tool(tool_name, arguments)
return result
async def chat_turn(self, user_message: str) -> str:
self.history.append({"role": "user", "content": user_message})
response = await self.client.chat(
model=self.model,
messages=self.history,
stream=False,
)
assistant_text = response["message"]["content"]
# Detect tool-use
try:
maybe_json = json.loads(assistant_text.strip())
if "tool" in maybe_json:
tool_name = maybe_json["tool"]
tool_args = maybe_json["arguments"]
print(f"[TOOL] {tool_name} {tool_args}")
tool_result = await self.call_tool(tool_name, tool_args)
self.history.append(
{
"role": "assistant",
"content": f"Tool result:\n{tool_result}",
}
)
# Re-query for final answer
response2 = await self.client.chat(
model=self.model,
messages=self.history,
stream=False,
)
assistant_text = response2["message"]["content"]
except Exception:
pass
self.history.append({"role": "assistant", "content": assistant_text})
return assistant_text
async def repl(self) -> None:
await self.start_mcp()
tools = await self.list_tools()
print("Available MCP tools:")
for t in tools:
print(" -", t["name"], t.get("description", ""))
print("Type /quit to exit.\n")
while True:
try:
user_in = input("> ").strip()
if not user_in:
continue
if user_in.lower() in {"/quit", "/exit"}:
break
reply = await self.chat_turn(user_in)
print("Assistant:", reply)
except KeyboardInterrupt:
break
except Exception as e:
print("Error:", e, file=sys.stderr)
async def __aenter__(self):
return self
async def __aexit__(self, *exc):
if hasattr(self, "session"):
await self.session.__aexit__(*exc)
if hasattr(self, "mcp_client"):
await self.mcp_client.__aexit__(*exc)
async def main() -> None:
async with ChatApp() as app:
await app.repl()
if __name__ == "__main__":
asyncio.run(main())