alexlopezcifuentes commited on
Commit
9978fd9
·
verified ·
1 Parent(s): 7aa8120

Upload src/streamlit_app.py with huggingface_hub

Browse files
Files changed (1) hide show
  1. src/streamlit_app.py +137 -36
src/streamlit_app.py CHANGED
@@ -1,40 +1,141 @@
1
- import altair as alt
2
- import numpy as np
3
- import pandas as pd
4
- import streamlit as st
5
-
 
 
 
 
6
  """
7
- # Welcome to Streamlit!
8
 
9
- Edit `/streamlit_app.py` to customize this app to your heart's desire :heart:.
10
- If you have any questions, checkout our [documentation](https://docs.streamlit.io) and [community
11
- forums](https://discuss.streamlit.io).
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
12
 
13
- In the meantime, below is an example of what you can do with just a few lines of code:
14
- """
15
 
16
- num_points = st.slider("Number of points in spiral", 1, 10000, 1100)
17
- num_turns = st.slider("Number of turns in spiral", 1, 300, 31)
18
-
19
- indices = np.linspace(0, 1, num_points)
20
- theta = 2 * np.pi * num_turns * indices
21
- radius = indices
22
-
23
- x = radius * np.cos(theta)
24
- y = radius * np.sin(theta)
25
-
26
- df = pd.DataFrame({
27
- "x": x,
28
- "y": y,
29
- "idx": indices,
30
- "rand": np.random.randn(num_points),
31
- })
32
-
33
- st.altair_chart(alt.Chart(df, height=700, width=700)
34
- .mark_point(filled=True)
35
- .encode(
36
- x=alt.X("x", axis=None),
37
- y=alt.Y("y", axis=None),
38
- color=alt.Color("idx", legend=None, scale=alt.Scale()),
39
- size=alt.Size("rand", legend=None, scale=alt.Scale(range=[1, 150])),
40
- ))
 
1
+ #!/usr/bin/env python3
2
+ """
3
+ Self-contained chat app that talks to a local open-source LLM (via Ollama)
4
+ and exposes MCP (Model-Context-Protocol) tools to the model.
5
+ Requires:
6
+ pip install ollama mcp httpx
7
+ # Example MCP server: https://github.com/modelcontextprotocol/servers
8
+ # Start one, e.g.:
9
+ # npx -y @modelcontextprotocol/server-filesystem /tmp
10
  """
 
11
 
12
+ import asyncio
13
+ import json
14
+ import sys
15
+ from typing import Any, Dict, List, Optional
16
+
17
+ import httpx
18
+ import ollama
19
+ from mcp import ClientSession, StdioServerParameters
20
+ from mcp.client.stdio import stdio_client
21
+
22
+
23
+ SYSTEM_PROMPT = (
24
+ "You are a helpful assistant. "
25
+ "You have tools available via MCP. "
26
+ "When you need to use a tool, respond with EXACTLY the following JSON and nothing else:\n"
27
+ '{"tool": "<tool_name>", "arguments": {...}}\n'
28
+ "When you receive the tool result, incorporate it into a friendly reply."
29
+ )
30
+
31
+
32
+ class ChatApp:
33
+ def __init__(
34
+ self,
35
+ model: str = "llama3.1",
36
+ mcp_command: Optional[str] = None,
37
+ mcp_args: Optional[List[str]] = None,
38
+ ):
39
+ self.model = model
40
+ self.client = ollama.AsyncClient()
41
+ self.history: List[Dict[str, str]] = [
42
+ {"role": "system", "content": SYSTEM_PROMPT}
43
+ ]
44
+
45
+ self.mcp_command = mcp_command or "npx"
46
+ self.mcp_args = mcp_args or [
47
+ "-y",
48
+ "@modelcontextprotocol/server-filesystem",
49
+ "/tmp",
50
+ ]
51
+
52
+ async def start_mcp(self) -> None:
53
+ server_params = StdioServerParameters(
54
+ command=self.mcp_command,
55
+ args=self.mcp_args,
56
+ )
57
+ self.mcp_client = await stdio_client(server_params).__aenter__()
58
+ self.session = await ClientSession(self.mcp_client).__aenter__()
59
+ await self.session.initialize()
60
+
61
+ async def list_tools(self) -> List[Dict[str, Any]]:
62
+ return await self.session.list_tools()
63
+
64
+ async def call_tool(self, tool_name: str, arguments: Dict[str, Any]) -> Any:
65
+ result = await self.session.call_tool(tool_name, arguments)
66
+ return result
67
+
68
+ async def chat_turn(self, user_message: str) -> str:
69
+ self.history.append({"role": "user", "content": user_message})
70
+ response = await self.client.chat(
71
+ model=self.model,
72
+ messages=self.history,
73
+ stream=False,
74
+ )
75
+ assistant_text = response["message"]["content"]
76
+ # Detect tool-use
77
+ try:
78
+ maybe_json = json.loads(assistant_text.strip())
79
+ if "tool" in maybe_json:
80
+ tool_name = maybe_json["tool"]
81
+ tool_args = maybe_json["arguments"]
82
+ print(f"[TOOL] {tool_name} {tool_args}")
83
+ tool_result = await self.call_tool(tool_name, tool_args)
84
+ self.history.append(
85
+ {
86
+ "role": "assistant",
87
+ "content": f"Tool result:\n{tool_result}",
88
+ }
89
+ )
90
+ # Re-query for final answer
91
+ response2 = await self.client.chat(
92
+ model=self.model,
93
+ messages=self.history,
94
+ stream=False,
95
+ )
96
+ assistant_text = response2["message"]["content"]
97
+ except Exception:
98
+ pass
99
+
100
+ self.history.append({"role": "assistant", "content": assistant_text})
101
+ return assistant_text
102
+
103
+ async def repl(self) -> None:
104
+ await self.start_mcp()
105
+ tools = await self.list_tools()
106
+ print("Available MCP tools:")
107
+ for t in tools:
108
+ print(" -", t["name"], t.get("description", ""))
109
+ print("Type /quit to exit.\n")
110
+
111
+ while True:
112
+ try:
113
+ user_in = input("> ").strip()
114
+ if not user_in:
115
+ continue
116
+ if user_in.lower() in {"/quit", "/exit"}:
117
+ break
118
+ reply = await self.chat_turn(user_in)
119
+ print("Assistant:", reply)
120
+ except KeyboardInterrupt:
121
+ break
122
+ except Exception as e:
123
+ print("Error:", e, file=sys.stderr)
124
+
125
+ async def __aenter__(self):
126
+ return self
127
+
128
+ async def __aexit__(self, *exc):
129
+ if hasattr(self, "session"):
130
+ await self.session.__aexit__(*exc)
131
+ if hasattr(self, "mcp_client"):
132
+ await self.mcp_client.__aexit__(*exc)
133
+
134
+
135
+ async def main() -> None:
136
+ async with ChatApp() as app:
137
+ await app.repl()
138
 
 
 
139
 
140
+ if __name__ == "__main__":
141
+ asyncio.run(main())