mgokg commited on
Commit
4106eec
·
verified ·
1 Parent(s): 9e7f00d

Create mcp.txt

Browse files
Files changed (1) hide show
  1. mcp.txt +232 -0
mcp.txt ADDED
@@ -0,0 +1,232 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ import os
3
+ import asyncio
4
+ from typing import List
5
+ from google import genai
6
+ from google.genai import types
7
+ from mcp import ClientSession, StdioServerParameters
8
+ from mcp.client.stdio import stdio_client
9
+
10
+ # MCP-Server Konfiguration
11
+ server_params = StdioServerParameters(
12
+ command="npx",
13
+ args=[
14
+ "mcp-remote",
15
+ "https://mgokg-db-timetable-api.hf.space/gradio_api/mcp/"
16
+ ]
17
+ )
18
+
19
+ async def generate1(input_text):
20
+ try:
21
+ client = genai.Client(api_key=os.environ.get("GEMINI_API_KEY"))
22
+ except Exception as e:
23
+ return f"Fehler bei der Initialisierung: {e}", ""
24
+
25
+ model = "gemini-2.5-flash"
26
+
27
+ try:
28
+ # MCP-Session aufbauen
29
+ async with stdio_client(server_params) as (read, write):
30
+ async with ClientSession(read, write) as session:
31
+ await session.initialize()
32
+
33
+ # MCP-Tools abrufen
34
+ mcp_tools_data = await session.list_tools()
35
+
36
+ # Tools in Gemini-kompatibles Format konvertieren
37
+ function_declarations = []
38
+
39
+ # 1. MCP Tools hinzufügen
40
+ for tool in mcp_tools_data.tools:
41
+ schema = tool.inputSchema if tool.inputSchema else {
42
+ "type": "object",
43
+ "properties": {},
44
+ "required": []
45
+ }
46
+
47
+ function_declarations.append(
48
+ types.FunctionDeclaration(
49
+ name=tool.name,
50
+ description=tool.description or "MCP Tool",
51
+ parameters=schema
52
+ )
53
+ )
54
+
55
+ # 2. Google Search Tool hinzufügen (als Function Declaration)
56
+ # Die Gemini API erwartet Google Search als spezielles Tool
57
+ google_search_declaration = types.FunctionDeclaration(
58
+ name="google_search",
59
+ description="Search the internet for current information, news, facts, and general knowledge. Use this for questions about current events, weather, general knowledge, or when you need up-to-date information from the web.",
60
+ parameters={
61
+ "type": "object",
62
+ "properties": {
63
+ "query": {
64
+ "type": "string",
65
+ "description": "The search query to look up on Google"
66
+ }
67
+ },
68
+ "required": ["query"]
69
+ }
70
+ )
71
+ function_declarations.append(google_search_declaration)
72
+
73
+ # Alle Tools kombinieren
74
+ tools = [types.Tool(function_declarations=function_declarations)]
75
+
76
+ # System Prompt zur Tool-Auswahl
77
+ system_instruction = """You are a helpful assistant with access to two types of tools:
78
+
79
+ 1. TRAIN CONNECTION TOOLS (MCP): Use these ONLY when the user asks about train connections, schedules, routes, or German railway (DB) information. Available tools: """ + ", ".join([t.name for t in mcp_tools_data.tools]) + """
80
+
81
+ 2. GOOGLE SEARCH: Use this for ALL other queries - general knowledge, current events, weather, news, calculations, or any topic not related to train connections.
82
+
83
+ Choose the appropriate tool based on the user's intent. For train connections, use the MCP tools. For everything else, use Google Search."""
84
+
85
+ contents = [types.Content(
86
+ role="user",
87
+ parts=[types.Part.from_text(text=input_text)]
88
+ )]
89
+
90
+ # Erster API-Aufruf
91
+ response = await client.aio.models.generate_content(
92
+ model=model,
93
+ contents=contents,
94
+ config=types.GenerateContentConfig(
95
+ tools=tools,
96
+ temperature=0.4,
97
+ system_instruction=system_instruction
98
+ )
99
+ )
100
+
101
+ # Agentic Loop für Tool-Calls
102
+ turn_count = 0
103
+ max_turns = 5
104
+
105
+ while response.candidates and turn_count < max_turns:
106
+ candidate = response.candidates[0]
107
+
108
+ if not hasattr(candidate, 'content') or not candidate.content:
109
+ break
110
+
111
+ function_calls = [
112
+ part for part in candidate.content.parts
113
+ if hasattr(part, 'function_call') and part.function_call
114
+ ]
115
+
116
+ if not function_calls:
117
+ break
118
+
119
+ turn_count += 1
120
+ contents.append(candidate.content)
121
+ tool_responses = []
122
+
123
+ for part in function_calls:
124
+ fc = part.function_call
125
+ tool_name = fc.name
126
+ tool_args = dict(fc.args) if fc.args else {}
127
+
128
+ try:
129
+ # Unterscheide zwischen MCP-Tools und Google Search
130
+ if tool_name == "google_search":
131
+ # Google Search über Gemini's eingebautes Tool
132
+ # Wir simulieren hier den Aufruf durch einen neuen Request mit google_search Tool
133
+ search_response = await client.aio.models.generate_content(
134
+ model=model,
135
+ contents=[types.Content(
136
+ role="user",
137
+ parts=[types.Part.from_text(text=f"Search Google for: {tool_args.get('query', '')}")]
138
+ )],
139
+ config=types.GenerateContentConfig(
140
+ tools=[types.Tool(google_search=types.GoogleSearch())],
141
+ temperature=0.4
142
+ )
143
+ )
144
+ result_text = search_response.text
145
+
146
+ else:
147
+ # MCP-Tool ausführen
148
+ tool_result = await session.call_tool(tool_name, tool_args)
149
+
150
+ if tool_result.isError:
151
+ result_text = f"Error: {tool_result.content[0].text if tool_result.content else 'Unknown error'}"
152
+ else:
153
+ result_text = tool_result.content[0].text if tool_result.content else "No result"
154
+
155
+ tool_responses.append(
156
+ types.FunctionResponse(
157
+ name=tool_name,
158
+ response={"result": result_text}
159
+ )
160
+ )
161
+
162
+ except Exception as e:
163
+ tool_responses.append(
164
+ types.FunctionResponse(
165
+ name=tool_name,
166
+ response={"error": str(e)}
167
+ )
168
+ )
169
+
170
+ contents.append(types.Content(role="user", parts=[
171
+ types.Part.from_function_response(
172
+ name=tr.name,
173
+ response=tr.response
174
+ ) for tr in tool_responses
175
+ ]))
176
+
177
+ # Nächster API-Aufruf
178
+ response = await client.aio.models.generate_content(
179
+ model=model,
180
+ contents=contents,
181
+ config=types.GenerateContentConfig(
182
+ tools=tools,
183
+ temperature=0.4,
184
+ system_instruction=system_instruction
185
+ )
186
+ )
187
+
188
+ return response.text, ""
189
+
190
+ except Exception as e:
191
+ import traceback
192
+ return f"Fehler während der Verarbeitung: {str(e)}\n\n{traceback.format_exc()}", ""
193
+
194
+ # Gradio UI Wrapper
195
+ def generate(input_text):
196
+ try:
197
+ return asyncio.run(generate1(input_text))
198
+ except Exception as e:
199
+ import traceback
200
+ return f"UI Fehler: {str(e)}\n\n{traceback.format_exc()}", ""
201
+
202
+ if __name__ == '__main__':
203
+ with gr.Blocks() as demo:
204
+ gr.Markdown("# Gemini 2.5 Flash + Google Search + DB Timetable (MCP)")
205
+
206
+ with gr.Row():
207
+ with gr.Column():
208
+ input_textbox = gr.Textbox(
209
+ lines=3,
210
+ label="Anfrage",
211
+ placeholder="z.B. Wie komme ich von Berlin nach Hamburg? oder Was ist die Hauptstadt von Frankreich?"
212
+ )
213
+ submit_button = gr.Button("Senden", variant="primary")
214
+
215
+ with gr.Column():
216
+ output_textbox = gr.Markdown(label="Antwort")
217
+ status_textbox = gr.Textbox(label="Status", interactive=False)
218
+
219
+ submit_button.click(
220
+ fn=generate,
221
+ inputs=input_textbox,
222
+ outputs=[output_textbox, status_textbox]
223
+ )
224
+
225
+ # Enter-Taste unterstützung
226
+ input_textbox.submit(
227
+ fn=generate,
228
+ inputs=input_textbox,
229
+ outputs=[output_textbox, status_textbox]
230
+ )
231
+
232
+ demo.launch(show_error=True, share=False)