mgokg commited on
Commit
b19448f
·
verified ·
1 Parent(s): 3286ab8

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +54 -89
app.py CHANGED
@@ -1,104 +1,69 @@
1
- import base64
2
- import gradio as gr
3
  import os
4
- import json
 
5
  from google import genai
6
  from google.genai import types
7
- from gradio_client import Client
8
-
9
-
10
- route="""
11
- how to handle special case "zugverbindung".
12
- Wichtig: Dies Regeln gelten nur wenn eine zugverbindung angefragt wird, else answer prompt
13
- Regeln:
14
- Wenn eine Zugverbindung von {Startort} nach {Zielort} angefragt wird, return json object with Startort and Zielort.
15
- always follow json scheme below.
16
-
17
- Wichtig: Gib absolut keinen Text vor oder nach dem JSON aus (keine Erklärungen, kein "Hier ist das Ergebnis").
18
-
19
- {
20
- "start_loc": "fill in Startort here",
21
- "dest_loc": "fill in Zielort here"
22
- }
23
-
24
- """
25
-
26
-
27
 
28
- def clean_json_string(json_str):
29
- """
30
- Removes any comments or prefixes before the actual JSON content.
31
- """
32
- # Find the first occurrence of '{'
33
- json_start = json_str.find('{')
34
- if json_start == -1:
35
- # If no '{' is found, try with '[' for arrays
36
- json_start = json_str.find('[')
37
- if json_start == -1:
38
- return json_str # Return original if no JSON markers found
39
-
40
- # Extract everything from the first JSON marker
41
- cleaned_str = json_str[json_start:]
42
- return cleaned_str
43
- # Verify it's valid JSON
44
- try:
45
- json.loads(cleaned_str)
46
- return cleaned_str
47
- except json.JSONDecodeError:
48
- return json_str # Return original if cleaning results in invalid JSON
49
-
50
- def generate(input_text):
51
  try:
 
52
  client = genai.Client(
53
  api_key=os.environ.get("GEMINI_API_KEY"),
54
  )
55
- except Exception as e:
56
- return f"Error initializing client: {e}. Make sure GEMINI_API_KEY is set."
57
-
58
- model = "gemini-flash-latest"
59
- contents = [
60
- types.Content(
61
- role="user",
62
- parts=[
63
- types.Part.from_text(text=f"{input_text}"),
64
- ],
65
- ),
66
- ]
67
- tools = [
68
- types.Tool(google_search=types.GoogleSearch()),
69
- ]
70
- generate_content_config = types.GenerateContentConfig(
71
- temperature=0.4,
72
- thinking_config = types.ThinkingConfig(
73
- thinking_budget=0,
74
- ),
75
- tools=tools,
76
- response_mime_type="text/plain",
77
- )
78
 
 
 
 
 
 
 
 
 
 
 
 
79
 
80
- response_text = ""
81
- try:
82
- for chunk in client.models.generate_content_stream(
83
- model=model,
84
- contents=contents,
85
- config=generate_content_config,
86
- ):
87
- response_text += chunk.text
88
  except Exception as e:
89
- return f"Error during generation: {e}"
90
- data = clean_json_string(response_text)
91
- data = data[:-1]
92
- return response_text, ""
93
-
94
 
95
- if __name__ == '__main__':
 
 
96
 
 
97
  with gr.Blocks() as demo:
98
- title=gr.Markdown("# Gemini 2.0 Flash + Websearch")
99
  output_textbox = gr.Markdown()
100
- input_textbox = gr.Textbox(lines=3, label="", placeholder="Enter message here...")
101
- submit_button = gr.Button("send")
102
- submit_button.click(fn=generate,inputs=input_textbox,outputs=[output_textbox, input_textbox])
103
- demo.launch(show_error=True)
104
- """"""
 
 
 
 
 
 
 
 
 
1
  import os
2
+ import asyncio
3
+ import gradio as gr
4
  from google import genai
5
  from google.genai import types
6
+ # Import MCP client for Python (standard in 2026)
7
+ from mcp import Client as MCPClient
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
8
 
9
+ async def generate(input_text):
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
10
  try:
11
+ # Initialize Gemini Client
12
  client = genai.Client(
13
  api_key=os.environ.get("GEMINI_API_KEY"),
14
  )
15
+
16
+ # 1. Connect to the Streamable HTTP MCP server on Hugging Face
17
+ # 2026 SDKs support direct URL initialization
18
+ mcp_url = "https://mgokg-db-timetable-api.hf.space/gradio_api/mcp/"
19
+
20
+ async with MCPClient(url=mcp_url) as mcp_session:
21
+
22
+ model = "gemini-2.5-flash" # Version 2026
23
+
24
+ # 2. Define tools: Google Search + MCP Session
25
+ tools = [
26
+ types.Tool(google_search=types.GoogleSearch()),
27
+ mcp_session # Pass the entire MCP server as a toolset here
28
+ ]
29
+
30
+ generate_content_config = types.GenerateContentConfig(
31
+ temperature=0.4,
32
+ tools=tools,
33
+ response_mime_type="text/plain",
34
+ )
 
 
 
35
 
36
+ response_text = ""
37
+ # 3. Stream call with MCP tools
38
+ async for chunk in client.aio.models.generate_content_stream(
39
+ model=model,
40
+ contents=input_text,
41
+ config=generate_content_config,
42
+ ):
43
+ if chunk.text:
44
+ response_text += chunk.text
45
+
46
+ return response_text, ""
47
 
 
 
 
 
 
 
 
 
48
  except Exception as e:
49
+ return f"Error during processing: {str(e)}", ""
 
 
 
 
50
 
51
+ # Helper function for asynchronous Gradio calls
52
+ def gradio_wrapper(input_text):
53
+ return asyncio.run(generate(input_text))
54
 
55
+ if __name__ == '__main__':
56
  with gr.Blocks() as demo:
57
+ gr.Markdown("# Gemini Flash + DB Timetable (MCP)")
58
  output_textbox = gr.Markdown()
59
+ input_textbox = gr.Textbox(lines=3, label="Request",
60
+ placeholder="When is the next train from Berlin to Hamburg?")
61
+ submit_button = gr.Button("Send Request")
62
+
63
+ submit_button.click(
64
+ fn=gradio_wrapper,
65
+ inputs=input_textbox,
66
+ outputs=[output_textbox, input_textbox]
67
+ )
68
+
69
+ demo.launch(show_error=True)