Zoro-147 commited on
Commit
453e0ef
·
verified ·
1 Parent(s): 74a38e6

Update main.py

Browse files
Files changed (1) hide show
  1. main.py +49 -63
main.py CHANGED
@@ -1,80 +1,66 @@
1
- from fastapi import FastAPI
2
  import gradio as gr
3
  import google.generativeai as genai
4
- from weather import get_current_weather
5
  import os
6
- import uvicorn
7
  import json
8
 
9
- app = FastAPI()
10
  genai.configure(api_key=os.getenv("GEMINI_API_KEY"))
11
  model = genai.GenerativeModel('gemini-pro')
12
 
13
- # Tools configuration
14
- tools = [
15
- {
16
- "name": "get_current_weather",
17
- "description": "Get current weather information",
18
- "parameters": {
19
- "type": "object",
20
- "properties": {
21
- "location": {
22
- "type": "string",
23
- "description": "City name, e.g. Paris, France"
24
- }
25
- },
26
- "required": ["location"]
27
- }
28
- }
29
- ]
30
-
31
- def execute_tool_call(tool_call):
32
- func_name = tool_call["name"]
33
- args = json.loads(tool_call["arguments"])
34
 
35
- if func_name == "get_current_weather":
36
- return get_current_weather(args["location"])
37
- else:
38
- return f"Error: Unknown function {func_name}"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
39
 
40
  def process_message(message):
41
- # Generate tool use suggestions
42
- response = model.generate_content(
43
- f"Analyze if user needs weather info: {message}. Respond ONLY with JSON:"
44
- '{{"needs_weather": true|false, "location": "city"}}'
45
- )
 
46
 
 
47
  try:
48
- analysis = json.loads(response.text)
49
- if analysis.get("needs_weather", False):
50
- location = analysis.get("location", "London")
51
- return get_current_weather(location)
52
- except:
53
- pass
54
-
55
- # Regular response
56
- chat = model.start_chat(history=[])
57
- response = chat.send_message(message)
58
- return response.text
59
-
60
- # Gradio Interface with new message format
61
- with gr.Blocks() as demo:
62
- chatbot = gr.Chatbot(height=500, type="messages")
63
- msg = gr.Textbox(label="Message")
64
- clear = gr.Button("Clear")
65
-
66
- def respond(message, chat_history):
67
- bot_message = process_message(message)
68
- # Append user and bot messages
69
- chat_history.append({"role": "user", "content": message})
70
- chat_history.append({"role": "assistant", "content": bot_message})
71
- return "", chat_history
72
 
73
- msg.submit(respond, [msg, chatbot], [msg, chatbot])
74
- clear.click(lambda: None, None, chatbot, queue=False)
 
 
 
75
 
76
- app = gr.mount_gradio_app(app, demo, path="/")
 
 
 
 
 
77
 
 
78
  if __name__ == "__main__":
79
- port = int(os.getenv("PORT", 8000))
80
- uvicorn.run(app, host="0.0.0.0", port=port)
 
 
1
  import gradio as gr
2
  import google.generativeai as genai
3
+ import requests
4
  import os
 
5
  import json
6
 
7
+ # Configure Gemini
8
  genai.configure(api_key=os.getenv("GEMINI_API_KEY"))
9
  model = genai.GenerativeModel('gemini-pro')
10
 
11
+ def get_current_weather(location: str) -> str:
12
+ """Get current weather using weatherapi.com"""
13
+ api_key = os.getenv("WEATHER_API_KEY")
14
+ if not api_key:
15
+ return "Weather API key not set"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
16
 
17
+ try:
18
+ response = requests.get(
19
+ "http://api.weatherapi.com/v1/current.json",
20
+ params={"key": api_key, "q": location, "aqi": "no"},
21
+ timeout=10
22
+ )
23
+ response.raise_for_status()
24
+ data = response.json()
25
+ current = data["current"]
26
+ return (
27
+ f"Weather in {data['location']['name']}:\n"
28
+ f"• Temperature: {current['temp_c']}°C\n"
29
+ f"• Condition: {current['condition']['text']}\n"
30
+ f"• Humidity: {current['humidity']}%\n"
31
+ f"• Wind: {current['wind_kph']} km/h"
32
+ )
33
+ except Exception as e:
34
+ return f"Error fetching weather: {str(e)}"
35
 
36
  def process_message(message):
37
+ """Process user message with Gemini"""
38
+ # First check if it's a weather request
39
+ if "weather" in message.lower():
40
+ # Simple extraction - get the last word as location
41
+ location = message.split()[-1] if len(message.split()) > 1 else "London"
42
+ return get_current_weather(location)
43
 
44
+ # Otherwise use Gemini
45
  try:
46
+ response = model.generate_content(message)
47
+ return response.text
48
+ except Exception as e:
49
+ return f"Gemini error: {str(e)}"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
50
 
51
+ # Gradio Interface
52
+ def chat_interface(message, history):
53
+ """Gradio chat function"""
54
+ bot_message = process_message(message)
55
+ return bot_message
56
 
57
+ demo = gr.ChatInterface(
58
+ fn=chat_interface,
59
+ title="MCP Server",
60
+ description="Ask about anything or request weather updates",
61
+ examples=["What's the weather in Paris?", "Explain quantum computing"]
62
+ )
63
 
64
+ # For Hugging Face Spaces
65
  if __name__ == "__main__":
66
+ demo.launch(server_name="0.0.0.0", server_port=7860)