Spaces:
Sleeping
Sleeping
Update main.py
Browse files
main.py
CHANGED
|
@@ -1,77 +1,80 @@
|
|
| 1 |
-
from fastapi import FastAPI
|
| 2 |
-
import gradio as gr
|
| 3 |
-
import google.generativeai as genai
|
| 4 |
-
from weather import get_current_weather
|
| 5 |
-
import os
|
| 6 |
-
import uvicorn
|
| 7 |
-
import json
|
| 8 |
-
|
| 9 |
-
app = FastAPI()
|
| 10 |
-
genai.configure(api_key=os.getenv("GEMINI_API_KEY"))
|
| 11 |
-
model = genai.GenerativeModel('gemini-pro')
|
| 12 |
-
|
| 13 |
-
# Tools configuration
|
| 14 |
-
tools = [
|
| 15 |
-
{
|
| 16 |
-
"name": "get_current_weather",
|
| 17 |
-
"description": "Get current weather information",
|
| 18 |
-
"parameters": {
|
| 19 |
-
"type": "object",
|
| 20 |
-
"properties": {
|
| 21 |
-
"location": {
|
| 22 |
-
"type": "string",
|
| 23 |
-
"description": "City name, e.g. Paris, France"
|
| 24 |
-
}
|
| 25 |
-
},
|
| 26 |
-
"required": ["location"]
|
| 27 |
-
}
|
| 28 |
-
}
|
| 29 |
-
]
|
| 30 |
-
|
| 31 |
-
def execute_tool_call(tool_call):
|
| 32 |
-
func_name = tool_call["name"]
|
| 33 |
-
args = json.loads(tool_call["arguments"])
|
| 34 |
-
|
| 35 |
-
if func_name == "get_current_weather":
|
| 36 |
-
return get_current_weather(args["location"])
|
| 37 |
-
else:
|
| 38 |
-
return f"Error: Unknown function {func_name}"
|
| 39 |
-
|
| 40 |
-
def process_message(message):
|
| 41 |
-
# Generate tool use suggestions
|
| 42 |
-
response = model.generate_content(
|
| 43 |
-
f"Analyze if user needs weather info: {message}. Respond ONLY with JSON:"
|
| 44 |
-
'{{"needs_weather": true|false, "location": "city"}}'
|
| 45 |
-
)
|
| 46 |
-
|
| 47 |
-
try:
|
| 48 |
-
analysis = json.loads(response.text)
|
| 49 |
-
if analysis.get("needs_weather", False):
|
| 50 |
-
location = analysis.get("location", "London")
|
| 51 |
-
return get_current_weather(location)
|
| 52 |
-
except:
|
| 53 |
-
pass
|
| 54 |
-
|
| 55 |
-
# Regular response
|
| 56 |
-
chat = model.start_chat(history=[])
|
| 57 |
-
response = chat.send_message(message)
|
| 58 |
-
return response.text
|
| 59 |
-
|
| 60 |
-
# Gradio Interface
|
| 61 |
-
with gr.Blocks() as demo:
|
| 62 |
-
chatbot = gr.Chatbot(height=500)
|
| 63 |
-
msg = gr.Textbox(label="Message")
|
| 64 |
-
clear = gr.Button("Clear")
|
| 65 |
-
|
| 66 |
-
def respond(message, chat_history):
|
| 67 |
-
bot_message = process_message(message)
|
| 68 |
-
|
| 69 |
-
|
| 70 |
-
|
| 71 |
-
|
| 72 |
-
|
| 73 |
-
|
| 74 |
-
|
| 75 |
-
|
| 76 |
-
|
| 77 |
-
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from fastapi import FastAPI
|
| 2 |
+
import gradio as gr
|
| 3 |
+
import google.generativeai as genai
|
| 4 |
+
from weather import get_current_weather
|
| 5 |
+
import os
|
| 6 |
+
import uvicorn
|
| 7 |
+
import json
|
| 8 |
+
|
| 9 |
+
app = FastAPI()
|
| 10 |
+
genai.configure(api_key=os.getenv("GEMINI_API_KEY"))
|
| 11 |
+
model = genai.GenerativeModel('gemini-pro')
|
| 12 |
+
|
| 13 |
+
# Tools configuration
|
| 14 |
+
tools = [
|
| 15 |
+
{
|
| 16 |
+
"name": "get_current_weather",
|
| 17 |
+
"description": "Get current weather information",
|
| 18 |
+
"parameters": {
|
| 19 |
+
"type": "object",
|
| 20 |
+
"properties": {
|
| 21 |
+
"location": {
|
| 22 |
+
"type": "string",
|
| 23 |
+
"description": "City name, e.g. Paris, France"
|
| 24 |
+
}
|
| 25 |
+
},
|
| 26 |
+
"required": ["location"]
|
| 27 |
+
}
|
| 28 |
+
}
|
| 29 |
+
]
|
| 30 |
+
|
| 31 |
+
def execute_tool_call(tool_call):
|
| 32 |
+
func_name = tool_call["name"]
|
| 33 |
+
args = json.loads(tool_call["arguments"])
|
| 34 |
+
|
| 35 |
+
if func_name == "get_current_weather":
|
| 36 |
+
return get_current_weather(args["location"])
|
| 37 |
+
else:
|
| 38 |
+
return f"Error: Unknown function {func_name}"
|
| 39 |
+
|
| 40 |
+
def process_message(message):
|
| 41 |
+
# Generate tool use suggestions
|
| 42 |
+
response = model.generate_content(
|
| 43 |
+
f"Analyze if user needs weather info: {message}. Respond ONLY with JSON:"
|
| 44 |
+
'{{"needs_weather": true|false, "location": "city"}}'
|
| 45 |
+
)
|
| 46 |
+
|
| 47 |
+
try:
|
| 48 |
+
analysis = json.loads(response.text)
|
| 49 |
+
if analysis.get("needs_weather", False):
|
| 50 |
+
location = analysis.get("location", "London")
|
| 51 |
+
return get_current_weather(location)
|
| 52 |
+
except:
|
| 53 |
+
pass
|
| 54 |
+
|
| 55 |
+
# Regular response
|
| 56 |
+
chat = model.start_chat(history=[])
|
| 57 |
+
response = chat.send_message(message)
|
| 58 |
+
return response.text
|
| 59 |
+
|
| 60 |
+
# Gradio Interface with new message format
|
| 61 |
+
with gr.Blocks() as demo:
|
| 62 |
+
chatbot = gr.Chatbot(height=500, type="messages")
|
| 63 |
+
msg = gr.Textbox(label="Message")
|
| 64 |
+
clear = gr.Button("Clear")
|
| 65 |
+
|
| 66 |
+
def respond(message, chat_history):
|
| 67 |
+
bot_message = process_message(message)
|
| 68 |
+
# Append user and bot messages
|
| 69 |
+
chat_history.append({"role": "user", "content": message})
|
| 70 |
+
chat_history.append({"role": "assistant", "content": bot_message})
|
| 71 |
+
return "", chat_history
|
| 72 |
+
|
| 73 |
+
msg.submit(respond, [msg, chatbot], [msg, chatbot])
|
| 74 |
+
clear.click(lambda: None, None, chatbot, queue=False)
|
| 75 |
+
|
| 76 |
+
app = gr.mount_gradio_app(app, demo, path="/")
|
| 77 |
+
|
| 78 |
+
if __name__ == "__main__":
|
| 79 |
+
port = int(os.getenv("PORT", 8000))
|
| 80 |
+
uvicorn.run(app, host="0.0.0.0", port=port)
|