algorhythym's picture
Update app.py
a1cbaa7 verified
import sys
import streamlit as st
import asyncio
import subprocess
import os
from dotenv import load_dotenv
# Import the mcp-use package
from mcp_use import MCPAgent, MCPClient
from langchain_groq import ChatGroq
# Load environment variables
load_dotenv()
# Page config
st.set_page_config(page_title="Autonomous Weather Agent", page_icon=":🌦️")
st.markdown(
"<h1 style='text-align: center; white-space: nowrap; font-size: 2.5em;'>🌦️ Autonomous Weather Agent (MCP)</h1>",
unsafe_allow_html=True
)
st.markdown("""
<div style='text-align: center;'>
I automatically choose the right tool for your question β€” forecasts or alerts.<br><br>
See how I choose different tools:<br>
🌀️ What's the weather in Austin? β†’ Uses Forecast Tool<br>
⚠️ Any alerts in California? β†’ Uses Alert Tool<br>
πŸ”„ Compare NY and Boston weather β†’ Uses Multiple Tools
</div>
""", unsafe_allow_html=True)
# Initialize the session state
if "messages" not in st.session_state:
st.session_state.messages = []
if "agent" not in st.session_state:
st.session_state.agent = None
if "server_started" not in st.session_state:
st.session_state.server_started = False
if "conversation_memory" not in st.session_state:
st.session_state.conversation_memory = [] # SIMPLE Q&A MEMORY
# --- 1. Start MCP Server as SUBPROCESS ---
if not st.session_state.server_started:
try:
with st.spinner("Starting MCP server..."):
# Run MCP server as SUBPROCESS - CORRECT COMMAND
mcp_process = subprocess.Popen([
sys.executable, "-m", "mcpserver.server"
], stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE)
st.session_state.server_started = True
st.session_state.mcp_process = mcp_process
st.sidebar.success("βœ… MCP Server started!")
# Wait for server to initialize
import time
time.sleep(3)
except Exception as e:
st.sidebar.error(f"❌ Server failed to start: {e}")
# --- 2. Initialize Agent ---
if st.session_state.server_started and st.session_state.agent is None:
try:
with st.spinner("Initializing agent..."):
# Use the correct path to weather.json (in root directory)
client = MCPClient.from_config_file("weather.json")
llm = ChatGroq(model="llama-3.1-8b-instant", api_key=os.getenv("GROQ_API_KEY"))
agent = MCPAgent(llm=llm, client=client, max_steps=15, memory_enabled=True)
st.session_state.agent = agent
st.sidebar.success("βœ… Agent initialized!")
except Exception as e:
st.error(f"❌ Agent init failed: {e}")
# Display chat messages
for message in st.session_state.messages:
with st.chat_message(message["role"]):
st.markdown(message["content"])
# Show status
if st.session_state.server_started:
st.info("βœ… MCP Server Connected - Agent is ready for commands.")
# Chat input
if prompt := st.chat_input("Ask about weather or alerts (e.g., 'Weather in Austin?' or 'Alerts in CA?')..."):
# Add user message to chat history
st.session_state.messages.append({"role": "user", "content": prompt})
with st.chat_message("user"):
st.markdown(prompt)
# Get assistant response
with st.chat_message("assistant"):
if st.session_state.agent:
try:
with st.spinner("Thinking..."):
# Create enhanced prompt with conversation memory
if st.session_state.conversation_memory:
memory_context = "Recent conversation:\n" + "\n".join(
[f"User: {item['user']}\nAgent: {item['agent']}"
for item in st.session_state.conversation_memory[-3:]] # Last 3 exchanges
)
enhanced_prompt = f"{memory_context}\n\nNew question: {prompt}"
else:
enhanced_prompt = prompt
raw_response = asyncio.run(st.session_state.agent.run(enhanced_prompt))
# CLEAN UP THE RESPONSE - REMOVE THOUGHTS/ACTIONS
if "Final Answer:" in raw_response:
# Extract only the final answer
clean_response = raw_response.split("Final Answer:")[-1].strip()
elif "Thought:" in raw_response:
# If no Final Answer, use everything before the first Thought
clean_response = raw_response.split("Thought:")[0].strip()
else:
# If no internal traces, use the raw response
clean_response = raw_response
st.markdown(clean_response)
st.session_state.messages.append({"role": "assistant", "content": clean_response})
# UPDATE MEMORY (store only clean Q&A)
st.session_state.conversation_memory.append({
"user": prompt,
"agent": clean_response
})
# Keep only last 5 exchanges (prevent memory overload)
if len(st.session_state.conversation_memory) > 5:
st.session_state.conversation_memory.pop(0)
except Exception as e:
error_msg = f"Error: {str(e)}"
st.error(error_msg)
# Show server logs if available
if hasattr(st.session_state, 'mcp_process'):
try:
stderr_output = st.session_state.mcp_process.stderr.read()
if stderr_output:
st.code(f"Server logs: {stderr_output}")
except:
pass
else:
st.error("Agent not initialized. Please check server logs.")