mcp-alert-generator / src /compiler.py
aakashdg's picture
cleanup
07ff834 verified
"""
Response Compiler - Stage 3
Compiles MCP results with focus on ALERTING INFORMATION ONLY
"""
from typing import Dict, Any
from openai import OpenAI
import httpx
import os
class ResponseCompiler:
"""
Compiles MCP server results into alert-focused responses.
KEY PRINCIPLE: All MCPs are queried, but the compiler extracts ONLY
the alerting, concerning, or actionable information. Normal/good status
is minimized or omitted entirely.
"""
def __init__(self):
"""Initialize compiler with OpenAI client"""
api_key = os.getenv("OPENAI_API_KEY")
if not api_key:
raise ValueError("OPENAI_API_KEY environment variable not set")
# Create custom httpx client without proxies parameter (compatibility fix)
http_client = httpx.Client(
timeout=httpx.Timeout(60.0, connect=10.0),
limits=httpx.Limits(max_keepalive_connections=10, max_connections=20)
)
self.client = OpenAI(api_key=api_key, http_client=http_client)
def compile_alert_summary(
self,
mcp_results: Dict[str, Any],
location: Dict[str, float],
location_name: str = ""
) -> str:
"""
Compile MCP results into alert summary focusing ONLY on concerning information.
This is where the intelligence lives - not in routing. All MCP servers are
queried, but we extract only what farmers need to act on.
Args:
mcp_results: Results from all MCP servers
location: Dict with 'latitude' and 'longitude' keys
location_name: Optional human-readable location name
Returns:
Alert summary highlighting only actionable concerns
"""
# Build comprehensive context from all MCP data
context_parts = []
for server_name, result in mcp_results.items():
if result["status"] == "success" and result["data"]:
context_parts.append(f"=== {server_name.upper()} DATA ===\n{result['data']}")
if not context_parts:
return "Unable to generate alert summary - no data available from MCP servers."
full_context = "\n\n".join(context_parts)
location_str = f"{location_name} ({location['latitude']:.4f}°N, {location['longitude']:.4f}°E)"
# THE KEY PROMPT: Extract only alerting information
prompt = f"""You are an agricultural alert analyst. Your task is to analyze comprehensive agricultural data and extract ONLY the alerting, concerning, or time-sensitive information.
LOCATION: {location_str}
COMPREHENSIVE DATA FROM ALL MONITORING SYSTEMS:
{full_context}
YOUR TASK:
Generate a concise ALERT SUMMARY that includes ONLY:
1. **CRITICAL ALERTS** - Immediate threats requiring urgent action:
- Extreme weather conditions (heat waves, storms, frost)
- Active pest/disease outbreaks
- Severe water scarcity or excess
- Soil contamination or extreme deficiencies
2. **IMPORTANT WARNINGS** - Developing issues requiring attention:
- Concerning trends (declining water table, degrading soil)
- Moderate pest pressure building up
- Suboptimal weather patterns affecting crops
- Nutrient imbalances needing correction
3. **ACTIONABLE RECOMMENDATIONS** - What farmers should do:
- Specific actions with timing
- Preventive measures
- Mitigation strategies
CRITICAL RULES:
- OMIT all normal/good status information unless it provides important context
- If weather is normal → DON'T mention it or say "Weather: Normal" briefly
- If soil is healthy → SKIP or say "Soil: No concerns" briefly
- If no pest activity → SKIP or say "Pests: No threats detected" briefly
- FOCUS on deviations from normal, risks, and time-sensitive items
- Use specific numbers/dates only when they convey urgency
- Maximum 400 words total
- If everything is fine, say so clearly upfront then provide brief context
Structure:
1. Status Line: "CRITICAL ALERTS DETECTED" or "NO CRITICAL ALERTS - FAVORABLE CONDITIONS"
2. Critical Alerts section (if any)
3. Important Warnings section (if any)
4. Recommended Actions (always include if alerts/warnings exist)
5. Add raw API output in JSON format at end for reference.
Be direct. Skip pleasantries. Farmers need to know what matters."""
try:
response = self.client.chat.completions.create(
model="gpt-4o",
messages=[
{
"role": "system",
"content": "You are an expert agricultural alert analyst. Extract ONLY concerning, alerting, or actionable information. Omit normal status unless contextually necessary."
},
{"role": "user", "content": prompt}
],
temperature=0.2,
max_tokens=1000
)
return response.choices[0].message.content.strip()
except Exception as e:
print(f"⚠️ Compilation error: {e}")
return self._create_fallback_summary(mcp_results, location_str)
def compile_response(
self,
query: str,
mcp_results: Dict[str, Any],
location: Dict[str, float]
) -> str:
"""
Compile MCP results into a response for a specific query.
Args:
query: User's original query
mcp_results: Results from MCP servers
location: Dict with 'latitude' and 'longitude' keys
Returns:
Compiled response text focusing on query-relevant information
"""
# Format MCP results for context
context_parts = []
for server_name, result in mcp_results.items():
if result["status"] == "success" and result["data"]:
context_parts.append(f"{server_name.upper()}: {result['data']}")
context = "\n\n".join(context_parts)
prompt = f"""Answer this farmer's question using the provided data, focusing on actionable insights.
QUESTION: {query}
LOCATION: {location['latitude']:.4f}°N, {location['longitude']:.4f}°E
AVAILABLE DATA:
{context}
Provide a focused answer that:
1. Directly addresses the question
2. Highlights any concerning information relevant to the query
3. Gives specific recommendations
4. Keeps explanations brief and practical
5. Omits irrelevant normal/good status information
6. Add Raw API Output from all MCP Servers at the end for reference.
Be conversational but professional. Skip unnecessary background unless it aids understanding."""
try:
response = self.client.chat.completions.create(
model="gpt-4o",
messages=[
{"role": "system", "content": "You are a knowledgeable agricultural advisor providing practical guidance to farmers."},
{"role": "user", "content": prompt}
],
temperature=0.5,
max_tokens=800
)
return response.choices[0].message.content.strip()
except Exception as e:
print(f"⚠️ Compilation error: {e}")
return f"Error compiling response: {str(e)}"
def _create_fallback_summary(self, mcp_results: Dict[str, Any], location_str: str) -> str:
"""Create basic fallback summary if LLM compilation fails"""
summary_parts = [f"Alert Summary for {location_str}\n\n"]
for server_name, result in mcp_results.items():
if result["status"] == "success" and result["data"]:
summary_parts.append(f"{server_name.upper()}:")
summary_parts.append(str(result["data"])[:300] + "...\n")
return "\n".join(summary_parts)