File size: 3,975 Bytes
16c8745 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 |
import gradio as gr
import json
from llm.ollama_llm import query_ollama
from llm.rag_pipeline import retrieve_context
from logger import get_logger
logger = get_logger(__name__)
# Function to load latest sensor data
def get_latest_sensor_data(path="data/farm_data_log.json", num_entries=3):
try:
with open(path, "r") as f:
data = json.load(f)
return data[-num_entries:] if data else []
except FileNotFoundError:
logger.error(f"Sensor data file {path} not found.")
return []
except json.JSONDecodeError as e:
logger.error(f"Invalid JSON in {path}: {e}")
return []
# Global query history
query_history = []
def process_query(user_query):
"""Handles user query and returns response + updated history."""
if not user_query.strip():
return "Please enter a question.", "\n".join(format_history())
logger.info("User query: %s", user_query)
try:
# Prepare sensor data
sensor_data_entries = get_latest_sensor_data()
combined_sensor_data = {
entry["timestamp"]: {
"soil": entry["soil"],
"water": entry["water"],
"environment": entry["environment"]
}
for entry in sensor_data_entries
}
# Retrieve context and query LLM
rag_context = retrieve_context(user_query)
response = query_ollama(user_query, combined_sensor_data, rag_context)
logger.info("--- FARM ASSISTANT RESPONSE ---")
# Add to query history
query_history.append((user_query, response))
return response, format_history()
except Exception as e:
logger.error(f"Query processing failed: {e}")
return "Error: Could not process query. Please try again.", "\n".join(format_history())
def format_history():
"""Format query history as text for display."""
lines = []
for i, (q, r) in enumerate(query_history[-5:], start=1):
lines.append(f"### Query {i}\n**Q:** {q}\n**A:** {r}\n")
return "\n\n".join(lines)
def clear_history():
query_history.clear()
return "", "" # Clears both output panels
# Show latest sensor data as markdown
def display_sensor_data():
sensor_data_entries = get_latest_sensor_data()
if not sensor_data_entries:
return "No sensor data available."
latest_entry = sensor_data_entries[-1]
text = f"""
**Latest Reading: {latest_entry['timestamp']}**
### Soil
- Moisture: {latest_entry['soil']['moisture']}
- pH: {latest_entry['soil']['pH']}
- Temperature: {latest_entry['soil']['temperature']}
### Water
- pH: {latest_entry['water']['pH']}
- Turbidity: {latest_entry['water']['turbidity']}
- Temperature: {latest_entry['water']['temperature']}
### Environment
- Humidity: {latest_entry['environment']['humidity']}
- Temperature: {latest_entry['environment']['temperature']}
- Rainfall: {latest_entry['environment']['rainfall']}
"""
return text
# Gradio UI
with gr.Blocks(theme=gr.themes.Soft(primary_hue="green")) as demo:
gr.Markdown("# 🌾 AgriEdge: Smart Farm Assistant")
gr.Markdown("Ask about your farm's conditions and get tailored advice based on sensor data.")
with gr.Tab("Ask Assistant"):
query = gr.Textbox(
label="Enter your farm-related question",
placeholder="e.g., What should I do about soil moisture?"
)
submit_btn = gr.Button("Submit Query")
clear_btn = gr.Button("Clear History")
response_box = gr.Markdown()
history_box = gr.Markdown()
submit_btn.click(process_query, inputs=query, outputs=[response_box, history_box])
clear_btn.click(clear_history, inputs=None, outputs=[response_box, history_box])
with gr.Tab("Recent Sensor Data"):
sensor_md = gr.Markdown(display_sensor_data())
demo.launch()
|