Spaces:
Build error
Build error
Upload 9 files
Browse files- README.md +52 -13
- app.py +100 -176
- demo.py +120 -0
- disease_info.py +130 -0
- error_fixes.py +139 -0
- model.py +128 -0
- requirements.txt +5 -5
- sample1.py +176 -0
- translator.py +36 -0
README.md
CHANGED
|
@@ -1,13 +1,52 @@
|
|
| 1 |
-
|
| 2 |
-
|
| 3 |
-
|
| 4 |
-
|
| 5 |
-
|
| 6 |
-
|
| 7 |
-
|
| 8 |
-
|
| 9 |
-
|
| 10 |
-
|
| 11 |
-
-
|
| 12 |
-
|
| 13 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Plant Disease Detection System
|
| 2 |
+
|
| 3 |
+
This system uses a VGG16 model to detect plant diseases from leaf images and provides detailed information about the disease, including treatment recommendations and prevention measures using the Mistral AI API.
|
| 4 |
+
|
| 5 |
+
## Features
|
| 6 |
+
|
| 7 |
+
- Plant disease detection using VGG16 model
|
| 8 |
+
- Detailed disease information including:
|
| 9 |
+
- Disease description
|
| 10 |
+
- Recommended pesticides
|
| 11 |
+
- Pesticide application timing
|
| 12 |
+
- Prevention measures
|
| 13 |
+
- User-friendly Gradio interface
|
| 14 |
+
|
| 15 |
+
## Setup
|
| 16 |
+
|
| 17 |
+
1. Clone the repository
|
| 18 |
+
2. Install dependencies:
|
| 19 |
+
```bash
|
| 20 |
+
pip install -r requirements.txt
|
| 21 |
+
```
|
| 22 |
+
3. Set up your Mistral API key:
|
| 23 |
+
- Create a `.env` file in the project root
|
| 24 |
+
- Add your Mistral API key:
|
| 25 |
+
```
|
| 26 |
+
MISTRAL_API_KEY=your_api_key_here
|
| 27 |
+
```
|
| 28 |
+
|
| 29 |
+
## Usage
|
| 30 |
+
|
| 31 |
+
1. Run the application:
|
| 32 |
+
```bash
|
| 33 |
+
python model.py
|
| 34 |
+
```
|
| 35 |
+
2. Open the Gradio interface in your web browser
|
| 36 |
+
3. Upload an image of a plant leaf
|
| 37 |
+
4. View the disease detection results and detailed information
|
| 38 |
+
|
| 39 |
+
## Model Information
|
| 40 |
+
|
| 41 |
+
The system can detect 38 different conditions across multiple plant species, including:
|
| 42 |
+
- Apple diseases
|
| 43 |
+
- Tomato diseases
|
| 44 |
+
- Potato diseases
|
| 45 |
+
- Grape diseases
|
| 46 |
+
- And more...
|
| 47 |
+
|
| 48 |
+
## Requirements
|
| 49 |
+
|
| 50 |
+
- Python 3.8+
|
| 51 |
+
- CUDA-capable GPU (optional, for faster inference)
|
| 52 |
+
- Mistral API key
|
app.py
CHANGED
|
@@ -1,176 +1,100 @@
|
|
| 1 |
-
import
|
| 2 |
-
import torch
|
| 3 |
-
import
|
| 4 |
-
|
| 5 |
-
from PIL import Image
|
| 6 |
-
import
|
| 7 |
-
|
| 8 |
-
|
| 9 |
-
|
| 10 |
-
|
| 11 |
-
|
| 12 |
-
|
| 13 |
-
|
| 14 |
-
|
| 15 |
-
|
| 16 |
-
|
| 17 |
-
|
| 18 |
-
|
| 19 |
-
|
| 20 |
-
|
| 21 |
-
|
| 22 |
-
|
| 23 |
-
|
| 24 |
-
|
| 25 |
-
|
| 26 |
-
|
| 27 |
-
|
| 28 |
-
|
| 29 |
-
|
| 30 |
-
|
| 31 |
-
|
| 32 |
-
#
|
| 33 |
-
|
| 34 |
-
model
|
| 35 |
-
|
| 36 |
-
|
| 37 |
-
|
| 38 |
-
|
| 39 |
-
|
| 40 |
-
|
| 41 |
-
|
| 42 |
-
|
| 43 |
-
|
| 44 |
-
|
| 45 |
-
|
| 46 |
-
|
| 47 |
-
|
| 48 |
-
|
| 49 |
-
|
| 50 |
-
|
| 51 |
-
|
| 52 |
-
|
| 53 |
-
|
| 54 |
-
|
| 55 |
-
|
| 56 |
-
|
| 57 |
-
|
| 58 |
-
|
| 59 |
-
|
| 60 |
-
|
| 61 |
-
|
| 62 |
-
|
| 63 |
-
|
| 64 |
-
|
| 65 |
-
|
| 66 |
-
|
| 67 |
-
|
| 68 |
-
|
| 69 |
-
|
| 70 |
-
|
| 71 |
-
|
| 72 |
-
|
| 73 |
-
|
| 74 |
-
|
| 75 |
-
|
| 76 |
-
|
| 77 |
-
|
| 78 |
-
|
| 79 |
-
|
| 80 |
-
|
| 81 |
-
|
| 82 |
-
|
| 83 |
-
|
| 84 |
-
|
| 85 |
-
|
| 86 |
-
|
| 87 |
-
|
| 88 |
-
|
| 89 |
-
|
| 90 |
-
|
| 91 |
-
|
| 92 |
-
|
| 93 |
-
|
| 94 |
-
|
| 95 |
-
|
| 96 |
-
|
| 97 |
-
|
| 98 |
-
|
| 99 |
-
|
| 100 |
-
|
| 101 |
-
{disease_info['timing']}
|
| 102 |
-
|
| 103 |
-
Prevention Measures:
|
| 104 |
-
{disease_info['prevention']}
|
| 105 |
-
"""
|
| 106 |
-
return result
|
| 107 |
-
|
| 108 |
-
except Exception as e:
|
| 109 |
-
return f"Error in prediction: {str(e)}"
|
| 110 |
-
|
| 111 |
-
# ========== WEB APPLICATION ==========
|
| 112 |
-
def find_available_port(start_port):
|
| 113 |
-
"""Find next available port from start_port"""
|
| 114 |
-
port = start_port
|
| 115 |
-
while True:
|
| 116 |
-
try:
|
| 117 |
-
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
|
| 118 |
-
s.bind(('0.0.0.0', port))
|
| 119 |
-
return port
|
| 120 |
-
except OSError:
|
| 121 |
-
port += 1
|
| 122 |
-
|
| 123 |
-
app = Flask(__name__)
|
| 124 |
-
|
| 125 |
-
# Gradio Interface
|
| 126 |
-
iface = gr.Interface(
|
| 127 |
-
fn=predict,
|
| 128 |
-
inputs=gr.Image(type="pil"),
|
| 129 |
-
outputs=gr.Textbox(label="Analysis Results", lines=20),
|
| 130 |
-
title="GREEN PULSE - Plant Health Analysis",
|
| 131 |
-
description="Upload an image of a plant leaf to detect health issues.",
|
| 132 |
-
examples=[
|
| 133 |
-
["examples/healthy_apple.jpg"],
|
| 134 |
-
["examples/diseased_tomato.jpg"]
|
| 135 |
-
]
|
| 136 |
-
)
|
| 137 |
-
|
| 138 |
-
def run_gradio():
|
| 139 |
-
"""Launch Gradio in separate thread"""
|
| 140 |
-
global gradio_port
|
| 141 |
-
gradio_port = find_available_port(7860)
|
| 142 |
-
print(f"\nGradio interface running on port: {gradio_port}")
|
| 143 |
-
iface.launch(
|
| 144 |
-
server_name="0.0.0.0",
|
| 145 |
-
server_port=gradio_port,
|
| 146 |
-
share=False,
|
| 147 |
-
prevent_thread_lock=True
|
| 148 |
-
)
|
| 149 |
-
|
| 150 |
-
# Start Gradio thread
|
| 151 |
-
gradio_port = 7860 # Default
|
| 152 |
-
gradio_thread = threading.Thread(target=run_gradio, daemon=True)
|
| 153 |
-
gradio_thread.start()
|
| 154 |
-
|
| 155 |
-
# Flask Routes
|
| 156 |
-
@app.route('/')
|
| 157 |
-
def home():
|
| 158 |
-
"""Main landing page"""
|
| 159 |
-
return render_template("index.html")
|
| 160 |
-
|
| 161 |
-
@app.route('/analyze')
|
| 162 |
-
def analyze():
|
| 163 |
-
"""Page with embedded Gradio interface"""
|
| 164 |
-
return render_template("analyze.html", gradio_port=gradio_port)
|
| 165 |
-
|
| 166 |
-
@app.route('/results')
|
| 167 |
-
def results():
|
| 168 |
-
"""Results display page"""
|
| 169 |
-
return render_template("results.html")
|
| 170 |
-
|
| 171 |
-
if __name__ == '__main__':
|
| 172 |
-
"""Main application entry point"""
|
| 173 |
-
flask_port = find_available_port(5000)
|
| 174 |
-
print(f"Flask server running on port: {flask_port}")
|
| 175 |
-
print(f"Access the app at: http://localhost:{flask_port}")
|
| 176 |
-
app.run(debug=True, port=flask_port, use_reloader=False)
|
|
|
|
| 1 |
+
import gradio as gr
|
| 2 |
+
import torch
|
| 3 |
+
import torch.nn as nn
|
| 4 |
+
from torchvision import models, transforms
|
| 5 |
+
from PIL import Image
|
| 6 |
+
import numpy as np
|
| 7 |
+
|
| 8 |
+
# Define the class names (from the notebook)
|
| 9 |
+
class_names = [
|
| 10 |
+
'Apple___Apple_scab', 'Apple___Black_rot', 'Apple___Cedar_apple_rust', 'Apple___healthy',
|
| 11 |
+
'Blueberry___healthy', 'Cherry_(including_sour)___Powdery_mildew', 'Cherry_(including_sour)___healthy',
|
| 12 |
+
'Corn_(maize)___Cercospora_leaf_spot Gray_leaf_spot', 'Corn_(maize)___Common_rust_',
|
| 13 |
+
'Corn_(maize)___Northern_Leaf_Blight', 'Corn_(maize)___healthy', 'Grape___Black_rot',
|
| 14 |
+
'Grape___Esca_(Black_Measles)', 'Grape___Leaf_blight_(Isariopsis_Leaf_Spot)', 'Grape___healthy',
|
| 15 |
+
'Orange___Haunglongbing_(Citrus_greening)', 'Peach___Bacterial_spot', 'Peach___healthy',
|
| 16 |
+
'Pepper,_bell___Bacterial_spot', 'Pepper,_bell___healthy', 'Potato___Early_blight',
|
| 17 |
+
'Potato___Late_blight', 'Potato___healthy', 'Raspberry___healthy', 'Soybean___healthy',
|
| 18 |
+
'Squash___Powdery_mildew', 'Strawberry___Leaf_scorch', 'Strawberry___healthy',
|
| 19 |
+
'Tomato___Bacterial_spot', 'Tomato___Early_blight', 'Tomato___Late_blight',
|
| 20 |
+
'Tomato___Leaf_Mold', 'Tomato___Septoria_leaf_spot', 'Tomato___Spider_mites Two-spotted_spider_mite',
|
| 21 |
+
'Tomato___Target_Spot', 'Tomato___Tomato_Yellow_Leaf_Curl_Virus', 'Tomato___Tomato_mosaic_virus',
|
| 22 |
+
'Tomato___healthy'
|
| 23 |
+
]
|
| 24 |
+
|
| 25 |
+
# Load your trained model
|
| 26 |
+
def load_model(model_path):
|
| 27 |
+
# Initialize the model (same architecture as used in training)
|
| 28 |
+
model = models.vgg16(pretrained=False)
|
| 29 |
+
num_features = model.classifier[6].in_features
|
| 30 |
+
model.classifier[6] = nn.Linear(num_features, len(class_names))
|
| 31 |
+
|
| 32 |
+
# Load the trained weights
|
| 33 |
+
model.load_state_dict(torch.load(model_path, map_location=torch.device('cpu')))
|
| 34 |
+
model.eval()
|
| 35 |
+
return model
|
| 36 |
+
|
| 37 |
+
# Define image transformations (same as during training)
|
| 38 |
+
transform = transforms.Compose([
|
| 39 |
+
transforms.Resize((128, 128)),
|
| 40 |
+
transforms.ToTensor(),
|
| 41 |
+
transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])
|
| 42 |
+
])
|
| 43 |
+
|
| 44 |
+
# Prediction function
|
| 45 |
+
def predict(image):
|
| 46 |
+
# Convert Gradio's numpy array to PIL Image
|
| 47 |
+
image = Image.fromarray(image.astype('uint8'), 'RGB')
|
| 48 |
+
|
| 49 |
+
# Apply transformations
|
| 50 |
+
image = transform(image).unsqueeze(0)
|
| 51 |
+
|
| 52 |
+
# Make prediction
|
| 53 |
+
with torch.no_grad():
|
| 54 |
+
output = model(image)
|
| 55 |
+
probabilities = torch.nn.functional.softmax(output[0], dim=0)
|
| 56 |
+
confidences = {class_names[i]: float(probabilities[i]) for i in range(len(class_names))}
|
| 57 |
+
|
| 58 |
+
# Extract plant name and disease status
|
| 59 |
+
top_pred = max(confidences, key=confidences.get)
|
| 60 |
+
plant = top_pred.split('___')[0]
|
| 61 |
+
disease = top_pred.split('___')[1]
|
| 62 |
+
|
| 63 |
+
return {
|
| 64 |
+
"Plant": plant,
|
| 65 |
+
"Disease": disease,
|
| 66 |
+
"Confidence": confidences[top_pred],
|
| 67 |
+
"All Predictions": confidences
|
| 68 |
+
}
|
| 69 |
+
|
| 70 |
+
# Load your trained model
|
| 71 |
+
model_path = "model/vgg_model_ft.pth" # Update this path if needed
|
| 72 |
+
model = load_model(model_path)
|
| 73 |
+
|
| 74 |
+
# Create Gradio interface
|
| 75 |
+
title = "Plant Disease Classifier"
|
| 76 |
+
description = """
|
| 77 |
+
Upload an image of a plant leaf to classify its health status. The model can detect diseases across 14 plant types and 38 disease categories.
|
| 78 |
+
"""
|
| 79 |
+
|
| 80 |
+
examples = [
|
| 81 |
+
["example_images/healthy_apple.jpg"], # You should provide some example images
|
| 82 |
+
["example_images/diseased_tomato.jpg"]
|
| 83 |
+
]
|
| 84 |
+
|
| 85 |
+
iface = gr.Interface(
|
| 86 |
+
fn=predict,
|
| 87 |
+
inputs=gr.Image(label="Upload Plant Leaf Image"),
|
| 88 |
+
outputs=[
|
| 89 |
+
gr.Label(label="Plant"),
|
| 90 |
+
gr.Label(label="Disease Status"),
|
| 91 |
+
gr.Label(label="Confidence"),
|
| 92 |
+
gr.Label(label="All Predictions")
|
| 93 |
+
],
|
| 94 |
+
title=title,
|
| 95 |
+
description=description,
|
| 96 |
+
examples=examples,
|
| 97 |
+
allow_flagging="never"
|
| 98 |
+
)
|
| 99 |
+
|
| 100 |
+
iface.launch()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
demo.py
ADDED
|
@@ -0,0 +1,120 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import gradio as gr
|
| 2 |
+
from PIL import Image
|
| 3 |
+
import random
|
| 4 |
+
import time
|
| 5 |
+
import os
|
| 6 |
+
|
| 7 |
+
# --- Sample Output (Simulated ML Prediction Result) ---
|
| 8 |
+
sample_output = {
|
| 9 |
+
"Apple___healthy": {
|
| 10 |
+
"disease_name": "Healthy",
|
| 11 |
+
"crop": "Apple",
|
| 12 |
+
"description": "This Apple leaf shows no signs of disease. The plant appears healthy.",
|
| 13 |
+
"cause": "No disease detected.",
|
| 14 |
+
"prevention": "Continue with good agricultural practices like clean pruning, proper spacing, and pest monitoring.",
|
| 15 |
+
"pesticide": {
|
| 16 |
+
"name": "No pesticide needed",
|
| 17 |
+
"type": "None",
|
| 18 |
+
"timing": "N/A",
|
| 19 |
+
"image_url": "https://yourcdn.com/images/no_pesticide.jpg"
|
| 20 |
+
},
|
| 21 |
+
"sample_images": [
|
| 22 |
+
"dataset/Apple___healthy/image1.jpg",
|
| 23 |
+
"dataset/Apple___healthy/image2.jpg",
|
| 24 |
+
"dataset/Apple___healthy/image3.jpg"
|
| 25 |
+
],
|
| 26 |
+
"summary_prompt": "The Apple leaf appears healthy. No signs of disease. Maintain good care and monitor regularly."
|
| 27 |
+
}
|
| 28 |
+
}
|
| 29 |
+
|
| 30 |
+
TIPS = [
|
| 31 |
+
"🩴 Always water plants early in the morning to reduce evaporation.",
|
| 32 |
+
"🌞 Keep leaves dry to prevent fungal diseases.",
|
| 33 |
+
"🩹 Clean tools after pruning to stop disease spread.",
|
| 34 |
+
"🎾 Rotate crops every season to maintain soil health.",
|
| 35 |
+
"🪪 Check for pest damage under the leaves too!"
|
| 36 |
+
]
|
| 37 |
+
|
| 38 |
+
def predict_disease(username, location_method, manual_location, gps_coords, image):
|
| 39 |
+
user_location = manual_location if location_method == "Manual Entry" else gps_coords
|
| 40 |
+
|
| 41 |
+
# Simulate Prediction
|
| 42 |
+
time.sleep(2)
|
| 43 |
+
predicted_label = "Apple___healthy"
|
| 44 |
+
confidence = 0.94
|
| 45 |
+
result = sample_output.get(predicted_label)
|
| 46 |
+
|
| 47 |
+
if not result:
|
| 48 |
+
return "Could not detect disease.", None, None, None, None, None, None, None, None, None
|
| 49 |
+
|
| 50 |
+
# Alerts based on location
|
| 51 |
+
alerts = {
|
| 52 |
+
"Punjab": ["Wheat Rust", "Cotton Leaf Curl"],
|
| 53 |
+
"West Bengal": ["Rice Blast", "Bacterial Leaf Blight"],
|
| 54 |
+
"Maharashtra": ["Powdery Mildew", "Leaf Spot"]
|
| 55 |
+
}
|
| 56 |
+
disease_alerts = alerts.get(user_location, ["No major alerts"])
|
| 57 |
+
|
| 58 |
+
return (
|
| 59 |
+
f"✅ Prediction Complete: {result['disease_name']} ({result['crop']})",
|
| 60 |
+
f"{int(confidence * 100)}%",
|
| 61 |
+
result['description'],
|
| 62 |
+
result['cause'],
|
| 63 |
+
result['prevention'],
|
| 64 |
+
result['pesticide'],
|
| 65 |
+
result['sample_images'],
|
| 66 |
+
random.choice(TIPS),
|
| 67 |
+
user_location,
|
| 68 |
+
", ".join(disease_alerts)
|
| 69 |
+
)
|
| 70 |
+
|
| 71 |
+
def dr_green_chat(user_query):
|
| 72 |
+
q = user_query.lower()
|
| 73 |
+
if "apple" in q and "healthy" in q:
|
| 74 |
+
return "An apple leaf with no spots or discoloration is likely healthy. Continue regular monitoring and good practices."
|
| 75 |
+
elif "pesticide" in q:
|
| 76 |
+
return "Choose pesticides based on the specific disease. Always follow recommended guidelines and timings."
|
| 77 |
+
elif "how to use" in q or "guide" in q:
|
| 78 |
+
return "Upload a clear leaf image and click 'Predict Disease'. Ask anything in the chat!"
|
| 79 |
+
else:
|
| 80 |
+
return "I'm Dr. Green 🌿, your plant health assistant! Ask me about diseases, care, or anything green."
|
| 81 |
+
|
| 82 |
+
with gr.Blocks(theme=gr.themes.Soft(primary_hue="green")) as demo:
|
| 83 |
+
gr.Markdown("# 🌱 GREENPULSE - AI-Powered Leaf Disease Detection")
|
| 84 |
+
|
| 85 |
+
with gr.Row():
|
| 86 |
+
username = gr.Textbox(label="Username", placeholder="e.g., farmer123")
|
| 87 |
+
location_method = gr.Radio(["Manual Entry", "Detect via GPS"], label="Location Method", value="Manual Entry")
|
| 88 |
+
|
| 89 |
+
with gr.Row():
|
| 90 |
+
manual_location = gr.Textbox(label="Manual Location", placeholder="e.g., Punjab")
|
| 91 |
+
gps_coords = gr.Textbox(label="GPS Coordinates", placeholder="e.g., 30.7333,76.7794")
|
| 92 |
+
|
| 93 |
+
image = gr.Image(type="filepath", label="Upload Leaf Image")
|
| 94 |
+
predict_btn = gr.Button("🔍 Predict Disease")
|
| 95 |
+
|
| 96 |
+
result_msg = gr.Textbox(label="Result")
|
| 97 |
+
confidence = gr.Textbox(label="Health Confidence")
|
| 98 |
+
description = gr.Textbox(label="Description")
|
| 99 |
+
cause = gr.Textbox(label="Cause")
|
| 100 |
+
prevention = gr.Textbox(label="Prevention")
|
| 101 |
+
pesticide_info = gr.Textbox(label="Pesticide Details")
|
| 102 |
+
sample_gallery = gr.Gallery(label="Sample Images", columns=3, rows=1)
|
| 103 |
+
tip = gr.Textbox(label="💡 Daily Tip")
|
| 104 |
+
detected_location = gr.Textbox(label="Detected Location")
|
| 105 |
+
alerts_output = gr.Textbox(label="Disease Alerts")
|
| 106 |
+
|
| 107 |
+
predict_btn.click(
|
| 108 |
+
predict_disease,
|
| 109 |
+
inputs=[username, location_method, manual_location, gps_coords, image],
|
| 110 |
+
outputs=[result_msg, confidence, description, cause, prevention, pesticide_info, sample_gallery, tip, detected_location, alerts_output]
|
| 111 |
+
)
|
| 112 |
+
|
| 113 |
+
gr.Markdown("---")
|
| 114 |
+
gr.Markdown("## 🧑🌾 Ask Dr. Green")
|
| 115 |
+
user_question = gr.Textbox(label="Ask your question")
|
| 116 |
+
dr_response = gr.Textbox(label="Dr. Green Says")
|
| 117 |
+
user_question.change(dr_green_chat, inputs=user_question, outputs=dr_response)
|
| 118 |
+
|
| 119 |
+
if __name__ == "__main__":
|
| 120 |
+
demo.launch()
|
disease_info.py
ADDED
|
@@ -0,0 +1,130 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import requests
|
| 3 |
+
import json
|
| 4 |
+
from dotenv import load_dotenv
|
| 5 |
+
|
| 6 |
+
# Load environment variables
|
| 7 |
+
load_dotenv()
|
| 8 |
+
|
| 9 |
+
# API configuration
|
| 10 |
+
API_KEY = "wfGjHNCbHYuLio66x5CGMANSnD8QVXYy"
|
| 11 |
+
API_URL = "https://api.mistral.ai/v1/chat/completions"
|
| 12 |
+
HEADERS = {
|
| 13 |
+
"Authorization": f"Bearer {API_KEY}",
|
| 14 |
+
"Content-Type": "application/json"
|
| 15 |
+
}
|
| 16 |
+
|
| 17 |
+
def get_disease_info(plant_name: str, disease_name: str) -> dict:
|
| 18 |
+
"""
|
| 19 |
+
Get detailed information about a plant disease using Mistral API
|
| 20 |
+
"""
|
| 21 |
+
try:
|
| 22 |
+
# Construct the prompt
|
| 23 |
+
prompt = f"""
|
| 24 |
+
You are an expert in plant pathology. Please provide detailed information about {disease_name} in {plant_name} plants.
|
| 25 |
+
Provide the following information in a structured format:
|
| 26 |
+
1. Disease description
|
| 27 |
+
2. Recommended pesticides (if any)
|
| 28 |
+
3. Pesticide application timing
|
| 29 |
+
4. Prevention measures
|
| 30 |
+
|
| 31 |
+
Format the response as a JSON object with these keys:
|
| 32 |
+
- description
|
| 33 |
+
- pesticides
|
| 34 |
+
- timing
|
| 35 |
+
- prevention
|
| 36 |
+
|
| 37 |
+
Example response format:
|
| 38 |
+
{{
|
| 39 |
+
"description": "Detailed description of the disease",
|
| 40 |
+
"pesticides": "List of recommended pesticides",
|
| 41 |
+
"timing": "When to apply pesticides",
|
| 42 |
+
"prevention": "Prevention measures"
|
| 43 |
+
}}
|
| 44 |
+
"""
|
| 45 |
+
|
| 46 |
+
print(f"Querying Mistral API for {plant_name} - {disease_name}")
|
| 47 |
+
|
| 48 |
+
# Prepare the request payload
|
| 49 |
+
payload = {
|
| 50 |
+
"model": "mistral-tiny",
|
| 51 |
+
"messages": [{"role": "user", "content": prompt}],
|
| 52 |
+
"temperature": 0.7
|
| 53 |
+
}
|
| 54 |
+
|
| 55 |
+
print("Making API request...")
|
| 56 |
+
# Make API call
|
| 57 |
+
response = requests.post(API_URL, headers=HEADERS, json=payload)
|
| 58 |
+
print(f"API Response Status: {response.status_code}")
|
| 59 |
+
|
| 60 |
+
if response.status_code != 200:
|
| 61 |
+
print(f"API Error Response: {response.text}")
|
| 62 |
+
raise Exception(f"API request failed with status {response.status_code}")
|
| 63 |
+
|
| 64 |
+
response_data = response.json()
|
| 65 |
+
print("Received API response")
|
| 66 |
+
|
| 67 |
+
# Extract the content from the response
|
| 68 |
+
content = response_data["choices"][0]["message"]["content"]
|
| 69 |
+
print("Extracted content from response")
|
| 70 |
+
|
| 71 |
+
# Try to parse the content as JSON
|
| 72 |
+
try:
|
| 73 |
+
# First, try to find JSON in the content
|
| 74 |
+
json_start = content.find('{')
|
| 75 |
+
json_end = content.rfind('}') + 1
|
| 76 |
+
if json_start != -1 and json_end != 0:
|
| 77 |
+
json_str = content[json_start:json_end]
|
| 78 |
+
disease_info = json.loads(json_str)
|
| 79 |
+
else:
|
| 80 |
+
disease_info = json.loads(content)
|
| 81 |
+
|
| 82 |
+
print("Successfully parsed JSON response")
|
| 83 |
+
|
| 84 |
+
# Ensure all required fields are present and properly formatted
|
| 85 |
+
required_fields = ["description", "pesticides", "timing", "prevention"]
|
| 86 |
+
for field in required_fields:
|
| 87 |
+
if field not in disease_info:
|
| 88 |
+
disease_info[field] = f"No {field} information available"
|
| 89 |
+
else:
|
| 90 |
+
# Clean up the field value
|
| 91 |
+
value = disease_info[field]
|
| 92 |
+
if isinstance(value, str):
|
| 93 |
+
# Remove any JSON formatting if present
|
| 94 |
+
if value.strip().startswith('{') and value.strip().endswith('}'):
|
| 95 |
+
try:
|
| 96 |
+
value = json.loads(value)
|
| 97 |
+
if isinstance(value, dict) and field in value:
|
| 98 |
+
disease_info[field] = value[field]
|
| 99 |
+
except:
|
| 100 |
+
pass
|
| 101 |
+
disease_info[field] = value.strip()
|
| 102 |
+
|
| 103 |
+
return disease_info
|
| 104 |
+
except json.JSONDecodeError as e:
|
| 105 |
+
print(f"JSON Parse Error: {str(e)}")
|
| 106 |
+
print(f"Raw content: {content}")
|
| 107 |
+
# If not valid JSON, use the raw content
|
| 108 |
+
return {
|
| 109 |
+
"description": content,
|
| 110 |
+
"pesticides": "Please check the description for pesticide information",
|
| 111 |
+
"timing": "Please check the description for timing information",
|
| 112 |
+
"prevention": "Please check the description for prevention measures"
|
| 113 |
+
}
|
| 114 |
+
|
| 115 |
+
except requests.exceptions.RequestException as e:
|
| 116 |
+
print(f"Request Error: {str(e)}")
|
| 117 |
+
return {
|
| 118 |
+
"description": f"Error retrieving disease information: {str(e)}",
|
| 119 |
+
"pesticides": "Error retrieving pesticide information",
|
| 120 |
+
"timing": "Error retrieving timing information",
|
| 121 |
+
"prevention": "Error retrieving prevention information"
|
| 122 |
+
}
|
| 123 |
+
except Exception as e:
|
| 124 |
+
print(f"Unexpected Error: {str(e)}")
|
| 125 |
+
return {
|
| 126 |
+
"description": f"Unexpected error: {str(e)}",
|
| 127 |
+
"pesticides": "Error retrieving pesticide information",
|
| 128 |
+
"timing": "Error retrieving timing information",
|
| 129 |
+
"prevention": "Error retrieving prevention information"
|
| 130 |
+
}
|
error_fixes.py
ADDED
|
@@ -0,0 +1,139 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import torch
|
| 2 |
+
import torch.nn as nn
|
| 3 |
+
import torchvision.models as models
|
| 4 |
+
import torchvision.transforms as transforms
|
| 5 |
+
from PIL import Image
|
| 6 |
+
import gradio as gr
|
| 7 |
+
import numpy as np
|
| 8 |
+
|
| 9 |
+
# Define your model class (same as during training)
|
| 10 |
+
class Plant_Disease_VGG16(nn.Module):
|
| 11 |
+
def __init__(self):
|
| 12 |
+
super().__init__()
|
| 13 |
+
self.network = models.vgg16(pretrained=True)
|
| 14 |
+
for param in list(self.network.features.parameters())[:-5]:
|
| 15 |
+
param.requires_grad = False
|
| 16 |
+
num_ftrs = self.network.classifier[-1].in_features
|
| 17 |
+
self.network.classifier[-1] = nn.Linear(num_ftrs, 38) # 38 classes
|
| 18 |
+
|
| 19 |
+
def forward(self, xb):
|
| 20 |
+
return self.network(xb)
|
| 21 |
+
|
| 22 |
+
# Load the model
|
| 23 |
+
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|
| 24 |
+
model = Plant_Disease_VGG16()
|
| 25 |
+
model.load_state_dict(torch.load("model/vgg_model_ft.pth", map_location=device))
|
| 26 |
+
model.to(device)
|
| 27 |
+
model.eval()
|
| 28 |
+
|
| 29 |
+
# Class labels with plant and disease information
|
| 30 |
+
class_labels = [
|
| 31 |
+
'Apple___Apple_scab', 'Apple___Black_rot', 'Apple___Cedar_apple_rust', 'Apple___healthy',
|
| 32 |
+
'Blueberry___healthy', 'Cherry_(including_sour)___Powdery_mildew', 'Cherry_(including_sour)___healthy',
|
| 33 |
+
'Corn_(maize)___Cercospora_leaf_spot Gray_leaf_spot', 'Corn_(maize)___Common_rust_',
|
| 34 |
+
'Corn_(maize)___Northern_Leaf_Blight', 'Corn_(maize)___healthy', 'Grape___Black_rot',
|
| 35 |
+
'Grape___Esca_(Black_Measles)', 'Grape___Leaf_blight_(Isariopsis_Leaf_Spot)', 'Grape___healthy',
|
| 36 |
+
'Orange___Haunglongbing_(Citrus_greening)', 'Peach___Bacterial_spot', 'Peach___healthy',
|
| 37 |
+
'Pepper,_bell___Bacterial_spot', 'Pepper,_bell___healthy', 'Potato___Early_blight',
|
| 38 |
+
'Potato___Late_blight', 'Potato___healthy', 'Raspberry___healthy', 'Soybean___healthy',
|
| 39 |
+
'Squash___Powdery_mildew', 'Strawberry___Leaf_scorch', 'Strawberry___healthy',
|
| 40 |
+
'Tomato___Bacterial_spot', 'Tomato___Early_blight', 'Tomato___Late_blight',
|
| 41 |
+
'Tomato___Leaf_Mold', 'Tomato___Septoria_leaf_spot', 'Tomato___Spider_mites Two-spotted_spider_mite',
|
| 42 |
+
'Tomato___Target_Spot', 'Tomato___Tomato_Yellow_Leaf_Curl_Virus', 'Tomato___Tomato_mosaic_virus',
|
| 43 |
+
'Tomato___healthy'
|
| 44 |
+
]
|
| 45 |
+
|
| 46 |
+
# Enhanced preprocessing
|
| 47 |
+
def preprocess_image(image):
|
| 48 |
+
"""Add noise reduction, sharpening, and background removal"""
|
| 49 |
+
# Convert to numpy array for processing
|
| 50 |
+
img = np.array(image)
|
| 51 |
+
|
| 52 |
+
# Simple background removal (assuming leaf is dominant green object)
|
| 53 |
+
hsv = cv2.cvtColor(img, cv2.COLOR_RGB2HSV)
|
| 54 |
+
mask = cv2.inRange(hsv, (36, 25, 25), (86, 255, 255)) # Green color range
|
| 55 |
+
kernel = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (11, 11))
|
| 56 |
+
mask = cv2.morphologyEx(mask, cv2.MORPH_CLOSE, kernel)
|
| 57 |
+
img = cv2.bitwise_and(img, img, mask=mask)
|
| 58 |
+
|
| 59 |
+
# Convert back to PIL
|
| 60 |
+
image = Image.fromarray(img)
|
| 61 |
+
|
| 62 |
+
transform = transforms.Compose([
|
| 63 |
+
transforms.Resize((224, 224)),
|
| 64 |
+
transforms.ToTensor(),
|
| 65 |
+
transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])
|
| 66 |
+
])
|
| 67 |
+
return transform(image)
|
| 68 |
+
|
| 69 |
+
def parse_class_label(class_label):
|
| 70 |
+
"""Split class label into plant name and disease status"""
|
| 71 |
+
parts = class_label.split('___')
|
| 72 |
+
plant = parts[0].replace('_', ' ').replace(',', '')
|
| 73 |
+
disease = parts[1].replace('_', ' ') if len(parts) > 1 else "healthy"
|
| 74 |
+
return plant, disease
|
| 75 |
+
|
| 76 |
+
def is_healthy_override(image, predicted_class, confidence):
|
| 77 |
+
"""Heuristic check for false disease predictions"""
|
| 78 |
+
# If model predicts disease but image looks "too clean", override to healthy
|
| 79 |
+
if "healthy" not in predicted_class and confidence > 0.9:
|
| 80 |
+
# Simple check: count green pixels vs total
|
| 81 |
+
img = np.array(image)
|
| 82 |
+
hsv = cv2.cvtColor(img, cv2.COLOR_RGB2HSV)
|
| 83 |
+
green_pixels = cv2.inRange(hsv, (36, 25, 25), (86, 255, 255))
|
| 84 |
+
green_ratio = np.sum(green_pixels > 0) / (img.shape[0] * img.shape[1])
|
| 85 |
+
|
| 86 |
+
if green_ratio > 0.7: # Mostly green leaf with no visible spots
|
| 87 |
+
return True
|
| 88 |
+
return False
|
| 89 |
+
|
| 90 |
+
# Prediction function with fixes
|
| 91 |
+
def predict(image):
|
| 92 |
+
try:
|
| 93 |
+
# Preprocess
|
| 94 |
+
input_tensor = preprocess_image(image).unsqueeze(0).to(device)
|
| 95 |
+
|
| 96 |
+
# Predict
|
| 97 |
+
with torch.no_grad():
|
| 98 |
+
preds = model(input_tensor)
|
| 99 |
+
probabilities = torch.nn.functional.softmax(preds[0], dim=0)
|
| 100 |
+
|
| 101 |
+
# Get top prediction
|
| 102 |
+
top_prob, top_idx = torch.max(probabilities, 0)
|
| 103 |
+
top_class = class_labels[top_idx.item()]
|
| 104 |
+
plant, disease = parse_class_label(top_class)
|
| 105 |
+
confidence = top_prob.item()
|
| 106 |
+
|
| 107 |
+
# Apply fixes
|
| 108 |
+
if is_healthy_override(image, top_class, confidence):
|
| 109 |
+
return f"Plant: {plant}\nDisease: healthy (Override: Original prediction '{disease}' had {confidence:.2%} confidence but leaf appears healthy)"
|
| 110 |
+
|
| 111 |
+
# Confidence thresholding
|
| 112 |
+
if confidence < 0.7:
|
| 113 |
+
return f"Uncertain prediction for {plant} (Confidence: {confidence:.2%})\nPlease upload a clearer image."
|
| 114 |
+
|
| 115 |
+
return f"Plant: {plant}\nDisease: {disease} (Confidence: {confidence:.2%})"
|
| 116 |
+
|
| 117 |
+
except Exception as e:
|
| 118 |
+
return f"Error: {str(e)}"
|
| 119 |
+
|
| 120 |
+
# Gradio UI with additional instructions
|
| 121 |
+
iface = gr.Interface(
|
| 122 |
+
fn=predict,
|
| 123 |
+
inputs=gr.Image(type="pil", label="Upload Leaf Image"),
|
| 124 |
+
outputs=gr.Textbox(label="Prediction Results"),
|
| 125 |
+
title="Plant Disease Detection (With Error Correction)",
|
| 126 |
+
description="""Upload a clear image of a plant leaf. Tips:
|
| 127 |
+
- Crop to show only the leaf
|
| 128 |
+
- Use even lighting
|
| 129 |
+
- Avoid shadows/reflections""",
|
| 130 |
+
examples=[
|
| 131 |
+
["examples/healthy_apple.jpg"],
|
| 132 |
+
["examples/diseased_tomato.jpg"]
|
| 133 |
+
],
|
| 134 |
+
allow_flagging="manual"
|
| 135 |
+
)
|
| 136 |
+
|
| 137 |
+
if __name__ == "__main__":
|
| 138 |
+
import cv2 # For image processing
|
| 139 |
+
iface.launch()
|
model.py
ADDED
|
@@ -0,0 +1,128 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import torch
|
| 2 |
+
import torch.nn as nn
|
| 3 |
+
import torchvision.models as models
|
| 4 |
+
import torchvision.transforms as transforms
|
| 5 |
+
from PIL import Image
|
| 6 |
+
import gradio as gr
|
| 7 |
+
from disease_info import get_disease_info
|
| 8 |
+
|
| 9 |
+
# Define your model class (same as during training)
|
| 10 |
+
class Plant_Disease_VGG16(nn.Module):
|
| 11 |
+
def __init__(self):
|
| 12 |
+
super().__init__()
|
| 13 |
+
self.network = models.vgg16(pretrained=True)
|
| 14 |
+
for param in list(self.network.features.parameters())[:-5]:
|
| 15 |
+
param.requires_grad = False
|
| 16 |
+
num_ftrs = self.network.classifier[-1].in_features
|
| 17 |
+
self.network.classifier[-1] = nn.Linear(num_ftrs, 38) # 38 classes
|
| 18 |
+
|
| 19 |
+
def forward(self, xb):
|
| 20 |
+
return self.network(xb)
|
| 21 |
+
|
| 22 |
+
# Load the model
|
| 23 |
+
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|
| 24 |
+
model = Plant_Disease_VGG16()
|
| 25 |
+
model.load_state_dict(torch.load("model/vgg_model_ft.pth", map_location=device))
|
| 26 |
+
model.to(device)
|
| 27 |
+
model.eval()
|
| 28 |
+
|
| 29 |
+
# Class labels with plant and disease information
|
| 30 |
+
class_labels = [
|
| 31 |
+
'Apple___Apple_scab', 'Apple___Black_rot', 'Apple___Cedar_apple_rust', 'Apple___healthy',
|
| 32 |
+
'Blueberry___healthy', 'Cherry_(including_sour)___Powdery_mildew', 'Cherry_(including_sour)___healthy',
|
| 33 |
+
'Corn_(maize)___Cercospora_leaf_spot Gray_leaf_spot', 'Corn_(maize)___Common_rust_',
|
| 34 |
+
'Corn_(maize)___Northern_Leaf_Blight', 'Corn_(maize)___healthy', 'Grape___Black_rot',
|
| 35 |
+
'Grape___Esca_(Black_Measles)', 'Grape___Leaf_blight_(Isariopsis_Leaf_Spot)', 'Grape___healthy',
|
| 36 |
+
'Orange___Haunglongbing_(Citrus_greening)', 'Peach___Bacterial_spot', 'Peach___healthy',
|
| 37 |
+
'Pepper,_bell___Bacterial_spot', 'Pepper,_bell___healthy', 'Potato___Early_blight',
|
| 38 |
+
'Potato___Late_blight', 'Potato___healthy', 'Raspberry___healthy', 'Soybean___healthy',
|
| 39 |
+
'Squash___Powdery_mildew', 'Strawberry___Leaf_scorch', 'Strawberry___healthy',
|
| 40 |
+
'Tomato___Bacterial_spot', 'Tomato___Early_blight', 'Tomato___Late_blight',
|
| 41 |
+
'Tomato___Leaf_Mold', 'Tomato___Septoria_leaf_spot', 'Tomato___Spider_mites Two-spotted_spider_mite',
|
| 42 |
+
'Tomato___Target_Spot', 'Tomato___Tomato_Yellow_Leaf_Curl_Virus', 'Tomato___Tomato_mosaic_virus',
|
| 43 |
+
'Tomato___healthy'
|
| 44 |
+
]
|
| 45 |
+
|
| 46 |
+
# Preprocessing
|
| 47 |
+
transform = transforms.Compose([
|
| 48 |
+
transforms.Resize((224, 224)),
|
| 49 |
+
transforms.ToTensor(),
|
| 50 |
+
])
|
| 51 |
+
|
| 52 |
+
def parse_class_label(class_label):
|
| 53 |
+
"""Split class label into plant name and disease status"""
|
| 54 |
+
parts = class_label.split('___')
|
| 55 |
+
plant = parts[0].replace('_', ' ').replace(',', '')
|
| 56 |
+
disease = parts[1].replace('_', ' ') if len(parts) > 1 else "healthy"
|
| 57 |
+
return plant, disease
|
| 58 |
+
|
| 59 |
+
# Prediction function
|
| 60 |
+
def predict(image):
|
| 61 |
+
try:
|
| 62 |
+
print("Starting prediction...")
|
| 63 |
+
if image is None:
|
| 64 |
+
return "Error: No image provided"
|
| 65 |
+
|
| 66 |
+
print("Preprocessing image...")
|
| 67 |
+
image = transform(image).unsqueeze(0).to(device)
|
| 68 |
+
|
| 69 |
+
print("Running model prediction...")
|
| 70 |
+
with torch.no_grad():
|
| 71 |
+
preds = model(image)
|
| 72 |
+
probabilities = torch.nn.functional.softmax(preds[0], dim=0)
|
| 73 |
+
|
| 74 |
+
# Get top prediction
|
| 75 |
+
top_prob, top_idx = torch.max(probabilities, 0)
|
| 76 |
+
class_name = class_labels[top_idx.item()]
|
| 77 |
+
plant, disease = parse_class_label(class_name)
|
| 78 |
+
|
| 79 |
+
print(f"Detected: {plant} - {disease}")
|
| 80 |
+
|
| 81 |
+
# Get disease information from Mistral API
|
| 82 |
+
print("Fetching disease information...")
|
| 83 |
+
try:
|
| 84 |
+
disease_info = get_disease_info(plant, disease)
|
| 85 |
+
print("Received disease information successfully")
|
| 86 |
+
|
| 87 |
+
# Format the output
|
| 88 |
+
result = f"""
|
| 89 |
+
Plant: {plant}
|
| 90 |
+
Disease: {disease}
|
| 91 |
+
|
| 92 |
+
{disease_info['description']}
|
| 93 |
+
|
| 94 |
+
Recommended Pesticides:
|
| 95 |
+
{disease_info['pesticides']}
|
| 96 |
+
|
| 97 |
+
Application Timing:
|
| 98 |
+
{disease_info['timing']}
|
| 99 |
+
|
| 100 |
+
Prevention Measures:
|
| 101 |
+
{disease_info['prevention']}
|
| 102 |
+
"""
|
| 103 |
+
return result
|
| 104 |
+
|
| 105 |
+
except Exception as e:
|
| 106 |
+
print(f"Error getting disease info: {str(e)}")
|
| 107 |
+
return f"Error getting disease information: {str(e)}"
|
| 108 |
+
|
| 109 |
+
except Exception as e:
|
| 110 |
+
print(f"Error in prediction: {str(e)}")
|
| 111 |
+
return f"Error in prediction: {str(e)}"
|
| 112 |
+
|
| 113 |
+
# Gradio UI
|
| 114 |
+
iface = gr.Interface(
|
| 115 |
+
fn=predict,
|
| 116 |
+
inputs=gr.Image(type="pil"),
|
| 117 |
+
outputs=gr.Textbox(label="Prediction Results", lines=20),
|
| 118 |
+
title="Plant Disease Detection",
|
| 119 |
+
description="Upload an image of a plant leaf to detect diseases and get detailed information about treatment and prevention.",
|
| 120 |
+
examples=[
|
| 121 |
+
["examples/healthy_apple.jpg"],
|
| 122 |
+
["examples/diseased_tomato.jpg"]
|
| 123 |
+
],
|
| 124 |
+
flagging_mode="never"
|
| 125 |
+
)
|
| 126 |
+
|
| 127 |
+
if __name__ == "__main__":
|
| 128 |
+
iface.launch(share=True)
|
requirements.txt
CHANGED
|
@@ -1,6 +1,6 @@
|
|
| 1 |
-
torch>=2.0.0
|
| 2 |
-
torchvision>=0.15.0
|
| 3 |
-
Pillow>=9.0.0
|
| 4 |
-
gradio>=4.44.1
|
| 5 |
-
requests>=2.31.0
|
| 6 |
python-dotenv>=0.19.0
|
|
|
|
| 1 |
+
torch>=2.0.0
|
| 2 |
+
torchvision>=0.15.0
|
| 3 |
+
Pillow>=9.0.0
|
| 4 |
+
gradio>=4.44.1
|
| 5 |
+
requests>=2.31.0
|
| 6 |
python-dotenv>=0.19.0
|
sample1.py
ADDED
|
@@ -0,0 +1,176 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import torch
|
| 2 |
+
import torch.nn as nn
|
| 3 |
+
import torchvision.models as models
|
| 4 |
+
import torchvision.transforms as transforms
|
| 5 |
+
from PIL import Image
|
| 6 |
+
import gradio as gr
|
| 7 |
+
from disease_info import get_disease_info
|
| 8 |
+
from flask import Flask, render_template
|
| 9 |
+
import threading
|
| 10 |
+
import socket
|
| 11 |
+
from warnings import filterwarnings
|
| 12 |
+
|
| 13 |
+
# Suppress deprecation warnings
|
| 14 |
+
filterwarnings("ignore", category=UserWarning)
|
| 15 |
+
|
| 16 |
+
# ========== MODEL DEFINITION ==========
|
| 17 |
+
class Plant_Disease_VGG16(nn.Module):
|
| 18 |
+
def __init__(self):
|
| 19 |
+
super().__init__()
|
| 20 |
+
weights = models.VGG16_Weights.IMAGENET1K_V1
|
| 21 |
+
self.network = models.vgg16(weights=weights)
|
| 22 |
+
# Freeze early layers
|
| 23 |
+
for param in list(self.network.features.parameters())[:-5]:
|
| 24 |
+
param.requires_grad = False
|
| 25 |
+
# Modify final layer
|
| 26 |
+
num_ftrs = self.network.classifier[-1].in_features
|
| 27 |
+
self.network.classifier[-1] = nn.Linear(num_ftrs, 38) # 38 classes
|
| 28 |
+
|
| 29 |
+
def forward(self, xb):
|
| 30 |
+
return self.network(xb)
|
| 31 |
+
|
| 32 |
+
# Initialize model
|
| 33 |
+
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|
| 34 |
+
model = Plant_Disease_VGG16()
|
| 35 |
+
model.load_state_dict(torch.load("model/vgg_model_ft.pth", map_location=device))
|
| 36 |
+
model.to(device)
|
| 37 |
+
model.eval()
|
| 38 |
+
|
| 39 |
+
# Class labels
|
| 40 |
+
class_labels = [
|
| 41 |
+
'Apple___Apple_scab', 'Apple___Black_rot', 'Apple___Cedar_apple_rust', 'Apple___healthy',
|
| 42 |
+
'Blueberry___healthy', 'Cherry_(including_sour)___Powdery_mildew', 'Cherry_(including_sour)___healthy',
|
| 43 |
+
'Corn_(maize)___Cercospora_leaf_spot Gray_leaf_spot', 'Corn_(maize)___Common_rust_',
|
| 44 |
+
'Corn_(maize)___Northern_Leaf_Blight', 'Corn_(maize)___healthy', 'Grape___Black_rot',
|
| 45 |
+
'Grape___Esca_(Black_Measles)', 'Grape___Leaf_blight_(Isariopsis_Leaf_Spot)', 'Grape___healthy',
|
| 46 |
+
'Orange___Haunglongbing_(Citrus_greening)', 'Peach___Bacterial_spot', 'Peach___healthy',
|
| 47 |
+
'Pepper,_bell___Bacterial_spot', 'Pepper,_bell___healthy', 'Potato___Early_blight',
|
| 48 |
+
'Potato___Late_blight', 'Potato___healthy', 'Raspberry___healthy', 'Soybean___healthy',
|
| 49 |
+
'Squash___Powdery_mildew', 'Strawberry___Leaf_scorch', 'Strawberry___healthy',
|
| 50 |
+
'Tomato___Bacterial_spot', 'Tomato___Early_blight', 'Tomato___Late_blight',
|
| 51 |
+
'Tomato___Leaf_Mold', 'Tomato___Septoria_leaf_spot', 'Tomato___Spider_mites Two-spotted_spider_mite',
|
| 52 |
+
'Tomato___Target_Spot', 'Tomato___Tomato_Yellow_Leaf_Curl_Virus', 'Tomato___Tomato_mosaic_virus',
|
| 53 |
+
'Tomato___healthy'
|
| 54 |
+
]
|
| 55 |
+
|
| 56 |
+
# Image preprocessing
|
| 57 |
+
transform = transforms.Compose([
|
| 58 |
+
transforms.Resize((224, 224)),
|
| 59 |
+
transforms.ToTensor(),
|
| 60 |
+
])
|
| 61 |
+
|
| 62 |
+
def parse_class_label(class_label):
|
| 63 |
+
"""Extract plant and disease from class label"""
|
| 64 |
+
parts = class_label.split('___')
|
| 65 |
+
plant = parts[0].replace('_', ' ').replace(',', '')
|
| 66 |
+
disease = parts[1].replace('_', ' ') if len(parts) > 1 else "healthy"
|
| 67 |
+
return plant, disease
|
| 68 |
+
|
| 69 |
+
def predict(image):
|
| 70 |
+
"""Make prediction on input image"""
|
| 71 |
+
try:
|
| 72 |
+
if image is None:
|
| 73 |
+
return "Error: No image provided"
|
| 74 |
+
|
| 75 |
+
# Preprocess and predict
|
| 76 |
+
image = transform(image).unsqueeze(0).to(device)
|
| 77 |
+
with torch.no_grad():
|
| 78 |
+
preds = model(image)
|
| 79 |
+
probabilities = torch.nn.functional.softmax(preds[0], dim=0)
|
| 80 |
+
|
| 81 |
+
# Get top prediction
|
| 82 |
+
top_prob, top_idx = torch.max(probabilities, 0)
|
| 83 |
+
class_name = class_labels[top_idx.item()]
|
| 84 |
+
plant, disease = parse_class_label(class_name)
|
| 85 |
+
|
| 86 |
+
# Get disease info
|
| 87 |
+
disease_info = get_disease_info(plant, disease)
|
| 88 |
+
|
| 89 |
+
# Format results
|
| 90 |
+
result = f"""
|
| 91 |
+
Plant: {plant}
|
| 92 |
+
Disease: {disease}
|
| 93 |
+
|
| 94 |
+
Description:
|
| 95 |
+
{disease_info['description']}
|
| 96 |
+
|
| 97 |
+
Recommended Treatments:
|
| 98 |
+
{disease_info['pesticides']}
|
| 99 |
+
|
| 100 |
+
Application Timing:
|
| 101 |
+
{disease_info['timing']}
|
| 102 |
+
|
| 103 |
+
Prevention Measures:
|
| 104 |
+
{disease_info['prevention']}
|
| 105 |
+
"""
|
| 106 |
+
return result
|
| 107 |
+
|
| 108 |
+
except Exception as e:
|
| 109 |
+
return f"Error in prediction: {str(e)}"
|
| 110 |
+
|
| 111 |
+
# ========== WEB APPLICATION ==========
|
| 112 |
+
def find_available_port(start_port):
|
| 113 |
+
"""Find next available port from start_port"""
|
| 114 |
+
port = start_port
|
| 115 |
+
while True:
|
| 116 |
+
try:
|
| 117 |
+
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
|
| 118 |
+
s.bind(('0.0.0.0', port))
|
| 119 |
+
return port
|
| 120 |
+
except OSError:
|
| 121 |
+
port += 1
|
| 122 |
+
|
| 123 |
+
app = Flask(__name__)
|
| 124 |
+
|
| 125 |
+
# Gradio Interface
|
| 126 |
+
iface = gr.Interface(
|
| 127 |
+
fn=predict,
|
| 128 |
+
inputs=gr.Image(type="pil"),
|
| 129 |
+
outputs=gr.Textbox(label="Analysis Results", lines=20),
|
| 130 |
+
title="GREEN PULSE - Plant Health Analysis",
|
| 131 |
+
description="Upload an image of a plant leaf to detect health issues.",
|
| 132 |
+
examples=[
|
| 133 |
+
["examples/healthy_apple.jpg"],
|
| 134 |
+
["examples/diseased_tomato.jpg"]
|
| 135 |
+
]
|
| 136 |
+
)
|
| 137 |
+
|
| 138 |
+
def run_gradio():
|
| 139 |
+
"""Launch Gradio in separate thread"""
|
| 140 |
+
global gradio_port
|
| 141 |
+
gradio_port = find_available_port(7860)
|
| 142 |
+
print(f"\nGradio interface running on port: {gradio_port}")
|
| 143 |
+
iface.launch(
|
| 144 |
+
server_name="0.0.0.0",
|
| 145 |
+
server_port=gradio_port,
|
| 146 |
+
share=False,
|
| 147 |
+
prevent_thread_lock=True
|
| 148 |
+
)
|
| 149 |
+
|
| 150 |
+
# Start Gradio thread
|
| 151 |
+
gradio_port = 7860 # Default
|
| 152 |
+
gradio_thread = threading.Thread(target=run_gradio, daemon=True)
|
| 153 |
+
gradio_thread.start()
|
| 154 |
+
|
| 155 |
+
# Flask Routes
|
| 156 |
+
@app.route('/')
|
| 157 |
+
def home():
|
| 158 |
+
"""Main landing page"""
|
| 159 |
+
return render_template("index.html")
|
| 160 |
+
|
| 161 |
+
@app.route('/analyze')
|
| 162 |
+
def analyze():
|
| 163 |
+
"""Page with embedded Gradio interface"""
|
| 164 |
+
return render_template("analyze.html", gradio_port=gradio_port)
|
| 165 |
+
|
| 166 |
+
@app.route('/results')
|
| 167 |
+
def results():
|
| 168 |
+
"""Results display page"""
|
| 169 |
+
return render_template("results.html")
|
| 170 |
+
|
| 171 |
+
if __name__ == '__main__':
|
| 172 |
+
"""Main application entry point"""
|
| 173 |
+
flask_port = find_available_port(5000)
|
| 174 |
+
print(f"Flask server running on port: {flask_port}")
|
| 175 |
+
print(f"Access the app at: http://localhost:{flask_port}")
|
| 176 |
+
app.run(debug=True, port=flask_port, use_reloader=False)
|
translator.py
ADDED
|
@@ -0,0 +1,36 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import requests
|
| 2 |
+
|
| 3 |
+
class Translator:
|
| 4 |
+
def __init__(self):
|
| 5 |
+
self.api_key = "393faf7faf-bf19-46db-a640-c0f44f844724" # Your Inference API Key Value
|
| 6 |
+
self.base_url = "https://api.basini.com/v1/translate"
|
| 7 |
+
self.headers = {
|
| 8 |
+
"Authorization": f"Bearer {self.api_key}",
|
| 9 |
+
"Content-Type": "application/json"
|
| 10 |
+
}
|
| 11 |
+
|
| 12 |
+
def translate(self, text, target_lang):
|
| 13 |
+
try:
|
| 14 |
+
# Split the text into sections for better translation
|
| 15 |
+
sections = text.split('\n\n')
|
| 16 |
+
translated_sections = []
|
| 17 |
+
|
| 18 |
+
for section in sections:
|
| 19 |
+
if section.strip():
|
| 20 |
+
payload = {
|
| 21 |
+
"text": section,
|
| 22 |
+
"target_lang": target_lang,
|
| 23 |
+
"source_lang": "en"
|
| 24 |
+
}
|
| 25 |
+
response = requests.post(self.base_url, headers=self.headers, json=payload)
|
| 26 |
+
response.raise_for_status()
|
| 27 |
+
translated_sections.append(response.json()["translated_text"])
|
| 28 |
+
else:
|
| 29 |
+
translated_sections.append("")
|
| 30 |
+
|
| 31 |
+
return "\n\n".join(translated_sections)
|
| 32 |
+
except Exception as e:
|
| 33 |
+
print(f"Translation error: {str(e)}")
|
| 34 |
+
return text
|
| 35 |
+
|
| 36 |
+
translator = Translator()
|