File size: 4,602 Bytes
237c700
 
 
 
 
f25f10c
fc33bcb
4030d18
f25f10c
4030d18
fc33bcb
 
cb4656b
 
fc33bcb
 
 
4030d18
 
fc33bcb
4030d18
 
f25f10c
237c700
 
 
 
 
4ca82bd
fc33bcb
4ca82bd
fc33bcb
237c700
4030d18
 
 
237c700
 
 
 
 
 
 
d009c24
 
 
 
 
 
 
 
 
4030d18
237c700
4030d18
237c700
 
 
 
 
 
 
d009c24
237c700
 
fc33bcb
 
 
cb4656b
fc33bcb
da0e064
 
 
6ccbed3
 
 
 
 
 
 
 
 
 
 
da0e064
 
 
 
 
fc33bcb
da0e064
fc33bcb
 
 
cb4656b
fc33bcb
 
 
cb4656b
fc33bcb
 
 
 
 
 
 
 
 
4030d18
237c700
4030d18
fc33bcb
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
cb4656b
f25f10c
 
fc33bcb
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
import os
import json
from PIL import Image
import numpy as np
import tensorflow as tf
import gradio as gr
import requests
from huggingface_hub import hf_hub_download

# -----------------------------
# Load API key from Hugging Face Secrets
# -----------------------------
GROK_KEY = os.getenv("GROK_API_KEY")
GROK_URL = "https://api.groq.com/openai/v1/chat/completions"

# -----------------------------
# Load TFLite model from Hugging Face Hub
# -----------------------------
model_path = hf_hub_download(
    repo_id="sidd-harth011/checkingPDRMod",  # ✅ your repo
    filename="plant_disease_model.tflite"
)

interpreter = tf.lite.Interpreter(model_path=model_path)
interpreter.allocate_tensors()
input_details = interpreter.get_input_details()
output_details = interpreter.get_output_details()

# -----------------------------
# Load class indices (local file in repo)
# -----------------------------
class_indices = json.load(open("class_indices.json"))

# -----------------------------
# Preprocessing function
# -----------------------------
def load_and_preprocess_image(image, target_size=(224, 224)):
    img = image.resize(target_size)
    img_array = np.array(img, dtype=np.float32)
    img_array = np.expand_dims(img_array, axis=0)
    img_array = img_array / 255.0
    return img_array

#-----------------------------
# Function to give disease name only
#-----------------------------

def clean_label(label: str) -> str:
    if "___" in label:
        label = label.split("___")[-1]
    return label.replace("_", " ").title()

# -----------------------------
# Prediction function
# -----------------------------
def predict_image_class(image):
    preprocessed_img = load_and_preprocess_image(image)
    interpreter.set_tensor(input_details[0]['index'], preprocessed_img)
    interpreter.invoke()
    predictions = interpreter.get_tensor(output_details[0]['index'])
    predicted_class_index = np.argmax(predictions, axis=1)[0]
    predicted_class_name = class_indices[str(predicted_class_index)]
    predicted_class_name = clean_label(predicted_class_name)
    return f"Prediction: {predicted_class_name}"

# -----------------------------
# OpenAI Chatbot (single-turn, no history)
# -----------------------------
def grok_chatbot(user_message):
    payload = {
        "model": "openai/gpt-oss-20b",
        "messages": [
        {
         "role": "system",
         "content": "You are a helpful assistant specializing in plant disease diagnosis and treatment."
         },
         {
         "role": "system",
         "content": "When providing treatment advice, always recommend consulting a local agricultural expert or extension service for confirmation and additional guidance."
         },
         {
         "role": "system",
         "content": "Use bullet points for lists and keep responses concise and informative."
         },
        {
            "role": "user",
            "content": f"{user_message}\n\n(Please answer in under 400 words.)"
        }
    ],
        "temperature": 0.7,
        "max_tokens": 1000
    }

    headers = {
        "Authorization": f"Bearer {GROK_KEY}",
        "Content-Type": "application/json"
    }

    response = requests.post(GROK_URL, headers=headers, json=payload)

    if response.status_code == 200:
        bot_message = response.json()["choices"][0]["message"]["content"]
    else:
        print("Error:", response.status_code, response.text)
        bot_message = "⚠️ Sorry, I couldn't process that. Try again!"

    return bot_message

# -----------------------------
# Gradio Interface
# -----------------------------
with gr.Blocks(title="🌱 Plant Disease Classifier & AI Chatbot (OpenAI)") as demo:

    gr.Markdown("## 🌱 Plant Disease Classifier with AI Assistant (OpenAI)")

    with gr.Row():
        # Left: Plant classifier
        with gr.Column(scale=1):
            gr.Markdown("### Upload Image")
            image_input = gr.Image(type="pil", label="Upload a Plant Leaf Image")
            predict_button = gr.Button("Classify")
            prediction_output = gr.Textbox(label="Prediction")

            predict_button.click(fn=predict_image_class, inputs=image_input, outputs=prediction_output)

        # Right: AI Chatbot
        with gr.Column(scale=1):
            gr.Markdown("### 🤖 AI Chatbot")
            msg = gr.Textbox(label="Type your message")
            response_box = gr.Textbox(label="Bot Response", lines=5)
            send_btn = gr.Button("Send")

            send_btn.click(grok_chatbot, inputs=msg, outputs=response_box)

if __name__ == "__main__":
    demo.launch()