sidd-harth011 commited on
Commit
fc33bcb
·
1 Parent(s): 4ca82bd
Files changed (3) hide show
  1. .gitignore +3 -1
  2. app.py +60 -17
  3. requirements.txt +3 -1
.gitignore CHANGED
@@ -1 +1,3 @@
1
- plant_disease_model.tflite
 
 
 
1
+ plant_disease_model.tflite
2
+ .env
3
+ app2.py
app.py CHANGED
@@ -4,30 +4,32 @@ from PIL import Image
4
  import numpy as np
5
  import tensorflow as tf
6
  import gradio as gr
 
7
  from huggingface_hub import hf_hub_download
8
 
9
  # -----------------------------
10
- # Download TFLite model from Hugging Face model repo
 
 
 
 
 
 
11
  # -----------------------------
12
  model_path = hf_hub_download(
13
- repo_id="sidd-harth011/checkingPDRMod", # your model repo
14
  filename="plant_disease_model.tflite"
15
  )
16
 
17
- # Load TFLite model
18
  interpreter = tf.lite.Interpreter(model_path=model_path)
19
  interpreter.allocate_tensors()
20
-
21
- # Get input and output details
22
  input_details = interpreter.get_input_details()
23
  output_details = interpreter.get_output_details()
24
 
25
  # -----------------------------
26
- # Load class indices locally from project repo
27
  # -----------------------------
28
- working_dir = os.path.dirname(os.path.abspath(__file__))
29
- class_indices_path = os.path.join(working_dir, "class_indices.json")
30
- class_indices = json.load(open(class_indices_path))
31
 
32
  # -----------------------------
33
  # Preprocessing function
@@ -51,16 +53,57 @@ def predict_image_class(image):
51
  predicted_class_name = class_indices[str(predicted_class_index)]
52
  return f"Prediction: {predicted_class_name}"
53
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
54
  # -----------------------------
55
  # Gradio Interface
56
  # -----------------------------
57
- interface = gr.Interface(
58
- fn=predict_image_class,
59
- inputs=gr.Image(type="pil", label="Upload an Image"),
60
- outputs=gr.Textbox(label="Prediction"),
61
- title="🌱 Plant Disease Classifier (TFLite)",
62
- description="Upload a plant leaf image to classify its disease using a compressed TFLite model hosted on Hugging Face."
63
- )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
64
 
65
  if __name__ == "__main__":
66
- interface.launch()
 
4
  import numpy as np
5
  import tensorflow as tf
6
  import gradio as gr
7
+ import requests
8
  from huggingface_hub import hf_hub_download
9
 
10
  # -----------------------------
11
+ # Load API key from Hugging Face Secrets
12
+ # -----------------------------
13
+ OPENAI_KEY = os.getenv("OPENAI_API_KEY")
14
+ OPENAI_URL = "https://api.openai.com/v1/chat/completions"
15
+
16
+ # -----------------------------
17
+ # Load TFLite model from Hugging Face Hub
18
  # -----------------------------
19
  model_path = hf_hub_download(
20
+ repo_id="sidd-harth011/checkingPDRMod", # your repo
21
  filename="plant_disease_model.tflite"
22
  )
23
 
 
24
  interpreter = tf.lite.Interpreter(model_path=model_path)
25
  interpreter.allocate_tensors()
 
 
26
  input_details = interpreter.get_input_details()
27
  output_details = interpreter.get_output_details()
28
 
29
  # -----------------------------
30
+ # Load class indices (local file in repo)
31
  # -----------------------------
32
+ class_indices = json.load(open("class_indices.json"))
 
 
33
 
34
  # -----------------------------
35
  # Preprocessing function
 
53
  predicted_class_name = class_indices[str(predicted_class_index)]
54
  return f"Prediction: {predicted_class_name}"
55
 
56
+ # -----------------------------
57
+ # OpenAI Chatbot (single-turn, no history)
58
+ # -----------------------------
59
+ def openai_chatbot(user_message):
60
+ payload = {
61
+ "model": "gpt-4o-mini", # ✅ lightweight, works in Spaces
62
+ "messages": [{"role": "user", "content": user_message}],
63
+ "temperature": 0.7,
64
+ "max_tokens": 500
65
+ }
66
+
67
+ headers = {
68
+ "Authorization": f"Bearer {OPENAI_KEY}",
69
+ "Content-Type": "application/json"
70
+ }
71
+
72
+ response = requests.post(OPENAI_URL, headers=headers, json=payload)
73
+
74
+ if response.status_code == 200:
75
+ bot_message = response.json()["choices"][0]["message"]["content"]
76
+ else:
77
+ print("Error:", response.status_code, response.text)
78
+ bot_message = "⚠️ Sorry, I couldn't process that. Try again!"
79
+
80
+ return bot_message
81
+
82
  # -----------------------------
83
  # Gradio Interface
84
  # -----------------------------
85
+ with gr.Blocks(title="🌱 Plant Disease Classifier & AI Chatbot (OpenAI)") as demo:
86
+
87
+ gr.Markdown("## 🌱 Plant Disease Classifier with AI Assistant (OpenAI)")
88
+
89
+ with gr.Row():
90
+ # Left: Plant classifier
91
+ with gr.Column(scale=1):
92
+ gr.Markdown("### Upload Image")
93
+ image_input = gr.Image(type="pil", label="Upload a Plant Leaf Image")
94
+ predict_button = gr.Button("Classify")
95
+ prediction_output = gr.Textbox(label="Prediction")
96
+
97
+ predict_button.click(fn=predict_image_class, inputs=image_input, outputs=prediction_output)
98
+
99
+ # Right: AI Chatbot
100
+ with gr.Column(scale=1):
101
+ gr.Markdown("### 🤖 AI Chatbot")
102
+ msg = gr.Textbox(label="Type your message")
103
+ response_box = gr.Textbox(label="Bot Response", lines=5)
104
+ send_btn = gr.Button("Send")
105
+
106
+ send_btn.click(openai_chatbot, inputs=msg, outputs=response_box)
107
 
108
  if __name__ == "__main__":
109
+ demo.launch()
requirements.txt CHANGED
@@ -1,3 +1,5 @@
1
  tensorflow
2
  numpy
3
- pillow
 
 
 
1
  tensorflow
2
  numpy
3
+ pillow
4
+ openai
5
+ anthropic