premo625 commited on
Commit
44bf352
·
verified ·
1 Parent(s): 526d2c4

Upload 43 files

Browse files
Files changed (43) hide show
  1. Dockerfile +38 -0
  2. api/__init__.py +1 -0
  3. api/__pycache__/__init__.cpython-311.pyc +0 -0
  4. api/__pycache__/main.cpython-311.pyc +0 -0
  5. api/endpoints/__init__.py +1 -0
  6. api/endpoints/__pycache__/__init__.cpython-311.pyc +0 -0
  7. api/endpoints/__pycache__/paddy_disease.cpython-311.pyc +0 -0
  8. api/endpoints/__pycache__/pest.cpython-311.pyc +0 -0
  9. api/endpoints/__pycache__/plant_disease.cpython-311.pyc +0 -0
  10. api/endpoints/__pycache__/unified.cpython-311.pyc +0 -0
  11. api/endpoints/paddy_disease.py +22 -0
  12. api/endpoints/pest.py +22 -0
  13. api/endpoints/plant_disease.py +23 -0
  14. api/endpoints/unified.py +29 -0
  15. api/main.py +10 -0
  16. frontend_app.py +63 -0
  17. models/__init__.py +1 -0
  18. models/__pycache__/__init__.cpython-311.pyc +0 -0
  19. models/__pycache__/base.cpython-311.pyc +0 -0
  20. models/__pycache__/cache.cpython-311.pyc +0 -0
  21. models/__pycache__/loader.cpython-311.pyc +0 -0
  22. models/__pycache__/paddy_disease_model.cpython-311.pyc +0 -0
  23. models/__pycache__/pest_model.cpython-311.pyc +0 -0
  24. models/__pycache__/plant_disease_model.cpython-311.pyc +0 -0
  25. models/__pycache__/router_model.cpython-311.pyc +0 -0
  26. models/base.py +10 -0
  27. models/cache.py +19 -0
  28. models/configs/paddy_disease.yaml +5 -0
  29. models/configs/pest.yaml +5 -0
  30. models/configs/plant_disease.yaml +5 -0
  31. models/configs/router.yaml +5 -0
  32. models/loader.py +25 -0
  33. models/paddy_disease_model.py +66 -0
  34. models/pest_model.py +63 -0
  35. models/plant_disease_model.py +51 -0
  36. models/router_model.py +48 -0
  37. request_queue/__init__.py +1 -0
  38. request_queue/__pycache__/__init__.cpython-311.pyc +0 -0
  39. request_queue/__pycache__/queue_manager.cpython-311.pyc +0 -0
  40. request_queue/queue_manager.py +15 -0
  41. utils/__init__.py +1 -0
  42. utils/helpers.py +1 -0
  43. utils/logger.py +11 -0
Dockerfile ADDED
@@ -0,0 +1,38 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ FROM python:3.10-slim
2
+
3
+ WORKDIR /app
4
+
5
+ # Install system dependencies for numpy, pandas, PIL, etc.
6
+ RUN apt-get update && \
7
+ apt-get install -y --no-install-recommends \
8
+ build-essential \
9
+ libglib2.0-0 \
10
+ libsm6 \
11
+ libxext6 \
12
+ libxrender-dev \
13
+ gcc \
14
+ git \
15
+ wget \
16
+ curl \
17
+ && apt-get clean && rm -rf /var/lib/apt/lists/*
18
+
19
+ # Set environment variables for UTF-8 and production
20
+ ENV PYTHONUNBUFFERED=1 \
21
+ PYTHONDONTWRITEBYTECODE=1 \
22
+ LC_ALL=C.UTF-8 \
23
+ LANG=C.UTF-8
24
+
25
+ COPY requirements.txt ./
26
+ RUN pip install --upgrade pip && pip install --no-cache-dir -r requirements.txt
27
+
28
+ COPY . .
29
+
30
+ # Expose the port for Hugging Face Spaces
31
+ EXPOSE 7860
32
+
33
+ # Healthcheck (optional, but recommended for cloud)
34
+ HEALTHCHECK --interval=30s --timeout=5s --start-period=10s --retries=3 \
35
+ CMD curl --fail http://localhost:7860/docs || exit 1
36
+
37
+ # Use python -m uvicorn for reliability
38
+ CMD ["python", "-m", "uvicorn", "api.main:app", "--host", "0.0.0.0", "--port", "7860"]
api/__init__.py ADDED
@@ -0,0 +1 @@
 
 
1
+ # API package init
api/__pycache__/__init__.cpython-311.pyc ADDED
Binary file (179 Bytes). View file
 
api/__pycache__/main.cpython-311.pyc ADDED
Binary file (835 Bytes). View file
 
api/endpoints/__init__.py ADDED
@@ -0,0 +1 @@
 
 
1
+ # Endpoints package init
api/endpoints/__pycache__/__init__.cpython-311.pyc ADDED
Binary file (189 Bytes). View file
 
api/endpoints/__pycache__/paddy_disease.cpython-311.pyc ADDED
Binary file (2.01 kB). View file
 
api/endpoints/__pycache__/pest.cpython-311.pyc ADDED
Binary file (1.96 kB). View file
 
api/endpoints/__pycache__/plant_disease.cpython-311.pyc ADDED
Binary file (2.02 kB). View file
 
api/endpoints/__pycache__/unified.cpython-311.pyc ADDED
Binary file (2.27 kB). View file
 
api/endpoints/paddy_disease.py ADDED
@@ -0,0 +1,22 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from fastapi import APIRouter, UploadFile, File, HTTPException
2
+ from models.loader import ModelLoader
3
+ from request_queue.queue_manager import RequestQueue
4
+
5
+ router = APIRouter(prefix="/predict/paddy-disease", tags=["Paddy Disease"])
6
+
7
+ model_loader = ModelLoader(config_dir="models/configs")
8
+ request_queue = RequestQueue(maxsize=10)
9
+
10
+ @router.post("")
11
+ async def predict_paddy_disease(file: UploadFile = File(...)):
12
+ try:
13
+ image_bytes = await file.read()
14
+ def process_request():
15
+ model = model_loader.load_model("paddy_disease")
16
+ result = model.predict(image_bytes)
17
+ return {"filename": file.filename, **result}
18
+ request_queue.add_request(process_request)
19
+ response = request_queue.get_request()()
20
+ return response
21
+ except Exception as e:
22
+ raise HTTPException(status_code=500, detail=str(e))
api/endpoints/pest.py ADDED
@@ -0,0 +1,22 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from fastapi import APIRouter, UploadFile, File, HTTPException
2
+ from models.loader import ModelLoader
3
+ from request_queue.queue_manager import RequestQueue
4
+
5
+ router = APIRouter(prefix="/predict/pest", tags=["Pest"])
6
+
7
+ model_loader = ModelLoader(config_dir="models/configs")
8
+ request_queue = RequestQueue(maxsize=10)
9
+
10
+ @router.post("")
11
+ async def predict_pest(file: UploadFile = File(...)):
12
+ try:
13
+ image_bytes = await file.read()
14
+ def process_request():
15
+ model = model_loader.load_model("pest")
16
+ result = model.predict(image_bytes)
17
+ return {"filename": file.filename, **result}
18
+ request_queue.add_request(process_request)
19
+ response = request_queue.get_request()()
20
+ return response
21
+ except Exception as e:
22
+ raise HTTPException(status_code=500, detail=str(e))
api/endpoints/plant_disease.py ADDED
@@ -0,0 +1,23 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from fastapi import APIRouter, UploadFile, File, HTTPException
2
+ from models.loader import ModelLoader
3
+ from request_queue.queue_manager import RequestQueue
4
+
5
+ router = APIRouter(prefix="/predict/plant-disease", tags=["Plant Disease"])
6
+
7
+ # Initialize model loader (adjust config path as needed)
8
+ model_loader = ModelLoader(config_dir="models/configs")
9
+ request_queue = RequestQueue(maxsize=10)
10
+
11
+ @router.post("")
12
+ async def predict_plant_disease(file: UploadFile = File(...)):
13
+ try:
14
+ image_bytes = await file.read()
15
+ def process_request():
16
+ model = model_loader.load_model("plant_disease")
17
+ result = model.predict(image_bytes)
18
+ return {"filename": file.filename, **result}
19
+ request_queue.add_request(process_request)
20
+ response = request_queue.get_request()()
21
+ return response
22
+ except Exception as e:
23
+ raise HTTPException(status_code=500, detail=str(e))
api/endpoints/unified.py ADDED
@@ -0,0 +1,29 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from fastapi import APIRouter, UploadFile, File, HTTPException
2
+ from models.loader import ModelLoader
3
+ from request_queue.queue_manager import RequestQueue
4
+
5
+ router = APIRouter(prefix="/predict/unified", tags=["Unified"])
6
+
7
+ model_loader = ModelLoader(config_dir="models/configs")
8
+
9
+ # Initialize a global request queue (can be tuned for maxsize)
10
+ request_queue = RequestQueue(maxsize=10)
11
+
12
+ @router.post("")
13
+ async def predict_unified(file: UploadFile = File(...)):
14
+ try:
15
+ image_bytes = await file.read()
16
+ # Add request to queue and process in order
17
+ def process_request():
18
+ router_model = model_loader.load_model("router")
19
+ router_result = router_model.predict(image_bytes)
20
+ model_key = router_result.get("model_key", "plant_disease")
21
+ model = model_loader.load_model(model_key)
22
+ result = model.predict(image_bytes)
23
+ return {"filename": file.filename, "router_info": router_result, **result}
24
+ request_queue.add_request(process_request)
25
+ # For simplicity, process immediately after adding (single worker scenario)
26
+ response = request_queue.get_request()()
27
+ return response
28
+ except Exception as e:
29
+ raise HTTPException(status_code=500, detail=str(e))
api/main.py ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ from fastapi import FastAPI
2
+ from api.endpoints import plant_disease, paddy_disease, pest, unified
3
+
4
+ app = FastAPI(title="AgroVisor API", description="Modular, scalable backend for Hugging Face Spaces.")
5
+
6
+ # Include routers from endpoints (to be implemented in each endpoint module)
7
+ app.include_router(plant_disease.router)
8
+ app.include_router(paddy_disease.router)
9
+ app.include_router(pest.router)
10
+ app.include_router(unified.router)
frontend_app.py ADDED
@@ -0,0 +1,63 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ import requests
3
+ from PIL import Image
4
+ import io
5
+ import time
6
+
7
+ API_BASE_URL = "http://localhost:7860"
8
+
9
+ st.set_page_config(page_title="AgroVisor AI System", page_icon="🌱", layout="wide")
10
+
11
+ st.title("🌱 AgroVisor AI System")
12
+ st.markdown("Efficient, modular backend with queuing and resource management.")
13
+
14
+ # Sidebar
15
+ st.sidebar.title("Navigation")
16
+ page = st.sidebar.radio("Choose task", [
17
+ "Unified Classification",
18
+ "Plant Disease Detection",
19
+ "Paddy Disease Classification",
20
+ "Pest Identification"
21
+ ])
22
+
23
+ # Helper to handle API requests and queue status
24
+ def predict(endpoint, file):
25
+ files = {"file": (file.name, file.getvalue(), file.type)}
26
+ try:
27
+ response = requests.post(f"{API_BASE_URL}/predict/{endpoint}", files=files, timeout=60)
28
+ if response.status_code == 200:
29
+ return response.json(), None
30
+ elif response.status_code == 429:
31
+ return None, "Server busy or queue full. Please wait and try again."
32
+ else:
33
+ return None, f"Error: {response.status_code} - {response.text}"
34
+ except requests.exceptions.RequestException as e:
35
+ return None, f"Request failed: {e}"
36
+
37
+ # Main UI logic
38
+ def show_predict_ui(endpoint, label):
39
+ st.header(label)
40
+ uploaded_file = st.file_uploader(f"Upload an image for {label.lower()}", type=["jpg", "jpeg", "png"])
41
+ if uploaded_file:
42
+ image = Image.open(uploaded_file)
43
+ st.image(image, caption="Uploaded Image", use_column_width=True)
44
+ if st.button(f"Predict {label}"):
45
+ with st.spinner("Processing and waiting in queue if needed..."):
46
+ result, error = predict(endpoint, uploaded_file)
47
+ if error:
48
+ st.error(error)
49
+ elif result:
50
+ st.success("Prediction complete!")
51
+ st.json(result)
52
+
53
+ if page == "Unified Classification":
54
+ show_predict_ui("unified", "Unified Classification")
55
+ elif page == "Plant Disease Detection":
56
+ show_predict_ui("plant-disease", "Plant Disease Detection")
57
+ elif page == "Paddy Disease Classification":
58
+ show_predict_ui("paddy-disease", "Paddy Disease Classification")
59
+ elif page == "Pest Identification":
60
+ show_predict_ui("pest", "Pest Identification")
61
+
62
+ st.sidebar.markdown("---")
63
+ st.sidebar.info("Backend: FastAPI (modular, lazy loading, LRU cache, queuing)")
models/__init__.py ADDED
@@ -0,0 +1 @@
 
 
1
+ # Models package init
models/__pycache__/__init__.cpython-311.pyc ADDED
Binary file (182 Bytes). View file
 
models/__pycache__/base.cpython-311.pyc ADDED
Binary file (817 Bytes). View file
 
models/__pycache__/cache.cpython-311.pyc ADDED
Binary file (1.74 kB). View file
 
models/__pycache__/loader.cpython-311.pyc ADDED
Binary file (2.26 kB). View file
 
models/__pycache__/paddy_disease_model.cpython-311.pyc ADDED
Binary file (5.15 kB). View file
 
models/__pycache__/pest_model.cpython-311.pyc ADDED
Binary file (4.83 kB). View file
 
models/__pycache__/plant_disease_model.cpython-311.pyc ADDED
Binary file (3.8 kB). View file
 
models/__pycache__/router_model.cpython-311.pyc ADDED
Binary file (3.05 kB). View file
 
models/base.py ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ from abc import ABC, abstractmethod
2
+
3
+ class BaseModel(ABC):
4
+ @abstractmethod
5
+ def load(self):
6
+ pass
7
+
8
+ @abstractmethod
9
+ def predict(self, input_data):
10
+ pass
models/cache.py ADDED
@@ -0,0 +1,19 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from collections import OrderedDict
2
+
3
+ class ModelCache(OrderedDict):
4
+ def __init__(self, max_size=2):
5
+ super().__init__()
6
+ self.max_size = max_size
7
+
8
+ def __getitem__(self, key):
9
+ value = super().__getitem__(key)
10
+ self.move_to_end(key)
11
+ return value
12
+
13
+ def __setitem__(self, key, value):
14
+ if key in self:
15
+ self.move_to_end(key)
16
+ super().__setitem__(key, value)
17
+ if len(self) > self.max_size:
18
+ oldest = next(iter(self))
19
+ del self[oldest]
models/configs/paddy_disease.yaml ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ module: models.paddy_disease_model
2
+ class: PaddyDiseaseModel
3
+ params:
4
+ model_path: "Models here/paddy_diseases_classifier_cnn.keras"
5
+ annotation_path: annotations/paddy_disease_classifier.csv
models/configs/pest.yaml ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ module: models.pest_model
2
+ class: PestModel
3
+ params:
4
+ model_path: "Models here/pest_classifier_effnetB3.keras"
5
+ annotation_path: annotations/pest_classifier.csv
models/configs/plant_disease.yaml ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ module: models.plant_disease_model
2
+ class: PlantDiseaseModel
3
+ params:
4
+ model_path: "Models here/plant_disease_classifier_cnn.keras"
5
+ annotation_path: annotations/plant_disease_classifier.csv
models/configs/router.yaml ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ module: models.router_model
2
+ class: RouterModel
3
+ params:
4
+ model_path: "Models here/router_classifier_best.keras"
5
+ annotation_path: annotations/router_classifier.csv
models/loader.py ADDED
@@ -0,0 +1,25 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import threading
2
+ from .cache import ModelCache
3
+ from .base import BaseModel
4
+ import yaml
5
+ import importlib
6
+
7
+ class ModelLoader:
8
+ def __init__(self, config_dir, cache_size=2):
9
+ self.config_dir = config_dir
10
+ self.cache = ModelCache(max_size=cache_size)
11
+ self.lock = threading.Lock()
12
+
13
+ def load_model(self, model_name):
14
+ with self.lock:
15
+ if model_name in self.cache:
16
+ return self.cache[model_name]
17
+ config_path = f"{self.config_dir}/{model_name}.yaml"
18
+ with open(config_path, 'r') as f:
19
+ config = yaml.safe_load(f)
20
+ module = importlib.import_module(config['module'])
21
+ model_class = getattr(module, config['class'])
22
+ model = model_class(**config.get('params', {}))
23
+ model.load()
24
+ self.cache[model_name] = model
25
+ return model
models/paddy_disease_model.py ADDED
@@ -0,0 +1,66 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import tensorflow as tf
2
+ import pandas as pd
3
+ import numpy as np
4
+ from models.base import BaseModel
5
+ from PIL import Image
6
+ import io
7
+ from keras import backend as K
8
+ from keras.saving import register_keras_serializable
9
+
10
+ class PaddyDiseaseModel(BaseModel):
11
+ def __init__(self, model_path, annotation_path):
12
+ self.model_path = model_path
13
+ self.annotation_path = annotation_path
14
+ self.model = None
15
+ self.class_names = []
16
+
17
+ def focal_loss(self, gamma=2.0, alpha=0.25):
18
+ @register_keras_serializable()
19
+ def focal_loss_fixed(y_true, y_pred):
20
+ y_pred = tf.clip_by_value(y_pred, K.epsilon(), 1. - K.epsilon())
21
+ cross_entropy = -y_true * tf.math.log(y_pred)
22
+ weight = alpha * tf.pow(1 - y_pred, gamma)
23
+ loss = weight * cross_entropy
24
+ return tf.reduce_mean(tf.reduce_sum(loss, axis=1))
25
+ return focal_loss_fixed
26
+
27
+ def load(self):
28
+ self.model = tf.keras.models.load_model(
29
+ self.model_path,
30
+ custom_objects={'focal_loss_fixed': self.focal_loss(gamma=2.0, alpha=0.25)}
31
+ )
32
+ df = pd.read_csv(self.annotation_path)
33
+ self.class_names = df['class_name'].tolist()
34
+
35
+ def preprocess(self, image_bytes):
36
+ image = Image.open(io.BytesIO(image_bytes)).convert('RGB')
37
+ img = np.array(image)
38
+ img = tf.image.resize(img, (224, 224)).numpy()
39
+ img = img.astype('float32') / 255.0
40
+ img = np.expand_dims(img, axis=0)
41
+ return img
42
+
43
+ def predict(self, image_bytes):
44
+ img = self.preprocess(image_bytes)
45
+ preds = self.model.predict(img)[0]
46
+ idx = int(np.argmax(preds))
47
+ confidence = float(preds[idx])
48
+ class_name = self.class_names[idx]
49
+ top_3_idx = np.argsort(preds)[-3:][::-1]
50
+ top_3 = [
51
+ {"class_index": int(i), "class_name": self.class_names[i], "confidence": float(preds[i])}
52
+ for i in top_3_idx
53
+ ]
54
+ is_healthy = 'healthy' in class_name.lower() or 'normal' in class_name.lower()
55
+ return {
56
+ "endpoint": "paddy-disease",
57
+ "description": "Specialized paddy/rice disease classification",
58
+ "prediction": {
59
+ "class_index": idx,
60
+ "class_name": class_name,
61
+ "confidence": confidence,
62
+ "is_healthy": is_healthy
63
+ },
64
+ "top_3_predictions": top_3,
65
+ "model_used": "paddy_disease"
66
+ }
models/pest_model.py ADDED
@@ -0,0 +1,63 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import tensorflow as tf
2
+ import pandas as pd
3
+ import numpy as np
4
+ from models.base import BaseModel
5
+ from PIL import Image
6
+ import io
7
+ from keras import backend as K
8
+
9
+ class PestModel(BaseModel):
10
+ def __init__(self, model_path, annotation_path):
11
+ self.model_path = model_path
12
+ self.annotation_path = annotation_path
13
+ self.model = None
14
+ self.class_names = []
15
+
16
+ def focal_loss(self, gamma=2.0, alpha=0.25):
17
+ def focal_loss_fixed(y_true, y_pred):
18
+ y_pred = tf.clip_by_value(y_pred, K.epsilon(), 1. - K.epsilon())
19
+ cross_entropy = -y_true * tf.math.log(y_pred)
20
+ loss = alpha * tf.pow(1 - y_pred, gamma) * cross_entropy
21
+ return tf.reduce_sum(loss, axis=1)
22
+ return focal_loss_fixed
23
+
24
+ def load(self):
25
+ self.model = tf.keras.models.load_model(
26
+ self.model_path,
27
+ custom_objects={'focal_loss_fixed': self.focal_loss(gamma=2.0, alpha=0.25)}
28
+ )
29
+ df = pd.read_csv(self.annotation_path)
30
+ self.class_names = df['class_name'].tolist()
31
+
32
+ def preprocess(self, image_bytes):
33
+ image = Image.open(io.BytesIO(image_bytes)).convert('RGB')
34
+ img = np.array(image)
35
+ img = tf.image.resize(img, (224, 224)).numpy()
36
+ img = img.astype('float32') / 255.0
37
+ img = np.expand_dims(img, axis=0)
38
+ return img
39
+
40
+ def predict(self, image_bytes):
41
+ img = self.preprocess(image_bytes)
42
+ preds = self.model.predict(img)[0]
43
+ idx = int(np.argmax(preds))
44
+ confidence = float(preds[idx])
45
+ class_name = self.class_names[idx]
46
+ top_3_idx = np.argsort(preds)[-3:][::-1]
47
+ top_3 = [
48
+ {"class_index": int(i), "class_name": self.class_names[i], "confidence": float(preds[i])}
49
+ for i in top_3_idx
50
+ ]
51
+ is_healthy = 'healthy' in class_name.lower() or 'normal' in class_name.lower()
52
+ return {
53
+ "endpoint": "pest",
54
+ "description": "Agricultural pest identification",
55
+ "prediction": {
56
+ "class_index": idx,
57
+ "class_name": class_name,
58
+ "confidence": confidence,
59
+ "is_healthy": is_healthy
60
+ },
61
+ "top_3_predictions": top_3,
62
+ "model_used": "pest"
63
+ }
models/plant_disease_model.py ADDED
@@ -0,0 +1,51 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import tensorflow as tf
2
+ import pandas as pd
3
+ import numpy as np
4
+ from models.base import BaseModel
5
+ from PIL import Image
6
+ import io
7
+
8
+ class PlantDiseaseModel(BaseModel):
9
+ def __init__(self, model_path, annotation_path):
10
+ self.model_path = model_path
11
+ self.annotation_path = annotation_path
12
+ self.model = None
13
+ self.class_names = []
14
+
15
+ def load(self):
16
+ self.model = tf.keras.models.load_model(self.model_path)
17
+ df = pd.read_csv(self.annotation_path)
18
+ self.class_names = df['class_name'].tolist()
19
+
20
+ def preprocess(self, image_bytes):
21
+ image = Image.open(io.BytesIO(image_bytes)).convert('RGB')
22
+ img = np.array(image)
23
+ img = tf.image.resize(img, (224, 224)).numpy()
24
+ img = img.astype('float32') / 255.0
25
+ img = np.expand_dims(img, axis=0)
26
+ return img
27
+
28
+ def predict(self, image_bytes):
29
+ img = self.preprocess(image_bytes)
30
+ preds = self.model.predict(img)[0]
31
+ idx = int(np.argmax(preds))
32
+ confidence = float(preds[idx])
33
+ class_name = self.class_names[idx]
34
+ top_3_idx = np.argsort(preds)[-3:][::-1]
35
+ top_3 = [
36
+ {"class_index": int(i), "class_name": self.class_names[i], "confidence": float(preds[i])}
37
+ for i in top_3_idx
38
+ ]
39
+ is_healthy = 'healthy' in class_name.lower() or 'normal' in class_name.lower()
40
+ return {
41
+ "endpoint": "plant-disease",
42
+ "description": "General plant disease classification",
43
+ "prediction": {
44
+ "class_index": idx,
45
+ "class_name": class_name,
46
+ "confidence": confidence,
47
+ "is_healthy": is_healthy
48
+ },
49
+ "top_3_predictions": top_3,
50
+ "model_used": "plant_disease"
51
+ }
models/router_model.py ADDED
@@ -0,0 +1,48 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import tensorflow as tf
2
+ import pandas as pd
3
+ import numpy as np
4
+ from models.base import BaseModel
5
+ from PIL import Image
6
+ import io
7
+
8
+ class RouterModel(BaseModel):
9
+ def __init__(self, model_path, annotation_path):
10
+ self.model_path = model_path
11
+ self.annotation_path = annotation_path
12
+ self.model = None
13
+ self.class_names = []
14
+
15
+ def load(self):
16
+ self.model = tf.keras.models.load_model(self.model_path)
17
+ df = pd.read_csv(self.annotation_path)
18
+ self.class_names = df['class_name'].tolist()
19
+
20
+ def preprocess(self, image_bytes):
21
+ image = Image.open(io.BytesIO(image_bytes)).convert('RGB')
22
+ img = np.array(image)
23
+ img = tf.image.resize(img, (224, 224)).numpy()
24
+ img = img.astype('float32') / 255.0
25
+ img = np.expand_dims(img, axis=0)
26
+ return img
27
+
28
+ def predict(self, image_bytes):
29
+ img = self.preprocess(image_bytes)
30
+ preds = self.model.predict(img)[0]
31
+ idx = int(np.argmax(preds))
32
+ confidence = float(preds[idx])
33
+ class_name = self.class_names[idx]
34
+ # Map router class to model key
35
+ if 'plant' in class_name:
36
+ model_key = 'plant_disease'
37
+ elif 'paddy' in class_name:
38
+ model_key = 'paddy_disease'
39
+ elif 'pest' in class_name:
40
+ model_key = 'pest'
41
+ else:
42
+ model_key = 'plant_disease'
43
+ return {
44
+ "router_prediction": class_name,
45
+ "router_confidence": confidence,
46
+ "model_key": model_key,
47
+ "available_classifiers": self.class_names
48
+ }
request_queue/__init__.py ADDED
@@ -0,0 +1 @@
 
 
1
+ # Queue package init
request_queue/__pycache__/__init__.cpython-311.pyc ADDED
Binary file (189 Bytes). View file
 
request_queue/__pycache__/queue_manager.cpython-311.pyc ADDED
Binary file (1.65 kB). View file
 
request_queue/queue_manager.py ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import threading
2
+ from queue import Queue
3
+
4
+ class RequestQueue:
5
+ def __init__(self, maxsize=10):
6
+ self.queue = Queue(maxsize=maxsize)
7
+ self.lock = threading.Lock()
8
+
9
+ def add_request(self, request):
10
+ with self.lock:
11
+ self.queue.put(request)
12
+
13
+ def get_request(self):
14
+ with self.lock:
15
+ return self.queue.get()
utils/__init__.py ADDED
@@ -0,0 +1 @@
 
 
1
+ # Utils package init
utils/helpers.py ADDED
@@ -0,0 +1 @@
 
 
1
+ # General helper functions can be added here
utils/logger.py ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import logging
2
+
3
+ def get_logger(name):
4
+ logger = logging.getLogger(name)
5
+ if not logger.handlers:
6
+ handler = logging.StreamHandler()
7
+ formatter = logging.Formatter('[%(asctime)s] %(levelname)s in %(name)s: %(message)s')
8
+ handler.setFormatter(formatter)
9
+ logger.addHandler(handler)
10
+ logger.setLevel(logging.INFO)
11
+ return logger