Fola-AI commited on
Commit ·
f45df09
0
Parent(s):
Initial FarmEyes deployment - AI Powered Crop Disease Detection Program
Browse filesThis view is limited to 50 files because it contains too many changes. See raw diff
- .DS_Store +0 -0
- .gitattributes +1 -0
- .ipynb_checkpoints/config-checkpoint.py +452 -0
- .ipynb_checkpoints/test-checkpoint.ipynb +6 -0
- Dockerfile +70 -0
- README.md +143 -0
- api/.DS_Store +0 -0
- api/__init__.py +22 -0
- api/__pycache__/__init__.cpython-310.pyc +0 -0
- api/__pycache__/__init__.cpython-312.pyc +0 -0
- api/routes/.DS_Store +0 -0
- api/routes/__init__.py +15 -0
- api/routes/__pycache__/__init__.cpython-310.pyc +0 -0
- api/routes/__pycache__/__init__.cpython-312.pyc +0 -0
- api/routes/__pycache__/chat.cpython-310.pyc +0 -0
- api/routes/__pycache__/chat.cpython-312.pyc +0 -0
- api/routes/__pycache__/detection.cpython-310.pyc +0 -0
- api/routes/__pycache__/detection.cpython-312.pyc +0 -0
- api/routes/__pycache__/transcribe.cpython-310.pyc +0 -0
- api/routes/__pycache__/transcribe.cpython-312.pyc +0 -0
- api/routes/__pycache__/tts.cpython-310.pyc +0 -0
- api/routes/chat.py +340 -0
- api/routes/detection.py +381 -0
- api/routes/transcribe.py +418 -0
- api/routes/tts.py +182 -0
- config.py +635 -0
- data/knowledge_base.json +1115 -0
- frontend/.DS_Store +0 -0
- frontend/css/.DS_Store +0 -0
- frontend/css/main.css +1451 -0
- frontend/index.html +321 -0
- frontend/js/api.js +417 -0
- frontend/js/app.js +274 -0
- frontend/js/chat.js +766 -0
- frontend/js/diagnosis.js +515 -0
- frontend/js/i18n.js +328 -0
- frontend/js/tts.js +526 -0
- frontend/js/voice.js +951 -0
- gitignore +119 -0
- main.py +442 -0
- models/.DS_Store +0 -0
- models/.ipynb_checkpoints/__init__-checkpoint.py +31 -0
- models/.ipynb_checkpoints/natlas_model-checkpoint.py +787 -0
- models/.ipynb_checkpoints/yolo_model-checkpoint.py +699 -0
- models/__init__.py +51 -0
- models/farmeyes_yolov11.pt +3 -0
- models/natlas_model.py +647 -0
- models/yolo_model.py +703 -0
- requirements.txt +70 -0
- services/__init__.py +86 -0
.DS_Store
ADDED
|
Binary file (10.2 kB). View file
|
|
|
.gitattributes
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
*.pt filter=lfs diff=lfs merge=lfs -text
|
.ipynb_checkpoints/config-checkpoint.py
ADDED
|
@@ -0,0 +1,452 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
FarmEyes Configuration File
|
| 3 |
+
===========================
|
| 4 |
+
Central configuration for the FarmEyes crop disease detection application.
|
| 5 |
+
Contains model paths, class mappings, device settings, and app configurations.
|
| 6 |
+
|
| 7 |
+
Device: Apple Silicon M1 Pro with MPS (Metal Performance Shaders) acceleration
|
| 8 |
+
"""
|
| 9 |
+
|
| 10 |
+
import os
|
| 11 |
+
from pathlib import Path
|
| 12 |
+
from typing import Dict, List, Optional
|
| 13 |
+
from dataclasses import dataclass, field
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
# =============================================================================
|
| 17 |
+
# PATH CONFIGURATIONS
|
| 18 |
+
# =============================================================================
|
| 19 |
+
|
| 20 |
+
# Base project directory - update this to your local path
|
| 21 |
+
BASE_DIR = Path(__file__).parent.resolve()
|
| 22 |
+
|
| 23 |
+
# Data directories
|
| 24 |
+
DATA_DIR = BASE_DIR / "data"
|
| 25 |
+
STATIC_DIR = BASE_DIR / "static"
|
| 26 |
+
MODELS_DIR = BASE_DIR / "models"
|
| 27 |
+
OUTPUTS_DIR = BASE_DIR / "outputs"
|
| 28 |
+
|
| 29 |
+
# Create directories if they don't exist
|
| 30 |
+
for directory in [DATA_DIR, STATIC_DIR, MODELS_DIR, OUTPUTS_DIR]:
|
| 31 |
+
directory.mkdir(parents=True, exist_ok=True)
|
| 32 |
+
|
| 33 |
+
# Knowledge base and UI translations paths
|
| 34 |
+
KNOWLEDGE_BASE_PATH = DATA_DIR / "knowledge_base.json"
|
| 35 |
+
UI_TRANSLATIONS_PATH = STATIC_DIR / "ui_translations.json"
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
# =============================================================================
|
| 39 |
+
# MODEL CONFIGURATIONS
|
| 40 |
+
# =============================================================================
|
| 41 |
+
|
| 42 |
+
@dataclass
|
| 43 |
+
class YOLOConfig:
|
| 44 |
+
"""Configuration for YOLOv11 disease detection model"""
|
| 45 |
+
|
| 46 |
+
# Path to trained YOLOv11 model weights (.pt file)
|
| 47 |
+
# Update this path once your model training is complete
|
| 48 |
+
model_path: Path = MODELS_DIR / "farmeyes_yolov11.pt"
|
| 49 |
+
|
| 50 |
+
# Confidence threshold for detections (0.0 - 1.0)
|
| 51 |
+
confidence_threshold: float = 0.5
|
| 52 |
+
|
| 53 |
+
# IoU threshold for non-maximum suppression
|
| 54 |
+
iou_threshold: float = 0.45
|
| 55 |
+
|
| 56 |
+
# Input image size (YOLOv11 default)
|
| 57 |
+
input_size: int = 640
|
| 58 |
+
|
| 59 |
+
# Maximum number of detections per image
|
| 60 |
+
max_detections: int = 10
|
| 61 |
+
|
| 62 |
+
# Device for inference ('mps' for Apple Silicon, 'cuda' for NVIDIA, 'cpu' for CPU)
|
| 63 |
+
device: str = "mps"
|
| 64 |
+
|
| 65 |
+
|
| 66 |
+
@dataclass
|
| 67 |
+
class NATLaSConfig:
|
| 68 |
+
"""Configuration for N-ATLaS language model (GGUF format)"""
|
| 69 |
+
|
| 70 |
+
# Hugging Face model repository
|
| 71 |
+
hf_repo: str = "tosinamuda/N-ATLaS-GGUF"
|
| 72 |
+
|
| 73 |
+
# GGUF model filename (16-bit quantized version)
|
| 74 |
+
model_filename: str = "N-ATLaS-8B-Instruct-v2.2-F16.gguf"
|
| 75 |
+
|
| 76 |
+
# Local path where model will be downloaded/cached
|
| 77 |
+
model_path: Path = MODELS_DIR / "natlas"
|
| 78 |
+
|
| 79 |
+
# Full path to the GGUF file
|
| 80 |
+
@property
|
| 81 |
+
def gguf_path(self) -> Path:
|
| 82 |
+
return self.model_path / self.model_filename
|
| 83 |
+
|
| 84 |
+
# Context window size (tokens)
|
| 85 |
+
context_length: int = 4096
|
| 86 |
+
|
| 87 |
+
# Maximum tokens to generate in response
|
| 88 |
+
max_tokens: int = 1024
|
| 89 |
+
|
| 90 |
+
# Temperature for text generation (0.0 = deterministic, 1.0 = creative)
|
| 91 |
+
temperature: float = 0.7
|
| 92 |
+
|
| 93 |
+
# Top-p (nucleus) sampling
|
| 94 |
+
top_p: float = 0.9
|
| 95 |
+
|
| 96 |
+
# Number of GPU layers to offload (for MPS acceleration)
|
| 97 |
+
# Set to -1 to offload all layers, 0 for CPU only
|
| 98 |
+
n_gpu_layers: int = -1
|
| 99 |
+
|
| 100 |
+
# Number of threads for CPU computation
|
| 101 |
+
n_threads: int = 8
|
| 102 |
+
|
| 103 |
+
# Batch size for prompt processing
|
| 104 |
+
n_batch: int = 512
|
| 105 |
+
|
| 106 |
+
# Device for inference
|
| 107 |
+
device: str = "mps"
|
| 108 |
+
|
| 109 |
+
|
| 110 |
+
# =============================================================================
|
| 111 |
+
# DISEASE CLASS MAPPINGS
|
| 112 |
+
# =============================================================================
|
| 113 |
+
|
| 114 |
+
# YOLOv11 class index to disease key mapping
|
| 115 |
+
# These matches the class indices from our trained model
|
| 116 |
+
CLASS_INDEX_TO_KEY: Dict[int, str] = {
|
| 117 |
+
0: "cassava_bacterial_blight",
|
| 118 |
+
1: "cassava_healthy",
|
| 119 |
+
2: "cassava_mosaic_disease",
|
| 120 |
+
3: "cocoa_healthy",
|
| 121 |
+
4: "cocoa_monilia_disease",
|
| 122 |
+
5: "cocoa_phytophthora_disease",
|
| 123 |
+
6: "tomato_gray_mold",
|
| 124 |
+
7: "tomato_healthy",
|
| 125 |
+
8: "tomato_viral_disease",
|
| 126 |
+
9: "tomato_wilt_disease"
|
| 127 |
+
}
|
| 128 |
+
|
| 129 |
+
# Reverse mapping: disease key to class index
|
| 130 |
+
KEY_TO_CLASS_INDEX: Dict[str, int] = {v: k for k, v in CLASS_INDEX_TO_KEY.items()}
|
| 131 |
+
|
| 132 |
+
# Class names as they appear in YOLO training (macthes our data.yaml file)
|
| 133 |
+
CLASS_NAMES: List[str] = [
|
| 134 |
+
"Cassava Bacteria Blight",
|
| 135 |
+
"Cassava Healthy Leaf",
|
| 136 |
+
"Cassava Mosaic Disease",
|
| 137 |
+
"Cocoa Healthy Leaf",
|
| 138 |
+
"Cocoa Monilia Disease",
|
| 139 |
+
"Cocoa Phytophthora Disease",
|
| 140 |
+
"Tomato Gray Mold Disease",
|
| 141 |
+
"Tomato Healthy Leaf",
|
| 142 |
+
"Tomato Viral Disease",
|
| 143 |
+
"Tomato Wilt Disease"
|
| 144 |
+
]
|
| 145 |
+
|
| 146 |
+
# Healthy class indices (for quick identification)
|
| 147 |
+
HEALTHY_CLASS_INDICES: List[int] = [1, 3, 7] # cassava_healthy, cocoa_healthy, tomato_healthy
|
| 148 |
+
|
| 149 |
+
# Disease class indices (excluding healthy)
|
| 150 |
+
DISEASE_CLASS_INDICES: List[int] = [0, 2, 4, 5, 6, 8, 9]
|
| 151 |
+
|
| 152 |
+
# Crop type mapping
|
| 153 |
+
CROP_TYPES: Dict[str, List[int]] = {
|
| 154 |
+
"cassava": [0, 1, 2],
|
| 155 |
+
"cocoa": [3, 4, 5],
|
| 156 |
+
"tomato": [6, 7, 8, 9]
|
| 157 |
+
}
|
| 158 |
+
|
| 159 |
+
# Reverse mapping: class index to crop type
|
| 160 |
+
CLASS_TO_CROP: Dict[int, str] = {}
|
| 161 |
+
for crop, indices in CROP_TYPES.items():
|
| 162 |
+
for idx in indices:
|
| 163 |
+
CLASS_TO_CROP[idx] = crop
|
| 164 |
+
|
| 165 |
+
|
| 166 |
+
# =============================================================================
|
| 167 |
+
# LANGUAGE CONFIGURATIONS
|
| 168 |
+
# =============================================================================
|
| 169 |
+
|
| 170 |
+
@dataclass
|
| 171 |
+
class LanguageConfig:
|
| 172 |
+
"""Configuration for supported languages"""
|
| 173 |
+
|
| 174 |
+
# Supported language codes
|
| 175 |
+
supported_languages: List[str] = field(default_factory=lambda: ["en", "ha", "yo", "ig"])
|
| 176 |
+
|
| 177 |
+
# Default language
|
| 178 |
+
default_language: str = "en"
|
| 179 |
+
|
| 180 |
+
# Language display names
|
| 181 |
+
language_names: Dict[str, str] = field(default_factory=lambda: {
|
| 182 |
+
"en": "English",
|
| 183 |
+
"ha": "Hausa",
|
| 184 |
+
"yo": "Yorùbá",
|
| 185 |
+
"ig": "Igbo"
|
| 186 |
+
})
|
| 187 |
+
|
| 188 |
+
# Language codes for N-ATLaS prompts
|
| 189 |
+
language_full_names: Dict[str, str] = field(default_factory=lambda: {
|
| 190 |
+
"en": "English",
|
| 191 |
+
"ha": "Hausa",
|
| 192 |
+
"yo": "Yoruba",
|
| 193 |
+
"ig": "Igbo"
|
| 194 |
+
})
|
| 195 |
+
|
| 196 |
+
|
| 197 |
+
# =============================================================================
|
| 198 |
+
# APPLICATION CONFIGURATIONS
|
| 199 |
+
# =============================================================================
|
| 200 |
+
|
| 201 |
+
@dataclass
|
| 202 |
+
class AppConfig:
|
| 203 |
+
"""General application configuration"""
|
| 204 |
+
|
| 205 |
+
# App information
|
| 206 |
+
app_name: str = "FarmEyes"
|
| 207 |
+
app_version: str = "1.0.0"
|
| 208 |
+
app_tagline: str = "AI-Powered Crop Health Assistant"
|
| 209 |
+
|
| 210 |
+
# Gradio server settings
|
| 211 |
+
server_host: str = "0.0.0.0"
|
| 212 |
+
server_port: int = 7860
|
| 213 |
+
share: bool = False # Set to True for public Gradio link
|
| 214 |
+
|
| 215 |
+
# Debug mode
|
| 216 |
+
debug: bool = True
|
| 217 |
+
|
| 218 |
+
# Maximum image file size (in bytes) - 10MB
|
| 219 |
+
max_image_size: int = 10 * 1024 * 1024
|
| 220 |
+
|
| 221 |
+
# Supported image formats
|
| 222 |
+
supported_image_formats: List[str] = field(default_factory=lambda: [
|
| 223 |
+
".jpg", ".jpeg", ".png", ".webp", ".bmp"
|
| 224 |
+
])
|
| 225 |
+
|
| 226 |
+
# Confidence thresholds for user feedback
|
| 227 |
+
high_confidence_threshold: float = 0.85
|
| 228 |
+
medium_confidence_threshold: float = 0.60
|
| 229 |
+
low_confidence_threshold: float = 0.40
|
| 230 |
+
|
| 231 |
+
# Enable/disable features
|
| 232 |
+
enable_voice_input: bool = False # Future feature
|
| 233 |
+
enable_offline_mode: bool = False # Future feature
|
| 234 |
+
enable_history: bool = True
|
| 235 |
+
|
| 236 |
+
|
| 237 |
+
# =============================================================================
|
| 238 |
+
# DEVICE CONFIGURATION (Apple Silicon Specific)
|
| 239 |
+
# =============================================================================
|
| 240 |
+
|
| 241 |
+
@dataclass
|
| 242 |
+
class DeviceConfig:
|
| 243 |
+
"""Device and hardware configuration for Apple Silicon M1 Pro"""
|
| 244 |
+
|
| 245 |
+
# Primary compute device
|
| 246 |
+
# Options: 'mps' (Apple Silicon GPU), 'cuda' (NVIDIA GPU), 'cpu'
|
| 247 |
+
compute_device: str = "mps"
|
| 248 |
+
|
| 249 |
+
# Fallback device if primary is unavailable
|
| 250 |
+
fallback_device: str = "cpu"
|
| 251 |
+
|
| 252 |
+
# Enable MPS (Metal Performance Shaders) for PyTorch
|
| 253 |
+
use_mps: bool = True
|
| 254 |
+
|
| 255 |
+
# Memory management
|
| 256 |
+
# Set to True to clear GPU cache after each inference
|
| 257 |
+
clear_cache_after_inference: bool = True
|
| 258 |
+
|
| 259 |
+
@staticmethod
|
| 260 |
+
def get_device() -> str:
|
| 261 |
+
"""
|
| 262 |
+
Determine the best available device for computation.
|
| 263 |
+
Returns 'mps' for Apple Silicon, 'cuda' for NVIDIA, or 'cpu'.
|
| 264 |
+
"""
|
| 265 |
+
import torch
|
| 266 |
+
|
| 267 |
+
# Check for Apple Silicon MPS
|
| 268 |
+
if torch.backends.mps.is_available():
|
| 269 |
+
return "mps"
|
| 270 |
+
# Check for NVIDIA CUDA
|
| 271 |
+
elif torch.cuda.is_available():
|
| 272 |
+
return "cuda"
|
| 273 |
+
# Fallback to CPU
|
| 274 |
+
else:
|
| 275 |
+
return "cpu"
|
| 276 |
+
|
| 277 |
+
@staticmethod
|
| 278 |
+
def get_device_info() -> Dict[str, str]:
|
| 279 |
+
"""Get information about the current compute device."""
|
| 280 |
+
import torch
|
| 281 |
+
import platform
|
| 282 |
+
|
| 283 |
+
info = {
|
| 284 |
+
"platform": platform.system(),
|
| 285 |
+
"processor": platform.processor(),
|
| 286 |
+
"python_version": platform.python_version(),
|
| 287 |
+
"pytorch_version": torch.__version__,
|
| 288 |
+
"device": DeviceConfig.get_device()
|
| 289 |
+
}
|
| 290 |
+
|
| 291 |
+
if torch.backends.mps.is_available():
|
| 292 |
+
info["mps_available"] = "Yes"
|
| 293 |
+
info["mps_built"] = str(torch.backends.mps.is_built())
|
| 294 |
+
|
| 295 |
+
return info
|
| 296 |
+
|
| 297 |
+
|
| 298 |
+
# =============================================================================
|
| 299 |
+
# PROMPT TEMPLATES CONFIGURATION
|
| 300 |
+
# =============================================================================
|
| 301 |
+
|
| 302 |
+
@dataclass
|
| 303 |
+
class PromptConfig:
|
| 304 |
+
"""Configuration for N-ATLaS prompt templates"""
|
| 305 |
+
|
| 306 |
+
# System prompt for the N-ATLaS model
|
| 307 |
+
system_prompt: str = """You are FarmEyes, an AI agricultural assistant helping Nigerian farmers.
|
| 308 |
+
You provide advice about crop diseases and treatments in a clear, simple, and helpful manner.
|
| 309 |
+
Always be respectful and use language that farmers can easily understand.
|
| 310 |
+
When providing treatment costs, use Nigerian Naira (₦).
|
| 311 |
+
Focus on practical advice that farmers can implement."""
|
| 312 |
+
|
| 313 |
+
# Maximum length for translated text
|
| 314 |
+
max_translation_length: int = 500
|
| 315 |
+
|
| 316 |
+
# Temperature for different tasks
|
| 317 |
+
translation_temperature: float = 0.3 # Lower for more accurate translations
|
| 318 |
+
diagnosis_temperature: float = 0.7 # Higher for more natural explanations
|
| 319 |
+
|
| 320 |
+
|
| 321 |
+
# =============================================================================
|
| 322 |
+
# LOGGING CONFIGURATION
|
| 323 |
+
# =============================================================================
|
| 324 |
+
|
| 325 |
+
@dataclass
|
| 326 |
+
class LogConfig:
|
| 327 |
+
"""Logging configuration"""
|
| 328 |
+
|
| 329 |
+
# Log level: DEBUG, INFO, WARNING, ERROR, CRITICAL
|
| 330 |
+
log_level: str = "INFO"
|
| 331 |
+
|
| 332 |
+
# Log file path
|
| 333 |
+
log_file: Path = BASE_DIR / "logs" / "farmeyes.log"
|
| 334 |
+
|
| 335 |
+
# Log format
|
| 336 |
+
log_format: str = "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
|
| 337 |
+
|
| 338 |
+
# Enable console logging
|
| 339 |
+
console_logging: bool = True
|
| 340 |
+
|
| 341 |
+
# Enable file logging
|
| 342 |
+
file_logging: bool = True
|
| 343 |
+
|
| 344 |
+
|
| 345 |
+
# =============================================================================
|
| 346 |
+
# INSTANTIATE DEFAULT CONFIGURATIONS
|
| 347 |
+
# =============================================================================
|
| 348 |
+
|
| 349 |
+
# Create default configuration instances
|
| 350 |
+
yolo_config = YOLOConfig()
|
| 351 |
+
natlas_config = NATLaSConfig()
|
| 352 |
+
language_config = LanguageConfig()
|
| 353 |
+
app_config = AppConfig()
|
| 354 |
+
device_config = DeviceConfig()
|
| 355 |
+
prompt_config = PromptConfig()
|
| 356 |
+
log_config = LogConfig()
|
| 357 |
+
|
| 358 |
+
|
| 359 |
+
# =============================================================================
|
| 360 |
+
# UTILITY FUNCTIONS
|
| 361 |
+
# =============================================================================
|
| 362 |
+
|
| 363 |
+
def get_disease_key(class_index: int) -> Optional[str]:
|
| 364 |
+
"""Get disease key from class index."""
|
| 365 |
+
return CLASS_INDEX_TO_KEY.get(class_index)
|
| 366 |
+
|
| 367 |
+
|
| 368 |
+
def get_class_index(disease_key: str) -> Optional[int]:
|
| 369 |
+
"""Get class index from disease key."""
|
| 370 |
+
return KEY_TO_CLASS_INDEX.get(disease_key)
|
| 371 |
+
|
| 372 |
+
|
| 373 |
+
def get_crop_type(class_index: int) -> Optional[str]:
|
| 374 |
+
"""Get crop type from class index."""
|
| 375 |
+
return CLASS_TO_CROP.get(class_index)
|
| 376 |
+
|
| 377 |
+
|
| 378 |
+
def is_healthy(class_index: int) -> bool:
|
| 379 |
+
"""Check if class index represents a healthy plant."""
|
| 380 |
+
return class_index in HEALTHY_CLASS_INDICES
|
| 381 |
+
|
| 382 |
+
|
| 383 |
+
def validate_config() -> Dict[str, bool]:
|
| 384 |
+
"""
|
| 385 |
+
Validate that all required configuration files and paths exist.
|
| 386 |
+
Returns a dictionary with validation results.
|
| 387 |
+
"""
|
| 388 |
+
validations = {
|
| 389 |
+
"knowledge_base_exists": KNOWLEDGE_BASE_PATH.exists(),
|
| 390 |
+
"ui_translations_exists": UI_TRANSLATIONS_PATH.exists(),
|
| 391 |
+
"models_dir_exists": MODELS_DIR.exists(),
|
| 392 |
+
"yolo_model_exists": yolo_config.model_path.exists(),
|
| 393 |
+
"natlas_model_exists": natlas_config.gguf_path.exists(),
|
| 394 |
+
}
|
| 395 |
+
return validations
|
| 396 |
+
|
| 397 |
+
|
| 398 |
+
def print_config_summary():
|
| 399 |
+
"""Print a summary of the current configuration."""
|
| 400 |
+
print("=" * 60)
|
| 401 |
+
print("FarmEyes Configuration Summary")
|
| 402 |
+
print("=" * 60)
|
| 403 |
+
print(f"\n📁 Paths:")
|
| 404 |
+
print(f" Base Directory: {BASE_DIR}")
|
| 405 |
+
print(f" Knowledge Base: {KNOWLEDGE_BASE_PATH}")
|
| 406 |
+
print(f" UI Translations: {UI_TRANSLATIONS_PATH}")
|
| 407 |
+
print(f" Models Directory: {MODELS_DIR}")
|
| 408 |
+
|
| 409 |
+
print(f"\n🤖 YOLOv11 Model:")
|
| 410 |
+
print(f" Model Path: {yolo_config.model_path}")
|
| 411 |
+
print(f" Confidence Threshold: {yolo_config.confidence_threshold}")
|
| 412 |
+
print(f" Device: {yolo_config.device}")
|
| 413 |
+
|
| 414 |
+
print(f"\n🗣️ N-ATLaS Model:")
|
| 415 |
+
print(f" HuggingFace Repo: {natlas_config.hf_repo}")
|
| 416 |
+
print(f" Model File: {natlas_config.model_filename}")
|
| 417 |
+
print(f" Context Length: {natlas_config.context_length}")
|
| 418 |
+
print(f" GPU Layers: {natlas_config.n_gpu_layers}")
|
| 419 |
+
|
| 420 |
+
print(f"\n🌍 Languages:")
|
| 421 |
+
print(f" Supported: {', '.join(language_config.supported_languages)}")
|
| 422 |
+
print(f" Default: {language_config.default_language}")
|
| 423 |
+
|
| 424 |
+
print(f"\n📱 Application:")
|
| 425 |
+
print(f" Name: {app_config.app_name} v{app_config.app_version}")
|
| 426 |
+
print(f" Server: {app_config.server_host}:{app_config.server_port}")
|
| 427 |
+
print(f" Debug Mode: {app_config.debug}")
|
| 428 |
+
|
| 429 |
+
print(f"\n💻 Device:")
|
| 430 |
+
device_info = device_config.get_device_info()
|
| 431 |
+
print(f" Platform: {device_info.get('platform', 'Unknown')}")
|
| 432 |
+
print(f" Compute Device: {device_info.get('device', 'Unknown')}")
|
| 433 |
+
print(f" PyTorch Version: {device_info.get('pytorch_version', 'Unknown')}")
|
| 434 |
+
|
| 435 |
+
print("\n" + "=" * 60)
|
| 436 |
+
|
| 437 |
+
# Validation
|
| 438 |
+
print("\n🔍 Configuration Validation:")
|
| 439 |
+
validations = validate_config()
|
| 440 |
+
for key, value in validations.items():
|
| 441 |
+
status = "✅" if value else "❌"
|
| 442 |
+
print(f" {status} {key.replace('_', ' ').title()}")
|
| 443 |
+
|
| 444 |
+
print("\n" + "=" * 60)
|
| 445 |
+
|
| 446 |
+
|
| 447 |
+
# =============================================================================
|
| 448 |
+
# MAIN - Run configuration check
|
| 449 |
+
# =============================================================================
|
| 450 |
+
|
| 451 |
+
if __name__ == "__main__":
|
| 452 |
+
print_config_summary()
|
.ipynb_checkpoints/test-checkpoint.ipynb
ADDED
|
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"cells": [],
|
| 3 |
+
"metadata": {},
|
| 4 |
+
"nbformat": 4,
|
| 5 |
+
"nbformat_minor": 5
|
| 6 |
+
}
|
Dockerfile
ADDED
|
@@ -0,0 +1,70 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# =============================================================================
|
| 2 |
+
# FarmEyes - HuggingFace Spaces Dockerfile
|
| 3 |
+
# =============================================================================
|
| 4 |
+
# AI-Powered Crop Disease Detection for African Farmers
|
| 5 |
+
#
|
| 6 |
+
# This Dockerfile is optimized for HuggingFace Spaces free tier:
|
| 7 |
+
# - Uses Python 3.10 slim image
|
| 8 |
+
# - Installs llama-cpp-python for CPU inference
|
| 9 |
+
# - Downloads N-ATLaS GGUF model at runtime (~4.92GB)
|
| 10 |
+
# - Runs on port 7860 (HF Spaces default)
|
| 11 |
+
# =============================================================================
|
| 12 |
+
|
| 13 |
+
FROM python:3.10-slim
|
| 14 |
+
|
| 15 |
+
# Set working directory
|
| 16 |
+
WORKDIR /app
|
| 17 |
+
|
| 18 |
+
# Set environment variables
|
| 19 |
+
ENV PYTHONUNBUFFERED=1
|
| 20 |
+
ENV PYTHONDONTWRITEBYTECODE=1
|
| 21 |
+
ENV HOST=0.0.0.0
|
| 22 |
+
ENV PORT=7860
|
| 23 |
+
|
| 24 |
+
# Install system dependencies
|
| 25 |
+
# - ffmpeg: for audio processing (Whisper)
|
| 26 |
+
# - libsm6, libxext6, libgl1: for OpenCV (image processing)
|
| 27 |
+
# - build-essential, cmake: for compiling llama-cpp-python
|
| 28 |
+
RUN apt-get update && apt-get install -y --no-install-recommends \
|
| 29 |
+
ffmpeg \
|
| 30 |
+
libsm6 \
|
| 31 |
+
libxext6 \
|
| 32 |
+
libgl1-mesa-glx \
|
| 33 |
+
build-essential \
|
| 34 |
+
cmake \
|
| 35 |
+
git \
|
| 36 |
+
&& rm -rf /var/lib/apt/lists/*
|
| 37 |
+
|
| 38 |
+
# Copy requirements first (for Docker cache optimization)
|
| 39 |
+
COPY requirements.txt .
|
| 40 |
+
|
| 41 |
+
# Upgrade pip
|
| 42 |
+
RUN pip install --no-cache-dir --upgrade pip
|
| 43 |
+
|
| 44 |
+
# Install Python dependencies
|
| 45 |
+
# Note: llama-cpp-python is compiled for CPU (no CUDA on free tier)
|
| 46 |
+
RUN pip install --no-cache-dir -r requirements.txt
|
| 47 |
+
|
| 48 |
+
# Install llama-cpp-python for CPU
|
| 49 |
+
# This enables GGUF model inference
|
| 50 |
+
RUN pip install --no-cache-dir llama-cpp-python
|
| 51 |
+
|
| 52 |
+
# Copy all application code
|
| 53 |
+
COPY . .
|
| 54 |
+
|
| 55 |
+
# Create necessary directories
|
| 56 |
+
RUN mkdir -p /app/uploads /app/temp
|
| 57 |
+
|
| 58 |
+
# Expose port 7860 (HuggingFace Spaces default)
|
| 59 |
+
EXPOSE 7860
|
| 60 |
+
|
| 61 |
+
# Health check
|
| 62 |
+
HEALTHCHECK --interval=30s --timeout=30s --start-period=5s --retries=3 \
|
| 63 |
+
CMD curl -f http://localhost:7860/api/health || exit 1
|
| 64 |
+
|
| 65 |
+
# Run the application
|
| 66 |
+
# The app will:
|
| 67 |
+
# 1. Start FastAPI server
|
| 68 |
+
# 2. Download N-ATLaS GGUF model on first request (~5-15 min)
|
| 69 |
+
# 3. Serve the web interface
|
| 70 |
+
CMD ["python", "main.py"]
|
README.md
ADDED
|
@@ -0,0 +1,143 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
---
|
| 2 |
+
title: FarmEyes
|
| 3 |
+
emoji: 🌱
|
| 4 |
+
colorFrom: green
|
| 5 |
+
colorTo: yellow
|
| 6 |
+
sdk: docker
|
| 7 |
+
app_port: 7860
|
| 8 |
+
pinned: false
|
| 9 |
+
suggested_hardware: cpu-basic
|
| 10 |
+
---
|
| 11 |
+
|
| 12 |
+
# 🌱 FarmEyes
|
| 13 |
+
|
| 14 |
+
**AI-Powered Crop Disease Detection for African Farmers**
|
| 15 |
+
|
| 16 |
+
[](https://awarri.com)
|
| 17 |
+
[](https://huggingface.co/NCAIR1/N-ATLaS)
|
| 18 |
+
|
| 19 |
+
---
|
| 20 |
+
|
| 21 |
+
## 🎯 What is FarmEyes?
|
| 22 |
+
|
| 23 |
+
FarmEyes is an AI application that helps African farmers identify crop diseases and get treatment recommendations in their native languages. Simply upload a photo of your crop, and FarmEyes will:
|
| 24 |
+
|
| 25 |
+
1. **Detect** the disease using computer vision (YOLOv11)
|
| 26 |
+
2. **Diagnose** the condition with severity assessment
|
| 27 |
+
3. **Translate** all information to your preferred language
|
| 28 |
+
4. **Chat** with an AI assistant for follow-up questions
|
| 29 |
+
|
| 30 |
+
---
|
| 31 |
+
|
| 32 |
+
## 🌍 Supported Languages
|
| 33 |
+
|
| 34 |
+
| Language | Native Name |
|
| 35 |
+
|----------|-------------|
|
| 36 |
+
| 🇬🇧 English | English |
|
| 37 |
+
| 🇳🇬 Hausa | Yaren Hausa |
|
| 38 |
+
| 🇳🇬 Yoruba | Èdè Yorùbá |
|
| 39 |
+
| 🇳🇬 Igbo | Asụsụ Igbo |
|
| 40 |
+
|
| 41 |
+
---
|
| 42 |
+
|
| 43 |
+
## 🦠 Detectable Diseases
|
| 44 |
+
|
| 45 |
+
| Crop | Diseases |
|
| 46 |
+
|------|----------|
|
| 47 |
+
| 🌿 **Cassava** | Bacterial Blight, Mosaic Virus |
|
| 48 |
+
| 🍫 **Cocoa** | Monilia Disease, Phytophthora Disease |
|
| 49 |
+
| 🍅 **Tomato** | Gray Mold Disease, Wilt Disease |
|
| 50 |
+
|
| 51 |
+
---
|
| 52 |
+
|
| 53 |
+
## 🚀 How to Use
|
| 54 |
+
|
| 55 |
+
### Step 1: Select Language
|
| 56 |
+
Choose your preferred language from the welcome screen.
|
| 57 |
+
|
| 58 |
+
### Step 2: Upload Image
|
| 59 |
+
Take a photo of the affected crop leaf and upload it.
|
| 60 |
+
|
| 61 |
+
### Step 3: View Results
|
| 62 |
+
- Disease name and confidence score
|
| 63 |
+
- Severity level (Low/Moderate/High/Critical)
|
| 64 |
+
- Treatment recommendations
|
| 65 |
+
- Cost estimates in Nigerian Naira (₦)
|
| 66 |
+
|
| 67 |
+
### Step 4: Ask Questions
|
| 68 |
+
Use the chat feature to ask follow-up questions about the diagnosis.
|
| 69 |
+
|
| 70 |
+
---
|
| 71 |
+
|
| 72 |
+
## 🔧 Technology Stack
|
| 73 |
+
|
| 74 |
+
| Component | Technology |
|
| 75 |
+
|-----------|------------|
|
| 76 |
+
| **Disease Detection** | YOLOv11 (trained on African crops) |
|
| 77 |
+
| **Language Model** | N-ATLaS (Nigerian multilingual AI) |
|
| 78 |
+
| **Speech-to-Text** | OpenAI Whisper |
|
| 79 |
+
| **Backend** | FastAPI |
|
| 80 |
+
| **Frontend** | Custom HTML/CSS/JS |
|
| 81 |
+
|
| 82 |
+
---
|
| 83 |
+
|
| 84 |
+
## 📱 Features
|
| 85 |
+
|
| 86 |
+
- ✅ **Image Upload** - Drag & drop or click to upload
|
| 87 |
+
- ✅ **Real-time Detection** - Results in seconds
|
| 88 |
+
- ✅ **Multilingual Support** - 4 Nigerian languages
|
| 89 |
+
- ✅ **Voice Input** - Speak your questions
|
| 90 |
+
- ✅ **Text-to-Speech** - Listen to responses
|
| 91 |
+
- ✅ **Treatment Advice** - Practical farming guidance
|
| 92 |
+
- ✅ **Cost Estimates** - In Nigerian Naira
|
| 93 |
+
|
| 94 |
+
---
|
| 95 |
+
|
| 96 |
+
## ⚠️ First Startup Notice
|
| 97 |
+
|
| 98 |
+
**Please be patient on first use!**
|
| 99 |
+
|
| 100 |
+
The N-ATLaS language model (~4.92GB) is downloaded automatically on first startup. This may take **5-15 minutes** depending on connection speed. Subsequent uses will be much faster.
|
| 101 |
+
|
| 102 |
+
---
|
| 103 |
+
|
| 104 |
+
## 🏆 About
|
| 105 |
+
|
| 106 |
+
FarmEyes was built for the **Awarri Developer Challenge 2025** to address the critical need for accessible agricultural AI in Africa.
|
| 107 |
+
|
| 108 |
+
**The Problem:**
|
| 109 |
+
- 20-80% crop losses annually due to diseases
|
| 110 |
+
- Only 1 extension worker per 10,000 farmers (FAO recommends 1:1,000)
|
| 111 |
+
- Agricultural knowledge locked in English
|
| 112 |
+
|
| 113 |
+
**Our Solution:**
|
| 114 |
+
- AI-powered disease detection accessible via smartphone
|
| 115 |
+
- Native language support through N-ATLaS
|
| 116 |
+
- Practical, localized treatment recommendations
|
| 117 |
+
|
| 118 |
+
---
|
| 119 |
+
|
| 120 |
+
## 👨💻 Developer
|
| 121 |
+
|
| 122 |
+
**Fola-AI**
|
| 123 |
+
|
| 124 |
+
- 🤗 HuggingFace: [@Fola-AI](https://huggingface.co/Fola-AI)
|
| 125 |
+
|
| 126 |
+
---
|
| 127 |
+
|
| 128 |
+
## 📄 License
|
| 129 |
+
|
| 130 |
+
Apache 2.0
|
| 131 |
+
|
| 132 |
+
---
|
| 133 |
+
|
| 134 |
+
## 🙏 Acknowledgments
|
| 135 |
+
|
| 136 |
+
- [NCAIR](https://ncair.nitda.gov.ng/) for N-ATLaS model
|
| 137 |
+
- [Ultralytics](https://ultralytics.com/) for YOLOv11
|
| 138 |
+
- [HuggingFace](https://huggingface.co/) for hosting
|
| 139 |
+
- [Awarri](https://awarri.com/) for the challenge opportunity
|
| 140 |
+
|
| 141 |
+
---
|
| 142 |
+
|
| 143 |
+
*Built with ❤️ for African Farmers*
|
api/.DS_Store
ADDED
|
Binary file (6.15 kB). View file
|
|
|
api/__init__.py
ADDED
|
@@ -0,0 +1,22 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
FarmEyes API Routes Package
|
| 3 |
+
===========================
|
| 4 |
+
REST API endpoint modules for the FarmEyes application.
|
| 5 |
+
|
| 6 |
+
Endpoints:
|
| 7 |
+
- /api/detect - Disease detection from images
|
| 8 |
+
- /api/chat - Contextual chat with N-ATLaS
|
| 9 |
+
- /api/transcribe - Voice-to-text with Whisper
|
| 10 |
+
- /api/session - Session management
|
| 11 |
+
- /api/translate - Text translation
|
| 12 |
+
"""
|
| 13 |
+
|
| 14 |
+
from api.routes.detection import router as detection_router
|
| 15 |
+
from api.routes.chat import router as chat_router
|
| 16 |
+
from api.routes.transcribe import router as transcribe_router
|
| 17 |
+
|
| 18 |
+
__all__ = [
|
| 19 |
+
"detection_router",
|
| 20 |
+
"chat_router",
|
| 21 |
+
"transcribe_router"
|
| 22 |
+
]
|
api/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (699 Bytes). View file
|
|
|
api/__pycache__/__init__.cpython-312.pyc
ADDED
|
Binary file (718 Bytes). View file
|
|
|
api/routes/.DS_Store
ADDED
|
Binary file (6.15 kB). View file
|
|
|
api/routes/__init__.py
ADDED
|
@@ -0,0 +1,15 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
FarmEyes API Routes
|
| 3 |
+
===================
|
| 4 |
+
Individual route modules for REST API endpoints.
|
| 5 |
+
"""
|
| 6 |
+
|
| 7 |
+
from api.routes.detection import router as detection_router
|
| 8 |
+
from api.routes.chat import router as chat_router
|
| 9 |
+
from api.routes.transcribe import router as transcribe_router
|
| 10 |
+
|
| 11 |
+
__all__ = [
|
| 12 |
+
"detection_router",
|
| 13 |
+
"chat_router",
|
| 14 |
+
"transcribe_router"
|
| 15 |
+
]
|
api/routes/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (460 Bytes). View file
|
|
|
api/routes/__pycache__/__init__.cpython-312.pyc
ADDED
|
Binary file (479 Bytes). View file
|
|
|
api/routes/__pycache__/chat.cpython-310.pyc
ADDED
|
Binary file (8.14 kB). View file
|
|
|
api/routes/__pycache__/chat.cpython-312.pyc
ADDED
|
Binary file (12 kB). View file
|
|
|
api/routes/__pycache__/detection.cpython-310.pyc
ADDED
|
Binary file (9.3 kB). View file
|
|
|
api/routes/__pycache__/detection.cpython-312.pyc
ADDED
|
Binary file (14.2 kB). View file
|
|
|
api/routes/__pycache__/transcribe.cpython-310.pyc
ADDED
|
Binary file (9.46 kB). View file
|
|
|
api/routes/__pycache__/transcribe.cpython-312.pyc
ADDED
|
Binary file (14.5 kB). View file
|
|
|
api/routes/__pycache__/tts.cpython-310.pyc
ADDED
|
Binary file (4.65 kB). View file
|
|
|
api/routes/chat.py
ADDED
|
@@ -0,0 +1,340 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
FarmEyes Chat API Routes
|
| 3 |
+
========================
|
| 4 |
+
REST API endpoints for contextual agricultural chat.
|
| 5 |
+
|
| 6 |
+
Endpoints:
|
| 7 |
+
- POST /api/chat - Send message and get response
|
| 8 |
+
- GET /api/chat/welcome - Get welcome message for chat page
|
| 9 |
+
- GET /api/chat/history - Get chat history for session
|
| 10 |
+
- DELETE /api/chat/history - Clear chat history
|
| 11 |
+
"""
|
| 12 |
+
|
| 13 |
+
import sys
|
| 14 |
+
from pathlib import Path
|
| 15 |
+
from typing import Optional, List
|
| 16 |
+
from datetime import datetime
|
| 17 |
+
import logging
|
| 18 |
+
|
| 19 |
+
from fastapi import APIRouter, HTTPException, Query
|
| 20 |
+
from fastapi.responses import JSONResponse
|
| 21 |
+
from pydantic import BaseModel, Field
|
| 22 |
+
|
| 23 |
+
# Configure logging
|
| 24 |
+
logging.basicConfig(level=logging.INFO)
|
| 25 |
+
logger = logging.getLogger(__name__)
|
| 26 |
+
|
| 27 |
+
# Create router
|
| 28 |
+
router = APIRouter(prefix="/api/chat", tags=["Chat"])
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
# =============================================================================
|
| 32 |
+
# REQUEST/RESPONSE MODELS
|
| 33 |
+
# =============================================================================
|
| 34 |
+
|
| 35 |
+
class ChatRequest(BaseModel):
|
| 36 |
+
"""Request model for chat message."""
|
| 37 |
+
session_id: str = Field(..., description="Session ID")
|
| 38 |
+
message: str = Field(..., min_length=1, max_length=2000, description="User message")
|
| 39 |
+
language: str = Field(default="en", description="Response language (en, ha, yo, ig)")
|
| 40 |
+
|
| 41 |
+
|
| 42 |
+
class ChatResponse(BaseModel):
|
| 43 |
+
"""Response model for chat message."""
|
| 44 |
+
success: bool
|
| 45 |
+
response: str
|
| 46 |
+
session_id: str
|
| 47 |
+
language: str
|
| 48 |
+
is_redirect: bool = False
|
| 49 |
+
context: Optional[dict] = None
|
| 50 |
+
timestamp: str
|
| 51 |
+
|
| 52 |
+
|
| 53 |
+
class WelcomeResponse(BaseModel):
|
| 54 |
+
"""Response model for welcome message."""
|
| 55 |
+
success: bool
|
| 56 |
+
response: str
|
| 57 |
+
session_id: str
|
| 58 |
+
language: str
|
| 59 |
+
context: Optional[dict] = None
|
| 60 |
+
is_welcome: bool = True
|
| 61 |
+
|
| 62 |
+
|
| 63 |
+
class HistoryResponse(BaseModel):
|
| 64 |
+
"""Response model for chat history."""
|
| 65 |
+
success: bool
|
| 66 |
+
session_id: str
|
| 67 |
+
messages: List[dict]
|
| 68 |
+
total_messages: int
|
| 69 |
+
|
| 70 |
+
|
| 71 |
+
# =============================================================================
|
| 72 |
+
# ENDPOINTS
|
| 73 |
+
# =============================================================================
|
| 74 |
+
|
| 75 |
+
@router.post("/", response_model=ChatResponse)
|
| 76 |
+
async def send_chat_message(request: ChatRequest):
|
| 77 |
+
"""
|
| 78 |
+
Send a chat message and get AI response.
|
| 79 |
+
|
| 80 |
+
The assistant will:
|
| 81 |
+
- Answer questions about the diagnosed disease
|
| 82 |
+
- Provide related agricultural advice
|
| 83 |
+
- Respond in the user's preferred language
|
| 84 |
+
- Redirect off-topic questions politely
|
| 85 |
+
|
| 86 |
+
Requires an active session with a diagnosis.
|
| 87 |
+
"""
|
| 88 |
+
try:
|
| 89 |
+
# Validate language
|
| 90 |
+
valid_languages = ["en", "ha", "yo", "ig"]
|
| 91 |
+
language = request.language if request.language in valid_languages else "en"
|
| 92 |
+
|
| 93 |
+
# Validate message
|
| 94 |
+
message = request.message.strip()
|
| 95 |
+
if not message:
|
| 96 |
+
raise HTTPException(status_code=400, detail="Message cannot be empty")
|
| 97 |
+
|
| 98 |
+
if len(message) > 2000:
|
| 99 |
+
raise HTTPException(status_code=400, detail="Message too long (max 2000 characters)")
|
| 100 |
+
|
| 101 |
+
# Import chat service
|
| 102 |
+
from services.chat_service import get_chat_service
|
| 103 |
+
|
| 104 |
+
chat_service = get_chat_service()
|
| 105 |
+
|
| 106 |
+
# Get response
|
| 107 |
+
logger.info(f"Chat request from session {request.session_id[:8]}...")
|
| 108 |
+
result = chat_service.chat(
|
| 109 |
+
session_id=request.session_id,
|
| 110 |
+
message=message,
|
| 111 |
+
language=language
|
| 112 |
+
)
|
| 113 |
+
|
| 114 |
+
if not result.get("success", False):
|
| 115 |
+
# Handle specific error cases
|
| 116 |
+
error_type = result.get("error", "unknown")
|
| 117 |
+
|
| 118 |
+
if error_type == "no_diagnosis":
|
| 119 |
+
raise HTTPException(
|
| 120 |
+
status_code=400,
|
| 121 |
+
detail=result.get("response", "Please analyze an image first")
|
| 122 |
+
)
|
| 123 |
+
else:
|
| 124 |
+
raise HTTPException(
|
| 125 |
+
status_code=500,
|
| 126 |
+
detail=result.get("response", "Failed to generate response")
|
| 127 |
+
)
|
| 128 |
+
|
| 129 |
+
# Build response
|
| 130 |
+
response_data = {
|
| 131 |
+
"success": True,
|
| 132 |
+
"response": result.get("response", ""),
|
| 133 |
+
"session_id": result.get("session_id", request.session_id),
|
| 134 |
+
"language": result.get("language", language),
|
| 135 |
+
"is_redirect": result.get("is_redirect", False),
|
| 136 |
+
"context": result.get("context"),
|
| 137 |
+
"timestamp": datetime.now().isoformat()
|
| 138 |
+
}
|
| 139 |
+
|
| 140 |
+
return JSONResponse(content=response_data)
|
| 141 |
+
|
| 142 |
+
except HTTPException:
|
| 143 |
+
raise
|
| 144 |
+
except Exception as e:
|
| 145 |
+
logger.error(f"Chat failed: {e}")
|
| 146 |
+
raise HTTPException(status_code=500, detail=f"Chat failed: {str(e)}")
|
| 147 |
+
|
| 148 |
+
|
| 149 |
+
@router.get("/welcome", response_model=WelcomeResponse)
|
| 150 |
+
async def get_welcome_message(
|
| 151 |
+
session_id: str = Query(..., description="Session ID"),
|
| 152 |
+
language: str = Query(default="en", description="Language code")
|
| 153 |
+
):
|
| 154 |
+
"""
|
| 155 |
+
Get welcome message for chat page.
|
| 156 |
+
|
| 157 |
+
Returns a personalized welcome message based on the
|
| 158 |
+
current diagnosis in the session. Should be called
|
| 159 |
+
when user navigates to the chat page.
|
| 160 |
+
"""
|
| 161 |
+
try:
|
| 162 |
+
# Validate language
|
| 163 |
+
valid_languages = ["en", "ha", "yo", "ig"]
|
| 164 |
+
language = language if language in valid_languages else "en"
|
| 165 |
+
|
| 166 |
+
# Import chat service
|
| 167 |
+
from services.chat_service import get_chat_service
|
| 168 |
+
|
| 169 |
+
chat_service = get_chat_service()
|
| 170 |
+
|
| 171 |
+
# Get welcome message
|
| 172 |
+
result = chat_service.get_welcome_message(session_id, language)
|
| 173 |
+
|
| 174 |
+
if not result.get("success", False):
|
| 175 |
+
error_type = result.get("error", "unknown")
|
| 176 |
+
|
| 177 |
+
if error_type == "no_diagnosis":
|
| 178 |
+
raise HTTPException(
|
| 179 |
+
status_code=400,
|
| 180 |
+
detail=result.get("response", "Please analyze an image first")
|
| 181 |
+
)
|
| 182 |
+
else:
|
| 183 |
+
raise HTTPException(
|
| 184 |
+
status_code=500,
|
| 185 |
+
detail="Failed to generate welcome message"
|
| 186 |
+
)
|
| 187 |
+
|
| 188 |
+
response_data = {
|
| 189 |
+
"success": True,
|
| 190 |
+
"response": result.get("response", ""),
|
| 191 |
+
"session_id": result.get("session_id", session_id),
|
| 192 |
+
"language": result.get("language", language),
|
| 193 |
+
"context": result.get("context"),
|
| 194 |
+
"is_welcome": True
|
| 195 |
+
}
|
| 196 |
+
|
| 197 |
+
return JSONResponse(content=response_data)
|
| 198 |
+
|
| 199 |
+
except HTTPException:
|
| 200 |
+
raise
|
| 201 |
+
except Exception as e:
|
| 202 |
+
logger.error(f"Get welcome failed: {e}")
|
| 203 |
+
raise HTTPException(status_code=500, detail=str(e))
|
| 204 |
+
|
| 205 |
+
|
| 206 |
+
@router.get("/history", response_model=HistoryResponse)
|
| 207 |
+
async def get_chat_history(
|
| 208 |
+
session_id: str = Query(..., description="Session ID"),
|
| 209 |
+
limit: int = Query(default=50, ge=1, le=100, description="Maximum messages to return")
|
| 210 |
+
):
|
| 211 |
+
"""
|
| 212 |
+
Get chat history for a session.
|
| 213 |
+
|
| 214 |
+
Returns all messages in the current chat session,
|
| 215 |
+
useful for restoring chat state when user returns
|
| 216 |
+
to the chat page.
|
| 217 |
+
"""
|
| 218 |
+
try:
|
| 219 |
+
from services.chat_service import get_chat_service
|
| 220 |
+
|
| 221 |
+
chat_service = get_chat_service()
|
| 222 |
+
messages = chat_service.get_history(session_id)
|
| 223 |
+
|
| 224 |
+
# Apply limit
|
| 225 |
+
if len(messages) > limit:
|
| 226 |
+
messages = messages[-limit:]
|
| 227 |
+
|
| 228 |
+
return JSONResponse(content={
|
| 229 |
+
"success": True,
|
| 230 |
+
"session_id": session_id,
|
| 231 |
+
"messages": messages,
|
| 232 |
+
"total_messages": len(messages)
|
| 233 |
+
})
|
| 234 |
+
|
| 235 |
+
except Exception as e:
|
| 236 |
+
logger.error(f"Get history failed: {e}")
|
| 237 |
+
raise HTTPException(status_code=500, detail=str(e))
|
| 238 |
+
|
| 239 |
+
|
| 240 |
+
@router.delete("/history")
|
| 241 |
+
async def clear_chat_history(
|
| 242 |
+
session_id: str = Query(..., description="Session ID")
|
| 243 |
+
):
|
| 244 |
+
"""
|
| 245 |
+
Clear chat history for a session.
|
| 246 |
+
|
| 247 |
+
Removes all messages but keeps the diagnosis context,
|
| 248 |
+
allowing user to start a fresh conversation about
|
| 249 |
+
the same diagnosis.
|
| 250 |
+
"""
|
| 251 |
+
try:
|
| 252 |
+
from services.chat_service import get_chat_service
|
| 253 |
+
|
| 254 |
+
chat_service = get_chat_service()
|
| 255 |
+
success = chat_service.clear_history(session_id)
|
| 256 |
+
|
| 257 |
+
if success:
|
| 258 |
+
return JSONResponse(content={
|
| 259 |
+
"success": True,
|
| 260 |
+
"message": "Chat history cleared",
|
| 261 |
+
"session_id": session_id
|
| 262 |
+
})
|
| 263 |
+
else:
|
| 264 |
+
raise HTTPException(status_code=404, detail="Session not found")
|
| 265 |
+
|
| 266 |
+
except HTTPException:
|
| 267 |
+
raise
|
| 268 |
+
except Exception as e:
|
| 269 |
+
logger.error(f"Clear history failed: {e}")
|
| 270 |
+
raise HTTPException(status_code=500, detail=str(e))
|
| 271 |
+
|
| 272 |
+
|
| 273 |
+
@router.get("/context")
|
| 274 |
+
async def get_diagnosis_context(
|
| 275 |
+
session_id: str = Query(..., description="Session ID")
|
| 276 |
+
):
|
| 277 |
+
"""
|
| 278 |
+
Get current diagnosis context for chat.
|
| 279 |
+
|
| 280 |
+
Returns the diagnosis information being used as
|
| 281 |
+
context for the chat assistant. Useful for displaying
|
| 282 |
+
context banner in chat UI.
|
| 283 |
+
"""
|
| 284 |
+
try:
|
| 285 |
+
from services.session_manager import get_session_manager
|
| 286 |
+
|
| 287 |
+
session_manager = get_session_manager()
|
| 288 |
+
diagnosis = session_manager.get_diagnosis(session_id)
|
| 289 |
+
|
| 290 |
+
if not diagnosis or not diagnosis.is_valid():
|
| 291 |
+
raise HTTPException(
|
| 292 |
+
status_code=404,
|
| 293 |
+
detail="No diagnosis found for this session"
|
| 294 |
+
)
|
| 295 |
+
|
| 296 |
+
return JSONResponse(content={
|
| 297 |
+
"success": True,
|
| 298 |
+
"session_id": session_id,
|
| 299 |
+
"context": diagnosis.to_dict(),
|
| 300 |
+
"context_string": diagnosis.get_context_string()
|
| 301 |
+
})
|
| 302 |
+
|
| 303 |
+
except HTTPException:
|
| 304 |
+
raise
|
| 305 |
+
except Exception as e:
|
| 306 |
+
logger.error(f"Get context failed: {e}")
|
| 307 |
+
raise HTTPException(status_code=500, detail=str(e))
|
| 308 |
+
|
| 309 |
+
|
| 310 |
+
@router.post("/voice")
|
| 311 |
+
async def chat_with_voice(
|
| 312 |
+
session_id: str = Query(..., description="Session ID"),
|
| 313 |
+
language: str = Query(default="en", description="Language code"),
|
| 314 |
+
text: str = Query(..., description="Transcribed text from voice")
|
| 315 |
+
):
|
| 316 |
+
"""
|
| 317 |
+
Send chat message from voice input.
|
| 318 |
+
|
| 319 |
+
Convenience endpoint that accepts already-transcribed
|
| 320 |
+
text from the voice input system. The transcription
|
| 321 |
+
is done separately via /api/transcribe.
|
| 322 |
+
|
| 323 |
+
This is the final step in the voice chat pipeline:
|
| 324 |
+
Voice → Whisper → Text → This endpoint → Response
|
| 325 |
+
"""
|
| 326 |
+
try:
|
| 327 |
+
# Create request and use main chat endpoint logic
|
| 328 |
+
request = ChatRequest(
|
| 329 |
+
session_id=session_id,
|
| 330 |
+
message=text,
|
| 331 |
+
language=language
|
| 332 |
+
)
|
| 333 |
+
|
| 334 |
+
return await send_chat_message(request)
|
| 335 |
+
|
| 336 |
+
except HTTPException:
|
| 337 |
+
raise
|
| 338 |
+
except Exception as e:
|
| 339 |
+
logger.error(f"Voice chat failed: {e}")
|
| 340 |
+
raise HTTPException(status_code=500, detail=str(e))
|
api/routes/detection.py
ADDED
|
@@ -0,0 +1,381 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
FarmEyes Detection API Routes
|
| 3 |
+
=============================
|
| 4 |
+
REST API endpoints for crop disease detection.
|
| 5 |
+
|
| 6 |
+
Endpoints:
|
| 7 |
+
- POST /api/detect - Analyze crop image for diseases
|
| 8 |
+
- GET /api/detect/status - Check model status
|
| 9 |
+
- GET /api/detect/classes - Get supported disease classes
|
| 10 |
+
"""
|
| 11 |
+
|
| 12 |
+
import sys
|
| 13 |
+
import io
|
| 14 |
+
import base64
|
| 15 |
+
from pathlib import Path
|
| 16 |
+
from typing import Optional
|
| 17 |
+
from datetime import datetime
|
| 18 |
+
import logging
|
| 19 |
+
|
| 20 |
+
from fastapi import APIRouter, File, UploadFile, Form, HTTPException, Query
|
| 21 |
+
from fastapi.responses import JSONResponse
|
| 22 |
+
from pydantic import BaseModel, Field
|
| 23 |
+
|
| 24 |
+
# Configure logging
|
| 25 |
+
logging.basicConfig(level=logging.INFO)
|
| 26 |
+
logger = logging.getLogger(__name__)
|
| 27 |
+
|
| 28 |
+
# Create router
|
| 29 |
+
router = APIRouter(prefix="/api/detect", tags=["Detection"])
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
# =============================================================================
|
| 33 |
+
# REQUEST/RESPONSE MODELS
|
| 34 |
+
# =============================================================================
|
| 35 |
+
|
| 36 |
+
class DetectionRequest(BaseModel):
|
| 37 |
+
"""Request model for detection with base64 image."""
|
| 38 |
+
image_base64: str = Field(..., description="Base64 encoded image data")
|
| 39 |
+
language: str = Field(default="en", description="Language code (en, ha, yo, ig)")
|
| 40 |
+
session_id: Optional[str] = Field(default=None, description="Session ID for context")
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
class DetectionResponse(BaseModel):
|
| 44 |
+
"""Response model for disease detection."""
|
| 45 |
+
success: bool
|
| 46 |
+
session_id: str
|
| 47 |
+
detection: dict
|
| 48 |
+
diagnosis: dict
|
| 49 |
+
language: str
|
| 50 |
+
timestamp: str
|
| 51 |
+
|
| 52 |
+
|
| 53 |
+
class StatusResponse(BaseModel):
|
| 54 |
+
"""Response model for service status."""
|
| 55 |
+
status: str
|
| 56 |
+
yolo_loaded: bool
|
| 57 |
+
natlas_loaded: bool
|
| 58 |
+
knowledge_base_loaded: bool
|
| 59 |
+
supported_languages: list
|
| 60 |
+
supported_crops: list
|
| 61 |
+
|
| 62 |
+
|
| 63 |
+
class ClassesResponse(BaseModel):
|
| 64 |
+
"""Response model for supported classes."""
|
| 65 |
+
total_classes: int
|
| 66 |
+
classes: list
|
| 67 |
+
crops: dict
|
| 68 |
+
|
| 69 |
+
|
| 70 |
+
# =============================================================================
|
| 71 |
+
# HELPER FUNCTIONS
|
| 72 |
+
# =============================================================================
|
| 73 |
+
|
| 74 |
+
def decode_base64_image(base64_string: str) -> bytes:
|
| 75 |
+
"""
|
| 76 |
+
Decode base64 image string to bytes.
|
| 77 |
+
|
| 78 |
+
Args:
|
| 79 |
+
base64_string: Base64 encoded image
|
| 80 |
+
|
| 81 |
+
Returns:
|
| 82 |
+
Image bytes
|
| 83 |
+
"""
|
| 84 |
+
# Remove data URL prefix if present
|
| 85 |
+
if "," in base64_string:
|
| 86 |
+
base64_string = base64_string.split(",")[1]
|
| 87 |
+
|
| 88 |
+
try:
|
| 89 |
+
return base64.b64decode(base64_string)
|
| 90 |
+
except Exception as e:
|
| 91 |
+
raise ValueError(f"Invalid base64 image: {e}")
|
| 92 |
+
|
| 93 |
+
|
| 94 |
+
def validate_image_format(filename: str) -> bool:
|
| 95 |
+
"""
|
| 96 |
+
Validate image file format.
|
| 97 |
+
|
| 98 |
+
Args:
|
| 99 |
+
filename: Image filename
|
| 100 |
+
|
| 101 |
+
Returns:
|
| 102 |
+
True if valid format
|
| 103 |
+
"""
|
| 104 |
+
valid_extensions = {".jpg", ".jpeg", ".png", ".webp", ".bmp"}
|
| 105 |
+
ext = Path(filename).suffix.lower()
|
| 106 |
+
return ext in valid_extensions
|
| 107 |
+
|
| 108 |
+
|
| 109 |
+
# =============================================================================
|
| 110 |
+
# ENDPOINTS
|
| 111 |
+
# =============================================================================
|
| 112 |
+
|
| 113 |
+
@router.post("/", response_model=DetectionResponse)
|
| 114 |
+
async def detect_disease(
|
| 115 |
+
file: UploadFile = File(..., description="Crop image file"),
|
| 116 |
+
language: str = Form(default="en", description="Language code"),
|
| 117 |
+
session_id: Optional[str] = Form(default=None, description="Session ID")
|
| 118 |
+
):
|
| 119 |
+
"""
|
| 120 |
+
Detect crop disease from uploaded image.
|
| 121 |
+
|
| 122 |
+
Analyzes the image using YOLOv11 model and returns:
|
| 123 |
+
- Disease detection results
|
| 124 |
+
- Complete diagnosis with treatments
|
| 125 |
+
- All content translated to selected language
|
| 126 |
+
|
| 127 |
+
Supported formats: JPG, JPEG, PNG, WEBP, BMP
|
| 128 |
+
Maximum file size: 10MB
|
| 129 |
+
"""
|
| 130 |
+
try:
|
| 131 |
+
# Validate file format
|
| 132 |
+
if not file.filename or not validate_image_format(file.filename):
|
| 133 |
+
raise HTTPException(
|
| 134 |
+
status_code=400,
|
| 135 |
+
detail="Invalid image format. Supported: JPG, JPEG, PNG, WEBP, BMP"
|
| 136 |
+
)
|
| 137 |
+
|
| 138 |
+
# Read file content
|
| 139 |
+
contents = await file.read()
|
| 140 |
+
|
| 141 |
+
# Validate file size (10MB max)
|
| 142 |
+
max_size = 10 * 1024 * 1024
|
| 143 |
+
if len(contents) > max_size:
|
| 144 |
+
raise HTTPException(
|
| 145 |
+
status_code=400,
|
| 146 |
+
detail=f"File too large. Maximum size: {max_size // (1024*1024)}MB"
|
| 147 |
+
)
|
| 148 |
+
|
| 149 |
+
# Validate language
|
| 150 |
+
valid_languages = ["en", "ha", "yo", "ig"]
|
| 151 |
+
if language not in valid_languages:
|
| 152 |
+
language = "en"
|
| 153 |
+
|
| 154 |
+
# Import services
|
| 155 |
+
from services.session_manager import get_session_manager, DiagnosisContext
|
| 156 |
+
from services.diagnosis_generator import generate_diagnosis_with_image
|
| 157 |
+
from PIL import Image
|
| 158 |
+
|
| 159 |
+
# Get or create session
|
| 160 |
+
session_manager = get_session_manager()
|
| 161 |
+
session = session_manager.get_or_create_session(session_id, language)
|
| 162 |
+
|
| 163 |
+
# Convert bytes to PIL Image
|
| 164 |
+
image = Image.open(io.BytesIO(contents))
|
| 165 |
+
|
| 166 |
+
# Generate diagnosis
|
| 167 |
+
logger.info(f"Processing detection for session {session.session_id[:8]}...")
|
| 168 |
+
report, annotated_image = generate_diagnosis_with_image(image, language)
|
| 169 |
+
|
| 170 |
+
# Update session with diagnosis context
|
| 171 |
+
diagnosis_context = DiagnosisContext.from_diagnosis_report(report)
|
| 172 |
+
session_manager.update_diagnosis(session.session_id, diagnosis_context)
|
| 173 |
+
|
| 174 |
+
# Build response
|
| 175 |
+
response_data = {
|
| 176 |
+
"success": True,
|
| 177 |
+
"session_id": session.session_id,
|
| 178 |
+
"detection": {
|
| 179 |
+
"disease_name": report.disease_name,
|
| 180 |
+
"crop_type": report.crop_type,
|
| 181 |
+
"confidence": report.confidence,
|
| 182 |
+
"confidence_percent": round(report.confidence * 100, 1),
|
| 183 |
+
"severity_level": report.severity_level,
|
| 184 |
+
"is_healthy": report.is_healthy
|
| 185 |
+
},
|
| 186 |
+
"diagnosis": report.to_dict(),
|
| 187 |
+
"language": language,
|
| 188 |
+
"timestamp": datetime.now().isoformat()
|
| 189 |
+
}
|
| 190 |
+
|
| 191 |
+
logger.info(f"Detection complete: {report.disease_name} ({report.confidence:.1%})")
|
| 192 |
+
|
| 193 |
+
return JSONResponse(content=response_data)
|
| 194 |
+
|
| 195 |
+
except HTTPException:
|
| 196 |
+
raise
|
| 197 |
+
except Exception as e:
|
| 198 |
+
logger.error(f"Detection failed: {e}")
|
| 199 |
+
raise HTTPException(status_code=500, detail=f"Detection failed: {str(e)}")
|
| 200 |
+
|
| 201 |
+
|
| 202 |
+
@router.post("/base64", response_model=DetectionResponse)
|
| 203 |
+
async def detect_disease_base64(request: DetectionRequest):
|
| 204 |
+
"""
|
| 205 |
+
Detect crop disease from base64 encoded image.
|
| 206 |
+
|
| 207 |
+
Alternative endpoint for clients that prefer sending
|
| 208 |
+
images as base64 strings rather than file uploads.
|
| 209 |
+
"""
|
| 210 |
+
try:
|
| 211 |
+
# Decode base64 image
|
| 212 |
+
try:
|
| 213 |
+
image_bytes = decode_base64_image(request.image_base64)
|
| 214 |
+
except ValueError as e:
|
| 215 |
+
raise HTTPException(status_code=400, detail=str(e))
|
| 216 |
+
|
| 217 |
+
# Validate size
|
| 218 |
+
max_size = 10 * 1024 * 1024
|
| 219 |
+
if len(image_bytes) > max_size:
|
| 220 |
+
raise HTTPException(
|
| 221 |
+
status_code=400,
|
| 222 |
+
detail=f"Image too large. Maximum size: {max_size // (1024*1024)}MB"
|
| 223 |
+
)
|
| 224 |
+
|
| 225 |
+
# Validate language
|
| 226 |
+
valid_languages = ["en", "ha", "yo", "ig"]
|
| 227 |
+
language = request.language if request.language in valid_languages else "en"
|
| 228 |
+
|
| 229 |
+
# Import services
|
| 230 |
+
from services.session_manager import get_session_manager, DiagnosisContext
|
| 231 |
+
from services.diagnosis_generator import generate_diagnosis_with_image
|
| 232 |
+
from PIL import Image
|
| 233 |
+
|
| 234 |
+
# Get or create session
|
| 235 |
+
session_manager = get_session_manager()
|
| 236 |
+
session = session_manager.get_or_create_session(request.session_id, language)
|
| 237 |
+
|
| 238 |
+
# Convert bytes to PIL Image
|
| 239 |
+
image = Image.open(io.BytesIO(image_bytes))
|
| 240 |
+
|
| 241 |
+
# Generate diagnosis
|
| 242 |
+
logger.info(f"Processing base64 detection for session {session.session_id[:8]}...")
|
| 243 |
+
report, annotated_image = generate_diagnosis_with_image(image, language)
|
| 244 |
+
|
| 245 |
+
# Update session
|
| 246 |
+
diagnosis_context = DiagnosisContext.from_diagnosis_report(report)
|
| 247 |
+
session_manager.update_diagnosis(session.session_id, diagnosis_context)
|
| 248 |
+
|
| 249 |
+
# Build response
|
| 250 |
+
response_data = {
|
| 251 |
+
"success": True,
|
| 252 |
+
"session_id": session.session_id,
|
| 253 |
+
"detection": {
|
| 254 |
+
"disease_name": report.disease_name,
|
| 255 |
+
"crop_type": report.crop_type,
|
| 256 |
+
"confidence": report.confidence,
|
| 257 |
+
"confidence_percent": round(report.confidence * 100, 1),
|
| 258 |
+
"severity_level": report.severity_level,
|
| 259 |
+
"is_healthy": report.is_healthy
|
| 260 |
+
},
|
| 261 |
+
"diagnosis": report.to_dict(),
|
| 262 |
+
"language": language,
|
| 263 |
+
"timestamp": datetime.now().isoformat()
|
| 264 |
+
}
|
| 265 |
+
|
| 266 |
+
return JSONResponse(content=response_data)
|
| 267 |
+
|
| 268 |
+
except HTTPException:
|
| 269 |
+
raise
|
| 270 |
+
except Exception as e:
|
| 271 |
+
logger.error(f"Base64 detection failed: {e}")
|
| 272 |
+
raise HTTPException(status_code=500, detail=f"Detection failed: {str(e)}")
|
| 273 |
+
|
| 274 |
+
|
| 275 |
+
@router.get("/status", response_model=StatusResponse)
|
| 276 |
+
async def get_detection_status():
|
| 277 |
+
"""
|
| 278 |
+
Get status of detection service.
|
| 279 |
+
|
| 280 |
+
Returns information about:
|
| 281 |
+
- Model loading status
|
| 282 |
+
- Supported languages
|
| 283 |
+
- Supported crops
|
| 284 |
+
"""
|
| 285 |
+
try:
|
| 286 |
+
# Try to get service status
|
| 287 |
+
status_info = {
|
| 288 |
+
"status": "operational",
|
| 289 |
+
"yolo_loaded": False,
|
| 290 |
+
"natlas_loaded": False,
|
| 291 |
+
"knowledge_base_loaded": False,
|
| 292 |
+
"supported_languages": ["en", "ha", "yo", "ig"],
|
| 293 |
+
"supported_crops": ["cassava", "cocoa", "tomato"]
|
| 294 |
+
}
|
| 295 |
+
|
| 296 |
+
try:
|
| 297 |
+
from services.disease_detector import get_disease_detector
|
| 298 |
+
detector = get_disease_detector()
|
| 299 |
+
status_info["knowledge_base_loaded"] = detector._knowledge_base is not None
|
| 300 |
+
status_info["yolo_loaded"] = (
|
| 301 |
+
detector._yolo_model is not None and
|
| 302 |
+
detector._yolo_model._is_loaded
|
| 303 |
+
)
|
| 304 |
+
except Exception as e:
|
| 305 |
+
logger.warning(f"Could not get detector status: {e}")
|
| 306 |
+
|
| 307 |
+
try:
|
| 308 |
+
from models.natlas_model import get_natlas_model
|
| 309 |
+
natlas = get_natlas_model()
|
| 310 |
+
status_info["natlas_loaded"] = natlas.is_loaded
|
| 311 |
+
except Exception as e:
|
| 312 |
+
logger.warning(f"Could not get N-ATLaS status: {e}")
|
| 313 |
+
|
| 314 |
+
return JSONResponse(content=status_info)
|
| 315 |
+
|
| 316 |
+
except Exception as e:
|
| 317 |
+
logger.error(f"Status check failed: {e}")
|
| 318 |
+
raise HTTPException(status_code=500, detail=str(e))
|
| 319 |
+
|
| 320 |
+
|
| 321 |
+
@router.get("/classes", response_model=ClassesResponse)
|
| 322 |
+
async def get_supported_classes():
|
| 323 |
+
"""
|
| 324 |
+
Get list of supported disease classes.
|
| 325 |
+
|
| 326 |
+
Returns:
|
| 327 |
+
- Total number of classes
|
| 328 |
+
- Class names with indices
|
| 329 |
+
- Mapping of crops to class indices
|
| 330 |
+
"""
|
| 331 |
+
try:
|
| 332 |
+
from config import CLASS_NAMES, CROP_TYPES, CLASS_TO_CROP, CLASS_INDEX_TO_KEY
|
| 333 |
+
|
| 334 |
+
classes_list = []
|
| 335 |
+
for idx, name in enumerate(CLASS_NAMES):
|
| 336 |
+
classes_list.append({
|
| 337 |
+
"index": idx,
|
| 338 |
+
"name": name,
|
| 339 |
+
"key": CLASS_INDEX_TO_KEY.get(idx, ""),
|
| 340 |
+
"crop": CLASS_TO_CROP.get(idx, "unknown")
|
| 341 |
+
})
|
| 342 |
+
|
| 343 |
+
return JSONResponse(content={
|
| 344 |
+
"total_classes": len(CLASS_NAMES),
|
| 345 |
+
"classes": classes_list,
|
| 346 |
+
"crops": CROP_TYPES
|
| 347 |
+
})
|
| 348 |
+
|
| 349 |
+
except Exception as e:
|
| 350 |
+
logger.error(f"Get classes failed: {e}")
|
| 351 |
+
raise HTTPException(status_code=500, detail=str(e))
|
| 352 |
+
|
| 353 |
+
|
| 354 |
+
@router.delete("/session/{session_id}")
|
| 355 |
+
async def clear_session_diagnosis(session_id: str):
|
| 356 |
+
"""
|
| 357 |
+
Clear diagnosis data for a session.
|
| 358 |
+
|
| 359 |
+
Clears the current diagnosis and chat history,
|
| 360 |
+
allowing user to start fresh with a new image.
|
| 361 |
+
"""
|
| 362 |
+
try:
|
| 363 |
+
from services.session_manager import get_session_manager
|
| 364 |
+
|
| 365 |
+
session_manager = get_session_manager()
|
| 366 |
+
success = session_manager.clear_diagnosis(session_id)
|
| 367 |
+
|
| 368 |
+
if success:
|
| 369 |
+
return JSONResponse(content={
|
| 370 |
+
"success": True,
|
| 371 |
+
"message": "Diagnosis cleared",
|
| 372 |
+
"session_id": session_id
|
| 373 |
+
})
|
| 374 |
+
else:
|
| 375 |
+
raise HTTPException(status_code=404, detail="Session not found")
|
| 376 |
+
|
| 377 |
+
except HTTPException:
|
| 378 |
+
raise
|
| 379 |
+
except Exception as e:
|
| 380 |
+
logger.error(f"Clear session failed: {e}")
|
| 381 |
+
raise HTTPException(status_code=500, detail=str(e))
|
api/routes/transcribe.py
ADDED
|
@@ -0,0 +1,418 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
FarmEyes Transcribe API Routes
|
| 3 |
+
==============================
|
| 4 |
+
REST API endpoints for speech-to-text transcription.
|
| 5 |
+
|
| 6 |
+
Endpoints:
|
| 7 |
+
- POST /api/transcribe - Transcribe audio to text
|
| 8 |
+
- GET /api/transcribe/status - Check Whisper model status
|
| 9 |
+
- GET /api/transcribe/formats - Get supported audio formats
|
| 10 |
+
"""
|
| 11 |
+
|
| 12 |
+
import sys
|
| 13 |
+
import io
|
| 14 |
+
import base64
|
| 15 |
+
from pathlib import Path
|
| 16 |
+
from typing import Optional
|
| 17 |
+
from datetime import datetime
|
| 18 |
+
import logging
|
| 19 |
+
|
| 20 |
+
from fastapi import APIRouter, File, UploadFile, Form, HTTPException, Query
|
| 21 |
+
from fastapi.responses import JSONResponse
|
| 22 |
+
from pydantic import BaseModel, Field
|
| 23 |
+
|
| 24 |
+
# Configure logging
|
| 25 |
+
logging.basicConfig(level=logging.INFO)
|
| 26 |
+
logger = logging.getLogger(__name__)
|
| 27 |
+
|
| 28 |
+
# Create router
|
| 29 |
+
router = APIRouter(prefix="/api/transcribe", tags=["Transcription"])
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
# =============================================================================
|
| 33 |
+
# REQUEST/RESPONSE MODELS
|
| 34 |
+
# =============================================================================
|
| 35 |
+
|
| 36 |
+
class TranscribeRequest(BaseModel):
|
| 37 |
+
"""Request model for base64 audio transcription."""
|
| 38 |
+
audio_base64: str = Field(..., description="Base64 encoded audio data")
|
| 39 |
+
filename: str = Field(default="audio.wav", description="Original filename for format detection")
|
| 40 |
+
language_hint: Optional[str] = Field(default=None, description="Language hint (en, ha, yo, ig)")
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
class TranscribeResponse(BaseModel):
|
| 44 |
+
"""Response model for transcription."""
|
| 45 |
+
success: bool
|
| 46 |
+
text: str
|
| 47 |
+
language: Optional[str] = None
|
| 48 |
+
confidence: float = 0.0
|
| 49 |
+
duration: float = 0.0
|
| 50 |
+
processing_time: Optional[float] = None
|
| 51 |
+
|
| 52 |
+
|
| 53 |
+
class StatusResponse(BaseModel):
|
| 54 |
+
"""Response model for service status."""
|
| 55 |
+
status: str
|
| 56 |
+
model_loaded: bool
|
| 57 |
+
model_size: str
|
| 58 |
+
device: str
|
| 59 |
+
supported_formats: list
|
| 60 |
+
|
| 61 |
+
|
| 62 |
+
class FormatsResponse(BaseModel):
|
| 63 |
+
"""Response model for supported formats."""
|
| 64 |
+
formats: list
|
| 65 |
+
max_file_size_mb: int
|
| 66 |
+
max_duration_seconds: int
|
| 67 |
+
|
| 68 |
+
|
| 69 |
+
# =============================================================================
|
| 70 |
+
# HELPER FUNCTIONS
|
| 71 |
+
# =============================================================================
|
| 72 |
+
|
| 73 |
+
def decode_base64_audio(base64_string: str) -> bytes:
|
| 74 |
+
"""
|
| 75 |
+
Decode base64 audio string to bytes.
|
| 76 |
+
|
| 77 |
+
Args:
|
| 78 |
+
base64_string: Base64 encoded audio
|
| 79 |
+
|
| 80 |
+
Returns:
|
| 81 |
+
Audio bytes
|
| 82 |
+
"""
|
| 83 |
+
# Remove data URL prefix if present
|
| 84 |
+
if "," in base64_string:
|
| 85 |
+
base64_string = base64_string.split(",")[1]
|
| 86 |
+
|
| 87 |
+
try:
|
| 88 |
+
return base64.b64decode(base64_string)
|
| 89 |
+
except Exception as e:
|
| 90 |
+
raise ValueError(f"Invalid base64 audio: {e}")
|
| 91 |
+
|
| 92 |
+
|
| 93 |
+
def validate_audio_format(filename: str) -> bool:
|
| 94 |
+
"""
|
| 95 |
+
Validate audio file format.
|
| 96 |
+
|
| 97 |
+
Args:
|
| 98 |
+
filename: Audio filename
|
| 99 |
+
|
| 100 |
+
Returns:
|
| 101 |
+
True if valid format
|
| 102 |
+
"""
|
| 103 |
+
valid_extensions = {".wav", ".mp3", ".m4a", ".ogg", ".flac", ".webm"}
|
| 104 |
+
ext = Path(filename).suffix.lower()
|
| 105 |
+
return ext in valid_extensions
|
| 106 |
+
|
| 107 |
+
|
| 108 |
+
# =============================================================================
|
| 109 |
+
# ENDPOINTS
|
| 110 |
+
# =============================================================================
|
| 111 |
+
|
| 112 |
+
@router.post("/", response_model=TranscribeResponse)
|
| 113 |
+
async def transcribe_audio(
|
| 114 |
+
file: UploadFile = File(..., description="Audio file"),
|
| 115 |
+
language_hint: Optional[str] = Form(default=None, description="Language hint (en, ha, yo, ig)")
|
| 116 |
+
):
|
| 117 |
+
"""
|
| 118 |
+
Transcribe audio file to text.
|
| 119 |
+
|
| 120 |
+
Uses OpenAI Whisper model for accurate speech-to-text,
|
| 121 |
+
with special optimization for Nigerian languages.
|
| 122 |
+
|
| 123 |
+
Supported formats: WAV, MP3, M4A, OGG, FLAC, WEBM
|
| 124 |
+
Maximum file size: 5MB
|
| 125 |
+
Maximum duration: 30 seconds
|
| 126 |
+
|
| 127 |
+
Language hints improve accuracy:
|
| 128 |
+
- en: English
|
| 129 |
+
- ha: Hausa
|
| 130 |
+
- yo: Yoruba
|
| 131 |
+
- ig: Igbo
|
| 132 |
+
"""
|
| 133 |
+
try:
|
| 134 |
+
# Validate file format
|
| 135 |
+
if not file.filename:
|
| 136 |
+
raise HTTPException(status_code=400, detail="No filename provided")
|
| 137 |
+
|
| 138 |
+
if not validate_audio_format(file.filename):
|
| 139 |
+
raise HTTPException(
|
| 140 |
+
status_code=400,
|
| 141 |
+
detail="Invalid audio format. Supported: WAV, MP3, M4A, OGG, FLAC, WEBM"
|
| 142 |
+
)
|
| 143 |
+
|
| 144 |
+
# Read file content
|
| 145 |
+
contents = await file.read()
|
| 146 |
+
|
| 147 |
+
# Validate file size (5MB max)
|
| 148 |
+
max_size = 5 * 1024 * 1024
|
| 149 |
+
if len(contents) > max_size:
|
| 150 |
+
raise HTTPException(
|
| 151 |
+
status_code=400,
|
| 152 |
+
detail=f"File too large. Maximum size: {max_size // (1024*1024)}MB"
|
| 153 |
+
)
|
| 154 |
+
|
| 155 |
+
# Validate language hint
|
| 156 |
+
valid_languages = ["en", "ha", "yo", "ig"]
|
| 157 |
+
if language_hint and language_hint not in valid_languages:
|
| 158 |
+
language_hint = None
|
| 159 |
+
|
| 160 |
+
# Import Whisper service
|
| 161 |
+
from services.whisper_service import get_whisper_service
|
| 162 |
+
|
| 163 |
+
whisper_service = get_whisper_service()
|
| 164 |
+
|
| 165 |
+
# Transcribe
|
| 166 |
+
logger.info(f"Transcribing audio: {file.filename}")
|
| 167 |
+
result = whisper_service.transcribe_bytes(
|
| 168 |
+
audio_bytes=contents,
|
| 169 |
+
filename=file.filename,
|
| 170 |
+
language_hint=language_hint
|
| 171 |
+
)
|
| 172 |
+
|
| 173 |
+
if not result.get("success", False):
|
| 174 |
+
error_msg = result.get("error", "Transcription failed")
|
| 175 |
+
raise HTTPException(status_code=500, detail=error_msg)
|
| 176 |
+
|
| 177 |
+
# Build response
|
| 178 |
+
response_data = {
|
| 179 |
+
"success": True,
|
| 180 |
+
"text": result.get("text", ""),
|
| 181 |
+
"language": result.get("language"),
|
| 182 |
+
"confidence": result.get("confidence", 0.0),
|
| 183 |
+
"duration": result.get("duration", 0.0),
|
| 184 |
+
"processing_time": result.get("processing_time")
|
| 185 |
+
}
|
| 186 |
+
|
| 187 |
+
logger.info(f"Transcription complete: {len(response_data['text'])} chars")
|
| 188 |
+
|
| 189 |
+
return JSONResponse(content=response_data)
|
| 190 |
+
|
| 191 |
+
except HTTPException:
|
| 192 |
+
raise
|
| 193 |
+
except Exception as e:
|
| 194 |
+
logger.error(f"Transcription failed: {e}")
|
| 195 |
+
raise HTTPException(status_code=500, detail=f"Transcription failed: {str(e)}")
|
| 196 |
+
|
| 197 |
+
|
| 198 |
+
@router.post("/base64", response_model=TranscribeResponse)
|
| 199 |
+
async def transcribe_audio_base64(request: TranscribeRequest):
|
| 200 |
+
"""
|
| 201 |
+
Transcribe base64 encoded audio to text.
|
| 202 |
+
|
| 203 |
+
Alternative endpoint for clients that prefer sending
|
| 204 |
+
audio as base64 strings (e.g., from web recordings).
|
| 205 |
+
"""
|
| 206 |
+
try:
|
| 207 |
+
# Decode base64 audio
|
| 208 |
+
try:
|
| 209 |
+
audio_bytes = decode_base64_audio(request.audio_base64)
|
| 210 |
+
except ValueError as e:
|
| 211 |
+
raise HTTPException(status_code=400, detail=str(e))
|
| 212 |
+
|
| 213 |
+
# Validate size (5MB max)
|
| 214 |
+
max_size = 5 * 1024 * 1024
|
| 215 |
+
if len(audio_bytes) > max_size:
|
| 216 |
+
raise HTTPException(
|
| 217 |
+
status_code=400,
|
| 218 |
+
detail=f"Audio too large. Maximum size: {max_size // (1024*1024)}MB"
|
| 219 |
+
)
|
| 220 |
+
|
| 221 |
+
# Validate format from filename
|
| 222 |
+
if not validate_audio_format(request.filename):
|
| 223 |
+
raise HTTPException(
|
| 224 |
+
status_code=400,
|
| 225 |
+
detail="Invalid audio format. Supported: WAV, MP3, M4A, OGG, FLAC, WEBM"
|
| 226 |
+
)
|
| 227 |
+
|
| 228 |
+
# Validate language hint
|
| 229 |
+
valid_languages = ["en", "ha", "yo", "ig"]
|
| 230 |
+
language_hint = request.language_hint
|
| 231 |
+
if language_hint and language_hint not in valid_languages:
|
| 232 |
+
language_hint = None
|
| 233 |
+
|
| 234 |
+
# Import Whisper service
|
| 235 |
+
from services.whisper_service import get_whisper_service
|
| 236 |
+
|
| 237 |
+
whisper_service = get_whisper_service()
|
| 238 |
+
|
| 239 |
+
# Transcribe
|
| 240 |
+
logger.info(f"Transcribing base64 audio: {request.filename}")
|
| 241 |
+
result = whisper_service.transcribe_bytes(
|
| 242 |
+
audio_bytes=audio_bytes,
|
| 243 |
+
filename=request.filename,
|
| 244 |
+
language_hint=language_hint
|
| 245 |
+
)
|
| 246 |
+
|
| 247 |
+
if not result.get("success", False):
|
| 248 |
+
error_msg = result.get("error", "Transcription failed")
|
| 249 |
+
raise HTTPException(status_code=500, detail=error_msg)
|
| 250 |
+
|
| 251 |
+
response_data = {
|
| 252 |
+
"success": True,
|
| 253 |
+
"text": result.get("text", ""),
|
| 254 |
+
"language": result.get("language"),
|
| 255 |
+
"confidence": result.get("confidence", 0.0),
|
| 256 |
+
"duration": result.get("duration", 0.0),
|
| 257 |
+
"processing_time": result.get("processing_time")
|
| 258 |
+
}
|
| 259 |
+
|
| 260 |
+
return JSONResponse(content=response_data)
|
| 261 |
+
|
| 262 |
+
except HTTPException:
|
| 263 |
+
raise
|
| 264 |
+
except Exception as e:
|
| 265 |
+
logger.error(f"Base64 transcription failed: {e}")
|
| 266 |
+
raise HTTPException(status_code=500, detail=f"Transcription failed: {str(e)}")
|
| 267 |
+
|
| 268 |
+
|
| 269 |
+
@router.get("/status", response_model=StatusResponse)
|
| 270 |
+
async def get_transcription_status():
|
| 271 |
+
"""
|
| 272 |
+
Get status of transcription service.
|
| 273 |
+
|
| 274 |
+
Returns information about:
|
| 275 |
+
- Whisper model loading status
|
| 276 |
+
- Model size and device
|
| 277 |
+
- Supported formats
|
| 278 |
+
"""
|
| 279 |
+
try:
|
| 280 |
+
from services.whisper_service import get_whisper_service
|
| 281 |
+
|
| 282 |
+
whisper_service = get_whisper_service()
|
| 283 |
+
info = whisper_service.get_model_info()
|
| 284 |
+
|
| 285 |
+
return JSONResponse(content={
|
| 286 |
+
"status": "operational" if info.get("is_loaded") else "model_not_loaded",
|
| 287 |
+
"model_loaded": info.get("is_loaded", False),
|
| 288 |
+
"model_size": info.get("model_size", "base"),
|
| 289 |
+
"device": info.get("device", "cpu"),
|
| 290 |
+
"supported_formats": info.get("supported_formats", [])
|
| 291 |
+
})
|
| 292 |
+
|
| 293 |
+
except Exception as e:
|
| 294 |
+
logger.error(f"Status check failed: {e}")
|
| 295 |
+
raise HTTPException(status_code=500, detail=str(e))
|
| 296 |
+
|
| 297 |
+
|
| 298 |
+
@router.get("/formats", response_model=FormatsResponse)
|
| 299 |
+
async def get_supported_formats():
|
| 300 |
+
"""
|
| 301 |
+
Get supported audio formats and limits.
|
| 302 |
+
|
| 303 |
+
Returns:
|
| 304 |
+
- List of supported audio formats
|
| 305 |
+
- Maximum file size
|
| 306 |
+
- Maximum audio duration
|
| 307 |
+
"""
|
| 308 |
+
try:
|
| 309 |
+
from services.whisper_service import AudioProcessor
|
| 310 |
+
|
| 311 |
+
return JSONResponse(content={
|
| 312 |
+
"formats": list(AudioProcessor.SUPPORTED_FORMATS),
|
| 313 |
+
"max_file_size_mb": 5,
|
| 314 |
+
"max_duration_seconds": AudioProcessor.MAX_DURATION
|
| 315 |
+
})
|
| 316 |
+
|
| 317 |
+
except Exception as e:
|
| 318 |
+
logger.error(f"Get formats failed: {e}")
|
| 319 |
+
raise HTTPException(status_code=500, detail=str(e))
|
| 320 |
+
|
| 321 |
+
|
| 322 |
+
@router.post("/detect-language")
|
| 323 |
+
async def detect_audio_language(
|
| 324 |
+
file: UploadFile = File(..., description="Audio file")
|
| 325 |
+
):
|
| 326 |
+
"""
|
| 327 |
+
Detect language in audio file.
|
| 328 |
+
|
| 329 |
+
Uses Whisper's language detection to identify
|
| 330 |
+
the spoken language without full transcription.
|
| 331 |
+
Faster than full transcription for language detection.
|
| 332 |
+
"""
|
| 333 |
+
try:
|
| 334 |
+
# Validate file
|
| 335 |
+
if not file.filename or not validate_audio_format(file.filename):
|
| 336 |
+
raise HTTPException(
|
| 337 |
+
status_code=400,
|
| 338 |
+
detail="Invalid audio format"
|
| 339 |
+
)
|
| 340 |
+
|
| 341 |
+
# Read content
|
| 342 |
+
contents = await file.read()
|
| 343 |
+
|
| 344 |
+
# Validate size
|
| 345 |
+
max_size = 5 * 1024 * 1024
|
| 346 |
+
if len(contents) > max_size:
|
| 347 |
+
raise HTTPException(status_code=400, detail="File too large")
|
| 348 |
+
|
| 349 |
+
# Import service
|
| 350 |
+
from services.whisper_service import get_whisper_service
|
| 351 |
+
|
| 352 |
+
whisper_service = get_whisper_service()
|
| 353 |
+
|
| 354 |
+
# Detect language
|
| 355 |
+
result = whisper_service.detect_language(contents)
|
| 356 |
+
|
| 357 |
+
if not result.get("success", False):
|
| 358 |
+
raise HTTPException(
|
| 359 |
+
status_code=500,
|
| 360 |
+
detail=result.get("error", "Language detection failed")
|
| 361 |
+
)
|
| 362 |
+
|
| 363 |
+
return JSONResponse(content={
|
| 364 |
+
"success": True,
|
| 365 |
+
"language": result.get("language"),
|
| 366 |
+
"confidence": result.get("confidence", 0.0),
|
| 367 |
+
"top_languages": result.get("all_probabilities", {})
|
| 368 |
+
})
|
| 369 |
+
|
| 370 |
+
except HTTPException:
|
| 371 |
+
raise
|
| 372 |
+
except Exception as e:
|
| 373 |
+
logger.error(f"Language detection failed: {e}")
|
| 374 |
+
raise HTTPException(status_code=500, detail=str(e))
|
| 375 |
+
|
| 376 |
+
|
| 377 |
+
@router.post("/load-model")
|
| 378 |
+
async def load_whisper_model():
|
| 379 |
+
"""
|
| 380 |
+
Explicitly load the Whisper model.
|
| 381 |
+
|
| 382 |
+
Useful for warming up the model before user
|
| 383 |
+
starts using voice input. Model loads automatically
|
| 384 |
+
on first use, but pre-loading improves UX.
|
| 385 |
+
"""
|
| 386 |
+
try:
|
| 387 |
+
from services.whisper_service import get_whisper_service
|
| 388 |
+
|
| 389 |
+
whisper_service = get_whisper_service()
|
| 390 |
+
|
| 391 |
+
if whisper_service.is_loaded:
|
| 392 |
+
return JSONResponse(content={
|
| 393 |
+
"success": True,
|
| 394 |
+
"message": "Model already loaded",
|
| 395 |
+
"model_size": whisper_service.model_size
|
| 396 |
+
})
|
| 397 |
+
|
| 398 |
+
# Load model
|
| 399 |
+
logger.info("Pre-loading Whisper model...")
|
| 400 |
+
success = whisper_service.load_model()
|
| 401 |
+
|
| 402 |
+
if success:
|
| 403 |
+
return JSONResponse(content={
|
| 404 |
+
"success": True,
|
| 405 |
+
"message": "Model loaded successfully",
|
| 406 |
+
"model_size": whisper_service.model_size
|
| 407 |
+
})
|
| 408 |
+
else:
|
| 409 |
+
raise HTTPException(
|
| 410 |
+
status_code=500,
|
| 411 |
+
detail="Failed to load Whisper model"
|
| 412 |
+
)
|
| 413 |
+
|
| 414 |
+
except HTTPException:
|
| 415 |
+
raise
|
| 416 |
+
except Exception as e:
|
| 417 |
+
logger.error(f"Load model failed: {e}")
|
| 418 |
+
raise HTTPException(status_code=500, detail=str(e))
|
api/routes/tts.py
ADDED
|
@@ -0,0 +1,182 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
FarmEyes TTS API Routes
|
| 3 |
+
=======================
|
| 4 |
+
REST API endpoints for text-to-speech synthesis.
|
| 5 |
+
|
| 6 |
+
Endpoints:
|
| 7 |
+
- POST /api/tts - Synthesize text to speech
|
| 8 |
+
- GET /api/tts/languages - Get supported languages
|
| 9 |
+
- GET /api/tts/status - Check TTS service status
|
| 10 |
+
"""
|
| 11 |
+
|
| 12 |
+
import sys
|
| 13 |
+
from pathlib import Path
|
| 14 |
+
from typing import Optional
|
| 15 |
+
from datetime import datetime
|
| 16 |
+
import logging
|
| 17 |
+
|
| 18 |
+
from fastapi import APIRouter, HTTPException, Query
|
| 19 |
+
from fastapi.responses import JSONResponse
|
| 20 |
+
from pydantic import BaseModel, Field
|
| 21 |
+
|
| 22 |
+
# Configure logging
|
| 23 |
+
logging.basicConfig(level=logging.INFO)
|
| 24 |
+
logger = logging.getLogger(__name__)
|
| 25 |
+
|
| 26 |
+
# Create router
|
| 27 |
+
router = APIRouter(prefix="/api/tts", tags=["Text-to-Speech"])
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
# =============================================================================
|
| 31 |
+
# REQUEST/RESPONSE MODELS
|
| 32 |
+
# =============================================================================
|
| 33 |
+
|
| 34 |
+
class TTSRequest(BaseModel):
|
| 35 |
+
"""Request model for TTS synthesis."""
|
| 36 |
+
text: str = Field(..., description="Text to convert to speech", max_length=2000)
|
| 37 |
+
language: str = Field(default="en", description="Language code (en, ha, yo, ig)")
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
class TTSResponse(BaseModel):
|
| 41 |
+
"""Response model for TTS synthesis."""
|
| 42 |
+
success: bool
|
| 43 |
+
audio_base64: Optional[str] = None
|
| 44 |
+
content_type: str = "audio/flac"
|
| 45 |
+
duration: float = 0.0
|
| 46 |
+
language: str = "en"
|
| 47 |
+
text_length: int = 0
|
| 48 |
+
processing_time: Optional[float] = None
|
| 49 |
+
error: Optional[str] = None
|
| 50 |
+
|
| 51 |
+
|
| 52 |
+
class LanguagesResponse(BaseModel):
|
| 53 |
+
"""Response model for supported languages."""
|
| 54 |
+
success: bool
|
| 55 |
+
languages: dict
|
| 56 |
+
|
| 57 |
+
|
| 58 |
+
class StatusResponse(BaseModel):
|
| 59 |
+
"""Response model for service status."""
|
| 60 |
+
success: bool
|
| 61 |
+
status: str
|
| 62 |
+
has_token: bool
|
| 63 |
+
supported_languages: list
|
| 64 |
+
|
| 65 |
+
|
| 66 |
+
# =============================================================================
|
| 67 |
+
# ENDPOINTS
|
| 68 |
+
# =============================================================================
|
| 69 |
+
|
| 70 |
+
@router.post("", response_model=TTSResponse)
|
| 71 |
+
@router.post("/", response_model=TTSResponse)
|
| 72 |
+
async def synthesize_speech(request: TTSRequest):
|
| 73 |
+
"""
|
| 74 |
+
Synthesize text to speech.
|
| 75 |
+
|
| 76 |
+
Converts the provided text to audio using Meta MMS-TTS.
|
| 77 |
+
Returns base64 encoded audio data.
|
| 78 |
+
|
| 79 |
+
Supported languages:
|
| 80 |
+
- en: English
|
| 81 |
+
- ha: Hausa
|
| 82 |
+
- yo: Yoruba
|
| 83 |
+
- ig: Igbo
|
| 84 |
+
"""
|
| 85 |
+
try:
|
| 86 |
+
from services.tts_service import get_tts_service
|
| 87 |
+
|
| 88 |
+
logger.info(f"TTS request: lang={request.language}, text_len={len(request.text)}")
|
| 89 |
+
|
| 90 |
+
# Get TTS service
|
| 91 |
+
tts_service = get_tts_service()
|
| 92 |
+
|
| 93 |
+
# Check language support
|
| 94 |
+
if not tts_service.is_language_supported(request.language):
|
| 95 |
+
raise HTTPException(
|
| 96 |
+
status_code=400,
|
| 97 |
+
detail=f"Language '{request.language}' is not supported. Use: en, ha, yo, ig"
|
| 98 |
+
)
|
| 99 |
+
|
| 100 |
+
# Synthesize
|
| 101 |
+
result = tts_service.synthesize(request.text, request.language)
|
| 102 |
+
|
| 103 |
+
if result["success"]:
|
| 104 |
+
return TTSResponse(
|
| 105 |
+
success=True,
|
| 106 |
+
audio_base64=result["audio_base64"],
|
| 107 |
+
content_type=result.get("content_type", "audio/flac"),
|
| 108 |
+
duration=result.get("duration", 0.0),
|
| 109 |
+
language=result["language"],
|
| 110 |
+
text_length=result.get("text_length", len(request.text)),
|
| 111 |
+
processing_time=result.get("processing_time")
|
| 112 |
+
)
|
| 113 |
+
else:
|
| 114 |
+
# Return error but don't raise exception (for fallback handling)
|
| 115 |
+
return TTSResponse(
|
| 116 |
+
success=False,
|
| 117 |
+
language=request.language,
|
| 118 |
+
text_length=len(request.text),
|
| 119 |
+
error=result.get("error", "TTS synthesis failed")
|
| 120 |
+
)
|
| 121 |
+
|
| 122 |
+
except HTTPException:
|
| 123 |
+
raise
|
| 124 |
+
except Exception as e:
|
| 125 |
+
logger.error(f"TTS synthesis failed: {e}")
|
| 126 |
+
raise HTTPException(status_code=500, detail=str(e))
|
| 127 |
+
|
| 128 |
+
|
| 129 |
+
@router.get("/languages", response_model=LanguagesResponse)
|
| 130 |
+
async def get_supported_languages():
|
| 131 |
+
"""
|
| 132 |
+
Get list of supported TTS languages.
|
| 133 |
+
|
| 134 |
+
Returns language codes and their display names.
|
| 135 |
+
"""
|
| 136 |
+
try:
|
| 137 |
+
from services.tts_service import get_tts_service
|
| 138 |
+
|
| 139 |
+
tts_service = get_tts_service()
|
| 140 |
+
languages = tts_service.get_supported_languages()
|
| 141 |
+
|
| 142 |
+
return LanguagesResponse(
|
| 143 |
+
success=True,
|
| 144 |
+
languages=languages
|
| 145 |
+
)
|
| 146 |
+
|
| 147 |
+
except Exception as e:
|
| 148 |
+
logger.error(f"Get languages failed: {e}")
|
| 149 |
+
raise HTTPException(status_code=500, detail=str(e))
|
| 150 |
+
|
| 151 |
+
|
| 152 |
+
@router.get("/status", response_model=StatusResponse)
|
| 153 |
+
async def get_tts_status():
|
| 154 |
+
"""
|
| 155 |
+
Get TTS service status.
|
| 156 |
+
|
| 157 |
+
Returns whether the service is configured and ready.
|
| 158 |
+
"""
|
| 159 |
+
try:
|
| 160 |
+
from services.tts_service import get_tts_service
|
| 161 |
+
|
| 162 |
+
tts_service = get_tts_service()
|
| 163 |
+
has_token = bool(tts_service.hf_token)
|
| 164 |
+
languages = list(tts_service.get_supported_languages().keys())
|
| 165 |
+
|
| 166 |
+
status = "ready" if has_token else "no_token"
|
| 167 |
+
|
| 168 |
+
return StatusResponse(
|
| 169 |
+
success=True,
|
| 170 |
+
status=status,
|
| 171 |
+
has_token=has_token,
|
| 172 |
+
supported_languages=languages
|
| 173 |
+
)
|
| 174 |
+
|
| 175 |
+
except Exception as e:
|
| 176 |
+
logger.error(f"Get status failed: {e}")
|
| 177 |
+
return StatusResponse(
|
| 178 |
+
success=False,
|
| 179 |
+
status="error",
|
| 180 |
+
has_token=False,
|
| 181 |
+
supported_languages=[]
|
| 182 |
+
)
|
config.py
ADDED
|
@@ -0,0 +1,635 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
FarmEyes Configuration File
|
| 3 |
+
===========================
|
| 4 |
+
Central configuration for the FarmEyes crop disease detection application.
|
| 5 |
+
Contains model paths, class mappings, device settings, API configurations,
|
| 6 |
+
session management, and Whisper speech-to-text settings.
|
| 7 |
+
|
| 8 |
+
Device: Apple Silicon M1 Pro with MPS (Metal Performance Shaders) acceleration
|
| 9 |
+
Deployment: Local development + HuggingFace Spaces
|
| 10 |
+
|
| 11 |
+
Model: Custom trained YOLOv11 for 6 disease classes
|
| 12 |
+
Crops: Cassava, Cocoa, Tomato
|
| 13 |
+
"""
|
| 14 |
+
|
| 15 |
+
import os
|
| 16 |
+
from pathlib import Path
|
| 17 |
+
from typing import Dict, List, Optional
|
| 18 |
+
from dataclasses import dataclass, field
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
# =============================================================================
|
| 22 |
+
# PATH CONFIGURATIONS
|
| 23 |
+
# =============================================================================
|
| 24 |
+
|
| 25 |
+
# Base project directory
|
| 26 |
+
BASE_DIR = Path(__file__).parent.resolve()
|
| 27 |
+
|
| 28 |
+
# Data directories
|
| 29 |
+
DATA_DIR = BASE_DIR / "data"
|
| 30 |
+
STATIC_DIR = BASE_DIR / "static"
|
| 31 |
+
MODELS_DIR = BASE_DIR / "models"
|
| 32 |
+
OUTPUTS_DIR = BASE_DIR / "outputs"
|
| 33 |
+
FRONTEND_DIR = BASE_DIR / "frontend"
|
| 34 |
+
UPLOADS_DIR = BASE_DIR / "uploads"
|
| 35 |
+
|
| 36 |
+
# Create directories if they don't exist
|
| 37 |
+
for directory in [DATA_DIR, STATIC_DIR, MODELS_DIR, OUTPUTS_DIR, UPLOADS_DIR]:
|
| 38 |
+
directory.mkdir(parents=True, exist_ok=True)
|
| 39 |
+
|
| 40 |
+
# Knowledge base and UI translations paths
|
| 41 |
+
KNOWLEDGE_BASE_PATH = DATA_DIR / "knowledge_base.json"
|
| 42 |
+
UI_TRANSLATIONS_PATH = STATIC_DIR / "ui_translations.json"
|
| 43 |
+
|
| 44 |
+
|
| 45 |
+
# =============================================================================
|
| 46 |
+
# API CONFIGURATION
|
| 47 |
+
# =============================================================================
|
| 48 |
+
|
| 49 |
+
@dataclass
|
| 50 |
+
class APIConfig:
|
| 51 |
+
"""Configuration for FastAPI backend"""
|
| 52 |
+
|
| 53 |
+
# Server settings
|
| 54 |
+
host: str = "0.0.0.0"
|
| 55 |
+
port: int = 7860 # Default HuggingFace Spaces port
|
| 56 |
+
|
| 57 |
+
# API metadata
|
| 58 |
+
title: str = "FarmEyes API"
|
| 59 |
+
description: str = "AI-Powered Crop Disease Detection for Nigerian Farmers"
|
| 60 |
+
version: str = "2.0.0"
|
| 61 |
+
|
| 62 |
+
# CORS settings (for frontend access)
|
| 63 |
+
cors_origins: List[str] = field(default_factory=lambda: [
|
| 64 |
+
"http://localhost:7860",
|
| 65 |
+
"http://127.0.0.1:7860",
|
| 66 |
+
"https://*.hf.space", # HuggingFace Spaces
|
| 67 |
+
"*" # Allow all for development - restrict in production
|
| 68 |
+
])
|
| 69 |
+
|
| 70 |
+
# Request limits
|
| 71 |
+
max_upload_size: int = 10 * 1024 * 1024 # 10MB max image upload
|
| 72 |
+
request_timeout: int = 60 # seconds
|
| 73 |
+
|
| 74 |
+
# Rate limiting (basic)
|
| 75 |
+
rate_limit_requests: int = 100 # requests per minute
|
| 76 |
+
rate_limit_window: int = 60 # seconds
|
| 77 |
+
|
| 78 |
+
# Debug mode
|
| 79 |
+
debug: bool = os.environ.get("DEBUG", "false").lower() == "true"
|
| 80 |
+
|
| 81 |
+
# Environment detection
|
| 82 |
+
@property
|
| 83 |
+
def is_huggingface(self) -> bool:
|
| 84 |
+
"""Check if running on HuggingFace Spaces"""
|
| 85 |
+
return os.environ.get("SPACE_ID") is not None
|
| 86 |
+
|
| 87 |
+
@property
|
| 88 |
+
def base_url(self) -> str:
|
| 89 |
+
"""Get base URL based on environment"""
|
| 90 |
+
if self.is_huggingface:
|
| 91 |
+
space_id = os.environ.get("SPACE_ID", "")
|
| 92 |
+
return f"https://{space_id.replace('/', '-')}.hf.space"
|
| 93 |
+
return f"http://{self.host}:{self.port}"
|
| 94 |
+
|
| 95 |
+
|
| 96 |
+
# =============================================================================
|
| 97 |
+
# SESSION CONFIGURATION
|
| 98 |
+
# =============================================================================
|
| 99 |
+
|
| 100 |
+
@dataclass
|
| 101 |
+
class SessionConfig:
|
| 102 |
+
"""Configuration for session management"""
|
| 103 |
+
|
| 104 |
+
# Session settings
|
| 105 |
+
session_lifetime: int = 3600 # 1 hour in seconds
|
| 106 |
+
max_sessions: int = 1000 # Maximum concurrent sessions
|
| 107 |
+
|
| 108 |
+
# Chat history settings
|
| 109 |
+
max_chat_history: int = 50 # Maximum messages per session
|
| 110 |
+
max_message_length: int = 2000 # Maximum characters per message
|
| 111 |
+
|
| 112 |
+
# Context retention
|
| 113 |
+
retain_diagnosis: bool = True # Keep diagnosis context in session
|
| 114 |
+
|
| 115 |
+
# Cleanup settings
|
| 116 |
+
cleanup_interval: int = 300 # 5 minutes - check for expired sessions
|
| 117 |
+
|
| 118 |
+
|
| 119 |
+
# =============================================================================
|
| 120 |
+
# WHISPER CONFIGURATION
|
| 121 |
+
# =============================================================================
|
| 122 |
+
|
| 123 |
+
@dataclass
|
| 124 |
+
class WhisperConfig:
|
| 125 |
+
"""Configuration for Whisper speech-to-text"""
|
| 126 |
+
|
| 127 |
+
# Model settings
|
| 128 |
+
model_size: str = "base" # tiny, base, small, medium, large
|
| 129 |
+
|
| 130 |
+
# Supported model sizes with approximate VRAM/RAM requirements
|
| 131 |
+
# tiny: ~1GB - Fastest, least accurate
|
| 132 |
+
# base: ~1GB - Good balance (SELECTED)
|
| 133 |
+
# small: ~2GB - Better accuracy
|
| 134 |
+
# medium: ~5GB - High accuracy
|
| 135 |
+
# large: ~10GB - Best accuracy
|
| 136 |
+
|
| 137 |
+
# Device settings
|
| 138 |
+
device: str = "cpu" # Use CPU for broader compatibility
|
| 139 |
+
# Note: On Apple Silicon, Whisper runs well on CPU
|
| 140 |
+
# For GPU: set to "cuda" (NVIDIA) or use mlx-whisper for Apple Silicon
|
| 141 |
+
|
| 142 |
+
# Audio settings
|
| 143 |
+
sample_rate: int = 16000 # Whisper expects 16kHz audio
|
| 144 |
+
max_audio_duration: int = 30 # Maximum seconds of audio to process
|
| 145 |
+
|
| 146 |
+
# Language settings - Whisper auto-detects but we can hint
|
| 147 |
+
language_hints: Dict[str, str] = field(default_factory=lambda: {
|
| 148 |
+
"en": "english",
|
| 149 |
+
"ha": "hausa",
|
| 150 |
+
"yo": "yoruba",
|
| 151 |
+
"ig": "igbo"
|
| 152 |
+
})
|
| 153 |
+
|
| 154 |
+
# Transcription settings
|
| 155 |
+
task: str = "transcribe" # transcribe or translate
|
| 156 |
+
|
| 157 |
+
# Performance settings
|
| 158 |
+
fp16: bool = False # Use FP32 for CPU compatibility
|
| 159 |
+
|
| 160 |
+
# Supported audio formats
|
| 161 |
+
supported_formats: List[str] = field(default_factory=lambda: [
|
| 162 |
+
".wav", ".mp3", ".m4a", ".ogg", ".flac", ".webm"
|
| 163 |
+
])
|
| 164 |
+
|
| 165 |
+
# Maximum audio file size (5MB)
|
| 166 |
+
max_file_size: int = 5 * 1024 * 1024
|
| 167 |
+
|
| 168 |
+
|
| 169 |
+
# =============================================================================
|
| 170 |
+
# MODEL CONFIGURATIONS
|
| 171 |
+
# =============================================================================
|
| 172 |
+
|
| 173 |
+
@dataclass
|
| 174 |
+
class YOLOConfig:
|
| 175 |
+
"""Configuration for YOLOv11 disease detection model"""
|
| 176 |
+
|
| 177 |
+
# Path to trained YOLOv11 model weights (.pt file)
|
| 178 |
+
model_path: Path = MODELS_DIR / "farmeyes_yolov11.pt"
|
| 179 |
+
|
| 180 |
+
# Confidence threshold for detections (0.0 - 1.0)
|
| 181 |
+
confidence_threshold: float = 0.5
|
| 182 |
+
|
| 183 |
+
# IoU threshold for non-maximum suppression
|
| 184 |
+
iou_threshold: float = 0.45
|
| 185 |
+
|
| 186 |
+
# Input image size (YOLOv11 default)
|
| 187 |
+
input_size: int = 640
|
| 188 |
+
|
| 189 |
+
# Maximum number of detections per image
|
| 190 |
+
max_detections: int = 10
|
| 191 |
+
|
| 192 |
+
# Device for inference ('mps' for Apple Silicon, 'cuda' for NVIDIA, 'cpu' for CPU)
|
| 193 |
+
device: str = "mps"
|
| 194 |
+
|
| 195 |
+
|
| 196 |
+
@dataclass
|
| 197 |
+
class NATLaSConfig:
|
| 198 |
+
"""Configuration for N-ATLaS language model (GGUF format)"""
|
| 199 |
+
|
| 200 |
+
# Hugging Face model repository
|
| 201 |
+
hf_repo: str = "tosinamuda/N-ATLaS-GGUF"
|
| 202 |
+
|
| 203 |
+
# GGUF model filename (Q4_K_M quantized version - smaller, faster)
|
| 204 |
+
model_filename: str = "N-ATLaS-GGUF-Q4_K_M.gguf"
|
| 205 |
+
|
| 206 |
+
# Local path where model will be downloaded/cached
|
| 207 |
+
model_path: Path = MODELS_DIR / "natlas"
|
| 208 |
+
|
| 209 |
+
# Full path to the GGUF file
|
| 210 |
+
@property
|
| 211 |
+
def gguf_path(self) -> Path:
|
| 212 |
+
return self.model_path / self.model_filename
|
| 213 |
+
|
| 214 |
+
# Context window size (tokens)
|
| 215 |
+
context_length: int = 4096
|
| 216 |
+
|
| 217 |
+
# Maximum tokens to generate in response
|
| 218 |
+
max_tokens: int = 1024
|
| 219 |
+
|
| 220 |
+
# Chat-specific max tokens (shorter for responsiveness)
|
| 221 |
+
chat_max_tokens: int = 512
|
| 222 |
+
|
| 223 |
+
# Temperature for text generation (0.0 = deterministic, 1.0 = creative)
|
| 224 |
+
temperature: float = 0.7
|
| 225 |
+
|
| 226 |
+
# Chat temperature (slightly lower for more focused responses)
|
| 227 |
+
chat_temperature: float = 0.6
|
| 228 |
+
|
| 229 |
+
# Top-p (nucleus) sampling
|
| 230 |
+
top_p: float = 0.9
|
| 231 |
+
|
| 232 |
+
# Number of GPU layers to offload (for MPS acceleration)
|
| 233 |
+
# Set to -1 to offload all layers, 0 for CPU only
|
| 234 |
+
n_gpu_layers: int = -1
|
| 235 |
+
|
| 236 |
+
# Number of threads for CPU computation
|
| 237 |
+
n_threads: int = 8
|
| 238 |
+
|
| 239 |
+
# Batch size for prompt processing
|
| 240 |
+
n_batch: int = 512
|
| 241 |
+
|
| 242 |
+
# Device for inference
|
| 243 |
+
device: str = "mps"
|
| 244 |
+
|
| 245 |
+
|
| 246 |
+
# =============================================================================
|
| 247 |
+
# DISEASE CLASS MAPPINGS (6 CLASSES - NO HEALTHY CLASSES)
|
| 248 |
+
# =============================================================================
|
| 249 |
+
|
| 250 |
+
# YOLOv11 class index to disease key mapping
|
| 251 |
+
CLASS_INDEX_TO_KEY: Dict[int, str] = {
|
| 252 |
+
0: "cassava_bacterial_blight",
|
| 253 |
+
1: "cassava_mosaic_virus",
|
| 254 |
+
2: "cocoa_monilia_disease",
|
| 255 |
+
3: "cocoa_phytophthora_disease",
|
| 256 |
+
4: "tomato_gray_mold",
|
| 257 |
+
5: "tomato_wilt_disease"
|
| 258 |
+
}
|
| 259 |
+
|
| 260 |
+
# Reverse mapping: disease key to class index
|
| 261 |
+
KEY_TO_CLASS_INDEX: Dict[str, int] = {v: k for k, v in CLASS_INDEX_TO_KEY.items()}
|
| 262 |
+
|
| 263 |
+
# Class names as they appear in YOLO training (6 classes)
|
| 264 |
+
CLASS_NAMES: List[str] = [
|
| 265 |
+
"Cassava Bacteria Blight", # Index 0
|
| 266 |
+
"Cassava Mosaic Virus", # Index 1
|
| 267 |
+
"Cocoa Monilia Disease", # Index 2
|
| 268 |
+
"Cocoa Phytophthora Disease", # Index 3
|
| 269 |
+
"Tomato Gray Mold Disease", # Index 4
|
| 270 |
+
"Tomato Wilt Disease" # Index 5
|
| 271 |
+
]
|
| 272 |
+
|
| 273 |
+
# No healthy class indices in 6-class model
|
| 274 |
+
HEALTHY_CLASS_INDICES: List[int] = []
|
| 275 |
+
|
| 276 |
+
# All class indices are disease classes
|
| 277 |
+
DISEASE_CLASS_INDICES: List[int] = [0, 1, 2, 3, 4, 5]
|
| 278 |
+
|
| 279 |
+
# Crop type mapping (6 classes only)
|
| 280 |
+
CROP_TYPES: Dict[str, List[int]] = {
|
| 281 |
+
"cassava": [0, 1],
|
| 282 |
+
"cocoa": [2, 3],
|
| 283 |
+
"tomato": [4, 5]
|
| 284 |
+
}
|
| 285 |
+
|
| 286 |
+
# Reverse mapping: class index to crop type
|
| 287 |
+
CLASS_TO_CROP: Dict[int, str] = {}
|
| 288 |
+
for crop, indices in CROP_TYPES.items():
|
| 289 |
+
for idx in indices:
|
| 290 |
+
CLASS_TO_CROP[idx] = crop
|
| 291 |
+
|
| 292 |
+
|
| 293 |
+
# =============================================================================
|
| 294 |
+
# LANGUAGE CONFIGURATIONS
|
| 295 |
+
# =============================================================================
|
| 296 |
+
|
| 297 |
+
@dataclass
|
| 298 |
+
class LanguageConfig:
|
| 299 |
+
"""Configuration for supported languages"""
|
| 300 |
+
|
| 301 |
+
# Supported language codes
|
| 302 |
+
supported_languages: List[str] = field(default_factory=lambda: ["en", "ha", "yo", "ig"])
|
| 303 |
+
|
| 304 |
+
# Default language
|
| 305 |
+
default_language: str = "en"
|
| 306 |
+
|
| 307 |
+
# Language display names
|
| 308 |
+
language_names: Dict[str, str] = field(default_factory=lambda: {
|
| 309 |
+
"en": "English",
|
| 310 |
+
"ha": "Hausa",
|
| 311 |
+
"yo": "Yorùbá",
|
| 312 |
+
"ig": "Igbo"
|
| 313 |
+
})
|
| 314 |
+
|
| 315 |
+
# Language codes for N-ATLaS prompts
|
| 316 |
+
language_full_names: Dict[str, str] = field(default_factory=lambda: {
|
| 317 |
+
"en": "English",
|
| 318 |
+
"ha": "Hausa",
|
| 319 |
+
"yo": "Yoruba",
|
| 320 |
+
"ig": "Igbo"
|
| 321 |
+
})
|
| 322 |
+
|
| 323 |
+
# Native language names (for display in selector)
|
| 324 |
+
native_names: Dict[str, str] = field(default_factory=lambda: {
|
| 325 |
+
"en": "English",
|
| 326 |
+
"ha": "Hausa",
|
| 327 |
+
"yo": "Yorùbá",
|
| 328 |
+
"ig": "Asụsụ Igbo"
|
| 329 |
+
})
|
| 330 |
+
|
| 331 |
+
|
| 332 |
+
# =============================================================================
|
| 333 |
+
# CHAT CONFIGURATION
|
| 334 |
+
# =============================================================================
|
| 335 |
+
|
| 336 |
+
@dataclass
|
| 337 |
+
class ChatConfig:
|
| 338 |
+
"""Configuration for contextual chatbot"""
|
| 339 |
+
|
| 340 |
+
# System prompt for agricultural chat
|
| 341 |
+
system_prompt: str = """You are FarmEyes, an AI agricultural assistant helping Nigerian farmers.
|
| 342 |
+
You are currently discussing a specific crop disease diagnosis with the farmer.
|
| 343 |
+
Your role is to:
|
| 344 |
+
1. Answer questions ONLY about the diagnosed disease and related agricultural topics
|
| 345 |
+
2. Provide practical, actionable advice in simple language
|
| 346 |
+
3. Use local context (Nigerian farming practices, costs in Naira)
|
| 347 |
+
4. Be respectful, patient, and supportive
|
| 348 |
+
5. If asked about unrelated topics, politely redirect to agricultural matters
|
| 349 |
+
|
| 350 |
+
IMPORTANT: Stay focused on the diagnosis context provided. Do not make up information.
|
| 351 |
+
If you don't know something, say so honestly and suggest consulting a local agricultural extension officer."""
|
| 352 |
+
|
| 353 |
+
# Context template for chat
|
| 354 |
+
context_template: str = """CURRENT DIAGNOSIS CONTEXT:
|
| 355 |
+
- Crop: {crop_type}
|
| 356 |
+
- Disease: {disease_name}
|
| 357 |
+
- Confidence: {confidence}%
|
| 358 |
+
- Severity: {severity}
|
| 359 |
+
- Key symptoms: {symptoms}
|
| 360 |
+
- Recommended treatment: {treatment_summary}
|
| 361 |
+
|
| 362 |
+
The farmer may ask follow-up questions about this diagnosis."""
|
| 363 |
+
|
| 364 |
+
# Allowed topic keywords (for moderate context restriction)
|
| 365 |
+
allowed_topics: List[str] = field(default_factory=lambda: [
|
| 366 |
+
# Disease-related
|
| 367 |
+
"disease", "infection", "symptom", "treatment", "cure", "prevention",
|
| 368 |
+
"spread", "cause", "severity", "recovery",
|
| 369 |
+
# Crop-related
|
| 370 |
+
"crop", "plant", "leaf", "stem", "root", "fruit", "harvest", "yield",
|
| 371 |
+
"cassava", "cocoa", "tomato", "farming", "agriculture",
|
| 372 |
+
# Treatment-related
|
| 373 |
+
"medicine", "chemical", "organic", "traditional", "spray", "apply",
|
| 374 |
+
"fungicide", "pesticide", "fertilizer", "cost", "price", "naira",
|
| 375 |
+
# General farming
|
| 376 |
+
"farm", "field", "soil", "water", "weather", "season", "planting",
|
| 377 |
+
"seed", "variety", "resistant", "healthy"
|
| 378 |
+
])
|
| 379 |
+
|
| 380 |
+
# Response length limits
|
| 381 |
+
max_response_tokens: int = 400
|
| 382 |
+
|
| 383 |
+
# Welcome message template
|
| 384 |
+
welcome_template: str = """Hello! I'm your FarmEyes assistant. I've analyzed your {crop_type} plant and detected {disease_name} with {confidence}% confidence.
|
| 385 |
+
|
| 386 |
+
I can help you understand:
|
| 387 |
+
• More about this disease and its symptoms
|
| 388 |
+
• Treatment options and costs
|
| 389 |
+
• Prevention methods
|
| 390 |
+
• When to seek expert help
|
| 391 |
+
|
| 392 |
+
What would you like to know?"""
|
| 393 |
+
|
| 394 |
+
|
| 395 |
+
# =============================================================================
|
| 396 |
+
# APPLICATION CONFIGURATIONS
|
| 397 |
+
# =============================================================================
|
| 398 |
+
|
| 399 |
+
@dataclass
|
| 400 |
+
class AppConfig:
|
| 401 |
+
"""General application configuration"""
|
| 402 |
+
|
| 403 |
+
# App information
|
| 404 |
+
app_name: str = "FarmEyes"
|
| 405 |
+
app_version: str = "2.0.0"
|
| 406 |
+
app_tagline: str = "AI-Powered Crop Disease Detection for Nigerian Farmers"
|
| 407 |
+
|
| 408 |
+
# Server settings (legacy Gradio support)
|
| 409 |
+
server_host: str = "0.0.0.0"
|
| 410 |
+
server_port: int = 7860
|
| 411 |
+
share: bool = False
|
| 412 |
+
|
| 413 |
+
# Debug mode
|
| 414 |
+
debug: bool = True
|
| 415 |
+
|
| 416 |
+
# Maximum image file size (in bytes) - 10MB
|
| 417 |
+
max_image_size: int = 10 * 1024 * 1024
|
| 418 |
+
|
| 419 |
+
# Supported image formats
|
| 420 |
+
supported_image_formats: List[str] = field(default_factory=lambda: [
|
| 421 |
+
".jpg", ".jpeg", ".png", ".webp", ".bmp"
|
| 422 |
+
])
|
| 423 |
+
|
| 424 |
+
# Confidence thresholds for user feedback
|
| 425 |
+
high_confidence_threshold: float = 0.85
|
| 426 |
+
medium_confidence_threshold: float = 0.60
|
| 427 |
+
low_confidence_threshold: float = 0.40
|
| 428 |
+
|
| 429 |
+
# Enable/disable features
|
| 430 |
+
enable_voice_input: bool = True # Voice input with Whisper
|
| 431 |
+
enable_chat: bool = True # Contextual chat
|
| 432 |
+
enable_history: bool = True # Session history
|
| 433 |
+
|
| 434 |
+
|
| 435 |
+
# =============================================================================
|
| 436 |
+
# DEVICE CONFIGURATION (Apple Silicon Specific)
|
| 437 |
+
# =============================================================================
|
| 438 |
+
|
| 439 |
+
@dataclass
|
| 440 |
+
class DeviceConfig:
|
| 441 |
+
"""Device and hardware configuration for Apple Silicon M1 Pro"""
|
| 442 |
+
|
| 443 |
+
# Primary compute device
|
| 444 |
+
compute_device: str = "mps"
|
| 445 |
+
|
| 446 |
+
# Fallback device if primary is unavailable
|
| 447 |
+
fallback_device: str = "cpu"
|
| 448 |
+
|
| 449 |
+
# Enable MPS (Metal Performance Shaders) for PyTorch
|
| 450 |
+
use_mps: bool = True
|
| 451 |
+
|
| 452 |
+
# Memory management
|
| 453 |
+
clear_cache_after_inference: bool = True
|
| 454 |
+
|
| 455 |
+
@staticmethod
|
| 456 |
+
def get_device() -> str:
|
| 457 |
+
"""Determine the best available device for computation."""
|
| 458 |
+
import torch
|
| 459 |
+
|
| 460 |
+
if torch.backends.mps.is_available():
|
| 461 |
+
return "mps"
|
| 462 |
+
elif torch.cuda.is_available():
|
| 463 |
+
return "cuda"
|
| 464 |
+
else:
|
| 465 |
+
return "cpu"
|
| 466 |
+
|
| 467 |
+
@staticmethod
|
| 468 |
+
def get_device_info() -> Dict[str, str]:
|
| 469 |
+
"""Get information about the current compute device."""
|
| 470 |
+
import torch
|
| 471 |
+
import platform
|
| 472 |
+
|
| 473 |
+
info = {
|
| 474 |
+
"platform": platform.system(),
|
| 475 |
+
"processor": platform.processor(),
|
| 476 |
+
"python_version": platform.python_version(),
|
| 477 |
+
"pytorch_version": torch.__version__,
|
| 478 |
+
"device": DeviceConfig.get_device()
|
| 479 |
+
}
|
| 480 |
+
|
| 481 |
+
if torch.backends.mps.is_available():
|
| 482 |
+
info["mps_available"] = "Yes"
|
| 483 |
+
info["mps_built"] = str(torch.backends.mps.is_built())
|
| 484 |
+
|
| 485 |
+
return info
|
| 486 |
+
|
| 487 |
+
|
| 488 |
+
# =============================================================================
|
| 489 |
+
# PROMPT TEMPLATES CONFIGURATION
|
| 490 |
+
# =============================================================================
|
| 491 |
+
|
| 492 |
+
@dataclass
|
| 493 |
+
class PromptConfig:
|
| 494 |
+
"""Configuration for N-ATLaS prompt templates"""
|
| 495 |
+
|
| 496 |
+
# System prompt for the N-ATLaS model
|
| 497 |
+
system_prompt: str = """You are FarmEyes, an AI agricultural assistant helping Nigerian farmers.
|
| 498 |
+
You provide advice about crop diseases and treatments in a clear, simple, and helpful manner.
|
| 499 |
+
Always be respectful and use language that farmers can easily understand.
|
| 500 |
+
When providing treatment costs, use Nigerian Naira (₦).
|
| 501 |
+
Focus on practical advice that farmers can implement."""
|
| 502 |
+
|
| 503 |
+
# Maximum length for translated text
|
| 504 |
+
max_translation_length: int = 500
|
| 505 |
+
|
| 506 |
+
# Temperature for different tasks
|
| 507 |
+
translation_temperature: float = 0.3
|
| 508 |
+
diagnosis_temperature: float = 0.7
|
| 509 |
+
chat_temperature: float = 0.6
|
| 510 |
+
|
| 511 |
+
|
| 512 |
+
# =============================================================================
|
| 513 |
+
# LOGGING CONFIGURATION
|
| 514 |
+
# =============================================================================
|
| 515 |
+
|
| 516 |
+
@dataclass
|
| 517 |
+
class LogConfig:
|
| 518 |
+
"""Logging configuration"""
|
| 519 |
+
|
| 520 |
+
# Log level: DEBUG, INFO, WARNING, ERROR, CRITICAL
|
| 521 |
+
log_level: str = "INFO"
|
| 522 |
+
|
| 523 |
+
# Log file path
|
| 524 |
+
log_file: Path = BASE_DIR / "logs" / "farmeyes.log"
|
| 525 |
+
|
| 526 |
+
# Log format
|
| 527 |
+
log_format: str = "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
|
| 528 |
+
|
| 529 |
+
# Enable console logging
|
| 530 |
+
console_logging: bool = True
|
| 531 |
+
|
| 532 |
+
# Enable file logging
|
| 533 |
+
file_logging: bool = True
|
| 534 |
+
|
| 535 |
+
|
| 536 |
+
# =============================================================================
|
| 537 |
+
# INSTANTIATE DEFAULT CONFIGURATIONS
|
| 538 |
+
# =============================================================================
|
| 539 |
+
|
| 540 |
+
# Create default configuration instances
|
| 541 |
+
api_config = APIConfig()
|
| 542 |
+
session_config = SessionConfig()
|
| 543 |
+
whisper_config = WhisperConfig()
|
| 544 |
+
yolo_config = YOLOConfig()
|
| 545 |
+
natlas_config = NATLaSConfig()
|
| 546 |
+
language_config = LanguageConfig()
|
| 547 |
+
chat_config = ChatConfig()
|
| 548 |
+
app_config = AppConfig()
|
| 549 |
+
device_config = DeviceConfig()
|
| 550 |
+
prompt_config = PromptConfig()
|
| 551 |
+
log_config = LogConfig()
|
| 552 |
+
|
| 553 |
+
|
| 554 |
+
# =============================================================================
|
| 555 |
+
# UTILITY FUNCTIONS
|
| 556 |
+
# =============================================================================
|
| 557 |
+
|
| 558 |
+
def get_disease_key(class_index: int) -> Optional[str]:
|
| 559 |
+
"""Get disease key from class index."""
|
| 560 |
+
return CLASS_INDEX_TO_KEY.get(class_index)
|
| 561 |
+
|
| 562 |
+
|
| 563 |
+
def get_class_index(disease_key: str) -> Optional[int]:
|
| 564 |
+
"""Get class index from disease key."""
|
| 565 |
+
return KEY_TO_CLASS_INDEX.get(disease_key)
|
| 566 |
+
|
| 567 |
+
|
| 568 |
+
def get_crop_type(class_index: int) -> Optional[str]:
|
| 569 |
+
"""Get crop type from class index."""
|
| 570 |
+
return CLASS_TO_CROP.get(class_index)
|
| 571 |
+
|
| 572 |
+
|
| 573 |
+
def is_healthy(class_index: int) -> bool:
|
| 574 |
+
"""Check if class index represents a healthy plant (always False for 6-class)."""
|
| 575 |
+
return class_index in HEALTHY_CLASS_INDICES
|
| 576 |
+
|
| 577 |
+
|
| 578 |
+
def validate_config() -> Dict[str, bool]:
|
| 579 |
+
"""Validate that all required configuration files and paths exist."""
|
| 580 |
+
validations = {
|
| 581 |
+
"knowledge_base_exists": KNOWLEDGE_BASE_PATH.exists(),
|
| 582 |
+
"ui_translations_exists": UI_TRANSLATIONS_PATH.exists(),
|
| 583 |
+
"models_dir_exists": MODELS_DIR.exists(),
|
| 584 |
+
"yolo_model_exists": yolo_config.model_path.exists(),
|
| 585 |
+
"natlas_model_exists": natlas_config.gguf_path.exists(),
|
| 586 |
+
"frontend_dir_exists": FRONTEND_DIR.exists(),
|
| 587 |
+
}
|
| 588 |
+
return validations
|
| 589 |
+
|
| 590 |
+
|
| 591 |
+
def print_config_summary():
|
| 592 |
+
"""Print a summary of the current configuration."""
|
| 593 |
+
print("=" * 60)
|
| 594 |
+
print("FarmEyes Configuration Summary v2.0")
|
| 595 |
+
print("=" * 60)
|
| 596 |
+
|
| 597 |
+
print(f"\n📁 Paths:")
|
| 598 |
+
print(f" Base Directory: {BASE_DIR}")
|
| 599 |
+
print(f" Knowledge Base: {KNOWLEDGE_BASE_PATH}")
|
| 600 |
+
print(f" Frontend: {FRONTEND_DIR}")
|
| 601 |
+
|
| 602 |
+
print(f"\n🌐 API Configuration:")
|
| 603 |
+
print(f" Host: {api_config.host}:{api_config.port}")
|
| 604 |
+
print(f" Debug: {api_config.debug}")
|
| 605 |
+
print(f" HuggingFace: {api_config.is_huggingface}")
|
| 606 |
+
|
| 607 |
+
print(f"\n🤖 YOLOv11 Model:")
|
| 608 |
+
print(f" Model Path: {yolo_config.model_path}")
|
| 609 |
+
print(f" Confidence: {yolo_config.confidence_threshold}")
|
| 610 |
+
print(f" Classes: {len(CLASS_NAMES)}")
|
| 611 |
+
|
| 612 |
+
print(f"\n🗣️ N-ATLaS Model:")
|
| 613 |
+
print(f" HF Repo: {natlas_config.hf_repo}")
|
| 614 |
+
print(f" Chat Max Tokens: {natlas_config.chat_max_tokens}")
|
| 615 |
+
|
| 616 |
+
print(f"\n🎤 Whisper (Voice):")
|
| 617 |
+
print(f" Model Size: {whisper_config.model_size}")
|
| 618 |
+
print(f" Device: {whisper_config.device}")
|
| 619 |
+
|
| 620 |
+
print(f"\n💬 Chat:")
|
| 621 |
+
print(f" Enabled: {app_config.enable_chat}")
|
| 622 |
+
print(f" Voice Input: {app_config.enable_voice_input}")
|
| 623 |
+
|
| 624 |
+
print(f"\n🌍 Languages:")
|
| 625 |
+
print(f" Supported: {', '.join(language_config.supported_languages)}")
|
| 626 |
+
|
| 627 |
+
print("\n" + "=" * 60)
|
| 628 |
+
|
| 629 |
+
|
| 630 |
+
# =============================================================================
|
| 631 |
+
# MAIN - Run configuration check
|
| 632 |
+
# =============================================================================
|
| 633 |
+
|
| 634 |
+
if __name__ == "__main__":
|
| 635 |
+
print_config_summary()
|
data/knowledge_base.json
ADDED
|
@@ -0,0 +1,1115 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"_metadata": {
|
| 3 |
+
"version": "1.0.0",
|
| 4 |
+
"created": "2025-12-13",
|
| 5 |
+
"description": "FarmEyes Disease Knowledge Base - 6 disease classes for Nigerian farmers. All content in English - N-ATLaS handles runtime translation to Hausa, Yoruba, Igbo.",
|
| 6 |
+
"crops_covered": [
|
| 7 |
+
"cassava",
|
| 8 |
+
"cocoa",
|
| 9 |
+
"tomato"
|
| 10 |
+
],
|
| 11 |
+
"total_classes": 6,
|
| 12 |
+
"currency": "NGN",
|
| 13 |
+
"last_updated": "2025-12-13",
|
| 14 |
+
"note": "6-class model (diseases only, no healthy classes). N-ATLaS model performs all translations to local languages during app usage."
|
| 15 |
+
},
|
| 16 |
+
"diseases": {
|
| 17 |
+
"cassava_bacterial_blight": {
|
| 18 |
+
"id": "CBB_001",
|
| 19 |
+
"class_name": "Cassava Bacteria Blight",
|
| 20 |
+
"display_name": "Cassava Bacterial Blight",
|
| 21 |
+
"scientific_name": "Xanthomonas axonopodis pv. manihotis",
|
| 22 |
+
"crop": "cassava",
|
| 23 |
+
"category": "bacterial",
|
| 24 |
+
"is_disease": true,
|
| 25 |
+
"severity": {
|
| 26 |
+
"level": "high",
|
| 27 |
+
"scale": 4,
|
| 28 |
+
"max_scale": 5,
|
| 29 |
+
"description": "Severe bacterial disease that can cause significant yield losses, especially during rainy season"
|
| 30 |
+
},
|
| 31 |
+
"symptoms": [
|
| 32 |
+
"Angular leaf spots that appear water-soaked",
|
| 33 |
+
"Leaf wilting and yellowing starting from the edges",
|
| 34 |
+
"Gum exudation (bacterial ooze) from stems - sticky yellowish substance",
|
| 35 |
+
"Dieback of shoot tips and young branches",
|
| 36 |
+
"Blighting and death of leaves",
|
| 37 |
+
"Vascular discoloration (brown streaks) when stem is cut",
|
| 38 |
+
"Canker formation on stems in severe cases"
|
| 39 |
+
],
|
| 40 |
+
"how_it_spreads": [
|
| 41 |
+
"Infected planting materials (stem cuttings) - most common source",
|
| 42 |
+
"Rain splash spreading bacteria between plants",
|
| 43 |
+
"Contaminated cutting tools and farm equipment",
|
| 44 |
+
"Wind-driven rain carrying bacteria",
|
| 45 |
+
"Workers' hands and clothing after touching infected plants"
|
| 46 |
+
],
|
| 47 |
+
"favorable_conditions": {
|
| 48 |
+
"temperature": "25-30°C",
|
| 49 |
+
"humidity": "Above 80%",
|
| 50 |
+
"season": "Rainy season (May-October in Nigeria)",
|
| 51 |
+
"other": "Waterlogged soils, poor drainage, dense plant spacing"
|
| 52 |
+
},
|
| 53 |
+
"yield_loss": {
|
| 54 |
+
"min_percent": 20,
|
| 55 |
+
"max_percent": 100,
|
| 56 |
+
"average_percent": 50,
|
| 57 |
+
"description": "Can cause 20-100% yield loss depending on severity, variety susceptibility, and time of infection"
|
| 58 |
+
},
|
| 59 |
+
"treatments": {
|
| 60 |
+
"cultural": [
|
| 61 |
+
{
|
| 62 |
+
"method": "Use disease-free planting materials",
|
| 63 |
+
"description": "Select healthy stems from certified disease-free fields. Inspect stems carefully before planting.",
|
| 64 |
+
"effectiveness": "high",
|
| 65 |
+
"cost_ngn": 0,
|
| 66 |
+
"timing": "Before planting"
|
| 67 |
+
},
|
| 68 |
+
{
|
| 69 |
+
"method": "Roguing (remove infected plants)",
|
| 70 |
+
"description": "Immediately remove and burn all infected plants. Do not leave debris in field.",
|
| 71 |
+
"effectiveness": "high",
|
| 72 |
+
"cost_ngn_per_hectare": 5000,
|
| 73 |
+
"timing": "As soon as symptoms appear"
|
| 74 |
+
},
|
| 75 |
+
{
|
| 76 |
+
"method": "Crop rotation",
|
| 77 |
+
"description": "Plant non-host crops like maize, sorghum, or legumes for 2-3 years before returning cassava to the field.",
|
| 78 |
+
"effectiveness": "medium",
|
| 79 |
+
"cost_ngn": 0,
|
| 80 |
+
"timing": "Seasonal planning"
|
| 81 |
+
},
|
| 82 |
+
{
|
| 83 |
+
"method": "Tool sanitation",
|
| 84 |
+
"description": "Disinfect cutting tools with 10% bleach solution between plants and fields.",
|
| 85 |
+
"effectiveness": "high",
|
| 86 |
+
"cost_ngn": 500,
|
| 87 |
+
"timing": "During all field operations"
|
| 88 |
+
}
|
| 89 |
+
],
|
| 90 |
+
"chemical": [
|
| 91 |
+
{
|
| 92 |
+
"product_name": "Copper-based bactericide",
|
| 93 |
+
"active_ingredient": "Copper hydroxide",
|
| 94 |
+
"local_brands": [
|
| 95 |
+
"Kocide 101",
|
| 96 |
+
"Nordox 75",
|
| 97 |
+
"Funguran-OH",
|
| 98 |
+
"Champion WP"
|
| 99 |
+
],
|
| 100 |
+
"cost_ngn_min": 8000,
|
| 101 |
+
"cost_ngn_max": 15000,
|
| 102 |
+
"cost_unit": "per hectare per application",
|
| 103 |
+
"dosage": "2-3 kg per hectare in 400-500L water",
|
| 104 |
+
"frequency": "Every 2-3 weeks during rainy season",
|
| 105 |
+
"application_method": "Spray thoroughly on leaves and stems, especially undersides of leaves",
|
| 106 |
+
"effectiveness": "medium",
|
| 107 |
+
"safety_precautions": [
|
| 108 |
+
"Wear protective clothing, gloves, and face mask",
|
| 109 |
+
"Do not spray on windy days",
|
| 110 |
+
"Avoid contact with skin and eyes",
|
| 111 |
+
"Wait 7 days before harvest"
|
| 112 |
+
]
|
| 113 |
+
}
|
| 114 |
+
],
|
| 115 |
+
"resistant_varieties": [
|
| 116 |
+
{
|
| 117 |
+
"variety_name": "TMS 30572",
|
| 118 |
+
"resistance_level": "high",
|
| 119 |
+
"source": "IITA Ibadan, NRCRI Umudike",
|
| 120 |
+
"cost_ngn_per_bundle": 15000,
|
| 121 |
+
"notes": "Widely available, good yield potential"
|
| 122 |
+
},
|
| 123 |
+
{
|
| 124 |
+
"variety_name": "TMS 4(2)1425",
|
| 125 |
+
"resistance_level": "high",
|
| 126 |
+
"source": "IITA Ibadan",
|
| 127 |
+
"cost_ngn_per_bundle": 18000,
|
| 128 |
+
"notes": "High yielding with good disease resistance"
|
| 129 |
+
},
|
| 130 |
+
{
|
| 131 |
+
"variety_name": "NR 8083",
|
| 132 |
+
"resistance_level": "medium",
|
| 133 |
+
"source": "NRCRI Umudike",
|
| 134 |
+
"cost_ngn_per_bundle": 15000,
|
| 135 |
+
"notes": "Good for multiple disease resistance"
|
| 136 |
+
}
|
| 137 |
+
],
|
| 138 |
+
"traditional": [
|
| 139 |
+
{
|
| 140 |
+
"method": "Wood ash application",
|
| 141 |
+
"description": "Apply wood ash around plant base after rain. Creates alkaline environment less favorable for bacteria.",
|
| 142 |
+
"effectiveness": "low",
|
| 143 |
+
"cost_ngn": 0
|
| 144 |
+
}
|
| 145 |
+
]
|
| 146 |
+
},
|
| 147 |
+
"total_treatment_cost": {
|
| 148 |
+
"min_ngn": 8000,
|
| 149 |
+
"max_ngn": 25000,
|
| 150 |
+
"per": "hectare",
|
| 151 |
+
"notes": "Using resistant varieties is the most cost-effective long-term solution"
|
| 152 |
+
},
|
| 153 |
+
"prevention": [
|
| 154 |
+
"Use certified disease-free planting materials from reputable sources (IITA, NRCRI, accredited seed companies)",
|
| 155 |
+
"Plant resistant varieties (TMS 30572, TMS 4(2)1425, NR 8083)",
|
| 156 |
+
"Practice field sanitation - remove all crop debris after harvest",
|
| 157 |
+
"Avoid working in fields when plants are wet from rain or dew",
|
| 158 |
+
"Disinfect cutting tools with 10% bleach solution between plants",
|
| 159 |
+
"Implement 2-3 year crop rotation with non-host crops (maize, legumes)",
|
| 160 |
+
"Ensure proper drainage to reduce humidity around plants",
|
| 161 |
+
"Maintain recommended plant spacing (1m x 1m) for good air circulation",
|
| 162 |
+
"Scout fields regularly for early disease detection"
|
| 163 |
+
],
|
| 164 |
+
"health_projection": {
|
| 165 |
+
"early_detection": {
|
| 166 |
+
"recovery_chance_percent": 80,
|
| 167 |
+
"message": "If treated within 2 weeks of first symptoms appearing, approximately 80% of your field can be saved. Remove infected plants immediately and apply copper spray to remaining plants."
|
| 168 |
+
},
|
| 169 |
+
"moderate_infection": {
|
| 170 |
+
"recovery_chance_percent": 50,
|
| 171 |
+
"message": "With moderate infection (less than 30% of plants affected), expect 50% yield recovery with immediate treatment. Focus on protecting healthy plants."
|
| 172 |
+
},
|
| 173 |
+
"severe_infection": {
|
| 174 |
+
"recovery_chance_percent": 20,
|
| 175 |
+
"message": "Severe infection requires removing all affected plants. Only 20% may be salvageable. Consider replanting with resistant varieties next season."
|
| 176 |
+
}
|
| 177 |
+
},
|
| 178 |
+
"expert_contact": {
|
| 179 |
+
"institution": "National Root Crops Research Institute (NRCRI)",
|
| 180 |
+
"location": "Umudike, Abia State, Nigeria",
|
| 181 |
+
"services": "Disease diagnosis, resistant variety seeds, extension services"
|
| 182 |
+
}
|
| 183 |
+
},
|
| 184 |
+
"cassava_mosaic_virus": {
|
| 185 |
+
"id": "CMD_001",
|
| 186 |
+
"class_name": "Cassava Mosaic Virus",
|
| 187 |
+
"display_name": "Cassava Mosaic Virus",
|
| 188 |
+
"scientific_name": "Cassava mosaic geminiviruses (CMGs) - African cassava mosaic virus, East African cassava mosaic virus",
|
| 189 |
+
"crop": "cassava",
|
| 190 |
+
"category": "viral",
|
| 191 |
+
"is_disease": true,
|
| 192 |
+
"severity": {
|
| 193 |
+
"level": "very_high",
|
| 194 |
+
"scale": 5,
|
| 195 |
+
"max_scale": 5,
|
| 196 |
+
"description": "Most devastating cassava disease in Africa. Can cause complete crop failure in susceptible varieties."
|
| 197 |
+
},
|
| 198 |
+
"symptoms": [
|
| 199 |
+
"Mosaic pattern of yellow/light green and dark green patches on leaves",
|
| 200 |
+
"Leaf curling, twisting, and distortion",
|
| 201 |
+
"Reduced leaf size compared to healthy plants",
|
| 202 |
+
"Severely stunted plant growth",
|
| 203 |
+
"Misshapen, small, or no tuber formation",
|
| 204 |
+
"Chlorosis (yellowing) along leaf veins",
|
| 205 |
+
"Leaves may become completely yellow in severe cases"
|
| 206 |
+
],
|
| 207 |
+
"how_it_spreads": [
|
| 208 |
+
"Whiteflies (Bemisia tabaci) - primary vector, transmit virus while feeding",
|
| 209 |
+
"Infected stem cuttings used for planting - carries virus to new fields",
|
| 210 |
+
"Mechanical transmission through contaminated tools (minor)",
|
| 211 |
+
"NOT spread by contact between plants or through soil"
|
| 212 |
+
],
|
| 213 |
+
"favorable_conditions": {
|
| 214 |
+
"temperature": "25-35°C",
|
| 215 |
+
"humidity": "Variable - disease occurs in all humidity levels",
|
| 216 |
+
"season": "Year-round, but more severe in dry season when whitefly populations peak",
|
| 217 |
+
"other": "High whitefly populations, planting infected cuttings, presence of infected plants nearby"
|
| 218 |
+
},
|
| 219 |
+
"yield_loss": {
|
| 220 |
+
"min_percent": 30,
|
| 221 |
+
"max_percent": 95,
|
| 222 |
+
"average_percent": 50,
|
| 223 |
+
"description": "Can cause 30-95% yield loss. Severely infected plants may produce no usable tubers at all."
|
| 224 |
+
},
|
| 225 |
+
"treatments": {
|
| 226 |
+
"cultural": [
|
| 227 |
+
{
|
| 228 |
+
"method": "Use virus-free planting materials",
|
| 229 |
+
"description": "Source stem cuttings only from certified disease-free multiplication sites. Never take cuttings from infected plants.",
|
| 230 |
+
"effectiveness": "very_high",
|
| 231 |
+
"cost_ngn": 0,
|
| 232 |
+
"timing": "Before planting"
|
| 233 |
+
},
|
| 234 |
+
{
|
| 235 |
+
"method": "Roguing infected plants",
|
| 236 |
+
"description": "Remove and destroy infected plants as soon as symptoms appear. This prevents whiteflies from spreading virus to healthy plants.",
|
| 237 |
+
"effectiveness": "high",
|
| 238 |
+
"cost_ngn_per_hectare": 5000,
|
| 239 |
+
"timing": "Weekly scouting and removal"
|
| 240 |
+
},
|
| 241 |
+
{
|
| 242 |
+
"method": "Early planting",
|
| 243 |
+
"description": "Plant at onset of rains when whitefly populations are lower. This gives plants time to establish before peak whitefly season.",
|
| 244 |
+
"effectiveness": "medium",
|
| 245 |
+
"cost_ngn": 0,
|
| 246 |
+
"timing": "Start of rainy season"
|
| 247 |
+
},
|
| 248 |
+
{
|
| 249 |
+
"method": "Remove volunteer plants",
|
| 250 |
+
"description": "Remove any cassava plants that grow from previous season's debris. These can harbor virus.",
|
| 251 |
+
"effectiveness": "medium",
|
| 252 |
+
"cost_ngn_per_hectare": 3000,
|
| 253 |
+
"timing": "Before and during planting"
|
| 254 |
+
}
|
| 255 |
+
],
|
| 256 |
+
"chemical": [
|
| 257 |
+
{
|
| 258 |
+
"product_name": "Imidacloprid (for whitefly control)",
|
| 259 |
+
"active_ingredient": "Imidacloprid",
|
| 260 |
+
"local_brands": [
|
| 261 |
+
"Confidor",
|
| 262 |
+
"Gaucho",
|
| 263 |
+
"Admire",
|
| 264 |
+
"Kohinor"
|
| 265 |
+
],
|
| 266 |
+
"cost_ngn_min": 5000,
|
| 267 |
+
"cost_ngn_max": 12000,
|
| 268 |
+
"cost_unit": "per hectare per application",
|
| 269 |
+
"dosage": "Follow label instructions - typically 100-200ml per hectare",
|
| 270 |
+
"frequency": "Every 2-3 weeks when whitefly pressure is high",
|
| 271 |
+
"application_method": "Spray on leaves, targeting undersides where whiteflies feed",
|
| 272 |
+
"effectiveness": "medium",
|
| 273 |
+
"important_note": "This controls whiteflies but does NOT cure already infected plants. Infected plants must be removed.",
|
| 274 |
+
"safety_precautions": [
|
| 275 |
+
"Highly toxic to bees - apply in evening when bees are not active",
|
| 276 |
+
"Wear protective clothing and gloves",
|
| 277 |
+
"Do not spray near water sources",
|
| 278 |
+
"Follow pre-harvest interval on label"
|
| 279 |
+
]
|
| 280 |
+
}
|
| 281 |
+
],
|
| 282 |
+
"resistant_varieties": [
|
| 283 |
+
{
|
| 284 |
+
"variety_name": "TME 419",
|
| 285 |
+
"resistance_level": "very_high",
|
| 286 |
+
"source": "IITA Ibadan, state ADPs",
|
| 287 |
+
"cost_ngn_per_bundle": 20000,
|
| 288 |
+
"notes": "Most widely recommended CMD-resistant variety. High yield."
|
| 289 |
+
},
|
| 290 |
+
{
|
| 291 |
+
"variety_name": "UMUCASS 36 (TMS 01/1368)",
|
| 292 |
+
"resistance_level": "very_high",
|
| 293 |
+
"source": "NRCRI Umudike",
|
| 294 |
+
"cost_ngn_per_bundle": 22000,
|
| 295 |
+
"notes": "Excellent CMD resistance with high dry matter content"
|
| 296 |
+
},
|
| 297 |
+
{
|
| 298 |
+
"variety_name": "UMUCASS 37 (TMS 01/1412)",
|
| 299 |
+
"resistance_level": "very_high",
|
| 300 |
+
"source": "NRCRI Umudike",
|
| 301 |
+
"cost_ngn_per_bundle": 22000,
|
| 302 |
+
"notes": "Good for garri processing"
|
| 303 |
+
},
|
| 304 |
+
{
|
| 305 |
+
"variety_name": "UMUCASS 38 (TMS 01/1371)",
|
| 306 |
+
"resistance_level": "high",
|
| 307 |
+
"source": "NRCRI Umudike",
|
| 308 |
+
"cost_ngn_per_bundle": 20000,
|
| 309 |
+
"notes": "Multiple disease resistance"
|
| 310 |
+
},
|
| 311 |
+
{
|
| 312 |
+
"variety_name": "TMS 98/0581",
|
| 313 |
+
"resistance_level": "high",
|
| 314 |
+
"source": "IITA Ibadan",
|
| 315 |
+
"cost_ngn_per_bundle": 18000,
|
| 316 |
+
"notes": "Good yield with CMD tolerance"
|
| 317 |
+
}
|
| 318 |
+
],
|
| 319 |
+
"traditional": [
|
| 320 |
+
{
|
| 321 |
+
"method": "Neem leaf extract spray",
|
| 322 |
+
"description": "Crush 1kg fresh neem leaves, soak in 5 liters of water overnight, strain and spray. Repels whiteflies.",
|
| 323 |
+
"effectiveness": "low",
|
| 324 |
+
"cost_ngn": 2000
|
| 325 |
+
}
|
| 326 |
+
]
|
| 327 |
+
},
|
| 328 |
+
"total_treatment_cost": {
|
| 329 |
+
"min_ngn": 5000,
|
| 330 |
+
"max_ngn": 35000,
|
| 331 |
+
"per": "hectare",
|
| 332 |
+
"notes": "IMPORTANT: There is NO CURE for viral diseases. The best investment is planting resistant varieties. Infected plants cannot be cured and must be removed."
|
| 333 |
+
},
|
| 334 |
+
"prevention": [
|
| 335 |
+
"Plant CMD-resistant varieties (TME 419, UMUCASS 36, 37, 38) - MOST IMPORTANT",
|
| 336 |
+
"Source planting materials only from certified disease-free sources",
|
| 337 |
+
"Never take cuttings from plants showing any mosaic symptoms",
|
| 338 |
+
"Control whitefly populations with insecticides or neem extracts",
|
| 339 |
+
"Remove and burn all volunteer cassava plants from previous seasons",
|
| 340 |
+
"Practice thorough field sanitation after harvest",
|
| 341 |
+
"Avoid planting new cassava fields adjacent to infected fields",
|
| 342 |
+
"Inspect plants weekly and remove infected ones immediately",
|
| 343 |
+
"Do not transport cuttings from areas with high CMD incidence"
|
| 344 |
+
],
|
| 345 |
+
"health_projection": {
|
| 346 |
+
"early_detection": {
|
| 347 |
+
"recovery_chance_percent": 70,
|
| 348 |
+
"message": "If you detect CMD early and immediately remove infected plants, you can protect approximately 70% of your yield. The key is stopping spread to healthy plants."
|
| 349 |
+
},
|
| 350 |
+
"moderate_infection": {
|
| 351 |
+
"recovery_chance_percent": 40,
|
| 352 |
+
"message": "With moderate infection across the field, focus on removing all infected plants and protecting the remaining healthy ones. Expected yield recovery is about 40%."
|
| 353 |
+
},
|
| 354 |
+
"severe_infection": {
|
| 355 |
+
"recovery_chance_percent": 10,
|
| 356 |
+
"message": "Severe CMD infection has spread widely. This season's harvest will be significantly reduced. Plan to replant next season using resistant varieties only."
|
| 357 |
+
}
|
| 358 |
+
},
|
| 359 |
+
"expert_contact": {
|
| 360 |
+
"institution": "International Institute of Tropical Agriculture (IITA)",
|
| 361 |
+
"location": "Ibadan, Oyo State, Nigeria",
|
| 362 |
+
"services": "CMD-resistant varieties, disease diagnosis, training on CMD management"
|
| 363 |
+
}
|
| 364 |
+
},
|
| 365 |
+
"cocoa_monilia_disease": {
|
| 366 |
+
"id": "CMN_001",
|
| 367 |
+
"class_name": "Cocoa Monilia Disease",
|
| 368 |
+
"display_name": "Frosty Pod Rot (Monilia Disease)",
|
| 369 |
+
"scientific_name": "Moniliophthora roreri",
|
| 370 |
+
"crop": "cocoa",
|
| 371 |
+
"category": "fungal",
|
| 372 |
+
"is_disease": true,
|
| 373 |
+
"severity": {
|
| 374 |
+
"level": "high",
|
| 375 |
+
"scale": 4,
|
| 376 |
+
"max_scale": 5,
|
| 377 |
+
"description": "Serious fungal disease that can destroy entire pod harvests. The white 'frosty' spore covering produces millions of spores that spread rapidly."
|
| 378 |
+
},
|
| 379 |
+
"symptoms": [
|
| 380 |
+
"White or cream-colored powdery coating on pods giving a 'frosty' appearance",
|
| 381 |
+
"Brown spots that enlarge rapidly on pod surface",
|
| 382 |
+
"Irregular swelling or lumps on pods before external symptoms appear",
|
| 383 |
+
"Internal pod rot with liquefied, foul-smelling pulp",
|
| 384 |
+
"Premature ripening or blackening of pods",
|
| 385 |
+
"Beans inside become sticky, clumped together, and unusable",
|
| 386 |
+
"Strong unpleasant odor from infected pods"
|
| 387 |
+
],
|
| 388 |
+
"how_it_spreads": [
|
| 389 |
+
"Wind dispersal of spores from infected pods - can travel several kilometers",
|
| 390 |
+
"Rain splash spreading spores to nearby pods",
|
| 391 |
+
"Contact with infected pods during harvesting",
|
| 392 |
+
"Contaminated harvesting tools (machetes, baskets)",
|
| 393 |
+
"Leaving infected pods on trees or ground provides continuous spore source"
|
| 394 |
+
],
|
| 395 |
+
"favorable_conditions": {
|
| 396 |
+
"temperature": "20-28°C (optimal around 25°C)",
|
| 397 |
+
"humidity": "Above 85%",
|
| 398 |
+
"season": "Peak during rainy season, especially with prolonged wet periods",
|
| 399 |
+
"other": "Poor air circulation, excessive shade, leaving infected pods in field"
|
| 400 |
+
},
|
| 401 |
+
"yield_loss": {
|
| 402 |
+
"min_percent": 25,
|
| 403 |
+
"max_percent": 90,
|
| 404 |
+
"average_percent": 50,
|
| 405 |
+
"description": "Can cause 25-90% pod losses in favorable conditions. Unmanaged outbreaks can destroy nearly entire harvests."
|
| 406 |
+
},
|
| 407 |
+
"treatments": {
|
| 408 |
+
"cultural": [
|
| 409 |
+
{
|
| 410 |
+
"method": "Weekly removal and destruction of infected pods",
|
| 411 |
+
"description": "Inspect all trees weekly. Remove any pod showing symptoms. Bury pods 30cm deep or burn them. Never leave infected pods on ground.",
|
| 412 |
+
"effectiveness": "high",
|
| 413 |
+
"cost_ngn_per_hectare": 10000,
|
| 414 |
+
"cost_frequency": "per month (labor)"
|
| 415 |
+
},
|
| 416 |
+
{
|
| 417 |
+
"method": "Shade management",
|
| 418 |
+
"description": "Reduce shade canopy to 50% to improve air circulation and reduce humidity in the canopy.",
|
| 419 |
+
"effectiveness": "medium",
|
| 420 |
+
"cost_ngn_per_hectare": 15000,
|
| 421 |
+
"cost_frequency": "one-time pruning cost"
|
| 422 |
+
},
|
| 423 |
+
{
|
| 424 |
+
"method": "Tree pruning",
|
| 425 |
+
"description": "Regular pruning to open up tree canopy, improve air flow, and make pods more accessible for inspection and harvesting.",
|
| 426 |
+
"effectiveness": "medium",
|
| 427 |
+
"cost_ngn_per_hectare": 20000,
|
| 428 |
+
"cost_frequency": "annually"
|
| 429 |
+
},
|
| 430 |
+
{
|
| 431 |
+
"method": "Prompt harvesting",
|
| 432 |
+
"description": "Harvest mature pods immediately. Overripe pods are more susceptible to infection.",
|
| 433 |
+
"effectiveness": "medium",
|
| 434 |
+
"cost_ngn": 0
|
| 435 |
+
}
|
| 436 |
+
],
|
| 437 |
+
"chemical": [
|
| 438 |
+
{
|
| 439 |
+
"product_name": "Copper-based fungicide",
|
| 440 |
+
"active_ingredient": "Copper hydroxide or Copper oxychloride",
|
| 441 |
+
"local_brands": [
|
| 442 |
+
"Kocide 101",
|
| 443 |
+
"Nordox 75",
|
| 444 |
+
"Koka Blue 50 WG",
|
| 445 |
+
"Funguran-OH"
|
| 446 |
+
],
|
| 447 |
+
"cost_ngn_min": 15000,
|
| 448 |
+
"cost_ngn_max": 25000,
|
| 449 |
+
"cost_unit": "per hectare per application",
|
| 450 |
+
"dosage": "2.5-3 kg per hectare in 500L water",
|
| 451 |
+
"frequency": "Monthly during pod development season",
|
| 452 |
+
"application_method": "Spray pods thoroughly, especially young developing pods. Focus on lower trunk where pods form.",
|
| 453 |
+
"effectiveness": "medium",
|
| 454 |
+
"safety_precautions": [
|
| 455 |
+
"Wear protective clothing and mask",
|
| 456 |
+
"Apply in calm weather conditions",
|
| 457 |
+
"Avoid spraying during rain"
|
| 458 |
+
]
|
| 459 |
+
},
|
| 460 |
+
{
|
| 461 |
+
"product_name": "Metalaxyl + Mancozeb combination",
|
| 462 |
+
"active_ingredient": "Metalaxyl 12% + Mancozeb 60%",
|
| 463 |
+
"local_brands": [
|
| 464 |
+
"Ridomil Gold MZ",
|
| 465 |
+
"Agro-laxyl 63.5 WP"
|
| 466 |
+
],
|
| 467 |
+
"cost_ngn_min": 20000,
|
| 468 |
+
"cost_ngn_max": 35000,
|
| 469 |
+
"cost_unit": "per hectare per application",
|
| 470 |
+
"dosage": "2-2.5 kg per hectare",
|
| 471 |
+
"frequency": "Every 3-4 weeks during critical period (peak rainy season)",
|
| 472 |
+
"application_method": "Apply as preventive treatment before disease onset for best results",
|
| 473 |
+
"effectiveness": "high",
|
| 474 |
+
"safety_precautions": [
|
| 475 |
+
"Wear full protective equipment",
|
| 476 |
+
"Do not apply within 14 days of harvest",
|
| 477 |
+
"Store away from food items"
|
| 478 |
+
]
|
| 479 |
+
}
|
| 480 |
+
],
|
| 481 |
+
"biological": [
|
| 482 |
+
{
|
| 483 |
+
"method": "Trichoderma-based biocontrol",
|
| 484 |
+
"description": "Beneficial fungi that compete with and suppress disease fungi. Spray on pods and trunk.",
|
| 485 |
+
"effectiveness": "medium",
|
| 486 |
+
"cost_ngn_per_hectare_min": 12000,
|
| 487 |
+
"cost_ngn_per_hectare_max": 20000,
|
| 488 |
+
"source": "Available from CRIN (Cocoa Research Institute of Nigeria) and some agro-dealers",
|
| 489 |
+
"notes": "Best used as part of integrated management, not as sole treatment"
|
| 490 |
+
}
|
| 491 |
+
],
|
| 492 |
+
"traditional": [
|
| 493 |
+
{
|
| 494 |
+
"method": "Palm oil coating on pods",
|
| 495 |
+
"description": "Mix palm oil with water, spray on young pods. Creates a physical barrier against spore infection.",
|
| 496 |
+
"effectiveness": "low",
|
| 497 |
+
"cost_ngn": 5000,
|
| 498 |
+
"notes": "Traditional method with limited scientific validation"
|
| 499 |
+
}
|
| 500 |
+
]
|
| 501 |
+
},
|
| 502 |
+
"total_treatment_cost": {
|
| 503 |
+
"min_ngn": 15000,
|
| 504 |
+
"max_ngn": 50000,
|
| 505 |
+
"per": "hectare per season",
|
| 506 |
+
"notes": "Combination of regular pod removal (sanitation) with fungicide application gives best results"
|
| 507 |
+
},
|
| 508 |
+
"prevention": [
|
| 509 |
+
"Inspect trees and remove infected pods at least weekly",
|
| 510 |
+
"Maintain shade canopy at 50% for good air circulation",
|
| 511 |
+
"Prune trees regularly to reduce humidity in canopy",
|
| 512 |
+
"Harvest mature pods promptly - do not leave overripe pods",
|
| 513 |
+
"Never leave infected or rotting pods on trees or ground",
|
| 514 |
+
"Bury removed pods at least 30cm deep or burn them",
|
| 515 |
+
"Clean harvesting tools between trees using soap solution",
|
| 516 |
+
"Apply preventive fungicide sprays before peak disease season",
|
| 517 |
+
"Maintain good drainage in plantation"
|
| 518 |
+
],
|
| 519 |
+
"health_projection": {
|
| 520 |
+
"early_detection": {
|
| 521 |
+
"recovery_chance_percent": 75,
|
| 522 |
+
"message": "Early detection with immediate pod removal can save approximately 75% of your harvest. Start weekly inspections now and remove every infected pod."
|
| 523 |
+
},
|
| 524 |
+
"moderate_infection": {
|
| 525 |
+
"recovery_chance_percent": 50,
|
| 526 |
+
"message": "Moderate infection requires intensive sanitation combined with fungicide application. With immediate action, expect to save about 50% of remaining pods."
|
| 527 |
+
},
|
| 528 |
+
"severe_infection": {
|
| 529 |
+
"recovery_chance_percent": 25,
|
| 530 |
+
"message": "Severe frosty pod rot outbreak. Remove all infected pods, apply fungicide to protect remaining healthy pods. Focus on protecting next season's production."
|
| 531 |
+
}
|
| 532 |
+
},
|
| 533 |
+
"expert_contact": {
|
| 534 |
+
"institution": "Cocoa Research Institute of Nigeria (CRIN)",
|
| 535 |
+
"location": "Ibadan, Oyo State, Nigeria",
|
| 536 |
+
"services": "Disease diagnosis, fungicide recommendations, resistant varieties, extension services"
|
| 537 |
+
}
|
| 538 |
+
},
|
| 539 |
+
"cocoa_phytophthora_disease": {
|
| 540 |
+
"id": "CPH_001",
|
| 541 |
+
"class_name": "Cocoa Phytophthora Disease",
|
| 542 |
+
"display_name": "Black Pod Disease",
|
| 543 |
+
"scientific_name": "Phytophthora palmivora and Phytophthora megakarya",
|
| 544 |
+
"crop": "cocoa",
|
| 545 |
+
"category": "oomycete",
|
| 546 |
+
"is_disease": true,
|
| 547 |
+
"severity": {
|
| 548 |
+
"level": "very_high",
|
| 549 |
+
"scale": 5,
|
| 550 |
+
"max_scale": 5,
|
| 551 |
+
"description": "Most serious cocoa disease in West Africa. P. megakarya (found in Nigeria) is more aggressive than P. palmivora and can destroy 60-100% of pods in severe outbreaks."
|
| 552 |
+
},
|
| 553 |
+
"symptoms": [
|
| 554 |
+
"Dark brown to black lesions starting at any point on the pod",
|
| 555 |
+
"Lesions spread very rapidly, covering entire pod within 10-14 days",
|
| 556 |
+
"White or grayish mold growth on pod surface in humid conditions",
|
| 557 |
+
"Firm pod becomes soft as internal rot progresses",
|
| 558 |
+
"Beans inside become shriveled, stuck together, and turn black",
|
| 559 |
+
"Canker lesions on stem bark with reddish-brown gum exudation",
|
| 560 |
+
"Wilting of leaves and dieback of branches in severe trunk infections"
|
| 561 |
+
],
|
| 562 |
+
"how_it_spreads": [
|
| 563 |
+
"Rain splash from infected pods - most important method",
|
| 564 |
+
"Infected pods on ground serve as continuous source of spores",
|
| 565 |
+
"Ants (especially Crematogaster striatula) carry spores between pods",
|
| 566 |
+
"Wind-driven rain spreading spores",
|
| 567 |
+
"Contaminated harvesting tools",
|
| 568 |
+
"Spores can survive in soil and plant debris"
|
| 569 |
+
],
|
| 570 |
+
"favorable_conditions": {
|
| 571 |
+
"temperature": "20-30°C (optimal around 25°C)",
|
| 572 |
+
"humidity": "Above 85%",
|
| 573 |
+
"season": "Peak during rainy season (May-October in southern Nigeria)",
|
| 574 |
+
"other": "High rainfall, poor drainage, excessive shade, infected pods left in field"
|
| 575 |
+
},
|
| 576 |
+
"yield_loss": {
|
| 577 |
+
"min_percent": 30,
|
| 578 |
+
"max_percent": 90,
|
| 579 |
+
"average_percent": 60,
|
| 580 |
+
"description": "Causes 30-90% pod losses. P. megakarya infections are faster and more destructive than P. palmivora. Annual losses estimated at over $700 million globally."
|
| 581 |
+
},
|
| 582 |
+
"treatments": {
|
| 583 |
+
"cultural": [
|
| 584 |
+
{
|
| 585 |
+
"method": "Frequent pod removal",
|
| 586 |
+
"description": "Remove all infected pods every 5-7 days. Bury pods at least 30cm deep or burn them. Never leave on ground surface.",
|
| 587 |
+
"effectiveness": "high",
|
| 588 |
+
"cost_ngn_per_hectare": 15000,
|
| 589 |
+
"cost_frequency": "per month (labor)"
|
| 590 |
+
},
|
| 591 |
+
{
|
| 592 |
+
"method": "Improve drainage",
|
| 593 |
+
"description": "Create drainage channels to prevent waterlogging. Remove stagnant water from around trees.",
|
| 594 |
+
"effectiveness": "medium",
|
| 595 |
+
"cost_ngn_per_hectare": 30000,
|
| 596 |
+
"cost_frequency": "one-time installation"
|
| 597 |
+
},
|
| 598 |
+
{
|
| 599 |
+
"method": "Shade and canopy management",
|
| 600 |
+
"description": "Maintain 50% shade, prune lower branches, and thin canopy to improve air circulation and reduce humidity.",
|
| 601 |
+
"effectiveness": "medium",
|
| 602 |
+
"cost_ngn_per_hectare": 25000,
|
| 603 |
+
"cost_frequency": "annually"
|
| 604 |
+
},
|
| 605 |
+
{
|
| 606 |
+
"method": "Ant control",
|
| 607 |
+
"description": "Destroy ant nests around trees. Ants spread disease spores between pods.",
|
| 608 |
+
"effectiveness": "low",
|
| 609 |
+
"cost_ngn_per_hectare": 5000
|
| 610 |
+
}
|
| 611 |
+
],
|
| 612 |
+
"chemical": [
|
| 613 |
+
{
|
| 614 |
+
"product_name": "Metalaxyl + Copper combination",
|
| 615 |
+
"active_ingredient": "Metalaxyl 12% + Copper-1-oxide 60%",
|
| 616 |
+
"local_brands": [
|
| 617 |
+
"Ridomil 72 Plus",
|
| 618 |
+
"Foko",
|
| 619 |
+
"Ridomil Gold Plus"
|
| 620 |
+
],
|
| 621 |
+
"cost_ngn_min": 25000,
|
| 622 |
+
"cost_ngn_max": 40000,
|
| 623 |
+
"cost_unit": "per hectare per application",
|
| 624 |
+
"dosage": "2.5-3 kg per hectare in 500L water",
|
| 625 |
+
"frequency": "3-4 applications during peak season (June, August, September, October)",
|
| 626 |
+
"application_method": "Spray on all pods and lower trunk. Most effective when applied before disease onset.",
|
| 627 |
+
"effectiveness": "very_high",
|
| 628 |
+
"safety_precautions": [
|
| 629 |
+
"Wear full protective equipment",
|
| 630 |
+
"Apply in calm weather",
|
| 631 |
+
"Follow label directions exactly",
|
| 632 |
+
"Observe pre-harvest interval"
|
| 633 |
+
]
|
| 634 |
+
},
|
| 635 |
+
{
|
| 636 |
+
"product_name": "Copper fungicide",
|
| 637 |
+
"active_ingredient": "Copper hydroxide",
|
| 638 |
+
"local_brands": [
|
| 639 |
+
"Kocide 101",
|
| 640 |
+
"Nordox 75",
|
| 641 |
+
"Blue Shield",
|
| 642 |
+
"Funguran-OH"
|
| 643 |
+
],
|
| 644 |
+
"cost_ngn_min": 15000,
|
| 645 |
+
"cost_ngn_max": 25000,
|
| 646 |
+
"cost_unit": "per hectare per application",
|
| 647 |
+
"dosage": "2-3 kg per hectare",
|
| 648 |
+
"frequency": "Every 3-4 weeks during rainy season",
|
| 649 |
+
"application_method": "Thorough coverage of all pods. Contact fungicide - must cover pod surface to protect.",
|
| 650 |
+
"effectiveness": "medium"
|
| 651 |
+
},
|
| 652 |
+
{
|
| 653 |
+
"product_name": "Phosphonate (trunk injection)",
|
| 654 |
+
"active_ingredient": "Fosetyl-Al or Phosphorous acid",
|
| 655 |
+
"local_brands": [
|
| 656 |
+
"Foli-R-Fos 400",
|
| 657 |
+
"Aliette"
|
| 658 |
+
],
|
| 659 |
+
"cost_ngn_min": 35000,
|
| 660 |
+
"cost_ngn_max": 50000,
|
| 661 |
+
"cost_unit": "per hectare per application",
|
| 662 |
+
"dosage": "As per label - injected into trunk",
|
| 663 |
+
"frequency": "1-2 times per season",
|
| 664 |
+
"application_method": "Inject directly into main trunk. Provides systemic protection throughout tree.",
|
| 665 |
+
"effectiveness": "very_high",
|
| 666 |
+
"notes": "Requires training for proper application technique"
|
| 667 |
+
}
|
| 668 |
+
],
|
| 669 |
+
"biological": [
|
| 670 |
+
{
|
| 671 |
+
"method": "Trichoderma asperellum",
|
| 672 |
+
"description": "Beneficial fungus that parasitizes Phytophthora. Applied as spray to pods and trunk.",
|
| 673 |
+
"effectiveness": "medium",
|
| 674 |
+
"cost_ngn_per_hectare_min": 15000,
|
| 675 |
+
"cost_ngn_per_hectare_max": 25000,
|
| 676 |
+
"source": "Research stage in Nigeria - contact CRIN for availability",
|
| 677 |
+
"notes": "Reduces disease but not as effective as chemical fungicides in severe outbreaks"
|
| 678 |
+
}
|
| 679 |
+
],
|
| 680 |
+
"traditional": [
|
| 681 |
+
{
|
| 682 |
+
"method": "Ash application",
|
| 683 |
+
"description": "Apply wood ash around base of trees. May help reduce soil moisture and spore survival.",
|
| 684 |
+
"effectiveness": "low",
|
| 685 |
+
"cost_ngn": 2000
|
| 686 |
+
}
|
| 687 |
+
]
|
| 688 |
+
},
|
| 689 |
+
"total_treatment_cost": {
|
| 690 |
+
"min_ngn": 25000,
|
| 691 |
+
"max_ngn": 80000,
|
| 692 |
+
"per": "hectare per season",
|
| 693 |
+
"notes": "6-8 fungicide applications may be needed in severe areas. Combining sanitation with fewer fungicide sprays is most cost-effective."
|
| 694 |
+
},
|
| 695 |
+
"prevention": [
|
| 696 |
+
"Remove and destroy infected pods every 5-7 days - most important practice",
|
| 697 |
+
"Maintain proper tree spacing and reduce shade to 50%",
|
| 698 |
+
"Improve drainage in waterlogged areas",
|
| 699 |
+
"Harvest pods as soon as they mature - do not leave overripe",
|
| 700 |
+
"Control ant populations that spread spores",
|
| 701 |
+
"Apply preventive fungicide sprays before rainy season peak",
|
| 702 |
+
"Remove all pods from ground surface",
|
| 703 |
+
"Prune lower branches to reduce humidity near pods",
|
| 704 |
+
"Clean tools between trees with soap solution",
|
| 705 |
+
"Remove mummified pods from previous seasons"
|
| 706 |
+
],
|
| 707 |
+
"health_projection": {
|
| 708 |
+
"early_detection": {
|
| 709 |
+
"recovery_chance_percent": 80,
|
| 710 |
+
"message": "With immediate treatment and sanitation, approximately 80% of remaining healthy pods can be saved. Begin fungicide application and twice-weekly pod removal immediately."
|
| 711 |
+
},
|
| 712 |
+
"moderate_infection": {
|
| 713 |
+
"recovery_chance_percent": 50,
|
| 714 |
+
"message": "Apply fungicide immediately and intensify pod removal to every 5 days. With aggressive management, expect to save about 50% of your crop."
|
| 715 |
+
},
|
| 716 |
+
"severe_infection": {
|
| 717 |
+
"recovery_chance_percent": 20,
|
| 718 |
+
"message": "Severe black pod outbreak. This season's harvest is significantly compromised. Focus sanitation and fungicide efforts on protecting next season's production."
|
| 719 |
+
}
|
| 720 |
+
},
|
| 721 |
+
"expert_contact": {
|
| 722 |
+
"institution": "Cocoa Research Institute of Nigeria (CRIN)",
|
| 723 |
+
"location": "Ibadan, Oyo State, Nigeria",
|
| 724 |
+
"services": "Disease diagnosis, fungicide recommendations, integrated management training, tolerant varieties"
|
| 725 |
+
}
|
| 726 |
+
},
|
| 727 |
+
"tomato_gray_mold": {
|
| 728 |
+
"id": "TGM_001",
|
| 729 |
+
"class_name": "Tomato Gray Mold Disease",
|
| 730 |
+
"display_name": "Gray Mold (Botrytis Blight)",
|
| 731 |
+
"scientific_name": "Botrytis cinerea",
|
| 732 |
+
"crop": "tomato",
|
| 733 |
+
"category": "fungal",
|
| 734 |
+
"is_disease": true,
|
| 735 |
+
"severity": {
|
| 736 |
+
"level": "high",
|
| 737 |
+
"scale": 4,
|
| 738 |
+
"max_scale": 5,
|
| 739 |
+
"description": "Common and destructive fungal disease especially in humid conditions. Can affect all above-ground plant parts and cause significant post-harvest losses."
|
| 740 |
+
},
|
| 741 |
+
"symptoms": [
|
| 742 |
+
"Soft, water-soaked spots on leaves, stems, and fruits",
|
| 743 |
+
"Distinctive gray fuzzy mold growth (spores) on infected areas",
|
| 744 |
+
"Brown to tan lesions on stems, often at pruning wounds or leaf scars",
|
| 745 |
+
"Blossom blight - flowers turn brown, wither, and fall off",
|
| 746 |
+
"Ghost spots on fruits - pale rings with darker centers",
|
| 747 |
+
"Fruit rot starting from stem end, wounds, or where fruit touches ground",
|
| 748 |
+
"Stem cankers that can girdle and kill plant"
|
| 749 |
+
],
|
| 750 |
+
"how_it_spreads": [
|
| 751 |
+
"Airborne spores (conidia) - primary spread method, released in clouds when disturbed",
|
| 752 |
+
"Splashing water from rain or overhead irrigation",
|
| 753 |
+
"Contaminated hands, tools, and clothing",
|
| 754 |
+
"Infected plant debris in soil - fungus survives as sclerotia",
|
| 755 |
+
"Entry through wounds, pruning cuts, flower scars, or senescent tissue"
|
| 756 |
+
],
|
| 757 |
+
"favorable_conditions": {
|
| 758 |
+
"temperature": "15-25°C (optimal around 20°C)",
|
| 759 |
+
"humidity": "Above 93% for at least 8-12 hours",
|
| 760 |
+
"season": "Cool, cloudy, humid weather conditions",
|
| 761 |
+
"other": "Poor air circulation, overhead irrigation, wounded plants, dense plant canopy"
|
| 762 |
+
},
|
| 763 |
+
"yield_loss": {
|
| 764 |
+
"min_percent": 15,
|
| 765 |
+
"max_percent": 50,
|
| 766 |
+
"average_percent": 25,
|
| 767 |
+
"description": "Can cause 15-50% losses in greenhouses. Field losses typically lower but can be severe in prolonged wet weather."
|
| 768 |
+
},
|
| 769 |
+
"treatments": {
|
| 770 |
+
"cultural": [
|
| 771 |
+
{
|
| 772 |
+
"method": "Improve air circulation",
|
| 773 |
+
"description": "Increase plant spacing, stake plants properly, prune lower leaves, and ensure good ventilation in greenhouses.",
|
| 774 |
+
"effectiveness": "high",
|
| 775 |
+
"cost_ngn": 0
|
| 776 |
+
},
|
| 777 |
+
{
|
| 778 |
+
"method": "Remove infected plant parts",
|
| 779 |
+
"description": "Immediately remove and destroy (burn or bury) any infected leaves, stems, flowers, or fruits. Do not compost.",
|
| 780 |
+
"effectiveness": "high",
|
| 781 |
+
"cost_ngn_per_week": 3000
|
| 782 |
+
},
|
| 783 |
+
{
|
| 784 |
+
"method": "Avoid overhead irrigation",
|
| 785 |
+
"description": "Use drip irrigation to keep foliage dry. Water early in day so plants dry before evening.",
|
| 786 |
+
"effectiveness": "high",
|
| 787 |
+
"cost_ngn_per_hectare": 50000,
|
| 788 |
+
"notes": "One-time drip system installation cost"
|
| 789 |
+
},
|
| 790 |
+
{
|
| 791 |
+
"method": "Prune lower leaves",
|
| 792 |
+
"description": "Remove leaves touching the ground and lower leaves to improve air flow around plants.",
|
| 793 |
+
"effectiveness": "medium",
|
| 794 |
+
"cost_ngn_per_week": 5000
|
| 795 |
+
},
|
| 796 |
+
{
|
| 797 |
+
"method": "Reduce humidity in greenhouse",
|
| 798 |
+
"description": "Ventilate greenhouse, especially in evening. Heat and vent to reduce humidity below 85%.",
|
| 799 |
+
"effectiveness": "high",
|
| 800 |
+
"cost_ngn": 0
|
| 801 |
+
}
|
| 802 |
+
],
|
| 803 |
+
"chemical": [
|
| 804 |
+
{
|
| 805 |
+
"product_name": "Fludioxonil",
|
| 806 |
+
"active_ingredient": "Fludioxonil",
|
| 807 |
+
"local_brands": [
|
| 808 |
+
"Scholar",
|
| 809 |
+
"Medallion",
|
| 810 |
+
"Geoxe"
|
| 811 |
+
],
|
| 812 |
+
"cost_ngn_min": 12000,
|
| 813 |
+
"cost_ngn_max": 20000,
|
| 814 |
+
"cost_unit": "per hectare per application",
|
| 815 |
+
"dosage": "Follow label directions",
|
| 816 |
+
"frequency": "Every 7-10 days during humid periods",
|
| 817 |
+
"application_method": "Spray to thorough coverage of all plant parts",
|
| 818 |
+
"effectiveness": "very_high",
|
| 819 |
+
"notes": "One of the most effective fungicides for gray mold"
|
| 820 |
+
},
|
| 821 |
+
{
|
| 822 |
+
"product_name": "Chlorothalonil",
|
| 823 |
+
"active_ingredient": "Chlorothalonil",
|
| 824 |
+
"local_brands": [
|
| 825 |
+
"Daconil",
|
| 826 |
+
"Bravo",
|
| 827 |
+
"Echo"
|
| 828 |
+
],
|
| 829 |
+
"cost_ngn_min": 8000,
|
| 830 |
+
"cost_ngn_max": 15000,
|
| 831 |
+
"cost_unit": "per hectare per application",
|
| 832 |
+
"dosage": "2-2.5 L per hectare",
|
| 833 |
+
"frequency": "Every 7-14 days",
|
| 834 |
+
"application_method": "Apply as preventive spray before disease onset",
|
| 835 |
+
"effectiveness": "medium",
|
| 836 |
+
"safety_precautions": [
|
| 837 |
+
"Wait 7 days between last spray and harvest",
|
| 838 |
+
"Wear protective equipment",
|
| 839 |
+
"Do not apply in extreme heat"
|
| 840 |
+
]
|
| 841 |
+
},
|
| 842 |
+
{
|
| 843 |
+
"product_name": "Iprodione",
|
| 844 |
+
"active_ingredient": "Iprodione",
|
| 845 |
+
"local_brands": [
|
| 846 |
+
"Rovral",
|
| 847 |
+
"Chipco"
|
| 848 |
+
],
|
| 849 |
+
"cost_ngn_min": 10000,
|
| 850 |
+
"cost_ngn_max": 18000,
|
| 851 |
+
"cost_unit": "per hectare per application",
|
| 852 |
+
"dosage": "1-1.5 kg per hectare",
|
| 853 |
+
"frequency": "Every 10-14 days",
|
| 854 |
+
"application_method": "Spray on foliage and stems",
|
| 855 |
+
"effectiveness": "high",
|
| 856 |
+
"notes": "Rotate with other fungicide classes to prevent resistance"
|
| 857 |
+
}
|
| 858 |
+
],
|
| 859 |
+
"biological": [
|
| 860 |
+
{
|
| 861 |
+
"method": "Bacillus subtilis biofungicide",
|
| 862 |
+
"description": "Biological fungicide that colonizes plant surfaces and competes with disease fungi.",
|
| 863 |
+
"product_names": [
|
| 864 |
+
"Serenade",
|
| 865 |
+
"Cease"
|
| 866 |
+
],
|
| 867 |
+
"effectiveness": "medium",
|
| 868 |
+
"cost_ngn_per_hectare_min": 10000,
|
| 869 |
+
"cost_ngn_per_hectare_max": 18000,
|
| 870 |
+
"notes": "Best used preventively. Approved for organic production."
|
| 871 |
+
}
|
| 872 |
+
],
|
| 873 |
+
"traditional": [
|
| 874 |
+
{
|
| 875 |
+
"method": "Neem oil spray",
|
| 876 |
+
"description": "Mix 5ml neem oil per liter of water with small amount of liquid soap. Spray weekly as preventive.",
|
| 877 |
+
"effectiveness": "low",
|
| 878 |
+
"cost_ngn": 5000
|
| 879 |
+
},
|
| 880 |
+
{
|
| 881 |
+
"method": "Garlic extract spray",
|
| 882 |
+
"description": "Crush 100g garlic, soak in 1 liter water for 24 hours, strain and spray. Has some antifungal properties.",
|
| 883 |
+
"effectiveness": "low",
|
| 884 |
+
"cost_ngn": 3000
|
| 885 |
+
}
|
| 886 |
+
]
|
| 887 |
+
},
|
| 888 |
+
"total_treatment_cost": {
|
| 889 |
+
"min_ngn": 8000,
|
| 890 |
+
"max_ngn": 25000,
|
| 891 |
+
"per": "hectare per application",
|
| 892 |
+
"notes": "Prevention through cultural practices (spacing, irrigation method, pruning) is most cost-effective approach"
|
| 893 |
+
},
|
| 894 |
+
"prevention": [
|
| 895 |
+
"Maintain good air circulation between plants with proper spacing",
|
| 896 |
+
"Use drip irrigation instead of overhead watering",
|
| 897 |
+
"Remove plant debris and fallen leaves promptly",
|
| 898 |
+
"Prune lower leaves to improve air flow at plant base",
|
| 899 |
+
"Avoid working with plants when foliage is wet",
|
| 900 |
+
"Sanitize pruning tools with 10% bleach solution between plants",
|
| 901 |
+
"Ventilate greenhouses to reduce humidity, especially at night",
|
| 902 |
+
"Apply preventive fungicides during cool, humid weather forecasts",
|
| 903 |
+
"Avoid excessive nitrogen fertilization which creates dense, soft growth",
|
| 904 |
+
"Remove crop debris thoroughly at end of season"
|
| 905 |
+
],
|
| 906 |
+
"health_projection": {
|
| 907 |
+
"early_detection": {
|
| 908 |
+
"recovery_chance_percent": 85,
|
| 909 |
+
"message": "Early treatment with fungicide and good sanitation can protect approximately 85% of your crop. Remove all infected parts immediately and improve air circulation."
|
| 910 |
+
},
|
| 911 |
+
"moderate_infection": {
|
| 912 |
+
"recovery_chance_percent": 60,
|
| 913 |
+
"message": "Remove all infected plant parts immediately, apply fungicide, and reduce humidity. With aggressive management, about 60% of crop can be saved."
|
| 914 |
+
},
|
| 915 |
+
"severe_infection": {
|
| 916 |
+
"recovery_chance_percent": 30,
|
| 917 |
+
"message": "Severe gray mold outbreak requires intensive fungicide program and complete removal of infected plants. Expect significant yield reduction this season."
|
| 918 |
+
}
|
| 919 |
+
}
|
| 920 |
+
},
|
| 921 |
+
"tomato_wilt_disease": {
|
| 922 |
+
"id": "TWD_001",
|
| 923 |
+
"class_name": "Tomato Wilt Disease",
|
| 924 |
+
"display_name": "Tomato Wilt Disease",
|
| 925 |
+
"scientific_name": "Fusarium oxysporum f. sp. lycopersici (Fusarium wilt) or Ralstonia solanacearum (Bacterial wilt)",
|
| 926 |
+
"crop": "tomato",
|
| 927 |
+
"category": "fungal_or_bacterial",
|
| 928 |
+
"is_disease": true,
|
| 929 |
+
"severity": {
|
| 930 |
+
"level": "very_high",
|
| 931 |
+
"scale": 5,
|
| 932 |
+
"max_scale": 5,
|
| 933 |
+
"description": "Devastating soil-borne diseases that block water transport in plants. Bacterial wilt (common in tropical Nigeria) can kill plants within days and has no chemical cure."
|
| 934 |
+
},
|
| 935 |
+
"symptoms": [
|
| 936 |
+
"Wilting of leaves and stems, often starting on one side of plant",
|
| 937 |
+
"Yellowing of lower leaves, progressing upward",
|
| 938 |
+
"Wilting during hottest part of day, partial recovery at night (early stage)",
|
| 939 |
+
"Permanent wilting that does not recover even with watering",
|
| 940 |
+
"Brown discoloration of vascular tissue (cut stem to see brown streaks)",
|
| 941 |
+
"Bacterial wilt: milky white bacterial ooze when cut stem is placed in water",
|
| 942 |
+
"Stunted growth and eventual plant death",
|
| 943 |
+
"Fusarium wilt: symptoms may appear on one side of plant or leaf first"
|
| 944 |
+
],
|
| 945 |
+
"how_it_spreads": [
|
| 946 |
+
"Contaminated soil - pathogens survive in soil for many years",
|
| 947 |
+
"Infected transplants from nurseries",
|
| 948 |
+
"Contaminated water (bacterial wilt spreads easily in irrigation water)",
|
| 949 |
+
"Tools and equipment that moved soil between fields",
|
| 950 |
+
"Root-to-root contact between plants",
|
| 951 |
+
"Nematode damage to roots facilitates infection",
|
| 952 |
+
"Workers' boots and clothing carrying contaminated soil"
|
| 953 |
+
],
|
| 954 |
+
"favorable_conditions": {
|
| 955 |
+
"temperature": "Fusarium: 27-28°C optimal; Bacterial wilt: 30-35°C optimal",
|
| 956 |
+
"humidity": "High soil moisture favors bacterial wilt",
|
| 957 |
+
"season": "Year-round in Nigeria, worse during rainy season",
|
| 958 |
+
"other": "Poor drainage, root damage from nematodes, acidic soil (for Fusarium), continuous cropping"
|
| 959 |
+
},
|
| 960 |
+
"yield_loss": {
|
| 961 |
+
"min_percent": 30,
|
| 962 |
+
"max_percent": 100,
|
| 963 |
+
"average_percent": 60,
|
| 964 |
+
"description": "Can cause 30-100% losses. Bacterial wilt can destroy an entire field within 2-3 weeks in favorable conditions."
|
| 965 |
+
},
|
| 966 |
+
"treatments": {
|
| 967 |
+
"cultural": [
|
| 968 |
+
{
|
| 969 |
+
"method": "Remove and destroy infected plants",
|
| 970 |
+
"description": "Immediately remove wilted plants including roots. Burn or bury at least 1 meter deep away from field. Do not compost.",
|
| 971 |
+
"effectiveness": "medium",
|
| 972 |
+
"cost_ngn_per_plant": 100
|
| 973 |
+
},
|
| 974 |
+
{
|
| 975 |
+
"method": "Long crop rotation",
|
| 976 |
+
"description": "Rotate away from tomatoes and related crops (pepper, eggplant, potato) for 4-5 years minimum.",
|
| 977 |
+
"effectiveness": "medium",
|
| 978 |
+
"cost_ngn": 0,
|
| 979 |
+
"notes": "Pathogens can survive in soil for many years"
|
| 980 |
+
},
|
| 981 |
+
{
|
| 982 |
+
"method": "Improve drainage",
|
| 983 |
+
"description": "Plant on raised beds or ridges. Ensure good soil drainage. Avoid waterlogging.",
|
| 984 |
+
"effectiveness": "medium",
|
| 985 |
+
"cost_ngn_per_hectare": 20000
|
| 986 |
+
},
|
| 987 |
+
{
|
| 988 |
+
"method": "Soil solarization",
|
| 989 |
+
"description": "Cover moist soil with clear plastic during hottest months for 4-6 weeks. Heat kills pathogens in top soil layer.",
|
| 990 |
+
"effectiveness": "medium",
|
| 991 |
+
"cost_ngn_per_hectare": 30000
|
| 992 |
+
},
|
| 993 |
+
{
|
| 994 |
+
"method": "Use disease-free transplants",
|
| 995 |
+
"description": "Purchase transplants only from certified disease-free nurseries. Inspect roots before planting.",
|
| 996 |
+
"effectiveness": "high",
|
| 997 |
+
"cost_ngn_premium": 5000
|
| 998 |
+
},
|
| 999 |
+
{
|
| 1000 |
+
"method": "Grafting onto resistant rootstocks",
|
| 1001 |
+
"description": "Graft susceptible varieties onto wilt-resistant rootstocks. Provides excellent protection.",
|
| 1002 |
+
"effectiveness": "very_high",
|
| 1003 |
+
"cost_ngn_per_plant": 150,
|
| 1004 |
+
"notes": "Labor intensive but very effective"
|
| 1005 |
+
}
|
| 1006 |
+
],
|
| 1007 |
+
"chemical": [
|
| 1008 |
+
{
|
| 1009 |
+
"product_name": "Soil fumigant (for Fusarium)",
|
| 1010 |
+
"active_ingredient": "Metam sodium or Dazomet",
|
| 1011 |
+
"local_brands": [
|
| 1012 |
+
"Vapam",
|
| 1013 |
+
"Basamid"
|
| 1014 |
+
],
|
| 1015 |
+
"cost_ngn_min": 80000,
|
| 1016 |
+
"cost_ngn_max": 150000,
|
| 1017 |
+
"cost_unit": "per hectare",
|
| 1018 |
+
"dosage": "Follow label directions carefully",
|
| 1019 |
+
"application_method": "Apply to soil before planting, cover with plastic, wait 2-3 weeks before planting",
|
| 1020 |
+
"effectiveness": "medium",
|
| 1021 |
+
"important_note": "Expensive and may harm beneficial soil organisms. Not effective against bacterial wilt.",
|
| 1022 |
+
"safety_precautions": [
|
| 1023 |
+
"Highly toxic - requires professional application",
|
| 1024 |
+
"Keep people and animals away during treatment",
|
| 1025 |
+
"Follow all waiting periods before planting"
|
| 1026 |
+
]
|
| 1027 |
+
},
|
| 1028 |
+
{
|
| 1029 |
+
"product_name": "Note on bacterial wilt",
|
| 1030 |
+
"description": "There are NO effective chemical treatments for bacterial wilt. Focus entirely on prevention and resistant varieties.",
|
| 1031 |
+
"effectiveness": "none"
|
| 1032 |
+
}
|
| 1033 |
+
],
|
| 1034 |
+
"resistant_varieties": [
|
| 1035 |
+
{
|
| 1036 |
+
"resistance_type": "Fusarium wilt resistant (F, F2, F3)",
|
| 1037 |
+
"description": "Many commercial varieties have resistance to Fusarium races. Look for F, F2, or F3 on seed packets.",
|
| 1038 |
+
"effectiveness": "high",
|
| 1039 |
+
"notes": "Different races exist - variety may resist some but not all"
|
| 1040 |
+
},
|
| 1041 |
+
{
|
| 1042 |
+
"resistance_type": "Bacterial wilt resistant",
|
| 1043 |
+
"description": "Few varieties have true resistance. Some have tolerance. Grafting onto resistant rootstocks is most effective.",
|
| 1044 |
+
"effectiveness": "medium",
|
| 1045 |
+
"notes": "Check with local extension for recommended varieties"
|
| 1046 |
+
}
|
| 1047 |
+
],
|
| 1048 |
+
"biological": [
|
| 1049 |
+
{
|
| 1050 |
+
"method": "Trichoderma application",
|
| 1051 |
+
"description": "Apply Trichoderma-based products to soil before planting. Colonizes roots and provides some protection.",
|
| 1052 |
+
"effectiveness": "low_to_medium",
|
| 1053 |
+
"cost_ngn_per_hectare": 15000,
|
| 1054 |
+
"notes": "Best as part of integrated management, not sole treatment"
|
| 1055 |
+
},
|
| 1056 |
+
{
|
| 1057 |
+
"method": "Mycorrhizal inoculants",
|
| 1058 |
+
"description": "Apply mycorrhizal fungi to transplant roots. Improves root health and provides some disease suppression.",
|
| 1059 |
+
"effectiveness": "low",
|
| 1060 |
+
"cost_ngn_per_hectare": 10000
|
| 1061 |
+
}
|
| 1062 |
+
],
|
| 1063 |
+
"traditional": [
|
| 1064 |
+
{
|
| 1065 |
+
"method": "Organic matter addition",
|
| 1066 |
+
"description": "Add well-composted organic matter to soil. Improves soil health and beneficial microbe populations.",
|
| 1067 |
+
"effectiveness": "low",
|
| 1068 |
+
"cost_ngn_per_hectare": 20000
|
| 1069 |
+
},
|
| 1070 |
+
{
|
| 1071 |
+
"method": "Lime application (for Fusarium)",
|
| 1072 |
+
"description": "Raise soil pH to 6.5-7.0 with agricultural lime. Fusarium prefers acidic soils.",
|
| 1073 |
+
"effectiveness": "low",
|
| 1074 |
+
"cost_ngn_per_hectare": 15000
|
| 1075 |
+
}
|
| 1076 |
+
]
|
| 1077 |
+
},
|
| 1078 |
+
"total_treatment_cost": {
|
| 1079 |
+
"min_ngn": 20000,
|
| 1080 |
+
"max_ngn": 150000,
|
| 1081 |
+
"per": "hectare",
|
| 1082 |
+
"notes": "Prevention is far more cost-effective than treatment. Once wilt pathogens are in soil, they persist for years. Invest in resistant varieties and grafted transplants."
|
| 1083 |
+
},
|
| 1084 |
+
"prevention": [
|
| 1085 |
+
"Plant resistant varieties - most important for Fusarium wilt",
|
| 1086 |
+
"Use grafted plants with resistant rootstocks - best for bacterial wilt",
|
| 1087 |
+
"Purchase transplants only from certified disease-free nurseries",
|
| 1088 |
+
"Practice long crop rotation (4-5 years) away from solanaceous crops",
|
| 1089 |
+
"Improve soil drainage - plant on raised beds in wet areas",
|
| 1090 |
+
"Sanitize tools and boots when moving between fields",
|
| 1091 |
+
"Avoid introducing contaminated soil to clean fields",
|
| 1092 |
+
"Control root-knot nematodes that facilitate infection",
|
| 1093 |
+
"Use clean irrigation water - bacterial wilt spreads in water",
|
| 1094 |
+
"Add organic matter to support beneficial soil microorganisms",
|
| 1095 |
+
"Solarize soil in heavily infected areas before replanting",
|
| 1096 |
+
"Never plant in fields with known wilt history without taking precautions"
|
| 1097 |
+
],
|
| 1098 |
+
"health_projection": {
|
| 1099 |
+
"early_detection": {
|
| 1100 |
+
"recovery_chance_percent": 50,
|
| 1101 |
+
"message": "Remove wilted plants immediately to prevent spread. Remaining plants have about 50% chance if action is taken quickly. Do not replant in same location this season."
|
| 1102 |
+
},
|
| 1103 |
+
"moderate_infection": {
|
| 1104 |
+
"recovery_chance_percent": 25,
|
| 1105 |
+
"message": "Multiple plants affected indicates pathogen is established in soil. Remove all affected plants. Remaining healthy plants are at high risk. Consider abandoning field for this season."
|
| 1106 |
+
},
|
| 1107 |
+
"severe_infection": {
|
| 1108 |
+
"recovery_chance_percent": 5,
|
| 1109 |
+
"message": "Severe wilt outbreak means soil is heavily contaminated. This season's crop is lost. Do not plant tomatoes or related crops in this field for at least 4-5 years. Consider soil solarization before future use."
|
| 1110 |
+
}
|
| 1111 |
+
},
|
| 1112 |
+
"diagnostic_tip": "To distinguish between Fusarium and Bacterial wilt: Cut a stem and place cut end in clear glass of water. If milky white bacterial streaming appears within minutes, it is Bacterial wilt. Fusarium wilt shows brown vascular discoloration but no bacterial ooze."
|
| 1113 |
+
}
|
| 1114 |
+
}
|
| 1115 |
+
}
|
frontend/.DS_Store
ADDED
|
Binary file (6.15 kB). View file
|
|
|
frontend/css/.DS_Store
ADDED
|
Binary file (6.15 kB). View file
|
|
|
frontend/css/main.css
ADDED
|
@@ -0,0 +1,1451 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
/**
|
| 2 |
+
* FarmEyes Main Stylesheet
|
| 3 |
+
* Modern/Classy design for Diagnosis, ChatGPT-style for Chat only
|
| 4 |
+
*/
|
| 5 |
+
|
| 6 |
+
/* ==========================================================================
|
| 7 |
+
CSS VARIABLES
|
| 8 |
+
========================================================================== */
|
| 9 |
+
:root {
|
| 10 |
+
--bg-dark: #0D0D0D;
|
| 11 |
+
--bg-card: #1A1A1A;
|
| 12 |
+
--bg-elevated: #252525;
|
| 13 |
+
--bg-hover: #2D2D2D;
|
| 14 |
+
|
| 15 |
+
--text-primary: #FFFFFF;
|
| 16 |
+
--text-secondary: #B0B0B0;
|
| 17 |
+
--text-muted: #707070;
|
| 18 |
+
|
| 19 |
+
--accent: #10B981;
|
| 20 |
+
--accent-hover: #34D399;
|
| 21 |
+
--accent-muted: rgba(16, 185, 129, 0.15);
|
| 22 |
+
|
| 23 |
+
--border: #333333;
|
| 24 |
+
--border-light: #2A2A2A;
|
| 25 |
+
|
| 26 |
+
--success: #10B981;
|
| 27 |
+
--warning: #F59E0B;
|
| 28 |
+
--error: #EF4444;
|
| 29 |
+
--info: #3B82F6;
|
| 30 |
+
|
| 31 |
+
--severity-low: #10B981;
|
| 32 |
+
--severity-medium: #F59E0B;
|
| 33 |
+
--severity-high: #F97316;
|
| 34 |
+
--severity-very-high: #EF4444;
|
| 35 |
+
|
| 36 |
+
--radius-sm: 6px;
|
| 37 |
+
--radius-md: 10px;
|
| 38 |
+
--radius-lg: 14px;
|
| 39 |
+
--radius-xl: 20px;
|
| 40 |
+
|
| 41 |
+
--shadow: 0 4px 20px rgba(0,0,0,0.3);
|
| 42 |
+
--transition: 200ms ease;
|
| 43 |
+
}
|
| 44 |
+
|
| 45 |
+
/* ==========================================================================
|
| 46 |
+
RESET & BASE
|
| 47 |
+
========================================================================== */
|
| 48 |
+
*, *::before, *::after { box-sizing: border-box; margin: 0; padding: 0; }
|
| 49 |
+
|
| 50 |
+
html {
|
| 51 |
+
font-size: 16px;
|
| 52 |
+
-webkit-font-smoothing: antialiased;
|
| 53 |
+
}
|
| 54 |
+
|
| 55 |
+
body {
|
| 56 |
+
font-family: 'Inter', -apple-system, BlinkMacSystemFont, sans-serif;
|
| 57 |
+
background: var(--bg-dark);
|
| 58 |
+
color: var(--text-primary);
|
| 59 |
+
line-height: 1.5;
|
| 60 |
+
min-height: 100vh;
|
| 61 |
+
}
|
| 62 |
+
|
| 63 |
+
button { font-family: inherit; cursor: pointer; border: none; background: none; }
|
| 64 |
+
input, textarea { font-family: inherit; }
|
| 65 |
+
|
| 66 |
+
/* ==========================================================================
|
| 67 |
+
LAYOUT
|
| 68 |
+
========================================================================== */
|
| 69 |
+
.app-container { width: 100%; min-height: 100vh; }
|
| 70 |
+
.page { display: none; min-height: 100vh; }
|
| 71 |
+
.page.active { display: flex; flex-direction: column; }
|
| 72 |
+
.hidden { display: none !important; }
|
| 73 |
+
|
| 74 |
+
/* ==========================================================================
|
| 75 |
+
PAGE 1: LANGUAGE SELECTOR
|
| 76 |
+
========================================================================== */
|
| 77 |
+
.language-page {
|
| 78 |
+
min-height: 100vh;
|
| 79 |
+
display: flex;
|
| 80 |
+
flex-direction: column;
|
| 81 |
+
align-items: center;
|
| 82 |
+
justify-content: center;
|
| 83 |
+
padding: 24px;
|
| 84 |
+
text-align: center;
|
| 85 |
+
background: linear-gradient(180deg, #0D0D0D 0%, #1A1A1A 100%);
|
| 86 |
+
}
|
| 87 |
+
|
| 88 |
+
.language-content { max-width: 500px; }
|
| 89 |
+
|
| 90 |
+
.logo-large {
|
| 91 |
+
font-size: 72px;
|
| 92 |
+
margin-bottom: 16px;
|
| 93 |
+
animation: float 3s ease-in-out infinite;
|
| 94 |
+
}
|
| 95 |
+
|
| 96 |
+
@keyframes float {
|
| 97 |
+
0%, 100% { transform: translateY(0); }
|
| 98 |
+
50% { transform: translateY(-8px); }
|
| 99 |
+
}
|
| 100 |
+
|
| 101 |
+
.app-title {
|
| 102 |
+
font-size: 42px;
|
| 103 |
+
font-weight: 700;
|
| 104 |
+
color: var(--accent);
|
| 105 |
+
margin-bottom: 8px;
|
| 106 |
+
}
|
| 107 |
+
|
| 108 |
+
.app-tagline {
|
| 109 |
+
font-size: 16px;
|
| 110 |
+
color: var(--text-secondary);
|
| 111 |
+
margin-bottom: 40px;
|
| 112 |
+
}
|
| 113 |
+
|
| 114 |
+
.language-selection { margin-bottom: 20px; }
|
| 115 |
+
|
| 116 |
+
.selection-title {
|
| 117 |
+
font-size: 22px;
|
| 118 |
+
font-weight: 600;
|
| 119 |
+
margin-bottom: 8px;
|
| 120 |
+
}
|
| 121 |
+
|
| 122 |
+
.selection-subtitle {
|
| 123 |
+
font-size: 14px;
|
| 124 |
+
color: var(--text-muted);
|
| 125 |
+
margin-bottom: 24px;
|
| 126 |
+
}
|
| 127 |
+
|
| 128 |
+
.language-grid {
|
| 129 |
+
display: grid;
|
| 130 |
+
grid-template-columns: repeat(2, 1fr);
|
| 131 |
+
gap: 12px;
|
| 132 |
+
margin-bottom: 28px;
|
| 133 |
+
}
|
| 134 |
+
|
| 135 |
+
.language-btn {
|
| 136 |
+
display: flex;
|
| 137 |
+
flex-direction: column;
|
| 138 |
+
align-items: center;
|
| 139 |
+
gap: 8px;
|
| 140 |
+
padding: 20px 16px;
|
| 141 |
+
background: var(--bg-card);
|
| 142 |
+
border: 2px solid var(--border);
|
| 143 |
+
border-radius: var(--radius-lg);
|
| 144 |
+
transition: all var(--transition);
|
| 145 |
+
}
|
| 146 |
+
|
| 147 |
+
.language-btn:hover {
|
| 148 |
+
border-color: var(--accent);
|
| 149 |
+
background: var(--accent-muted);
|
| 150 |
+
}
|
| 151 |
+
|
| 152 |
+
.language-btn.selected {
|
| 153 |
+
border-color: var(--accent);
|
| 154 |
+
background: var(--accent-muted);
|
| 155 |
+
box-shadow: 0 0 20px rgba(16, 185, 129, 0.2);
|
| 156 |
+
}
|
| 157 |
+
|
| 158 |
+
.lang-flag { font-size: 28px; }
|
| 159 |
+
.lang-name { font-size: 15px; font-weight: 600; color: var(--text-primary); }
|
| 160 |
+
|
| 161 |
+
.btn-continue {
|
| 162 |
+
width: 100%;
|
| 163 |
+
max-width: 280px;
|
| 164 |
+
padding: 14px 24px;
|
| 165 |
+
font-size: 16px;
|
| 166 |
+
font-weight: 600;
|
| 167 |
+
display: flex;
|
| 168 |
+
align-items: center;
|
| 169 |
+
justify-content: center;
|
| 170 |
+
gap: 8px;
|
| 171 |
+
}
|
| 172 |
+
|
| 173 |
+
.page-footer {
|
| 174 |
+
position: absolute;
|
| 175 |
+
bottom: 20px;
|
| 176 |
+
left: 0;
|
| 177 |
+
right: 0;
|
| 178 |
+
text-align: center;
|
| 179 |
+
}
|
| 180 |
+
|
| 181 |
+
.page-footer p {
|
| 182 |
+
font-size: 12px;
|
| 183 |
+
color: var(--text-muted);
|
| 184 |
+
}
|
| 185 |
+
|
| 186 |
+
/* ==========================================================================
|
| 187 |
+
PAGE 2: DIAGNOSIS (Modern/Classy)
|
| 188 |
+
========================================================================== */
|
| 189 |
+
.diagnosis-page {
|
| 190 |
+
min-height: 100vh;
|
| 191 |
+
display: flex;
|
| 192 |
+
flex-direction: column;
|
| 193 |
+
background: var(--bg-dark);
|
| 194 |
+
}
|
| 195 |
+
|
| 196 |
+
/* Header - STICKY */
|
| 197 |
+
.main-header {
|
| 198 |
+
display: flex;
|
| 199 |
+
align-items: center;
|
| 200 |
+
justify-content: space-between;
|
| 201 |
+
padding: 16px 20px;
|
| 202 |
+
background: var(--bg-card);
|
| 203 |
+
border-bottom: 1px solid var(--border);
|
| 204 |
+
position: sticky;
|
| 205 |
+
top: 0;
|
| 206 |
+
z-index: 100;
|
| 207 |
+
}
|
| 208 |
+
|
| 209 |
+
.header-brand {
|
| 210 |
+
display: flex;
|
| 211 |
+
align-items: center;
|
| 212 |
+
gap: 12px;
|
| 213 |
+
}
|
| 214 |
+
|
| 215 |
+
.header-logo { font-size: 36px; }
|
| 216 |
+
|
| 217 |
+
.header-title {
|
| 218 |
+
font-size: 28px;
|
| 219 |
+
font-weight: 800;
|
| 220 |
+
color: var(--accent);
|
| 221 |
+
margin: 0;
|
| 222 |
+
letter-spacing: -0.5px;
|
| 223 |
+
}
|
| 224 |
+
|
| 225 |
+
.header-subtitle {
|
| 226 |
+
font-size: 14px;
|
| 227 |
+
color: var(--text-secondary);
|
| 228 |
+
margin: 4px 0 0 0;
|
| 229 |
+
}
|
| 230 |
+
|
| 231 |
+
.header-actions { position: relative; }
|
| 232 |
+
|
| 233 |
+
.btn-language {
|
| 234 |
+
display: flex;
|
| 235 |
+
align-items: center;
|
| 236 |
+
gap: 6px;
|
| 237 |
+
padding: 12px 18px;
|
| 238 |
+
background: var(--accent);
|
| 239 |
+
border-radius: var(--radius-md);
|
| 240 |
+
color: #FFFFFF;
|
| 241 |
+
font-size: 15px;
|
| 242 |
+
font-weight: 700;
|
| 243 |
+
border: 2px solid var(--accent);
|
| 244 |
+
transition: var(--transition);
|
| 245 |
+
}
|
| 246 |
+
|
| 247 |
+
.btn-language:hover {
|
| 248 |
+
background: var(--accent-hover);
|
| 249 |
+
border-color: var(--accent-hover);
|
| 250 |
+
transform: scale(1.02);
|
| 251 |
+
}
|
| 252 |
+
|
| 253 |
+
.dropdown-menu {
|
| 254 |
+
position: absolute;
|
| 255 |
+
top: 100%;
|
| 256 |
+
right: 0;
|
| 257 |
+
margin-top: 4px;
|
| 258 |
+
background: var(--bg-elevated);
|
| 259 |
+
border: 1px solid var(--border);
|
| 260 |
+
border-radius: var(--radius-md);
|
| 261 |
+
overflow: hidden;
|
| 262 |
+
min-width: 120px;
|
| 263 |
+
z-index: 100;
|
| 264 |
+
}
|
| 265 |
+
|
| 266 |
+
.dropdown-item {
|
| 267 |
+
display: block;
|
| 268 |
+
width: 100%;
|
| 269 |
+
padding: 10px 14px;
|
| 270 |
+
text-align: left;
|
| 271 |
+
color: var(--text-primary);
|
| 272 |
+
font-size: 13px;
|
| 273 |
+
}
|
| 274 |
+
|
| 275 |
+
.dropdown-item:hover { background: var(--bg-hover); }
|
| 276 |
+
.dropdown-item.active { color: var(--accent); }
|
| 277 |
+
|
| 278 |
+
/* Main Content */
|
| 279 |
+
.diagnosis-main {
|
| 280 |
+
flex: 1;
|
| 281 |
+
padding: 20px;
|
| 282 |
+
max-width: 900px;
|
| 283 |
+
margin: 0 auto;
|
| 284 |
+
width: 100%;
|
| 285 |
+
}
|
| 286 |
+
|
| 287 |
+
/* Upload Section */
|
| 288 |
+
.upload-section { margin-bottom: 20px; }
|
| 289 |
+
|
| 290 |
+
.upload-card {
|
| 291 |
+
background: var(--bg-card);
|
| 292 |
+
border-radius: var(--radius-lg);
|
| 293 |
+
padding: 24px;
|
| 294 |
+
text-align: center;
|
| 295 |
+
}
|
| 296 |
+
|
| 297 |
+
.upload-header {
|
| 298 |
+
display: flex;
|
| 299 |
+
align-items: center;
|
| 300 |
+
justify-content: center;
|
| 301 |
+
gap: 10px;
|
| 302 |
+
margin-bottom: 8px;
|
| 303 |
+
}
|
| 304 |
+
|
| 305 |
+
.upload-icon-small { font-size: 24px; }
|
| 306 |
+
|
| 307 |
+
.upload-header h2 {
|
| 308 |
+
font-size: 20px;
|
| 309 |
+
font-weight: 600;
|
| 310 |
+
}
|
| 311 |
+
|
| 312 |
+
.upload-desc {
|
| 313 |
+
font-size: 14px;
|
| 314 |
+
color: var(--text-secondary);
|
| 315 |
+
margin-bottom: 20px;
|
| 316 |
+
}
|
| 317 |
+
|
| 318 |
+
.upload-zone {
|
| 319 |
+
border: 2px dashed var(--border);
|
| 320 |
+
border-radius: var(--radius-md);
|
| 321 |
+
padding: 32px 20px;
|
| 322 |
+
cursor: pointer;
|
| 323 |
+
transition: all var(--transition);
|
| 324 |
+
margin-bottom: 16px;
|
| 325 |
+
}
|
| 326 |
+
|
| 327 |
+
.upload-zone:hover, .upload-zone.dragover {
|
| 328 |
+
border-color: var(--accent);
|
| 329 |
+
background: var(--accent-muted);
|
| 330 |
+
}
|
| 331 |
+
|
| 332 |
+
.upload-icon { margin-bottom: 12px; color: var(--text-muted); }
|
| 333 |
+
|
| 334 |
+
.upload-text {
|
| 335 |
+
font-size: 15px;
|
| 336 |
+
font-weight: 500;
|
| 337 |
+
color: var(--text-primary);
|
| 338 |
+
margin-bottom: 4px;
|
| 339 |
+
}
|
| 340 |
+
|
| 341 |
+
.upload-formats {
|
| 342 |
+
font-size: 12px;
|
| 343 |
+
color: var(--text-muted);
|
| 344 |
+
}
|
| 345 |
+
|
| 346 |
+
.image-preview-container {
|
| 347 |
+
position: relative;
|
| 348 |
+
margin-bottom: 16px;
|
| 349 |
+
border-radius: var(--radius-md);
|
| 350 |
+
overflow: hidden;
|
| 351 |
+
background: var(--bg-elevated);
|
| 352 |
+
}
|
| 353 |
+
|
| 354 |
+
.image-preview {
|
| 355 |
+
width: 100%;
|
| 356 |
+
max-height: 250px;
|
| 357 |
+
object-fit: contain;
|
| 358 |
+
}
|
| 359 |
+
|
| 360 |
+
.btn-remove-image {
|
| 361 |
+
position: absolute;
|
| 362 |
+
top: 8px;
|
| 363 |
+
right: 8px;
|
| 364 |
+
width: 28px;
|
| 365 |
+
height: 28px;
|
| 366 |
+
background: rgba(0,0,0,0.7);
|
| 367 |
+
color: #fff;
|
| 368 |
+
border-radius: 50%;
|
| 369 |
+
font-size: 14px;
|
| 370 |
+
display: flex;
|
| 371 |
+
align-items: center;
|
| 372 |
+
justify-content: center;
|
| 373 |
+
}
|
| 374 |
+
|
| 375 |
+
.btn-remove-image:hover { background: var(--error); }
|
| 376 |
+
|
| 377 |
+
.btn-analyze {
|
| 378 |
+
width: 100%;
|
| 379 |
+
padding: 14px;
|
| 380 |
+
font-size: 15px;
|
| 381 |
+
font-weight: 600;
|
| 382 |
+
display: flex;
|
| 383 |
+
align-items: center;
|
| 384 |
+
justify-content: center;
|
| 385 |
+
gap: 8px;
|
| 386 |
+
margin-bottom: 16px;
|
| 387 |
+
}
|
| 388 |
+
|
| 389 |
+
.analyzing-loader {
|
| 390 |
+
display: flex;
|
| 391 |
+
flex-direction: column;
|
| 392 |
+
align-items: center;
|
| 393 |
+
gap: 12px;
|
| 394 |
+
padding: 20px;
|
| 395 |
+
}
|
| 396 |
+
|
| 397 |
+
.analyzing-loader p {
|
| 398 |
+
font-size: 14px;
|
| 399 |
+
color: var(--text-secondary);
|
| 400 |
+
}
|
| 401 |
+
|
| 402 |
+
.supported-crops {
|
| 403 |
+
display: flex;
|
| 404 |
+
justify-content: center;
|
| 405 |
+
gap: 12px;
|
| 406 |
+
flex-wrap: wrap;
|
| 407 |
+
}
|
| 408 |
+
|
| 409 |
+
.crop-tag {
|
| 410 |
+
font-size: 13px;
|
| 411 |
+
color: var(--text-secondary);
|
| 412 |
+
background: var(--bg-elevated);
|
| 413 |
+
padding: 6px 12px;
|
| 414 |
+
border-radius: 20px;
|
| 415 |
+
}
|
| 416 |
+
|
| 417 |
+
/* Results Section */
|
| 418 |
+
.results-section { animation: fadeIn 0.3s ease; }
|
| 419 |
+
|
| 420 |
+
@keyframes fadeIn { from { opacity: 0; } to { opacity: 1; } }
|
| 421 |
+
|
| 422 |
+
.results-header {
|
| 423 |
+
display: flex;
|
| 424 |
+
align-items: center;
|
| 425 |
+
justify-content: space-between;
|
| 426 |
+
margin-bottom: 16px;
|
| 427 |
+
}
|
| 428 |
+
|
| 429 |
+
.results-header h2 {
|
| 430 |
+
font-size: 18px;
|
| 431 |
+
font-weight: 600;
|
| 432 |
+
}
|
| 433 |
+
|
| 434 |
+
.btn-text {
|
| 435 |
+
font-size: 13px;
|
| 436 |
+
color: var(--accent);
|
| 437 |
+
font-weight: 500;
|
| 438 |
+
}
|
| 439 |
+
|
| 440 |
+
.btn-text:hover { text-decoration: underline; }
|
| 441 |
+
|
| 442 |
+
/* Disease Card */
|
| 443 |
+
.disease-card {
|
| 444 |
+
background: var(--bg-card);
|
| 445 |
+
border-radius: var(--radius-lg);
|
| 446 |
+
padding: 16px;
|
| 447 |
+
margin-bottom: 16px;
|
| 448 |
+
}
|
| 449 |
+
|
| 450 |
+
.disease-top {
|
| 451 |
+
display: flex;
|
| 452 |
+
align-items: center;
|
| 453 |
+
gap: 12px;
|
| 454 |
+
margin-bottom: 14px;
|
| 455 |
+
}
|
| 456 |
+
|
| 457 |
+
.disease-icon {
|
| 458 |
+
font-size: 36px;
|
| 459 |
+
width: 50px;
|
| 460 |
+
height: 50px;
|
| 461 |
+
display: flex;
|
| 462 |
+
align-items: center;
|
| 463 |
+
justify-content: center;
|
| 464 |
+
background: var(--bg-elevated);
|
| 465 |
+
border-radius: var(--radius-md);
|
| 466 |
+
}
|
| 467 |
+
|
| 468 |
+
.disease-info { flex: 1; }
|
| 469 |
+
|
| 470 |
+
.disease-info h3 {
|
| 471 |
+
font-size: 17px;
|
| 472 |
+
font-weight: 600;
|
| 473 |
+
margin-bottom: 2px;
|
| 474 |
+
}
|
| 475 |
+
|
| 476 |
+
.crop-label {
|
| 477 |
+
font-size: 13px;
|
| 478 |
+
color: var(--text-secondary);
|
| 479 |
+
text-transform: capitalize;
|
| 480 |
+
}
|
| 481 |
+
|
| 482 |
+
.severity-badge {
|
| 483 |
+
padding: 5px 12px;
|
| 484 |
+
border-radius: 20px;
|
| 485 |
+
font-size: 12px;
|
| 486 |
+
font-weight: 600;
|
| 487 |
+
text-transform: capitalize;
|
| 488 |
+
}
|
| 489 |
+
|
| 490 |
+
.severity-badge.low { background: rgba(16,185,129,0.15); color: var(--severity-low); }
|
| 491 |
+
.severity-badge.medium { background: rgba(245,158,11,0.15); color: var(--severity-medium); }
|
| 492 |
+
.severity-badge.high { background: rgba(249,115,22,0.15); color: var(--severity-high); }
|
| 493 |
+
.severity-badge.very-high, .severity-badge.very_high { background: rgba(239,68,68,0.15); color: var(--severity-very-high); }
|
| 494 |
+
|
| 495 |
+
.disease-confidence {
|
| 496 |
+
display: flex;
|
| 497 |
+
align-items: center;
|
| 498 |
+
gap: 10px;
|
| 499 |
+
}
|
| 500 |
+
|
| 501 |
+
.conf-label {
|
| 502 |
+
font-size: 13px;
|
| 503 |
+
color: var(--text-secondary);
|
| 504 |
+
}
|
| 505 |
+
|
| 506 |
+
.conf-bar-wrap {
|
| 507 |
+
flex: 1;
|
| 508 |
+
height: 8px;
|
| 509 |
+
background: var(--bg-elevated);
|
| 510 |
+
border-radius: 4px;
|
| 511 |
+
overflow: hidden;
|
| 512 |
+
}
|
| 513 |
+
|
| 514 |
+
.conf-bar {
|
| 515 |
+
height: 100%;
|
| 516 |
+
background: linear-gradient(90deg, var(--accent), var(--accent-hover));
|
| 517 |
+
border-radius: 4px;
|
| 518 |
+
transition: width 0.5s ease;
|
| 519 |
+
}
|
| 520 |
+
|
| 521 |
+
.conf-value {
|
| 522 |
+
font-size: 14px;
|
| 523 |
+
font-weight: 600;
|
| 524 |
+
color: var(--accent);
|
| 525 |
+
min-width: 40px;
|
| 526 |
+
text-align: right;
|
| 527 |
+
}
|
| 528 |
+
|
| 529 |
+
/* Info Card / Tabs */
|
| 530 |
+
.info-card {
|
| 531 |
+
background: var(--bg-card);
|
| 532 |
+
border-radius: var(--radius-lg);
|
| 533 |
+
overflow: hidden;
|
| 534 |
+
margin-bottom: 16px;
|
| 535 |
+
}
|
| 536 |
+
|
| 537 |
+
.tabs {
|
| 538 |
+
display: flex;
|
| 539 |
+
background: var(--bg-elevated);
|
| 540 |
+
padding: 4px;
|
| 541 |
+
gap: 4px;
|
| 542 |
+
}
|
| 543 |
+
|
| 544 |
+
.tab-btn {
|
| 545 |
+
flex: 1;
|
| 546 |
+
padding: 10px;
|
| 547 |
+
font-size: 13px;
|
| 548 |
+
font-weight: 500;
|
| 549 |
+
color: var(--text-secondary);
|
| 550 |
+
border-radius: var(--radius-sm);
|
| 551 |
+
transition: all var(--transition);
|
| 552 |
+
}
|
| 553 |
+
|
| 554 |
+
.tab-btn:hover { color: var(--text-primary); }
|
| 555 |
+
.tab-btn.active { background: var(--bg-card); color: var(--text-primary); }
|
| 556 |
+
|
| 557 |
+
.tab-content { padding: 16px; }
|
| 558 |
+
|
| 559 |
+
.info-list {
|
| 560 |
+
list-style: none;
|
| 561 |
+
margin: 0 0 16px 0;
|
| 562 |
+
}
|
| 563 |
+
|
| 564 |
+
.info-list li {
|
| 565 |
+
position: relative;
|
| 566 |
+
padding-left: 16px;
|
| 567 |
+
margin-bottom: 10px;
|
| 568 |
+
font-size: 14px;
|
| 569 |
+
color: var(--text-secondary);
|
| 570 |
+
line-height: 1.5;
|
| 571 |
+
}
|
| 572 |
+
|
| 573 |
+
.info-list li::before {
|
| 574 |
+
content: "";
|
| 575 |
+
position: absolute;
|
| 576 |
+
left: 0;
|
| 577 |
+
top: 7px;
|
| 578 |
+
width: 6px;
|
| 579 |
+
height: 6px;
|
| 580 |
+
background: var(--accent);
|
| 581 |
+
border-radius: 50%;
|
| 582 |
+
}
|
| 583 |
+
|
| 584 |
+
.info-block {
|
| 585 |
+
margin-bottom: 16px;
|
| 586 |
+
}
|
| 587 |
+
|
| 588 |
+
.info-block h4 {
|
| 589 |
+
font-size: 14px;
|
| 590 |
+
font-weight: 600;
|
| 591 |
+
margin-bottom: 8px;
|
| 592 |
+
color: var(--text-primary);
|
| 593 |
+
}
|
| 594 |
+
|
| 595 |
+
.info-block p {
|
| 596 |
+
font-size: 14px;
|
| 597 |
+
color: var(--text-secondary);
|
| 598 |
+
}
|
| 599 |
+
|
| 600 |
+
.recovery-block {
|
| 601 |
+
background: var(--bg-elevated);
|
| 602 |
+
padding: 12px;
|
| 603 |
+
border-radius: var(--radius-md);
|
| 604 |
+
}
|
| 605 |
+
|
| 606 |
+
.recovery-bar-wrap {
|
| 607 |
+
height: 10px;
|
| 608 |
+
background: var(--bg-dark);
|
| 609 |
+
border-radius: 5px;
|
| 610 |
+
overflow: hidden;
|
| 611 |
+
margin-bottom: 6px;
|
| 612 |
+
}
|
| 613 |
+
|
| 614 |
+
.recovery-bar {
|
| 615 |
+
height: 100%;
|
| 616 |
+
background: var(--success);
|
| 617 |
+
border-radius: 5px;
|
| 618 |
+
transition: width 0.5s ease;
|
| 619 |
+
}
|
| 620 |
+
|
| 621 |
+
.recovery-block span {
|
| 622 |
+
font-size: 13px;
|
| 623 |
+
color: var(--success);
|
| 624 |
+
}
|
| 625 |
+
|
| 626 |
+
.treatment-grid {
|
| 627 |
+
display: flex;
|
| 628 |
+
flex-direction: column;
|
| 629 |
+
gap: 8px;
|
| 630 |
+
}
|
| 631 |
+
|
| 632 |
+
.treatment-item {
|
| 633 |
+
background: var(--bg-elevated);
|
| 634 |
+
padding: 10px 12px;
|
| 635 |
+
border-radius: var(--radius-sm);
|
| 636 |
+
}
|
| 637 |
+
|
| 638 |
+
.treatment-item strong {
|
| 639 |
+
font-size: 13px;
|
| 640 |
+
display: block;
|
| 641 |
+
margin-bottom: 2px;
|
| 642 |
+
}
|
| 643 |
+
|
| 644 |
+
.treatment-item span {
|
| 645 |
+
font-size: 12px;
|
| 646 |
+
color: var(--text-secondary);
|
| 647 |
+
}
|
| 648 |
+
|
| 649 |
+
.cost-block {
|
| 650 |
+
display: flex;
|
| 651 |
+
align-items: center;
|
| 652 |
+
justify-content: space-between;
|
| 653 |
+
background: var(--accent-muted);
|
| 654 |
+
padding: 12px 16px;
|
| 655 |
+
border-radius: var(--radius-md);
|
| 656 |
+
margin-top: 12px;
|
| 657 |
+
}
|
| 658 |
+
|
| 659 |
+
.cost-label {
|
| 660 |
+
font-size: 14px;
|
| 661 |
+
color: var(--text-secondary);
|
| 662 |
+
}
|
| 663 |
+
|
| 664 |
+
.cost-value {
|
| 665 |
+
font-size: 18px;
|
| 666 |
+
font-weight: 700;
|
| 667 |
+
color: var(--accent);
|
| 668 |
+
}
|
| 669 |
+
|
| 670 |
+
/* Chat Button (Simple but visible) */
|
| 671 |
+
.btn-chat {
|
| 672 |
+
display: flex;
|
| 673 |
+
align-items: center;
|
| 674 |
+
justify-content: center;
|
| 675 |
+
gap: 8px;
|
| 676 |
+
width: 100%;
|
| 677 |
+
padding: 14px;
|
| 678 |
+
background: var(--accent);
|
| 679 |
+
color: #fff;
|
| 680 |
+
font-size: 15px;
|
| 681 |
+
font-weight: 600;
|
| 682 |
+
border-radius: var(--radius-md);
|
| 683 |
+
transition: all var(--transition);
|
| 684 |
+
}
|
| 685 |
+
|
| 686 |
+
.btn-chat:hover {
|
| 687 |
+
background: var(--accent-hover);
|
| 688 |
+
transform: translateY(-1px);
|
| 689 |
+
}
|
| 690 |
+
|
| 691 |
+
/* Footer */
|
| 692 |
+
.main-footer {
|
| 693 |
+
padding: 16px;
|
| 694 |
+
text-align: center;
|
| 695 |
+
border-top: 1px solid var(--border-light);
|
| 696 |
+
}
|
| 697 |
+
|
| 698 |
+
.main-footer p {
|
| 699 |
+
font-size: 12px;
|
| 700 |
+
color: var(--text-muted);
|
| 701 |
+
}
|
| 702 |
+
|
| 703 |
+
/* ==========================================================================
|
| 704 |
+
PAGE 3: CHAT (ChatGPT-Inspired)
|
| 705 |
+
========================================================================== */
|
| 706 |
+
.chat-page {
|
| 707 |
+
height: 100vh;
|
| 708 |
+
display: flex;
|
| 709 |
+
flex-direction: column;
|
| 710 |
+
background: #0D0D0D;
|
| 711 |
+
}
|
| 712 |
+
|
| 713 |
+
.chat-header {
|
| 714 |
+
display: flex;
|
| 715 |
+
align-items: center;
|
| 716 |
+
justify-content: space-between;
|
| 717 |
+
padding: 12px 16px;
|
| 718 |
+
background: #1A1A1A;
|
| 719 |
+
border-bottom: 1px solid #2D2D2D;
|
| 720 |
+
}
|
| 721 |
+
|
| 722 |
+
.btn-back {
|
| 723 |
+
display: flex;
|
| 724 |
+
align-items: center;
|
| 725 |
+
gap: 6px;
|
| 726 |
+
color: var(--text-secondary);
|
| 727 |
+
font-size: 14px;
|
| 728 |
+
padding: 6px 10px;
|
| 729 |
+
border-radius: var(--radius-sm);
|
| 730 |
+
}
|
| 731 |
+
|
| 732 |
+
.btn-back:hover { background: #2D2D2D; color: #fff; }
|
| 733 |
+
|
| 734 |
+
.chat-title {
|
| 735 |
+
font-size: 15px;
|
| 736 |
+
font-weight: 600;
|
| 737 |
+
display: flex;
|
| 738 |
+
align-items: center;
|
| 739 |
+
gap: 6px;
|
| 740 |
+
}
|
| 741 |
+
|
| 742 |
+
.chat-lang {
|
| 743 |
+
font-size: 12px;
|
| 744 |
+
color: var(--text-muted);
|
| 745 |
+
background: #2D2D2D;
|
| 746 |
+
padding: 4px 10px;
|
| 747 |
+
border-radius: 12px;
|
| 748 |
+
}
|
| 749 |
+
|
| 750 |
+
.chat-context {
|
| 751 |
+
display: flex;
|
| 752 |
+
align-items: center;
|
| 753 |
+
gap: 8px;
|
| 754 |
+
padding: 10px 16px;
|
| 755 |
+
background: rgba(16,185,129,0.08);
|
| 756 |
+
font-size: 13px;
|
| 757 |
+
color: var(--text-secondary);
|
| 758 |
+
border-bottom: 1px solid #2D2D2D;
|
| 759 |
+
flex-wrap: wrap;
|
| 760 |
+
}
|
| 761 |
+
|
| 762 |
+
.chat-context strong { color: var(--accent); }
|
| 763 |
+
|
| 764 |
+
.chat-messages {
|
| 765 |
+
flex: 1;
|
| 766 |
+
overflow-y: auto;
|
| 767 |
+
padding: 16px;
|
| 768 |
+
}
|
| 769 |
+
|
| 770 |
+
.chat-welcome {
|
| 771 |
+
display: flex;
|
| 772 |
+
flex-direction: column;
|
| 773 |
+
align-items: center;
|
| 774 |
+
justify-content: center;
|
| 775 |
+
height: 100%;
|
| 776 |
+
text-align: center;
|
| 777 |
+
padding: 40px 20px;
|
| 778 |
+
}
|
| 779 |
+
|
| 780 |
+
.welcome-icon {
|
| 781 |
+
font-size: 48px;
|
| 782 |
+
margin-bottom: 16px;
|
| 783 |
+
opacity: 0.6;
|
| 784 |
+
}
|
| 785 |
+
|
| 786 |
+
.chat-welcome h3 {
|
| 787 |
+
font-size: 18px;
|
| 788 |
+
margin-bottom: 8px;
|
| 789 |
+
}
|
| 790 |
+
|
| 791 |
+
.chat-welcome p {
|
| 792 |
+
font-size: 14px;
|
| 793 |
+
color: var(--text-muted);
|
| 794 |
+
max-width: 300px;
|
| 795 |
+
}
|
| 796 |
+
|
| 797 |
+
/* Chat Messages */
|
| 798 |
+
.message {
|
| 799 |
+
display: flex;
|
| 800 |
+
gap: 12px;
|
| 801 |
+
margin-bottom: 16px;
|
| 802 |
+
animation: slideUp 0.3s ease;
|
| 803 |
+
}
|
| 804 |
+
|
| 805 |
+
@keyframes slideUp {
|
| 806 |
+
from { opacity: 0; transform: translateY(10px); }
|
| 807 |
+
to { opacity: 1; transform: translateY(0); }
|
| 808 |
+
}
|
| 809 |
+
|
| 810 |
+
.message.user { flex-direction: row-reverse; }
|
| 811 |
+
|
| 812 |
+
.message-avatar {
|
| 813 |
+
width: 32px;
|
| 814 |
+
height: 32px;
|
| 815 |
+
border-radius: 50%;
|
| 816 |
+
display: flex;
|
| 817 |
+
align-items: center;
|
| 818 |
+
justify-content: center;
|
| 819 |
+
font-size: 16px;
|
| 820 |
+
flex-shrink: 0;
|
| 821 |
+
}
|
| 822 |
+
|
| 823 |
+
.message.assistant .message-avatar { background: var(--accent); }
|
| 824 |
+
.message.user .message-avatar { background: #4B5563; }
|
| 825 |
+
|
| 826 |
+
.message-content {
|
| 827 |
+
max-width: 80%;
|
| 828 |
+
padding: 12px 16px;
|
| 829 |
+
border-radius: 16px;
|
| 830 |
+
font-size: 14px;
|
| 831 |
+
line-height: 1.5;
|
| 832 |
+
}
|
| 833 |
+
|
| 834 |
+
/* Message content wrapper for Listen button */
|
| 835 |
+
.message-content-wrapper {
|
| 836 |
+
display: flex;
|
| 837 |
+
flex-direction: column;
|
| 838 |
+
align-items: flex-start;
|
| 839 |
+
max-width: 80%;
|
| 840 |
+
}
|
| 841 |
+
|
| 842 |
+
.message.user .message-content-wrapper {
|
| 843 |
+
align-items: flex-end;
|
| 844 |
+
}
|
| 845 |
+
|
| 846 |
+
.message-content-wrapper .message-content {
|
| 847 |
+
max-width: 100%;
|
| 848 |
+
}
|
| 849 |
+
|
| 850 |
+
.message.assistant .message-content {
|
| 851 |
+
background: #2D2D2D;
|
| 852 |
+
border-bottom-left-radius: 4px;
|
| 853 |
+
}
|
| 854 |
+
|
| 855 |
+
.message.user .message-content {
|
| 856 |
+
background: var(--accent);
|
| 857 |
+
color: #fff;
|
| 858 |
+
border-bottom-right-radius: 4px;
|
| 859 |
+
}
|
| 860 |
+
|
| 861 |
+
.typing-indicator {
|
| 862 |
+
display: flex;
|
| 863 |
+
gap: 4px;
|
| 864 |
+
padding: 8px 0;
|
| 865 |
+
}
|
| 866 |
+
|
| 867 |
+
.typing-dot {
|
| 868 |
+
width: 8px;
|
| 869 |
+
height: 8px;
|
| 870 |
+
background: var(--text-muted);
|
| 871 |
+
border-radius: 50%;
|
| 872 |
+
animation: bounce 1.4s infinite;
|
| 873 |
+
}
|
| 874 |
+
|
| 875 |
+
.typing-dot:nth-child(2) { animation-delay: 0.2s; }
|
| 876 |
+
.typing-dot:nth-child(3) { animation-delay: 0.4s; }
|
| 877 |
+
|
| 878 |
+
@keyframes bounce {
|
| 879 |
+
0%, 60%, 100% { transform: translateY(0); }
|
| 880 |
+
30% { transform: translateY(-6px); }
|
| 881 |
+
}
|
| 882 |
+
|
| 883 |
+
/* Chat Input */
|
| 884 |
+
.chat-input-wrap {
|
| 885 |
+
padding: 12px 16px;
|
| 886 |
+
background: #1A1A1A;
|
| 887 |
+
border-top: 1px solid #2D2D2D;
|
| 888 |
+
}
|
| 889 |
+
|
| 890 |
+
.chat-input-box {
|
| 891 |
+
display: flex;
|
| 892 |
+
align-items: flex-end;
|
| 893 |
+
gap: 8px;
|
| 894 |
+
background: #2D2D2D;
|
| 895 |
+
border-radius: 12px;
|
| 896 |
+
padding: 8px 12px;
|
| 897 |
+
}
|
| 898 |
+
|
| 899 |
+
.chat-input-box textarea {
|
| 900 |
+
flex: 1;
|
| 901 |
+
background: transparent;
|
| 902 |
+
border: none;
|
| 903 |
+
color: #fff;
|
| 904 |
+
font-size: 14px;
|
| 905 |
+
resize: none;
|
| 906 |
+
min-height: 24px;
|
| 907 |
+
max-height: 120px;
|
| 908 |
+
padding: 4px 0;
|
| 909 |
+
}
|
| 910 |
+
|
| 911 |
+
.chat-input-box textarea:focus { outline: none; }
|
| 912 |
+
.chat-input-box textarea::placeholder { color: #6B6B6B; }
|
| 913 |
+
|
| 914 |
+
.btn-icon {
|
| 915 |
+
width: 36px;
|
| 916 |
+
height: 36px;
|
| 917 |
+
display: flex;
|
| 918 |
+
align-items: center;
|
| 919 |
+
justify-content: center;
|
| 920 |
+
border-radius: 8px;
|
| 921 |
+
color: var(--text-secondary);
|
| 922 |
+
transition: all var(--transition);
|
| 923 |
+
}
|
| 924 |
+
|
| 925 |
+
.btn-icon:hover { background: #3D3D3D; color: #fff; }
|
| 926 |
+
|
| 927 |
+
.btn-voice.recording {
|
| 928 |
+
background: var(--error);
|
| 929 |
+
color: #fff;
|
| 930 |
+
animation: pulse 1.5s infinite;
|
| 931 |
+
}
|
| 932 |
+
|
| 933 |
+
@keyframes pulse {
|
| 934 |
+
0%, 100% { opacity: 1; }
|
| 935 |
+
50% { opacity: 0.6; }
|
| 936 |
+
}
|
| 937 |
+
|
| 938 |
+
.btn-send {
|
| 939 |
+
background: var(--accent);
|
| 940 |
+
color: #fff;
|
| 941 |
+
}
|
| 942 |
+
|
| 943 |
+
.btn-send:disabled {
|
| 944 |
+
background: #3D3D3D;
|
| 945 |
+
color: #6B6B6B;
|
| 946 |
+
cursor: not-allowed;
|
| 947 |
+
}
|
| 948 |
+
|
| 949 |
+
.btn-send:not(:disabled):hover { background: var(--accent-hover); }
|
| 950 |
+
|
| 951 |
+
.chat-note {
|
| 952 |
+
font-size: 11px;
|
| 953 |
+
color: var(--text-muted);
|
| 954 |
+
text-align: center;
|
| 955 |
+
margin-top: 8px;
|
| 956 |
+
}
|
| 957 |
+
|
| 958 |
+
/* Voice Overlay - LEGACY (hidden, replaced by inline indicator) */
|
| 959 |
+
.voice-overlay {
|
| 960 |
+
position: fixed;
|
| 961 |
+
inset: 0;
|
| 962 |
+
background: rgba(0,0,0,0.9);
|
| 963 |
+
display: flex;
|
| 964 |
+
align-items: center;
|
| 965 |
+
justify-content: center;
|
| 966 |
+
z-index: 200;
|
| 967 |
+
}
|
| 968 |
+
|
| 969 |
+
.voice-modal {
|
| 970 |
+
text-align: center;
|
| 971 |
+
padding: 40px;
|
| 972 |
+
}
|
| 973 |
+
|
| 974 |
+
.voice-anim {
|
| 975 |
+
display: flex;
|
| 976 |
+
justify-content: center;
|
| 977 |
+
gap: 8px;
|
| 978 |
+
margin-bottom: 20px;
|
| 979 |
+
}
|
| 980 |
+
|
| 981 |
+
.voice-anim span {
|
| 982 |
+
width: 12px;
|
| 983 |
+
height: 12px;
|
| 984 |
+
background: var(--accent);
|
| 985 |
+
border-radius: 50%;
|
| 986 |
+
animation: voicePulse 1.2s infinite;
|
| 987 |
+
}
|
| 988 |
+
|
| 989 |
+
.voice-anim span:nth-child(2) { animation-delay: 0.2s; }
|
| 990 |
+
.voice-anim span:nth-child(3) { animation-delay: 0.4s; }
|
| 991 |
+
|
| 992 |
+
@keyframes voicePulse {
|
| 993 |
+
0%, 100% { transform: scale(1); opacity: 0.5; }
|
| 994 |
+
50% { transform: scale(1.4); opacity: 1; }
|
| 995 |
+
}
|
| 996 |
+
|
| 997 |
+
.voice-modal p {
|
| 998 |
+
font-size: 18px;
|
| 999 |
+
margin-bottom: 20px;
|
| 1000 |
+
}
|
| 1001 |
+
|
| 1002 |
+
/* ==========================================================================
|
| 1003 |
+
INLINE LISTENING INDICATOR (New - replaces full-screen overlay)
|
| 1004 |
+
========================================================================== */
|
| 1005 |
+
.listening-indicator {
|
| 1006 |
+
display: flex;
|
| 1007 |
+
align-items: center;
|
| 1008 |
+
gap: 12px;
|
| 1009 |
+
flex: 1;
|
| 1010 |
+
padding: 8px 12px;
|
| 1011 |
+
background: rgba(239, 68, 68, 0.15);
|
| 1012 |
+
border: 1px solid var(--error);
|
| 1013 |
+
border-radius: 8px;
|
| 1014 |
+
animation: listenFadeIn 0.3s ease;
|
| 1015 |
+
}
|
| 1016 |
+
|
| 1017 |
+
@keyframes listenFadeIn {
|
| 1018 |
+
from { opacity: 0; transform: scale(0.95); }
|
| 1019 |
+
to { opacity: 1; transform: scale(1); }
|
| 1020 |
+
}
|
| 1021 |
+
|
| 1022 |
+
/* Pulsing red dot */
|
| 1023 |
+
.listening-pulse {
|
| 1024 |
+
width: 12px;
|
| 1025 |
+
height: 12px;
|
| 1026 |
+
background: var(--error);
|
| 1027 |
+
border-radius: 50%;
|
| 1028 |
+
animation: listenPulse 1.5s ease-in-out infinite;
|
| 1029 |
+
flex-shrink: 0;
|
| 1030 |
+
}
|
| 1031 |
+
|
| 1032 |
+
@keyframes listenPulse {
|
| 1033 |
+
0%, 100% {
|
| 1034 |
+
transform: scale(1);
|
| 1035 |
+
opacity: 1;
|
| 1036 |
+
box-shadow: 0 0 0 0 rgba(239, 68, 68, 0.7);
|
| 1037 |
+
}
|
| 1038 |
+
50% {
|
| 1039 |
+
transform: scale(1.1);
|
| 1040 |
+
opacity: 0.8;
|
| 1041 |
+
box-shadow: 0 0 0 8px rgba(239, 68, 68, 0);
|
| 1042 |
+
}
|
| 1043 |
+
}
|
| 1044 |
+
|
| 1045 |
+
/* Listening text */
|
| 1046 |
+
.listening-text {
|
| 1047 |
+
font-size: 15px;
|
| 1048 |
+
font-weight: 600;
|
| 1049 |
+
color: var(--error);
|
| 1050 |
+
flex: 1;
|
| 1051 |
+
}
|
| 1052 |
+
|
| 1053 |
+
/* Timer display */
|
| 1054 |
+
.listening-timer {
|
| 1055 |
+
font-size: 14px;
|
| 1056 |
+
font-weight: 600;
|
| 1057 |
+
color: var(--text-primary);
|
| 1058 |
+
font-family: 'SF Mono', 'Monaco', 'Consolas', monospace;
|
| 1059 |
+
background: rgba(255, 255, 255, 0.1);
|
| 1060 |
+
padding: 4px 10px;
|
| 1061 |
+
border-radius: 6px;
|
| 1062 |
+
min-width: 50px;
|
| 1063 |
+
text-align: center;
|
| 1064 |
+
}
|
| 1065 |
+
|
| 1066 |
+
/* Inline stop button */
|
| 1067 |
+
.btn-stop-inline {
|
| 1068 |
+
width: 36px;
|
| 1069 |
+
height: 36px;
|
| 1070 |
+
display: flex;
|
| 1071 |
+
align-items: center;
|
| 1072 |
+
justify-content: center;
|
| 1073 |
+
background: var(--error);
|
| 1074 |
+
color: #fff;
|
| 1075 |
+
border-radius: 8px;
|
| 1076 |
+
transition: all var(--transition);
|
| 1077 |
+
flex-shrink: 0;
|
| 1078 |
+
}
|
| 1079 |
+
|
| 1080 |
+
.btn-stop-inline:hover {
|
| 1081 |
+
background: #DC2626;
|
| 1082 |
+
transform: scale(1.05);
|
| 1083 |
+
}
|
| 1084 |
+
|
| 1085 |
+
/* Hide elements when recording */
|
| 1086 |
+
.chat-input-box textarea.hidden,
|
| 1087 |
+
.chat-input-box .btn-send.hidden {
|
| 1088 |
+
display: none;
|
| 1089 |
+
}
|
| 1090 |
+
|
| 1091 |
+
/* Voice button recording state */
|
| 1092 |
+
.btn-voice.recording {
|
| 1093 |
+
background: var(--error);
|
| 1094 |
+
color: #fff;
|
| 1095 |
+
animation: none; /* Remove pulse, we have the indicator now */
|
| 1096 |
+
}
|
| 1097 |
+
|
| 1098 |
+
/* ==========================================================================
|
| 1099 |
+
COMMON COMPONENTS
|
| 1100 |
+
========================================================================== */
|
| 1101 |
+
|
| 1102 |
+
/* ==========================================================================
|
| 1103 |
+
TTS - LISTEN BUTTON & AUDIO PLAYER
|
| 1104 |
+
========================================================================== */
|
| 1105 |
+
|
| 1106 |
+
/* Listen button on each assistant message */
|
| 1107 |
+
.btn-listen {
|
| 1108 |
+
display: inline-flex;
|
| 1109 |
+
align-items: center;
|
| 1110 |
+
gap: 6px;
|
| 1111 |
+
margin-top: 10px;
|
| 1112 |
+
padding: 8px 14px;
|
| 1113 |
+
background: var(--accent);
|
| 1114 |
+
color: #fff;
|
| 1115 |
+
font-size: 13px;
|
| 1116 |
+
font-weight: 600;
|
| 1117 |
+
border-radius: 20px;
|
| 1118 |
+
cursor: pointer;
|
| 1119 |
+
transition: all var(--transition);
|
| 1120 |
+
border: none;
|
| 1121 |
+
}
|
| 1122 |
+
|
| 1123 |
+
.btn-listen:hover {
|
| 1124 |
+
background: var(--accent-hover);
|
| 1125 |
+
transform: scale(1.02);
|
| 1126 |
+
}
|
| 1127 |
+
|
| 1128 |
+
.btn-listen.loading {
|
| 1129 |
+
background: var(--bg-elevated);
|
| 1130 |
+
color: var(--text-secondary);
|
| 1131 |
+
cursor: wait;
|
| 1132 |
+
}
|
| 1133 |
+
|
| 1134 |
+
.btn-listen.loading::after {
|
| 1135 |
+
content: '';
|
| 1136 |
+
width: 12px;
|
| 1137 |
+
height: 12px;
|
| 1138 |
+
border: 2px solid var(--text-muted);
|
| 1139 |
+
border-top-color: var(--accent);
|
| 1140 |
+
border-radius: 50%;
|
| 1141 |
+
animation: spin 0.8s linear infinite;
|
| 1142 |
+
margin-left: 6px;
|
| 1143 |
+
}
|
| 1144 |
+
|
| 1145 |
+
.btn-listen.playing {
|
| 1146 |
+
background: var(--error);
|
| 1147 |
+
}
|
| 1148 |
+
|
| 1149 |
+
.btn-listen-icon {
|
| 1150 |
+
font-size: 14px;
|
| 1151 |
+
}
|
| 1152 |
+
|
| 1153 |
+
/* Floating TTS Player */
|
| 1154 |
+
.tts-player {
|
| 1155 |
+
position: fixed;
|
| 1156 |
+
bottom: 100px;
|
| 1157 |
+
left: 50%;
|
| 1158 |
+
transform: translateX(-50%) translateY(100px);
|
| 1159 |
+
width: 90%;
|
| 1160 |
+
max-width: 400px;
|
| 1161 |
+
background: var(--bg-elevated);
|
| 1162 |
+
border: 1px solid var(--border);
|
| 1163 |
+
border-radius: var(--radius-lg);
|
| 1164 |
+
padding: 16px;
|
| 1165 |
+
box-shadow: 0 8px 32px rgba(0, 0, 0, 0.4);
|
| 1166 |
+
z-index: 150;
|
| 1167 |
+
opacity: 0;
|
| 1168 |
+
visibility: hidden;
|
| 1169 |
+
transition: all 0.3s ease;
|
| 1170 |
+
}
|
| 1171 |
+
|
| 1172 |
+
.tts-player.active {
|
| 1173 |
+
transform: translateX(-50%) translateY(0);
|
| 1174 |
+
opacity: 1;
|
| 1175 |
+
visibility: visible;
|
| 1176 |
+
}
|
| 1177 |
+
|
| 1178 |
+
/* Player header */
|
| 1179 |
+
.tts-player-header {
|
| 1180 |
+
display: flex;
|
| 1181 |
+
align-items: center;
|
| 1182 |
+
justify-content: space-between;
|
| 1183 |
+
margin-bottom: 12px;
|
| 1184 |
+
}
|
| 1185 |
+
|
| 1186 |
+
.tts-player-title {
|
| 1187 |
+
display: flex;
|
| 1188 |
+
align-items: center;
|
| 1189 |
+
gap: 8px;
|
| 1190 |
+
font-size: 14px;
|
| 1191 |
+
font-weight: 600;
|
| 1192 |
+
color: var(--text-primary);
|
| 1193 |
+
}
|
| 1194 |
+
|
| 1195 |
+
.tts-player-title-icon {
|
| 1196 |
+
font-size: 16px;
|
| 1197 |
+
}
|
| 1198 |
+
|
| 1199 |
+
.btn-tts-close {
|
| 1200 |
+
width: 28px;
|
| 1201 |
+
height: 28px;
|
| 1202 |
+
display: flex;
|
| 1203 |
+
align-items: center;
|
| 1204 |
+
justify-content: center;
|
| 1205 |
+
background: var(--bg-hover);
|
| 1206 |
+
color: var(--text-secondary);
|
| 1207 |
+
border-radius: 6px;
|
| 1208 |
+
font-size: 18px;
|
| 1209 |
+
cursor: pointer;
|
| 1210 |
+
transition: all var(--transition);
|
| 1211 |
+
}
|
| 1212 |
+
|
| 1213 |
+
.btn-tts-close:hover {
|
| 1214 |
+
background: var(--error);
|
| 1215 |
+
color: #fff;
|
| 1216 |
+
}
|
| 1217 |
+
|
| 1218 |
+
/* Progress bar */
|
| 1219 |
+
.tts-progress-container {
|
| 1220 |
+
height: 6px;
|
| 1221 |
+
background: var(--bg-hover);
|
| 1222 |
+
border-radius: 3px;
|
| 1223 |
+
overflow: hidden;
|
| 1224 |
+
margin-bottom: 12px;
|
| 1225 |
+
cursor: pointer;
|
| 1226 |
+
}
|
| 1227 |
+
|
| 1228 |
+
.tts-progress-bar {
|
| 1229 |
+
height: 100%;
|
| 1230 |
+
background: var(--accent);
|
| 1231 |
+
border-radius: 3px;
|
| 1232 |
+
width: 0%;
|
| 1233 |
+
transition: width 0.1s linear;
|
| 1234 |
+
}
|
| 1235 |
+
|
| 1236 |
+
/* Controls row */
|
| 1237 |
+
.tts-controls {
|
| 1238 |
+
display: flex;
|
| 1239 |
+
align-items: center;
|
| 1240 |
+
justify-content: space-between;
|
| 1241 |
+
gap: 12px;
|
| 1242 |
+
}
|
| 1243 |
+
|
| 1244 |
+
/* Playback controls */
|
| 1245 |
+
.tts-playback-controls {
|
| 1246 |
+
display: flex;
|
| 1247 |
+
align-items: center;
|
| 1248 |
+
gap: 8px;
|
| 1249 |
+
}
|
| 1250 |
+
|
| 1251 |
+
.btn-tts-control {
|
| 1252 |
+
width: 40px;
|
| 1253 |
+
height: 40px;
|
| 1254 |
+
display: flex;
|
| 1255 |
+
align-items: center;
|
| 1256 |
+
justify-content: center;
|
| 1257 |
+
background: var(--accent);
|
| 1258 |
+
color: #fff;
|
| 1259 |
+
border-radius: 50%;
|
| 1260 |
+
cursor: pointer;
|
| 1261 |
+
transition: all var(--transition);
|
| 1262 |
+
}
|
| 1263 |
+
|
| 1264 |
+
.btn-tts-control:hover {
|
| 1265 |
+
background: var(--accent-hover);
|
| 1266 |
+
transform: scale(1.05);
|
| 1267 |
+
}
|
| 1268 |
+
|
| 1269 |
+
.btn-tts-control.stop {
|
| 1270 |
+
background: var(--bg-hover);
|
| 1271 |
+
color: var(--text-secondary);
|
| 1272 |
+
width: 36px;
|
| 1273 |
+
height: 36px;
|
| 1274 |
+
}
|
| 1275 |
+
|
| 1276 |
+
.btn-tts-control.stop:hover {
|
| 1277 |
+
background: var(--error);
|
| 1278 |
+
color: #fff;
|
| 1279 |
+
}
|
| 1280 |
+
|
| 1281 |
+
/* Time display */
|
| 1282 |
+
.tts-time {
|
| 1283 |
+
font-size: 12px;
|
| 1284 |
+
color: var(--text-muted);
|
| 1285 |
+
font-family: 'SF Mono', 'Monaco', 'Consolas', monospace;
|
| 1286 |
+
min-width: 70px;
|
| 1287 |
+
text-align: center;
|
| 1288 |
+
}
|
| 1289 |
+
|
| 1290 |
+
/* Speed controls */
|
| 1291 |
+
.tts-speed-controls {
|
| 1292 |
+
display: flex;
|
| 1293 |
+
align-items: center;
|
| 1294 |
+
gap: 4px;
|
| 1295 |
+
}
|
| 1296 |
+
|
| 1297 |
+
.tts-speed-label {
|
| 1298 |
+
font-size: 11px;
|
| 1299 |
+
color: var(--text-muted);
|
| 1300 |
+
margin-right: 4px;
|
| 1301 |
+
}
|
| 1302 |
+
|
| 1303 |
+
.tts-speed-btn {
|
| 1304 |
+
padding: 4px 8px;
|
| 1305 |
+
font-size: 11px;
|
| 1306 |
+
font-weight: 600;
|
| 1307 |
+
color: var(--text-muted);
|
| 1308 |
+
background: var(--bg-hover);
|
| 1309 |
+
border-radius: 4px;
|
| 1310 |
+
cursor: pointer;
|
| 1311 |
+
transition: all var(--transition);
|
| 1312 |
+
}
|
| 1313 |
+
|
| 1314 |
+
.tts-speed-btn:hover {
|
| 1315 |
+
color: var(--text-primary);
|
| 1316 |
+
background: var(--bg-card);
|
| 1317 |
+
}
|
| 1318 |
+
|
| 1319 |
+
.tts-speed-btn.active {
|
| 1320 |
+
color: #fff;
|
| 1321 |
+
background: var(--accent);
|
| 1322 |
+
}
|
| 1323 |
+
|
| 1324 |
+
/* Responsive adjustments */
|
| 1325 |
+
@media (max-width: 480px) {
|
| 1326 |
+
.tts-player {
|
| 1327 |
+
bottom: 80px;
|
| 1328 |
+
width: 95%;
|
| 1329 |
+
padding: 12px;
|
| 1330 |
+
}
|
| 1331 |
+
|
| 1332 |
+
.tts-speed-label {
|
| 1333 |
+
display: none;
|
| 1334 |
+
}
|
| 1335 |
+
|
| 1336 |
+
.tts-time {
|
| 1337 |
+
font-size: 11px;
|
| 1338 |
+
min-width: 60px;
|
| 1339 |
+
}
|
| 1340 |
+
}
|
| 1341 |
+
|
| 1342 |
+
/* Buttons */
|
| 1343 |
+
.btn-primary {
|
| 1344 |
+
background: var(--accent);
|
| 1345 |
+
color: #fff;
|
| 1346 |
+
border-radius: var(--radius-md);
|
| 1347 |
+
font-weight: 600;
|
| 1348 |
+
transition: all var(--transition);
|
| 1349 |
+
}
|
| 1350 |
+
|
| 1351 |
+
.btn-primary:hover:not(:disabled) {
|
| 1352 |
+
background: var(--accent-hover);
|
| 1353 |
+
}
|
| 1354 |
+
|
| 1355 |
+
.btn-primary:disabled {
|
| 1356 |
+
opacity: 0.5;
|
| 1357 |
+
cursor: not-allowed;
|
| 1358 |
+
}
|
| 1359 |
+
|
| 1360 |
+
.btn-secondary {
|
| 1361 |
+
background: #2D2D2D;
|
| 1362 |
+
color: #fff;
|
| 1363 |
+
padding: 12px 24px;
|
| 1364 |
+
border-radius: var(--radius-md);
|
| 1365 |
+
font-weight: 500;
|
| 1366 |
+
}
|
| 1367 |
+
|
| 1368 |
+
.btn-secondary:hover { background: #3D3D3D; }
|
| 1369 |
+
|
| 1370 |
+
/* Loader */
|
| 1371 |
+
.loader-spinner {
|
| 1372 |
+
width: 24px;
|
| 1373 |
+
height: 24px;
|
| 1374 |
+
border: 3px solid #3D3D3D;
|
| 1375 |
+
border-top-color: var(--accent);
|
| 1376 |
+
border-radius: 50%;
|
| 1377 |
+
animation: spin 0.8s linear infinite;
|
| 1378 |
+
}
|
| 1379 |
+
|
| 1380 |
+
.loader-spinner.large { width: 40px; height: 40px; }
|
| 1381 |
+
|
| 1382 |
+
@keyframes spin { to { transform: rotate(360deg); } }
|
| 1383 |
+
|
| 1384 |
+
/* Loading Overlay */
|
| 1385 |
+
.loading-overlay {
|
| 1386 |
+
position: fixed;
|
| 1387 |
+
inset: 0;
|
| 1388 |
+
background: rgba(13,13,13,0.95);
|
| 1389 |
+
display: flex;
|
| 1390 |
+
flex-direction: column;
|
| 1391 |
+
align-items: center;
|
| 1392 |
+
justify-content: center;
|
| 1393 |
+
gap: 16px;
|
| 1394 |
+
z-index: 300;
|
| 1395 |
+
}
|
| 1396 |
+
|
| 1397 |
+
.loading-overlay p {
|
| 1398 |
+
color: var(--text-secondary);
|
| 1399 |
+
font-size: 14px;
|
| 1400 |
+
}
|
| 1401 |
+
|
| 1402 |
+
/* Toast */
|
| 1403 |
+
.toast-container {
|
| 1404 |
+
position: fixed;
|
| 1405 |
+
bottom: 20px;
|
| 1406 |
+
left: 50%;
|
| 1407 |
+
transform: translateX(-50%);
|
| 1408 |
+
z-index: 400;
|
| 1409 |
+
width: 90%;
|
| 1410 |
+
max-width: 360px;
|
| 1411 |
+
}
|
| 1412 |
+
|
| 1413 |
+
.toast {
|
| 1414 |
+
display: flex;
|
| 1415 |
+
align-items: center;
|
| 1416 |
+
gap: 12px;
|
| 1417 |
+
padding: 14px 16px;
|
| 1418 |
+
background: #2D2D2D;
|
| 1419 |
+
border-radius: var(--radius-md);
|
| 1420 |
+
margin-bottom: 8px;
|
| 1421 |
+
animation: slideUp 0.3s ease;
|
| 1422 |
+
}
|
| 1423 |
+
|
| 1424 |
+
.toast.success { border-left: 4px solid var(--success); }
|
| 1425 |
+
.toast.error { border-left: 4px solid var(--error); }
|
| 1426 |
+
.toast.warning { border-left: 4px solid var(--warning); }
|
| 1427 |
+
|
| 1428 |
+
.toast-message { flex: 1; font-size: 14px; }
|
| 1429 |
+
|
| 1430 |
+
.toast-close {
|
| 1431 |
+
color: var(--text-muted);
|
| 1432 |
+
font-size: 18px;
|
| 1433 |
+
cursor: pointer;
|
| 1434 |
+
}
|
| 1435 |
+
|
| 1436 |
+
/* ==========================================================================
|
| 1437 |
+
RESPONSIVE
|
| 1438 |
+
========================================================================== */
|
| 1439 |
+
@media (max-width: 480px) {
|
| 1440 |
+
.header-subtitle { display: none; }
|
| 1441 |
+
.header-title { font-size: 18px; }
|
| 1442 |
+
.app-title { font-size: 36px; }
|
| 1443 |
+
.language-grid { gap: 10px; }
|
| 1444 |
+
.language-btn { padding: 16px 12px; }
|
| 1445 |
+
}
|
| 1446 |
+
|
| 1447 |
+
@media (min-width: 768px) {
|
| 1448 |
+
.diagnosis-main { max-width: 900px; padding: 30px; }
|
| 1449 |
+
.upload-card { padding: 32px; }
|
| 1450 |
+
.language-grid { grid-template-columns: repeat(4, 1fr); }
|
| 1451 |
+
}
|
frontend/index.html
ADDED
|
@@ -0,0 +1,321 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
<!DOCTYPE html>
|
| 2 |
+
<html lang="en">
|
| 3 |
+
<head>
|
| 4 |
+
<meta charset="UTF-8">
|
| 5 |
+
<meta name="viewport" content="width=device-width, initial-scale=1.0, maximum-scale=1.0, user-scalable=no">
|
| 6 |
+
<meta name="description" content="FarmEyes - AI-Powered Crop Disease Detection for African Farmers">
|
| 7 |
+
<meta name="theme-color" content="#0D0D0D">
|
| 8 |
+
|
| 9 |
+
<link rel="icon" type="image/svg+xml" href="data:image/svg+xml,<svg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 100 100'><text y='.9em' font-size='90'>🌱</text></svg>">
|
| 10 |
+
|
| 11 |
+
<title>FarmEyes - Crop Disease Detection</title>
|
| 12 |
+
|
| 13 |
+
<link rel="stylesheet" href="/static/css/main.css">
|
| 14 |
+
<link rel="preconnect" href="https://fonts.googleapis.com">
|
| 15 |
+
<link rel="preconnect" href="https://fonts.gstatic.com" crossorigin>
|
| 16 |
+
<link href="https://fonts.googleapis.com/css2?family=Inter:wght@400;500;600;700&display=swap" rel="stylesheet">
|
| 17 |
+
</head>
|
| 18 |
+
<body>
|
| 19 |
+
<div id="app" class="app-container">
|
| 20 |
+
|
| 21 |
+
<!-- ============================================================== -->
|
| 22 |
+
<!-- PAGE 1: LANGUAGE SELECTOR (Shows First on Startup) -->
|
| 23 |
+
<!-- ============================================================== -->
|
| 24 |
+
<div id="page-language" class="page active">
|
| 25 |
+
<div class="language-page">
|
| 26 |
+
<div class="language-content">
|
| 27 |
+
<div class="logo-large">🌱</div>
|
| 28 |
+
<h1 class="app-title">FarmEyes</h1>
|
| 29 |
+
<p class="app-tagline">AI-Powered Crop Disease Detection for African Farmers</p>
|
| 30 |
+
|
| 31 |
+
<div class="language-selection">
|
| 32 |
+
<h2 class="selection-title">Select Your Language</h2>
|
| 33 |
+
<p class="selection-subtitle">Choose your preferred language to continue</p>
|
| 34 |
+
|
| 35 |
+
<div class="language-grid">
|
| 36 |
+
<!-- English - NO FLAG as requested -->
|
| 37 |
+
<button class="language-btn" data-lang="en">
|
| 38 |
+
<span class="lang-name">English</span>
|
| 39 |
+
</button>
|
| 40 |
+
<button class="language-btn" data-lang="ha">
|
| 41 |
+
<span class="lang-flag">🇳🇬</span>
|
| 42 |
+
<span class="lang-name">Hausa</span>
|
| 43 |
+
</button>
|
| 44 |
+
<button class="language-btn" data-lang="yo">
|
| 45 |
+
<span class="lang-flag">🇳🇬</span>
|
| 46 |
+
<span class="lang-name">Yorùbá</span>
|
| 47 |
+
</button>
|
| 48 |
+
<button class="language-btn" data-lang="ig">
|
| 49 |
+
<span class="lang-flag">🇳🇬</span>
|
| 50 |
+
<span class="lang-name">Igbo</span>
|
| 51 |
+
</button>
|
| 52 |
+
</div>
|
| 53 |
+
|
| 54 |
+
<button id="btn-continue-language" class="btn-primary btn-continue" disabled>
|
| 55 |
+
<span data-i18n="buttons.continue">Continue</span>
|
| 56 |
+
<svg viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" width="18" height="18">
|
| 57 |
+
<path d="M5 12h14M12 5l7 7-7 7"/>
|
| 58 |
+
</svg>
|
| 59 |
+
</button>
|
| 60 |
+
</div>
|
| 61 |
+
</div>
|
| 62 |
+
|
| 63 |
+
<footer class="page-footer">
|
| 64 |
+
<p>Designed with AI Powered N-ATLaS Language Platform</p>
|
| 65 |
+
</footer>
|
| 66 |
+
</div>
|
| 67 |
+
</div>
|
| 68 |
+
|
| 69 |
+
<!-- ============================================================== -->
|
| 70 |
+
<!-- PAGE 2: DIAGNOSIS (Modern/Classy Design) -->
|
| 71 |
+
<!-- ============================================================== -->
|
| 72 |
+
<div id="page-diagnosis" class="page">
|
| 73 |
+
<div class="diagnosis-page">
|
| 74 |
+
<!-- Header with branding -->
|
| 75 |
+
<header class="main-header">
|
| 76 |
+
<div class="header-brand">
|
| 77 |
+
<span class="header-logo">🌱</span>
|
| 78 |
+
<div class="header-text">
|
| 79 |
+
<h1 class="header-title">FarmEyes</h1>
|
| 80 |
+
<p class="header-subtitle">AI-Powered Crop Disease Detection for African Farmers</p>
|
| 81 |
+
</div>
|
| 82 |
+
</div>
|
| 83 |
+
<div class="header-actions">
|
| 84 |
+
<button id="btn-language-toggle" class="btn-language">
|
| 85 |
+
<span id="current-lang-display">EN</span>
|
| 86 |
+
<svg viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" width="14" height="14">
|
| 87 |
+
<path d="M6 9l6 6 6-6"/>
|
| 88 |
+
</svg>
|
| 89 |
+
</button>
|
| 90 |
+
<div id="language-menu" class="dropdown-menu hidden">
|
| 91 |
+
<button class="dropdown-item" data-lang="en">English</button>
|
| 92 |
+
<button class="dropdown-item" data-lang="ha">Hausa</button>
|
| 93 |
+
<button class="dropdown-item" data-lang="yo">Yorùbá</button>
|
| 94 |
+
<button class="dropdown-item" data-lang="ig">Igbo</button>
|
| 95 |
+
</div>
|
| 96 |
+
</div>
|
| 97 |
+
</header>
|
| 98 |
+
|
| 99 |
+
<!-- Main Content Area -->
|
| 100 |
+
<main class="diagnosis-main">
|
| 101 |
+
<!-- Upload Section -->
|
| 102 |
+
<section id="upload-section" class="upload-section">
|
| 103 |
+
<div class="upload-card">
|
| 104 |
+
<div class="upload-header">
|
| 105 |
+
<span class="upload-icon-small">📸</span>
|
| 106 |
+
<h2 data-i18n="diagnosis.upload_title">Upload Crop Image</h2>
|
| 107 |
+
</div>
|
| 108 |
+
<p class="upload-desc" data-i18n="diagnosis.upload_desc">Take a clear photo of the affected leaf or plant</p>
|
| 109 |
+
|
| 110 |
+
<div id="upload-zone" class="upload-zone">
|
| 111 |
+
<input type="file" id="file-input" accept="image/*" capture="environment" hidden>
|
| 112 |
+
<div class="upload-icon">
|
| 113 |
+
<svg viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="1.5" width="48" height="48">
|
| 114 |
+
<path d="M4 16l4.586-4.586a2 2 0 012.828 0L16 16m-2-2l1.586-1.586a2 2 0 012.828 0L20 14m-6-6h.01M6 20h12a2 2 0 002-2V6a2 2 0 00-2-2H6a2 2 0 00-2 2v12a2 2 0 002 2z"/>
|
| 115 |
+
</svg>
|
| 116 |
+
</div>
|
| 117 |
+
<p class="upload-text" data-i18n="diagnosis.click_or_drag">Click or drag image here</p>
|
| 118 |
+
<span class="upload-formats">JPG, PNG, WEBP (max 10MB)</span>
|
| 119 |
+
</div>
|
| 120 |
+
|
| 121 |
+
<div id="image-preview-container" class="image-preview-container hidden">
|
| 122 |
+
<img id="image-preview" class="image-preview" alt="Preview">
|
| 123 |
+
<button id="btn-remove-image" class="btn-remove-image">✕</button>
|
| 124 |
+
</div>
|
| 125 |
+
|
| 126 |
+
<button id="btn-analyze" class="btn-primary btn-analyze" disabled>
|
| 127 |
+
<svg viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" width="18" height="18">
|
| 128 |
+
<circle cx="11" cy="11" r="8"/><path d="M21 21l-4.35-4.35"/>
|
| 129 |
+
</svg>
|
| 130 |
+
<span data-i18n="buttons.analyze">Analyze Crop</span>
|
| 131 |
+
</button>
|
| 132 |
+
|
| 133 |
+
<div id="analyzing-loader" class="analyzing-loader hidden">
|
| 134 |
+
<div class="loader-spinner"></div>
|
| 135 |
+
<p data-i18n="diagnosis.analyzing">Analyzing your crop...</p>
|
| 136 |
+
</div>
|
| 137 |
+
|
| 138 |
+
<div class="supported-crops">
|
| 139 |
+
<span class="crop-tag">🌿 Cassava</span>
|
| 140 |
+
<span class="crop-tag">🍫 Cocoa</span>
|
| 141 |
+
<span class="crop-tag">🍅 Tomato</span>
|
| 142 |
+
</div>
|
| 143 |
+
</div>
|
| 144 |
+
</section>
|
| 145 |
+
|
| 146 |
+
<!-- Results Section -->
|
| 147 |
+
<section id="results-section" class="results-section hidden">
|
| 148 |
+
<div class="results-header">
|
| 149 |
+
<h2>📋 <span data-i18n="results.title">Diagnosis Results</span></h2>
|
| 150 |
+
<button id="btn-new-scan" class="btn-text" data-i18n="buttons.new_scan">+ New Scan</button>
|
| 151 |
+
</div>
|
| 152 |
+
|
| 153 |
+
<!-- Disease Card -->
|
| 154 |
+
<div class="disease-card">
|
| 155 |
+
<div class="disease-top">
|
| 156 |
+
<div class="disease-icon" id="disease-icon">🦠</div>
|
| 157 |
+
<div class="disease-info">
|
| 158 |
+
<h3 id="disease-name">Disease Name</h3>
|
| 159 |
+
<span id="crop-type" class="crop-label">Crop</span>
|
| 160 |
+
</div>
|
| 161 |
+
<span id="severity-badge" class="severity-badge">--</span>
|
| 162 |
+
</div>
|
| 163 |
+
<div class="disease-confidence">
|
| 164 |
+
<span class="conf-label" data-i18n="results.confidence">Confidence:</span>
|
| 165 |
+
<div class="conf-bar-wrap">
|
| 166 |
+
<div id="confidence-bar" class="conf-bar"></div>
|
| 167 |
+
</div>
|
| 168 |
+
<span id="confidence-value" class="conf-value">0%</span>
|
| 169 |
+
</div>
|
| 170 |
+
</div>
|
| 171 |
+
|
| 172 |
+
<!-- Info Tabs -->
|
| 173 |
+
<div class="info-card">
|
| 174 |
+
<div class="tabs">
|
| 175 |
+
<button class="tab-btn active" data-tab="symptoms" data-i18n="tabs.symptoms">Symptoms</button>
|
| 176 |
+
<button class="tab-btn" data-tab="treatment" data-i18n="tabs.treatment">Treatment</button>
|
| 177 |
+
<button class="tab-btn" data-tab="prevention" data-i18n="tabs.prevention">Prevention</button>
|
| 178 |
+
</div>
|
| 179 |
+
|
| 180 |
+
<div id="tab-symptoms" class="tab-content active">
|
| 181 |
+
<ul id="symptoms-list" class="info-list"></ul>
|
| 182 |
+
<div class="info-block">
|
| 183 |
+
<h4 data-i18n="results.transmission">How It Spreads</h4>
|
| 184 |
+
<ul id="transmission-list" class="info-list"></ul>
|
| 185 |
+
</div>
|
| 186 |
+
<div class="info-block">
|
| 187 |
+
<h4 data-i18n="results.yield_impact">Yield Impact</h4>
|
| 188 |
+
<p id="yield-impact-text"></p>
|
| 189 |
+
</div>
|
| 190 |
+
<div class="info-block recovery-block">
|
| 191 |
+
<h4 data-i18n="results.recovery">Recovery Chance</h4>
|
| 192 |
+
<div class="recovery-bar-wrap">
|
| 193 |
+
<div id="recovery-bar" class="recovery-bar"></div>
|
| 194 |
+
</div>
|
| 195 |
+
<span id="recovery-text"></span>
|
| 196 |
+
</div>
|
| 197 |
+
</div>
|
| 198 |
+
|
| 199 |
+
<div id="tab-treatment" class="tab-content hidden">
|
| 200 |
+
<div class="info-block">
|
| 201 |
+
<h4 data-i18n="treatment.immediate">Immediate Actions</h4>
|
| 202 |
+
<ul id="immediate-actions-list" class="info-list"></ul>
|
| 203 |
+
</div>
|
| 204 |
+
<div class="info-block">
|
| 205 |
+
<h4 data-i18n="treatment.chemical">Chemical Treatment</h4>
|
| 206 |
+
<div id="chemical-treatments" class="treatment-grid"></div>
|
| 207 |
+
</div>
|
| 208 |
+
<div class="cost-block">
|
| 209 |
+
<span class="cost-label" data-i18n="treatment.cost">Estimated Cost:</span>
|
| 210 |
+
<span id="cost-estimate" class="cost-value"></span>
|
| 211 |
+
</div>
|
| 212 |
+
</div>
|
| 213 |
+
|
| 214 |
+
<div id="tab-prevention" class="tab-content hidden">
|
| 215 |
+
<ul id="prevention-list" class="info-list"></ul>
|
| 216 |
+
</div>
|
| 217 |
+
</div>
|
| 218 |
+
|
| 219 |
+
<!-- Chat Button (Simple but visible) -->
|
| 220 |
+
<button id="btn-open-chat" class="btn-chat">
|
| 221 |
+
<svg viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" width="18" height="18">
|
| 222 |
+
<path d="M21 15a2 2 0 01-2 2H7l-4 4V5a2 2 0 012-2h14a2 2 0 012 2z"/>
|
| 223 |
+
</svg>
|
| 224 |
+
<span data-i18n="buttons.chat">Chat with Assistant</span>
|
| 225 |
+
</button>
|
| 226 |
+
</section>
|
| 227 |
+
</main>
|
| 228 |
+
|
| 229 |
+
<!-- Footer -->
|
| 230 |
+
<footer class="main-footer">
|
| 231 |
+
<p>Designed with AI Powered N-ATLaS Language Platform</p>
|
| 232 |
+
</footer>
|
| 233 |
+
</div>
|
| 234 |
+
</div>
|
| 235 |
+
|
| 236 |
+
<!-- ============================================================== -->
|
| 237 |
+
<!-- PAGE 3: CHAT (ChatGPT-Inspired Dark Design) -->
|
| 238 |
+
<!-- ============================================================== -->
|
| 239 |
+
<div id="page-chat" class="page">
|
| 240 |
+
<div class="chat-page">
|
| 241 |
+
<header class="chat-header">
|
| 242 |
+
<button id="btn-back-diagnosis" class="btn-back">
|
| 243 |
+
<svg viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" width="18" height="18">
|
| 244 |
+
<path d="M19 12H5M12 19l-7-7 7-7"/>
|
| 245 |
+
</svg>
|
| 246 |
+
<span data-i18n="buttons.back">Back</span>
|
| 247 |
+
</button>
|
| 248 |
+
<div class="chat-title">
|
| 249 |
+
<span>🌱</span> FarmEyes Assistant
|
| 250 |
+
</div>
|
| 251 |
+
<span id="chat-lang-display" class="chat-lang">EN</span>
|
| 252 |
+
</header>
|
| 253 |
+
|
| 254 |
+
<div id="chat-context-banner" class="chat-context">
|
| 255 |
+
<span data-i18n="chat.discussing">Discussing:</span>
|
| 256 |
+
<strong id="context-disease-name">Disease</strong>
|
| 257 |
+
<span>•</span>
|
| 258 |
+
<span id="context-confidence">0%</span>
|
| 259 |
+
<span>•</span>
|
| 260 |
+
<span id="context-severity">Severity</span>
|
| 261 |
+
</div>
|
| 262 |
+
|
| 263 |
+
<div id="chat-messages" class="chat-messages">
|
| 264 |
+
<div id="chat-welcome" class="chat-welcome">
|
| 265 |
+
<div class="welcome-icon">🌱</div>
|
| 266 |
+
<h3>FarmEyes Assistant</h3>
|
| 267 |
+
<p data-i18n="chat.welcome">Ask me anything about your diagnosis, treatments, or prevention tips.</p>
|
| 268 |
+
</div>
|
| 269 |
+
</div>
|
| 270 |
+
|
| 271 |
+
<div class="chat-input-wrap">
|
| 272 |
+
<div class="chat-input-box">
|
| 273 |
+
<textarea id="chat-input" data-i18n-placeholder="chat.placeholder" placeholder="Ask about your diagnosis..." rows="1" maxlength="2000"></textarea>
|
| 274 |
+
<button id="btn-voice-input" class="btn-icon btn-voice" title="Voice">
|
| 275 |
+
<svg viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" width="20" height="20">
|
| 276 |
+
<path d="M12 1a3 3 0 00-3 3v8a3 3 0 006 0V4a3 3 0 00-3-3z"/>
|
| 277 |
+
<path d="M19 10v2a7 7 0 01-14 0v-2"/>
|
| 278 |
+
<line x1="12" y1="19" x2="12" y2="23"/>
|
| 279 |
+
</svg>
|
| 280 |
+
</button>
|
| 281 |
+
<button id="btn-send-message" class="btn-icon btn-send" title="Send" disabled>
|
| 282 |
+
<svg viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" width="20" height="20">
|
| 283 |
+
<line x1="22" y1="2" x2="11" y2="13"/>
|
| 284 |
+
<polygon points="22,2 15,22 11,13 2,9"/>
|
| 285 |
+
</svg>
|
| 286 |
+
</button>
|
| 287 |
+
</div>
|
| 288 |
+
<p class="chat-note" data-i18n="chat.disclaimer">FarmEyes provides guidance only. Consult experts for serious cases.</p>
|
| 289 |
+
</div>
|
| 290 |
+
|
| 291 |
+
<div id="voice-overlay" class="voice-overlay hidden">
|
| 292 |
+
<div class="voice-modal">
|
| 293 |
+
<div class="voice-anim">
|
| 294 |
+
<span></span><span></span><span></span>
|
| 295 |
+
</div>
|
| 296 |
+
<p data-i18n="voice.listening">Listening...</p>
|
| 297 |
+
<button id="btn-stop-voice" class="btn-secondary" data-i18n="buttons.stop">Stop</button>
|
| 298 |
+
</div>
|
| 299 |
+
</div>
|
| 300 |
+
</div>
|
| 301 |
+
</div>
|
| 302 |
+
|
| 303 |
+
<!-- Loading Overlay -->
|
| 304 |
+
<div id="loading-overlay" class="loading-overlay hidden">
|
| 305 |
+
<div class="loader-spinner large"></div>
|
| 306 |
+
<p id="loading-text">Loading...</p>
|
| 307 |
+
</div>
|
| 308 |
+
|
| 309 |
+
<!-- Toast -->
|
| 310 |
+
<div id="toast-container" class="toast-container"></div>
|
| 311 |
+
</div>
|
| 312 |
+
|
| 313 |
+
<script src="/static/js/api.js"></script>
|
| 314 |
+
<script src="/static/js/i18n.js"></script>
|
| 315 |
+
<script src="/static/js/voice.js"></script>
|
| 316 |
+
<script src="/static/js/tts.js"></script>
|
| 317 |
+
<script src="/static/js/diagnosis.js"></script>
|
| 318 |
+
<script src="/static/js/chat.js"></script>
|
| 319 |
+
<script src="/static/js/app.js"></script>
|
| 320 |
+
</body>
|
| 321 |
+
</html>
|
frontend/js/api.js
ADDED
|
@@ -0,0 +1,417 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
/**
|
| 2 |
+
* FarmEyes API Client
|
| 3 |
+
* ===================
|
| 4 |
+
* Handles all communication with the FastAPI backend.
|
| 5 |
+
* Provides clean async methods for detection, chat, and transcription.
|
| 6 |
+
*/
|
| 7 |
+
|
| 8 |
+
const FarmEyesAPI = {
|
| 9 |
+
// Base URL - auto-detect based on environment
|
| 10 |
+
baseUrl: window.location.origin,
|
| 11 |
+
|
| 12 |
+
// Current session ID
|
| 13 |
+
sessionId: null,
|
| 14 |
+
|
| 15 |
+
// Current language
|
| 16 |
+
language: 'en',
|
| 17 |
+
|
| 18 |
+
/**
|
| 19 |
+
* Initialize API client
|
| 20 |
+
*/
|
| 21 |
+
async init() {
|
| 22 |
+
// Try to get existing session from storage
|
| 23 |
+
this.sessionId = localStorage.getItem('farmeyes_session');
|
| 24 |
+
this.language = localStorage.getItem('farmeyes_language') || 'en';
|
| 25 |
+
|
| 26 |
+
// Create new session if none exists
|
| 27 |
+
if (!this.sessionId) {
|
| 28 |
+
await this.createSession(this.language);
|
| 29 |
+
}
|
| 30 |
+
|
| 31 |
+
console.log('[API] Initialized with session:', this.sessionId?.substring(0, 8));
|
| 32 |
+
return this;
|
| 33 |
+
},
|
| 34 |
+
|
| 35 |
+
/**
|
| 36 |
+
* Make an API request
|
| 37 |
+
* @param {string} endpoint - API endpoint
|
| 38 |
+
* @param {object} options - Fetch options
|
| 39 |
+
* @returns {Promise<object>} Response data
|
| 40 |
+
*/
|
| 41 |
+
async request(endpoint, options = {}) {
|
| 42 |
+
const url = `${this.baseUrl}${endpoint}`;
|
| 43 |
+
|
| 44 |
+
const defaultOptions = {
|
| 45 |
+
headers: {
|
| 46 |
+
'Accept': 'application/json',
|
| 47 |
+
},
|
| 48 |
+
};
|
| 49 |
+
|
| 50 |
+
// Merge options
|
| 51 |
+
const fetchOptions = { ...defaultOptions, ...options };
|
| 52 |
+
|
| 53 |
+
// Add Content-Type for JSON body
|
| 54 |
+
if (options.body && !(options.body instanceof FormData)) {
|
| 55 |
+
fetchOptions.headers['Content-Type'] = 'application/json';
|
| 56 |
+
fetchOptions.body = JSON.stringify(options.body);
|
| 57 |
+
}
|
| 58 |
+
|
| 59 |
+
try {
|
| 60 |
+
const response = await fetch(url, fetchOptions);
|
| 61 |
+
|
| 62 |
+
// Handle non-JSON responses
|
| 63 |
+
const contentType = response.headers.get('content-type');
|
| 64 |
+
if (!contentType || !contentType.includes('application/json')) {
|
| 65 |
+
if (!response.ok) {
|
| 66 |
+
throw new Error(`HTTP ${response.status}: ${response.statusText}`);
|
| 67 |
+
}
|
| 68 |
+
return { success: true };
|
| 69 |
+
}
|
| 70 |
+
|
| 71 |
+
const data = await response.json();
|
| 72 |
+
|
| 73 |
+
if (!response.ok) {
|
| 74 |
+
throw new Error(data.detail || data.error || `HTTP ${response.status}`);
|
| 75 |
+
}
|
| 76 |
+
|
| 77 |
+
return data;
|
| 78 |
+
} catch (error) {
|
| 79 |
+
console.error('[API] Request failed:', endpoint, error);
|
| 80 |
+
throw error;
|
| 81 |
+
}
|
| 82 |
+
},
|
| 83 |
+
|
| 84 |
+
// =========================================================================
|
| 85 |
+
// SESSION MANAGEMENT
|
| 86 |
+
// =========================================================================
|
| 87 |
+
|
| 88 |
+
/**
|
| 89 |
+
* Create a new session
|
| 90 |
+
* @param {string} language - Language code
|
| 91 |
+
* @returns {Promise<object>} Session data
|
| 92 |
+
*/
|
| 93 |
+
async createSession(language = 'en') {
|
| 94 |
+
const data = await this.request(`/api/session?language=${language}`);
|
| 95 |
+
|
| 96 |
+
if (data.success && data.session_id) {
|
| 97 |
+
this.sessionId = data.session_id;
|
| 98 |
+
this.language = language;
|
| 99 |
+
localStorage.setItem('farmeyes_session', this.sessionId);
|
| 100 |
+
localStorage.setItem('farmeyes_language', language);
|
| 101 |
+
console.log('[API] Session created:', this.sessionId.substring(0, 8));
|
| 102 |
+
}
|
| 103 |
+
|
| 104 |
+
return data;
|
| 105 |
+
},
|
| 106 |
+
|
| 107 |
+
/**
|
| 108 |
+
* Get session info
|
| 109 |
+
* @returns {Promise<object>} Session info
|
| 110 |
+
*/
|
| 111 |
+
async getSession() {
|
| 112 |
+
if (!this.sessionId) {
|
| 113 |
+
return { success: false, error: 'No session' };
|
| 114 |
+
}
|
| 115 |
+
return this.request(`/api/session/${this.sessionId}`);
|
| 116 |
+
},
|
| 117 |
+
|
| 118 |
+
/**
|
| 119 |
+
* Update session language
|
| 120 |
+
* @param {string} language - New language code
|
| 121 |
+
* @returns {Promise<object>} Updated session
|
| 122 |
+
*/
|
| 123 |
+
async setLanguage(language) {
|
| 124 |
+
if (!this.sessionId) {
|
| 125 |
+
await this.createSession(language);
|
| 126 |
+
return { success: true };
|
| 127 |
+
}
|
| 128 |
+
|
| 129 |
+
const data = await this.request(`/api/session/${this.sessionId}/language?language=${language}`, {
|
| 130 |
+
method: 'PUT'
|
| 131 |
+
});
|
| 132 |
+
|
| 133 |
+
if (data.success) {
|
| 134 |
+
this.language = language;
|
| 135 |
+
localStorage.setItem('farmeyes_language', language);
|
| 136 |
+
}
|
| 137 |
+
|
| 138 |
+
return data;
|
| 139 |
+
},
|
| 140 |
+
|
| 141 |
+
/**
|
| 142 |
+
* Clear current session and create new one
|
| 143 |
+
* @returns {Promise<object>} New session data
|
| 144 |
+
*/
|
| 145 |
+
async resetSession() {
|
| 146 |
+
if (this.sessionId) {
|
| 147 |
+
try {
|
| 148 |
+
await this.request(`/api/session/${this.sessionId}`, { method: 'DELETE' });
|
| 149 |
+
} catch (e) {
|
| 150 |
+
// Ignore errors on delete
|
| 151 |
+
}
|
| 152 |
+
}
|
| 153 |
+
|
| 154 |
+
localStorage.removeItem('farmeyes_session');
|
| 155 |
+
this.sessionId = null;
|
| 156 |
+
|
| 157 |
+
return this.createSession(this.language);
|
| 158 |
+
},
|
| 159 |
+
|
| 160 |
+
// =========================================================================
|
| 161 |
+
// DISEASE DETECTION
|
| 162 |
+
// =========================================================================
|
| 163 |
+
|
| 164 |
+
/**
|
| 165 |
+
* Analyze crop image for disease detection
|
| 166 |
+
* @param {File} imageFile - Image file to analyze
|
| 167 |
+
* @param {string} language - Language for results
|
| 168 |
+
* @returns {Promise<object>} Detection results
|
| 169 |
+
*/
|
| 170 |
+
async detectDisease(imageFile, language = null) {
|
| 171 |
+
const formData = new FormData();
|
| 172 |
+
formData.append('file', imageFile);
|
| 173 |
+
formData.append('language', language || this.language);
|
| 174 |
+
formData.append('session_id', this.sessionId || '');
|
| 175 |
+
|
| 176 |
+
const data = await this.request('/api/detect/', {
|
| 177 |
+
method: 'POST',
|
| 178 |
+
body: formData
|
| 179 |
+
});
|
| 180 |
+
|
| 181 |
+
// Update session ID if returned
|
| 182 |
+
if (data.session_id) {
|
| 183 |
+
this.sessionId = data.session_id;
|
| 184 |
+
localStorage.setItem('farmeyes_session', this.sessionId);
|
| 185 |
+
}
|
| 186 |
+
|
| 187 |
+
return data;
|
| 188 |
+
},
|
| 189 |
+
|
| 190 |
+
/**
|
| 191 |
+
* Analyze base64 encoded image
|
| 192 |
+
* @param {string} base64Image - Base64 encoded image
|
| 193 |
+
* @param {string} language - Language for results
|
| 194 |
+
* @returns {Promise<object>} Detection results
|
| 195 |
+
*/
|
| 196 |
+
async detectDiseaseBase64(base64Image, language = null) {
|
| 197 |
+
const data = await this.request('/api/detect/base64', {
|
| 198 |
+
method: 'POST',
|
| 199 |
+
body: {
|
| 200 |
+
image_base64: base64Image,
|
| 201 |
+
language: language || this.language,
|
| 202 |
+
session_id: this.sessionId
|
| 203 |
+
}
|
| 204 |
+
});
|
| 205 |
+
|
| 206 |
+
if (data.session_id) {
|
| 207 |
+
this.sessionId = data.session_id;
|
| 208 |
+
localStorage.setItem('farmeyes_session', this.sessionId);
|
| 209 |
+
}
|
| 210 |
+
|
| 211 |
+
return data;
|
| 212 |
+
},
|
| 213 |
+
|
| 214 |
+
/**
|
| 215 |
+
* Get detection service status
|
| 216 |
+
* @returns {Promise<object>} Service status
|
| 217 |
+
*/
|
| 218 |
+
async getDetectionStatus() {
|
| 219 |
+
return this.request('/api/detect/status');
|
| 220 |
+
},
|
| 221 |
+
|
| 222 |
+
/**
|
| 223 |
+
* Get supported disease classes
|
| 224 |
+
* @returns {Promise<object>} Classes info
|
| 225 |
+
*/
|
| 226 |
+
async getClasses() {
|
| 227 |
+
return this.request('/api/detect/classes');
|
| 228 |
+
},
|
| 229 |
+
|
| 230 |
+
/**
|
| 231 |
+
* Clear current diagnosis
|
| 232 |
+
* @returns {Promise<object>} Result
|
| 233 |
+
*/
|
| 234 |
+
async clearDiagnosis() {
|
| 235 |
+
if (!this.sessionId) {
|
| 236 |
+
return { success: false, error: 'No session' };
|
| 237 |
+
}
|
| 238 |
+
return this.request(`/api/detect/session/${this.sessionId}`, {
|
| 239 |
+
method: 'DELETE'
|
| 240 |
+
});
|
| 241 |
+
},
|
| 242 |
+
|
| 243 |
+
// =========================================================================
|
| 244 |
+
// CHAT
|
| 245 |
+
// =========================================================================
|
| 246 |
+
|
| 247 |
+
/**
|
| 248 |
+
* Send chat message
|
| 249 |
+
* @param {string} message - User message
|
| 250 |
+
* @param {string} language - Response language
|
| 251 |
+
* @returns {Promise<object>} Chat response
|
| 252 |
+
*/
|
| 253 |
+
async sendChatMessage(message, language = null) {
|
| 254 |
+
if (!this.sessionId) {
|
| 255 |
+
await this.createSession(language || this.language);
|
| 256 |
+
}
|
| 257 |
+
|
| 258 |
+
return this.request('/api/chat/', {
|
| 259 |
+
method: 'POST',
|
| 260 |
+
body: {
|
| 261 |
+
session_id: this.sessionId,
|
| 262 |
+
message: message,
|
| 263 |
+
language: language || this.language
|
| 264 |
+
}
|
| 265 |
+
});
|
| 266 |
+
},
|
| 267 |
+
|
| 268 |
+
/**
|
| 269 |
+
* Get welcome message for chat
|
| 270 |
+
* @param {string} language - Language code
|
| 271 |
+
* @returns {Promise<object>} Welcome message
|
| 272 |
+
*/
|
| 273 |
+
async getChatWelcome(language = null) {
|
| 274 |
+
if (!this.sessionId) {
|
| 275 |
+
return { success: false, error: 'No session' };
|
| 276 |
+
}
|
| 277 |
+
|
| 278 |
+
const lang = language || this.language;
|
| 279 |
+
return this.request(`/api/chat/welcome?session_id=${this.sessionId}&language=${lang}`);
|
| 280 |
+
},
|
| 281 |
+
|
| 282 |
+
/**
|
| 283 |
+
* Get chat history
|
| 284 |
+
* @param {number} limit - Max messages to return
|
| 285 |
+
* @returns {Promise<object>} Chat history
|
| 286 |
+
*/
|
| 287 |
+
async getChatHistory(limit = 50) {
|
| 288 |
+
if (!this.sessionId) {
|
| 289 |
+
return { success: false, messages: [] };
|
| 290 |
+
}
|
| 291 |
+
|
| 292 |
+
return this.request(`/api/chat/history?session_id=${this.sessionId}&limit=${limit}`);
|
| 293 |
+
},
|
| 294 |
+
|
| 295 |
+
/**
|
| 296 |
+
* Clear chat history
|
| 297 |
+
* @returns {Promise<object>} Result
|
| 298 |
+
*/
|
| 299 |
+
async clearChatHistory() {
|
| 300 |
+
if (!this.sessionId) {
|
| 301 |
+
return { success: false };
|
| 302 |
+
}
|
| 303 |
+
|
| 304 |
+
return this.request(`/api/chat/history?session_id=${this.sessionId}`, {
|
| 305 |
+
method: 'DELETE'
|
| 306 |
+
});
|
| 307 |
+
},
|
| 308 |
+
|
| 309 |
+
/**
|
| 310 |
+
* Get current diagnosis context
|
| 311 |
+
* @returns {Promise<object>} Diagnosis context
|
| 312 |
+
*/
|
| 313 |
+
async getChatContext() {
|
| 314 |
+
if (!this.sessionId) {
|
| 315 |
+
return { success: false };
|
| 316 |
+
}
|
| 317 |
+
|
| 318 |
+
return this.request(`/api/chat/context?session_id=${this.sessionId}`);
|
| 319 |
+
},
|
| 320 |
+
|
| 321 |
+
// =========================================================================
|
| 322 |
+
// VOICE TRANSCRIPTION
|
| 323 |
+
// =========================================================================
|
| 324 |
+
|
| 325 |
+
/**
|
| 326 |
+
* Transcribe audio file
|
| 327 |
+
* @param {File|Blob} audioFile - Audio file to transcribe
|
| 328 |
+
* @param {string} languageHint - Language hint
|
| 329 |
+
* @returns {Promise<object>} Transcription result
|
| 330 |
+
*/
|
| 331 |
+
async transcribeAudio(audioFile, languageHint = null) {
|
| 332 |
+
const formData = new FormData();
|
| 333 |
+
formData.append('file', audioFile, audioFile.name || 'audio.wav');
|
| 334 |
+
|
| 335 |
+
if (languageHint) {
|
| 336 |
+
formData.append('language_hint', languageHint);
|
| 337 |
+
}
|
| 338 |
+
|
| 339 |
+
return this.request('/api/transcribe/', {
|
| 340 |
+
method: 'POST',
|
| 341 |
+
body: formData
|
| 342 |
+
});
|
| 343 |
+
},
|
| 344 |
+
|
| 345 |
+
/**
|
| 346 |
+
* Transcribe base64 audio
|
| 347 |
+
* @param {string} base64Audio - Base64 encoded audio
|
| 348 |
+
* @param {string} filename - Original filename
|
| 349 |
+
* @param {string} languageHint - Language hint
|
| 350 |
+
* @returns {Promise<object>} Transcription result
|
| 351 |
+
*/
|
| 352 |
+
async transcribeBase64(base64Audio, filename = 'audio.wav', languageHint = null) {
|
| 353 |
+
return this.request('/api/transcribe/base64', {
|
| 354 |
+
method: 'POST',
|
| 355 |
+
body: {
|
| 356 |
+
audio_base64: base64Audio,
|
| 357 |
+
filename: filename,
|
| 358 |
+
language_hint: languageHint
|
| 359 |
+
}
|
| 360 |
+
});
|
| 361 |
+
},
|
| 362 |
+
|
| 363 |
+
/**
|
| 364 |
+
* Get transcription service status
|
| 365 |
+
* @returns {Promise<object>} Service status
|
| 366 |
+
*/
|
| 367 |
+
async getTranscriptionStatus() {
|
| 368 |
+
return this.request('/api/transcribe/status');
|
| 369 |
+
},
|
| 370 |
+
|
| 371 |
+
/**
|
| 372 |
+
* Pre-load Whisper model
|
| 373 |
+
* @returns {Promise<object>} Result
|
| 374 |
+
*/
|
| 375 |
+
async loadWhisperModel() {
|
| 376 |
+
return this.request('/api/transcribe/load-model', {
|
| 377 |
+
method: 'POST'
|
| 378 |
+
});
|
| 379 |
+
},
|
| 380 |
+
|
| 381 |
+
// =========================================================================
|
| 382 |
+
// UI TRANSLATIONS
|
| 383 |
+
// =========================================================================
|
| 384 |
+
|
| 385 |
+
/**
|
| 386 |
+
* Get UI translations
|
| 387 |
+
* @param {string} language - Language code
|
| 388 |
+
* @returns {Promise<object>} Translations
|
| 389 |
+
*/
|
| 390 |
+
async getTranslations(language = null) {
|
| 391 |
+
const lang = language || this.language;
|
| 392 |
+
return this.request(`/api/translations?language=${lang}`);
|
| 393 |
+
},
|
| 394 |
+
|
| 395 |
+
// =========================================================================
|
| 396 |
+
// HEALTH CHECK
|
| 397 |
+
// =========================================================================
|
| 398 |
+
|
| 399 |
+
/**
|
| 400 |
+
* Check API health
|
| 401 |
+
* @returns {Promise<object>} Health status
|
| 402 |
+
*/
|
| 403 |
+
async healthCheck() {
|
| 404 |
+
return this.request('/health');
|
| 405 |
+
},
|
| 406 |
+
|
| 407 |
+
/**
|
| 408 |
+
* Get API info
|
| 409 |
+
* @returns {Promise<object>} API information
|
| 410 |
+
*/
|
| 411 |
+
async getApiInfo() {
|
| 412 |
+
return this.request('/api');
|
| 413 |
+
}
|
| 414 |
+
};
|
| 415 |
+
|
| 416 |
+
// Export for use in other modules
|
| 417 |
+
window.FarmEyesAPI = FarmEyesAPI;
|
frontend/js/app.js
ADDED
|
@@ -0,0 +1,274 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
/**
|
| 2 |
+
* FarmEyes Main Application
|
| 3 |
+
* =========================
|
| 4 |
+
* Main controller - navigation and language handling.
|
| 5 |
+
*/
|
| 6 |
+
|
| 7 |
+
const App = {
|
| 8 |
+
currentPage: 'language',
|
| 9 |
+
selectedLanguage: null,
|
| 10 |
+
isInitialized: false,
|
| 11 |
+
elements: {},
|
| 12 |
+
|
| 13 |
+
/**
|
| 14 |
+
* Initialize
|
| 15 |
+
*/
|
| 16 |
+
async init() {
|
| 17 |
+
console.log('[App] Initializing FarmEyes...');
|
| 18 |
+
|
| 19 |
+
this.cacheElements();
|
| 20 |
+
|
| 21 |
+
await FarmEyesAPI.init();
|
| 22 |
+
await I18n.init('en');
|
| 23 |
+
|
| 24 |
+
this.bindEvents();
|
| 25 |
+
|
| 26 |
+
Diagnosis.init();
|
| 27 |
+
Chat.init();
|
| 28 |
+
|
| 29 |
+
// Always start with language page
|
| 30 |
+
this.navigateToPage('language');
|
| 31 |
+
|
| 32 |
+
this.isInitialized = true;
|
| 33 |
+
console.log('[App] Ready!');
|
| 34 |
+
},
|
| 35 |
+
|
| 36 |
+
/**
|
| 37 |
+
* Cache DOM elements
|
| 38 |
+
*/
|
| 39 |
+
cacheElements() {
|
| 40 |
+
this.elements = {
|
| 41 |
+
pageLanguage: document.getElementById('page-language'),
|
| 42 |
+
pageDiagnosis: document.getElementById('page-diagnosis'),
|
| 43 |
+
pageChat: document.getElementById('page-chat'),
|
| 44 |
+
|
| 45 |
+
languageButtons: document.querySelectorAll('.language-btn'),
|
| 46 |
+
btnContinue: document.getElementById('btn-continue-language'),
|
| 47 |
+
|
| 48 |
+
btnLanguageToggle: document.getElementById('btn-language-toggle'),
|
| 49 |
+
currentLangDisplay: document.getElementById('current-lang-display'),
|
| 50 |
+
languageMenu: document.getElementById('language-menu'),
|
| 51 |
+
languageDropdownItems: document.querySelectorAll('#language-menu .dropdown-item'),
|
| 52 |
+
|
| 53 |
+
loadingOverlay: document.getElementById('loading-overlay'),
|
| 54 |
+
loadingText: document.getElementById('loading-text'),
|
| 55 |
+
toastContainer: document.getElementById('toast-container')
|
| 56 |
+
};
|
| 57 |
+
},
|
| 58 |
+
|
| 59 |
+
/**
|
| 60 |
+
* Bind events
|
| 61 |
+
*/
|
| 62 |
+
bindEvents() {
|
| 63 |
+
// Language selector buttons
|
| 64 |
+
this.elements.languageButtons.forEach(btn => {
|
| 65 |
+
btn.addEventListener('click', () => this.selectLanguage(btn.dataset.lang));
|
| 66 |
+
});
|
| 67 |
+
|
| 68 |
+
// Continue button
|
| 69 |
+
this.elements.btnContinue?.addEventListener('click', () => this.onLanguageContinue());
|
| 70 |
+
|
| 71 |
+
// Language dropdown
|
| 72 |
+
this.elements.btnLanguageToggle?.addEventListener('click', (e) => {
|
| 73 |
+
e.stopPropagation();
|
| 74 |
+
this.toggleLanguageMenu();
|
| 75 |
+
});
|
| 76 |
+
|
| 77 |
+
this.elements.languageDropdownItems.forEach(item => {
|
| 78 |
+
item.addEventListener('click', () => this.changeLanguage(item.dataset.lang));
|
| 79 |
+
});
|
| 80 |
+
|
| 81 |
+
document.addEventListener('click', () => this.closeLanguageMenu());
|
| 82 |
+
|
| 83 |
+
// Back button in chat
|
| 84 |
+
document.getElementById('btn-back-diagnosis')?.addEventListener('click', () => {
|
| 85 |
+
this.navigateToDiagnosis();
|
| 86 |
+
});
|
| 87 |
+
|
| 88 |
+
window.addEventListener('languageChanged', (e) => {
|
| 89 |
+
this.onLanguageChanged(e.detail.language);
|
| 90 |
+
});
|
| 91 |
+
},
|
| 92 |
+
|
| 93 |
+
/**
|
| 94 |
+
* Select language on language page
|
| 95 |
+
*/
|
| 96 |
+
selectLanguage(lang) {
|
| 97 |
+
this.selectedLanguage = lang;
|
| 98 |
+
|
| 99 |
+
this.elements.languageButtons.forEach(btn => {
|
| 100 |
+
btn.classList.toggle('selected', btn.dataset.lang === lang);
|
| 101 |
+
});
|
| 102 |
+
|
| 103 |
+
if (this.elements.btnContinue) {
|
| 104 |
+
this.elements.btnContinue.disabled = false;
|
| 105 |
+
}
|
| 106 |
+
|
| 107 |
+
console.log('[App] Language selected:', lang);
|
| 108 |
+
},
|
| 109 |
+
|
| 110 |
+
/**
|
| 111 |
+
* Continue after language selection
|
| 112 |
+
*/
|
| 113 |
+
async onLanguageContinue() {
|
| 114 |
+
if (!this.selectedLanguage) return;
|
| 115 |
+
|
| 116 |
+
this.showLoading('Setting up...');
|
| 117 |
+
|
| 118 |
+
try {
|
| 119 |
+
await I18n.setLanguage(this.selectedLanguage);
|
| 120 |
+
this.navigateToDiagnosis();
|
| 121 |
+
} catch (error) {
|
| 122 |
+
console.error('[App] Language setup failed:', error);
|
| 123 |
+
this.showToast('Failed to set language', 'error');
|
| 124 |
+
} finally {
|
| 125 |
+
this.hideLoading();
|
| 126 |
+
}
|
| 127 |
+
},
|
| 128 |
+
|
| 129 |
+
/**
|
| 130 |
+
* Change language from dropdown
|
| 131 |
+
*/
|
| 132 |
+
async changeLanguage(lang) {
|
| 133 |
+
if (lang === I18n.getLanguage()) {
|
| 134 |
+
this.closeLanguageMenu();
|
| 135 |
+
return;
|
| 136 |
+
}
|
| 137 |
+
|
| 138 |
+
this.closeLanguageMenu();
|
| 139 |
+
this.showLoading('Changing language...');
|
| 140 |
+
|
| 141 |
+
try {
|
| 142 |
+
await I18n.setLanguage(lang);
|
| 143 |
+
} catch (error) {
|
| 144 |
+
console.error('[App] Language change failed:', error);
|
| 145 |
+
this.showToast('Failed to change language', 'error');
|
| 146 |
+
} finally {
|
| 147 |
+
this.hideLoading();
|
| 148 |
+
}
|
| 149 |
+
},
|
| 150 |
+
|
| 151 |
+
/**
|
| 152 |
+
* Handle language change event
|
| 153 |
+
*/
|
| 154 |
+
onLanguageChanged(language) {
|
| 155 |
+
// Update header display
|
| 156 |
+
if (this.elements.currentLangDisplay) {
|
| 157 |
+
this.elements.currentLangDisplay.textContent = language.toUpperCase();
|
| 158 |
+
}
|
| 159 |
+
|
| 160 |
+
// Update chat display
|
| 161 |
+
const chatLangDisplay = document.getElementById('chat-lang-display');
|
| 162 |
+
if (chatLangDisplay) {
|
| 163 |
+
chatLangDisplay.textContent = language.toUpperCase();
|
| 164 |
+
}
|
| 165 |
+
|
| 166 |
+
// Update dropdown active state
|
| 167 |
+
this.elements.languageDropdownItems.forEach(item => {
|
| 168 |
+
item.classList.toggle('active', item.dataset.lang === language);
|
| 169 |
+
});
|
| 170 |
+
|
| 171 |
+
console.log('[App] Language updated to:', language);
|
| 172 |
+
},
|
| 173 |
+
|
| 174 |
+
toggleLanguageMenu() {
|
| 175 |
+
this.elements.languageMenu?.classList.toggle('hidden');
|
| 176 |
+
},
|
| 177 |
+
|
| 178 |
+
closeLanguageMenu() {
|
| 179 |
+
this.elements.languageMenu?.classList.add('hidden');
|
| 180 |
+
},
|
| 181 |
+
|
| 182 |
+
/**
|
| 183 |
+
* Navigate to page
|
| 184 |
+
*/
|
| 185 |
+
navigateToPage(pageName) {
|
| 186 |
+
const pages = ['language', 'diagnosis', 'chat'];
|
| 187 |
+
if (!pages.includes(pageName)) return;
|
| 188 |
+
|
| 189 |
+
// Hide all
|
| 190 |
+
this.elements.pageLanguage?.classList.remove('active');
|
| 191 |
+
this.elements.pageDiagnosis?.classList.remove('active');
|
| 192 |
+
this.elements.pageChat?.classList.remove('active');
|
| 193 |
+
|
| 194 |
+
// Show target
|
| 195 |
+
const target = document.getElementById(`page-${pageName}`);
|
| 196 |
+
target?.classList.add('active');
|
| 197 |
+
|
| 198 |
+
// Lifecycle
|
| 199 |
+
if (this.currentPage === 'chat' && pageName !== 'chat') {
|
| 200 |
+
Chat.onPageLeave?.();
|
| 201 |
+
}
|
| 202 |
+
if (pageName === 'chat') {
|
| 203 |
+
Chat.onPageEnter?.();
|
| 204 |
+
}
|
| 205 |
+
|
| 206 |
+
this.currentPage = pageName;
|
| 207 |
+
console.log('[App] Page:', pageName);
|
| 208 |
+
},
|
| 209 |
+
|
| 210 |
+
navigateToDiagnosis() {
|
| 211 |
+
this.navigateToPage('diagnosis');
|
| 212 |
+
},
|
| 213 |
+
|
| 214 |
+
navigateToChat() {
|
| 215 |
+
if (!Diagnosis.hasDiagnosis()) {
|
| 216 |
+
this.showToast('Please analyze an image first', 'warning');
|
| 217 |
+
return;
|
| 218 |
+
}
|
| 219 |
+
this.navigateToPage('chat');
|
| 220 |
+
},
|
| 221 |
+
|
| 222 |
+
/**
|
| 223 |
+
* Loading overlay
|
| 224 |
+
*/
|
| 225 |
+
showLoading(message = 'Loading...') {
|
| 226 |
+
if (this.elements.loadingText) {
|
| 227 |
+
this.elements.loadingText.textContent = message;
|
| 228 |
+
}
|
| 229 |
+
this.elements.loadingOverlay?.classList.remove('hidden');
|
| 230 |
+
},
|
| 231 |
+
|
| 232 |
+
hideLoading() {
|
| 233 |
+
this.elements.loadingOverlay?.classList.add('hidden');
|
| 234 |
+
},
|
| 235 |
+
|
| 236 |
+
/**
|
| 237 |
+
* Toast notifications
|
| 238 |
+
*/
|
| 239 |
+
showToast(message, type = 'info', duration = 4000) {
|
| 240 |
+
const container = this.elements.toastContainer;
|
| 241 |
+
if (!container) return;
|
| 242 |
+
|
| 243 |
+
const toast = document.createElement('div');
|
| 244 |
+
toast.className = `toast ${type}`;
|
| 245 |
+
toast.innerHTML = `
|
| 246 |
+
<span class="toast-message">${message}</span>
|
| 247 |
+
<span class="toast-close" onclick="this.parentElement.remove()">✕</span>
|
| 248 |
+
`;
|
| 249 |
+
|
| 250 |
+
container.appendChild(toast);
|
| 251 |
+
|
| 252 |
+
setTimeout(() => {
|
| 253 |
+
toast.style.opacity = '0';
|
| 254 |
+
setTimeout(() => toast.remove(), 300);
|
| 255 |
+
}, duration);
|
| 256 |
+
},
|
| 257 |
+
|
| 258 |
+
getCurrentPage() {
|
| 259 |
+
return this.currentPage;
|
| 260 |
+
},
|
| 261 |
+
|
| 262 |
+
isReady() {
|
| 263 |
+
return this.isInitialized;
|
| 264 |
+
}
|
| 265 |
+
};
|
| 266 |
+
|
| 267 |
+
// Initialize on DOM ready
|
| 268 |
+
document.addEventListener('DOMContentLoaded', () => {
|
| 269 |
+
App.init().catch(error => {
|
| 270 |
+
console.error('[App] Init failed:', error);
|
| 271 |
+
});
|
| 272 |
+
});
|
| 273 |
+
|
| 274 |
+
window.App = App;
|
frontend/js/chat.js
ADDED
|
@@ -0,0 +1,766 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
/**
|
| 2 |
+
* FarmEyes Chat Module
|
| 3 |
+
* ====================
|
| 4 |
+
* Handles the chat interface, message sending, and voice input.
|
| 5 |
+
*
|
| 6 |
+
* Updated: Inline "Listening..." indicator with timer (no full-screen overlay)
|
| 7 |
+
* Updated: TTS Listen button on assistant messages
|
| 8 |
+
*/
|
| 9 |
+
|
| 10 |
+
const Chat = {
|
| 11 |
+
// State
|
| 12 |
+
isLoading: false,
|
| 13 |
+
messages: [],
|
| 14 |
+
|
| 15 |
+
// Voice recording state
|
| 16 |
+
recordingTimer: null,
|
| 17 |
+
recordingSeconds: 0,
|
| 18 |
+
|
| 19 |
+
// Message ID counter for TTS
|
| 20 |
+
messageIdCounter: 0,
|
| 21 |
+
|
| 22 |
+
// DOM Elements
|
| 23 |
+
elements: {},
|
| 24 |
+
|
| 25 |
+
/**
|
| 26 |
+
* Initialize chat module
|
| 27 |
+
*/
|
| 28 |
+
init() {
|
| 29 |
+
this.cacheElements();
|
| 30 |
+
this.bindEvents();
|
| 31 |
+
this.initVoiceInput();
|
| 32 |
+
this.initTTS();
|
| 33 |
+
this.createTTSPlayer();
|
| 34 |
+
console.log('[Chat] Initialized');
|
| 35 |
+
},
|
| 36 |
+
|
| 37 |
+
/**
|
| 38 |
+
* Cache DOM elements
|
| 39 |
+
*/
|
| 40 |
+
cacheElements() {
|
| 41 |
+
this.elements = {
|
| 42 |
+
// Header
|
| 43 |
+
btnBack: document.getElementById('btn-back-diagnosis'),
|
| 44 |
+
btnLanguage: document.getElementById('btn-chat-language'),
|
| 45 |
+
chatLangDisplay: document.getElementById('chat-lang-display'),
|
| 46 |
+
|
| 47 |
+
// Context banner
|
| 48 |
+
contextBanner: document.getElementById('chat-context-banner'),
|
| 49 |
+
contextDiseaseName: document.getElementById('context-disease-name'),
|
| 50 |
+
contextConfidence: document.getElementById('context-confidence'),
|
| 51 |
+
contextSeverity: document.getElementById('context-severity'),
|
| 52 |
+
|
| 53 |
+
// Messages
|
| 54 |
+
messagesContainer: document.getElementById('chat-messages'),
|
| 55 |
+
chatWelcome: document.getElementById('chat-welcome'),
|
| 56 |
+
|
| 57 |
+
// Input
|
| 58 |
+
chatInput: document.getElementById('chat-input'),
|
| 59 |
+
btnVoice: document.getElementById('btn-voice-input'),
|
| 60 |
+
btnSend: document.getElementById('btn-send-message'),
|
| 61 |
+
chatInputBox: document.querySelector('.chat-input-box'),
|
| 62 |
+
|
| 63 |
+
// Voice overlay (keep reference but won't use full-screen)
|
| 64 |
+
voiceOverlay: document.getElementById('voice-overlay'),
|
| 65 |
+
btnStopVoice: document.getElementById('btn-stop-voice')
|
| 66 |
+
};
|
| 67 |
+
},
|
| 68 |
+
|
| 69 |
+
/**
|
| 70 |
+
* Bind event handlers
|
| 71 |
+
*/
|
| 72 |
+
bindEvents() {
|
| 73 |
+
const { btnBack, chatInput, btnVoice, btnSend, btnStopVoice } = this.elements;
|
| 74 |
+
|
| 75 |
+
// Back button
|
| 76 |
+
btnBack?.addEventListener('click', () => App.navigateToDiagnosis());
|
| 77 |
+
|
| 78 |
+
// Input events
|
| 79 |
+
chatInput?.addEventListener('input', () => this.handleInputChange());
|
| 80 |
+
chatInput?.addEventListener('keydown', (e) => this.handleKeyDown(e));
|
| 81 |
+
|
| 82 |
+
// Send button
|
| 83 |
+
btnSend?.addEventListener('click', () => this.sendMessage());
|
| 84 |
+
|
| 85 |
+
// Voice buttons
|
| 86 |
+
btnVoice?.addEventListener('click', () => this.toggleVoiceRecording());
|
| 87 |
+
btnStopVoice?.addEventListener('click', () => this.stopVoiceRecording());
|
| 88 |
+
|
| 89 |
+
// Auto-resize input
|
| 90 |
+
chatInput?.addEventListener('input', () => this.autoResizeInput());
|
| 91 |
+
},
|
| 92 |
+
|
| 93 |
+
/**
|
| 94 |
+
* Initialize voice input
|
| 95 |
+
*/
|
| 96 |
+
initVoiceInput() {
|
| 97 |
+
VoiceInput.init({
|
| 98 |
+
onTranscription: (text, result) => {
|
| 99 |
+
this.handleVoiceTranscription(text, result);
|
| 100 |
+
},
|
| 101 |
+
onError: (error) => {
|
| 102 |
+
App.showToast(error, 'error');
|
| 103 |
+
this.hideListeningIndicator();
|
| 104 |
+
},
|
| 105 |
+
onRecordingStart: () => {
|
| 106 |
+
this.showListeningIndicator();
|
| 107 |
+
},
|
| 108 |
+
onRecordingStop: () => {
|
| 109 |
+
this.hideListeningIndicator();
|
| 110 |
+
}
|
| 111 |
+
});
|
| 112 |
+
},
|
| 113 |
+
|
| 114 |
+
/**
|
| 115 |
+
* Initialize TTS (Text-to-Speech)
|
| 116 |
+
*/
|
| 117 |
+
initTTS() {
|
| 118 |
+
TTS.init({
|
| 119 |
+
onPlayStart: () => {
|
| 120 |
+
console.log('[Chat] TTS playback started');
|
| 121 |
+
},
|
| 122 |
+
onPlayEnd: () => {
|
| 123 |
+
console.log('[Chat] TTS playback ended');
|
| 124 |
+
this.updateListenButtons();
|
| 125 |
+
},
|
| 126 |
+
onError: (error) => {
|
| 127 |
+
App.showToast(error, 'error');
|
| 128 |
+
this.updateListenButtons();
|
| 129 |
+
}
|
| 130 |
+
});
|
| 131 |
+
},
|
| 132 |
+
|
| 133 |
+
/**
|
| 134 |
+
* Create floating TTS player element
|
| 135 |
+
*/
|
| 136 |
+
createTTSPlayer() {
|
| 137 |
+
// Check if player already exists
|
| 138 |
+
if (document.getElementById('tts-player')) return;
|
| 139 |
+
|
| 140 |
+
const player = document.createElement('div');
|
| 141 |
+
player.id = 'tts-player';
|
| 142 |
+
player.className = 'tts-player';
|
| 143 |
+
player.innerHTML = `
|
| 144 |
+
<div class="tts-player-header">
|
| 145 |
+
<div class="tts-player-title">
|
| 146 |
+
<span class="tts-player-title-icon">🔊</span>
|
| 147 |
+
<span>Now Playing</span>
|
| 148 |
+
</div>
|
| 149 |
+
<button class="btn-tts-close" id="tts-close" title="Close">×</button>
|
| 150 |
+
</div>
|
| 151 |
+
<div class="tts-progress-container" id="tts-progress-container">
|
| 152 |
+
<div class="tts-progress-bar" id="tts-progress"></div>
|
| 153 |
+
</div>
|
| 154 |
+
<div class="tts-controls">
|
| 155 |
+
<div class="tts-playback-controls">
|
| 156 |
+
<button class="btn-tts-control" id="tts-play-pause" title="Play/Pause">
|
| 157 |
+
<svg viewBox="0 0 24 24" fill="currentColor" width="20" height="20">
|
| 158 |
+
<polygon points="5,3 19,12 5,21"/>
|
| 159 |
+
</svg>
|
| 160 |
+
</button>
|
| 161 |
+
<button class="btn-tts-control stop" id="tts-stop" title="Stop">
|
| 162 |
+
<svg viewBox="0 0 24 24" fill="currentColor" width="16" height="16">
|
| 163 |
+
<rect x="6" y="6" width="12" height="12" rx="2"/>
|
| 164 |
+
</svg>
|
| 165 |
+
</button>
|
| 166 |
+
<span class="tts-time" id="tts-time">0:00</span>
|
| 167 |
+
</div>
|
| 168 |
+
<div class="tts-speed-controls">
|
| 169 |
+
<span class="tts-speed-label">Speed:</span>
|
| 170 |
+
<button class="tts-speed-btn" data-rate="0.75">0.75x</button>
|
| 171 |
+
<button class="tts-speed-btn active" data-rate="1">1x</button>
|
| 172 |
+
<button class="tts-speed-btn" data-rate="1.25">1.25x</button>
|
| 173 |
+
<button class="tts-speed-btn" data-rate="1.5">1.5x</button>
|
| 174 |
+
</div>
|
| 175 |
+
</div>
|
| 176 |
+
`;
|
| 177 |
+
|
| 178 |
+
document.body.appendChild(player);
|
| 179 |
+
|
| 180 |
+
// Bind player events
|
| 181 |
+
document.getElementById('tts-close')?.addEventListener('click', () => {
|
| 182 |
+
TTS.stop();
|
| 183 |
+
});
|
| 184 |
+
|
| 185 |
+
document.getElementById('tts-play-pause')?.addEventListener('click', () => {
|
| 186 |
+
TTS.togglePlayPause();
|
| 187 |
+
});
|
| 188 |
+
|
| 189 |
+
document.getElementById('tts-stop')?.addEventListener('click', () => {
|
| 190 |
+
TTS.stop();
|
| 191 |
+
});
|
| 192 |
+
|
| 193 |
+
// Speed buttons
|
| 194 |
+
document.querySelectorAll('.tts-speed-btn').forEach(btn => {
|
| 195 |
+
btn.addEventListener('click', () => {
|
| 196 |
+
const rate = parseFloat(btn.dataset.rate);
|
| 197 |
+
TTS.setPlaybackRate(rate);
|
| 198 |
+
});
|
| 199 |
+
});
|
| 200 |
+
|
| 201 |
+
console.log('[Chat] TTS player created');
|
| 202 |
+
},
|
| 203 |
+
|
| 204 |
+
// =========================================================================
|
| 205 |
+
// CHAT PAGE LIFECYCLE
|
| 206 |
+
// =========================================================================
|
| 207 |
+
|
| 208 |
+
/**
|
| 209 |
+
* Called when chat page becomes active
|
| 210 |
+
*/
|
| 211 |
+
async onPageEnter() {
|
| 212 |
+
console.log('[Chat] Page entered');
|
| 213 |
+
|
| 214 |
+
// Update context banner
|
| 215 |
+
this.updateContextBanner();
|
| 216 |
+
|
| 217 |
+
// Update language display
|
| 218 |
+
this.updateLanguageDisplay();
|
| 219 |
+
|
| 220 |
+
// Load chat history or get welcome
|
| 221 |
+
await this.loadChat();
|
| 222 |
+
|
| 223 |
+
// Focus input
|
| 224 |
+
this.elements.chatInput?.focus();
|
| 225 |
+
},
|
| 226 |
+
|
| 227 |
+
/**
|
| 228 |
+
* Called when leaving chat page
|
| 229 |
+
*/
|
| 230 |
+
onPageLeave() {
|
| 231 |
+
console.log('[Chat] Page left');
|
| 232 |
+
// Stop any ongoing recording
|
| 233 |
+
if (VoiceInput.getIsRecording()) {
|
| 234 |
+
VoiceInput.cancelRecording();
|
| 235 |
+
this.hideListeningIndicator();
|
| 236 |
+
}
|
| 237 |
+
},
|
| 238 |
+
|
| 239 |
+
/**
|
| 240 |
+
* Update the context banner with diagnosis info
|
| 241 |
+
*/
|
| 242 |
+
updateContextBanner() {
|
| 243 |
+
const diagnosis = Diagnosis.getDiagnosis();
|
| 244 |
+
|
| 245 |
+
if (!diagnosis) {
|
| 246 |
+
this.elements.contextBanner?.classList.add('hidden');
|
| 247 |
+
return;
|
| 248 |
+
}
|
| 249 |
+
|
| 250 |
+
this.elements.contextBanner?.classList.remove('hidden');
|
| 251 |
+
|
| 252 |
+
const { detection } = diagnosis;
|
| 253 |
+
this.elements.contextDiseaseName.textContent = detection.disease_name || 'Unknown';
|
| 254 |
+
this.elements.contextConfidence.textContent = `${Math.round(detection.confidence_percent || 0)}%`;
|
| 255 |
+
this.elements.contextSeverity.textContent = I18n.getSeverity(detection.severity_level || 'unknown');
|
| 256 |
+
},
|
| 257 |
+
|
| 258 |
+
/**
|
| 259 |
+
* Update language display
|
| 260 |
+
*/
|
| 261 |
+
updateLanguageDisplay() {
|
| 262 |
+
const lang = I18n.getLanguage();
|
| 263 |
+
if (this.elements.chatLangDisplay) {
|
| 264 |
+
this.elements.chatLangDisplay.textContent = lang.toUpperCase();
|
| 265 |
+
}
|
| 266 |
+
},
|
| 267 |
+
|
| 268 |
+
/**
|
| 269 |
+
* Load chat history or welcome message
|
| 270 |
+
*/
|
| 271 |
+
async loadChat() {
|
| 272 |
+
// Clear existing messages
|
| 273 |
+
this.clearMessages();
|
| 274 |
+
|
| 275 |
+
try {
|
| 276 |
+
// Try to get existing history
|
| 277 |
+
const history = await FarmEyesAPI.getChatHistory();
|
| 278 |
+
|
| 279 |
+
if (history.success && history.messages && history.messages.length > 0) {
|
| 280 |
+
// Display existing messages
|
| 281 |
+
this.messages = history.messages;
|
| 282 |
+
this.displayMessages(history.messages);
|
| 283 |
+
} else {
|
| 284 |
+
// Get welcome message
|
| 285 |
+
const welcome = await FarmEyesAPI.getChatWelcome(I18n.getLanguage());
|
| 286 |
+
|
| 287 |
+
if (welcome.success && welcome.response) {
|
| 288 |
+
this.addMessage('assistant', welcome.response);
|
| 289 |
+
} else {
|
| 290 |
+
// Show default welcome
|
| 291 |
+
this.showWelcome();
|
| 292 |
+
}
|
| 293 |
+
}
|
| 294 |
+
} catch (error) {
|
| 295 |
+
console.error('[Chat] Load failed:', error);
|
| 296 |
+
this.showWelcome();
|
| 297 |
+
}
|
| 298 |
+
},
|
| 299 |
+
|
| 300 |
+
// =========================================================================
|
| 301 |
+
// MESSAGE HANDLING
|
| 302 |
+
// =========================================================================
|
| 303 |
+
|
| 304 |
+
/**
|
| 305 |
+
* Handle input change
|
| 306 |
+
*/
|
| 307 |
+
handleInputChange() {
|
| 308 |
+
const text = this.elements.chatInput?.value?.trim();
|
| 309 |
+
this.elements.btnSend.disabled = !text || this.isLoading;
|
| 310 |
+
},
|
| 311 |
+
|
| 312 |
+
/**
|
| 313 |
+
* Handle keyboard input
|
| 314 |
+
* @param {KeyboardEvent} event
|
| 315 |
+
*/
|
| 316 |
+
handleKeyDown(event) {
|
| 317 |
+
// Send on Enter (without Shift)
|
| 318 |
+
if (event.key === 'Enter' && !event.shiftKey) {
|
| 319 |
+
event.preventDefault();
|
| 320 |
+
this.sendMessage();
|
| 321 |
+
}
|
| 322 |
+
},
|
| 323 |
+
|
| 324 |
+
/**
|
| 325 |
+
* Auto-resize textarea
|
| 326 |
+
*/
|
| 327 |
+
autoResizeInput() {
|
| 328 |
+
const input = this.elements.chatInput;
|
| 329 |
+
if (!input) return;
|
| 330 |
+
|
| 331 |
+
input.style.height = 'auto';
|
| 332 |
+
const newHeight = Math.min(input.scrollHeight, 150);
|
| 333 |
+
input.style.height = `${newHeight}px`;
|
| 334 |
+
},
|
| 335 |
+
|
| 336 |
+
/**
|
| 337 |
+
* Send a chat message
|
| 338 |
+
*/
|
| 339 |
+
async sendMessage() {
|
| 340 |
+
const input = this.elements.chatInput;
|
| 341 |
+
const message = input?.value?.trim();
|
| 342 |
+
|
| 343 |
+
if (!message || this.isLoading) return;
|
| 344 |
+
|
| 345 |
+
// Clear input
|
| 346 |
+
input.value = '';
|
| 347 |
+
this.autoResizeInput();
|
| 348 |
+
this.handleInputChange();
|
| 349 |
+
|
| 350 |
+
// Add user message to UI
|
| 351 |
+
this.addMessage('user', message);
|
| 352 |
+
|
| 353 |
+
// Send to API
|
| 354 |
+
this.isLoading = true;
|
| 355 |
+
this.showTypingIndicator();
|
| 356 |
+
|
| 357 |
+
try {
|
| 358 |
+
const response = await FarmEyesAPI.sendChatMessage(message, I18n.getLanguage());
|
| 359 |
+
|
| 360 |
+
if (response.success) {
|
| 361 |
+
this.addMessage('assistant', response.response);
|
| 362 |
+
} else {
|
| 363 |
+
throw new Error(response.error || 'Failed to get response');
|
| 364 |
+
}
|
| 365 |
+
} catch (error) {
|
| 366 |
+
console.error('[Chat] Send failed:', error);
|
| 367 |
+
this.addMessage('assistant', 'Sorry, I encountered an error. Please try again.');
|
| 368 |
+
App.showToast(error.message, 'error');
|
| 369 |
+
} finally {
|
| 370 |
+
this.isLoading = false;
|
| 371 |
+
this.hideTypingIndicator();
|
| 372 |
+
this.handleInputChange();
|
| 373 |
+
}
|
| 374 |
+
},
|
| 375 |
+
|
| 376 |
+
/**
|
| 377 |
+
* Add a message to the chat
|
| 378 |
+
* @param {string} role - 'user' or 'assistant'
|
| 379 |
+
* @param {string} content - Message content
|
| 380 |
+
*/
|
| 381 |
+
addMessage(role, content) {
|
| 382 |
+
// Hide welcome if visible
|
| 383 |
+
this.elements.chatWelcome?.classList.add('hidden');
|
| 384 |
+
|
| 385 |
+
// Create message element
|
| 386 |
+
const messageEl = this.createMessageElement(role, content);
|
| 387 |
+
|
| 388 |
+
// Add to container
|
| 389 |
+
this.elements.messagesContainer?.appendChild(messageEl);
|
| 390 |
+
|
| 391 |
+
// Store in array
|
| 392 |
+
this.messages.push({ role, content, timestamp: new Date().toISOString() });
|
| 393 |
+
|
| 394 |
+
// Scroll to bottom
|
| 395 |
+
this.scrollToBottom();
|
| 396 |
+
},
|
| 397 |
+
|
| 398 |
+
/**
|
| 399 |
+
* Create message DOM element
|
| 400 |
+
* @param {string} role
|
| 401 |
+
* @param {string} content
|
| 402 |
+
* @returns {HTMLElement}
|
| 403 |
+
*/
|
| 404 |
+
createMessageElement(role, content) {
|
| 405 |
+
const div = document.createElement('div');
|
| 406 |
+
div.className = `message ${role}`;
|
| 407 |
+
|
| 408 |
+
// Generate unique message ID for TTS caching
|
| 409 |
+
const messageId = `msg_${++this.messageIdCounter}_${Date.now()}`;
|
| 410 |
+
div.dataset.messageId = messageId;
|
| 411 |
+
|
| 412 |
+
const avatar = document.createElement('div');
|
| 413 |
+
avatar.className = 'message-avatar';
|
| 414 |
+
avatar.textContent = role === 'user' ? '👤' : '🌱';
|
| 415 |
+
|
| 416 |
+
const contentWrapper = document.createElement('div');
|
| 417 |
+
contentWrapper.className = 'message-content-wrapper';
|
| 418 |
+
|
| 419 |
+
const contentDiv = document.createElement('div');
|
| 420 |
+
contentDiv.className = 'message-content';
|
| 421 |
+
contentDiv.textContent = content;
|
| 422 |
+
|
| 423 |
+
contentWrapper.appendChild(contentDiv);
|
| 424 |
+
|
| 425 |
+
// Add Listen button for assistant messages
|
| 426 |
+
if (role === 'assistant') {
|
| 427 |
+
const listenBtn = document.createElement('button');
|
| 428 |
+
listenBtn.className = 'btn-listen';
|
| 429 |
+
listenBtn.dataset.messageId = messageId;
|
| 430 |
+
listenBtn.dataset.text = content;
|
| 431 |
+
listenBtn.innerHTML = `
|
| 432 |
+
<span class="btn-listen-icon">🔊</span>
|
| 433 |
+
<span class="btn-listen-text">Listen</span>
|
| 434 |
+
`;
|
| 435 |
+
listenBtn.title = 'Listen to this message';
|
| 436 |
+
listenBtn.addEventListener('click', () => this.handleListenClick(listenBtn, content, messageId));
|
| 437 |
+
|
| 438 |
+
contentWrapper.appendChild(listenBtn);
|
| 439 |
+
}
|
| 440 |
+
|
| 441 |
+
div.appendChild(avatar);
|
| 442 |
+
div.appendChild(contentWrapper);
|
| 443 |
+
|
| 444 |
+
return div;
|
| 445 |
+
},
|
| 446 |
+
|
| 447 |
+
/**
|
| 448 |
+
* Handle Listen button click
|
| 449 |
+
* @param {HTMLElement} button - The listen button
|
| 450 |
+
* @param {string} text - Message text
|
| 451 |
+
* @param {string} messageId - Unique message ID
|
| 452 |
+
*/
|
| 453 |
+
async handleListenClick(button, text, messageId) {
|
| 454 |
+
// If already playing this message, toggle pause
|
| 455 |
+
if (TTS.currentMessageId === messageId) {
|
| 456 |
+
if (TTS.getIsPlaying()) {
|
| 457 |
+
TTS.pause();
|
| 458 |
+
button.innerHTML = `<span class="btn-listen-icon">▶️</span><span class="btn-listen-text">Resume</span>`;
|
| 459 |
+
} else if (TTS.getIsPaused()) {
|
| 460 |
+
TTS.resume();
|
| 461 |
+
button.innerHTML = `<span class="btn-listen-icon">⏸️</span><span class="btn-listen-text">Pause</span>`;
|
| 462 |
+
}
|
| 463 |
+
return;
|
| 464 |
+
}
|
| 465 |
+
|
| 466 |
+
// Reset all other listen buttons
|
| 467 |
+
this.updateListenButtons();
|
| 468 |
+
|
| 469 |
+
// Show loading state
|
| 470 |
+
button.classList.add('loading');
|
| 471 |
+
button.innerHTML = `<span class="btn-listen-icon">🔊</span><span class="btn-listen-text">Loading...</span>`;
|
| 472 |
+
|
| 473 |
+
// Get current language
|
| 474 |
+
const language = I18n.getLanguage();
|
| 475 |
+
|
| 476 |
+
// Start TTS
|
| 477 |
+
const success = await TTS.speak(text, language, messageId);
|
| 478 |
+
|
| 479 |
+
// Update button state
|
| 480 |
+
button.classList.remove('loading');
|
| 481 |
+
|
| 482 |
+
if (success) {
|
| 483 |
+
button.classList.add('playing');
|
| 484 |
+
button.innerHTML = `<span class="btn-listen-icon">⏸️</span><span class="btn-listen-text">Pause</span>`;
|
| 485 |
+
} else {
|
| 486 |
+
button.innerHTML = `<span class="btn-listen-icon">🔊</span><span class="btn-listen-text">Listen</span>`;
|
| 487 |
+
}
|
| 488 |
+
},
|
| 489 |
+
|
| 490 |
+
/**
|
| 491 |
+
* Update all listen buttons to default state
|
| 492 |
+
*/
|
| 493 |
+
updateListenButtons() {
|
| 494 |
+
document.querySelectorAll('.btn-listen').forEach(btn => {
|
| 495 |
+
btn.classList.remove('loading', 'playing');
|
| 496 |
+
btn.innerHTML = `<span class="btn-listen-icon">🔊</span><span class="btn-listen-text">Listen</span>`;
|
| 497 |
+
});
|
| 498 |
+
},
|
| 499 |
+
|
| 500 |
+
/**
|
| 501 |
+
* Display multiple messages
|
| 502 |
+
* @param {Array} messages
|
| 503 |
+
*/
|
| 504 |
+
displayMessages(messages) {
|
| 505 |
+
this.elements.chatWelcome?.classList.add('hidden');
|
| 506 |
+
|
| 507 |
+
messages.forEach(msg => {
|
| 508 |
+
const messageEl = this.createMessageElement(msg.role, msg.content);
|
| 509 |
+
this.elements.messagesContainer?.appendChild(messageEl);
|
| 510 |
+
});
|
| 511 |
+
|
| 512 |
+
this.scrollToBottom();
|
| 513 |
+
},
|
| 514 |
+
|
| 515 |
+
/**
|
| 516 |
+
* Clear all messages
|
| 517 |
+
*/
|
| 518 |
+
clearMessages() {
|
| 519 |
+
if (this.elements.messagesContainer) {
|
| 520 |
+
this.elements.messagesContainer.innerHTML = '';
|
| 521 |
+
// Re-add welcome
|
| 522 |
+
const welcome = document.createElement('div');
|
| 523 |
+
welcome.id = 'chat-welcome';
|
| 524 |
+
welcome.className = 'chat-welcome';
|
| 525 |
+
welcome.innerHTML = `
|
| 526 |
+
<div class="welcome-icon">🌱</div>
|
| 527 |
+
<p class="welcome-text">Start a conversation about your diagnosis</p>
|
| 528 |
+
`;
|
| 529 |
+
this.elements.messagesContainer.appendChild(welcome);
|
| 530 |
+
this.elements.chatWelcome = welcome;
|
| 531 |
+
}
|
| 532 |
+
this.messages = [];
|
| 533 |
+
},
|
| 534 |
+
|
| 535 |
+
/**
|
| 536 |
+
* Show welcome screen
|
| 537 |
+
*/
|
| 538 |
+
showWelcome() {
|
| 539 |
+
this.elements.chatWelcome?.classList.remove('hidden');
|
| 540 |
+
},
|
| 541 |
+
|
| 542 |
+
/**
|
| 543 |
+
* Show typing indicator
|
| 544 |
+
*/
|
| 545 |
+
showTypingIndicator() {
|
| 546 |
+
// Remove existing indicator
|
| 547 |
+
this.hideTypingIndicator();
|
| 548 |
+
|
| 549 |
+
const indicator = document.createElement('div');
|
| 550 |
+
indicator.className = 'message assistant typing-message';
|
| 551 |
+
indicator.innerHTML = `
|
| 552 |
+
<div class="message-avatar">🌱</div>
|
| 553 |
+
<div class="message-content">
|
| 554 |
+
<div class="typing-indicator">
|
| 555 |
+
<div class="typing-dot"></div>
|
| 556 |
+
<div class="typing-dot"></div>
|
| 557 |
+
<div class="typing-dot"></div>
|
| 558 |
+
</div>
|
| 559 |
+
</div>
|
| 560 |
+
`;
|
| 561 |
+
|
| 562 |
+
this.elements.messagesContainer?.appendChild(indicator);
|
| 563 |
+
this.scrollToBottom();
|
| 564 |
+
},
|
| 565 |
+
|
| 566 |
+
/**
|
| 567 |
+
* Hide typing indicator
|
| 568 |
+
*/
|
| 569 |
+
hideTypingIndicator() {
|
| 570 |
+
const indicator = this.elements.messagesContainer?.querySelector('.typing-message');
|
| 571 |
+
indicator?.remove();
|
| 572 |
+
},
|
| 573 |
+
|
| 574 |
+
/**
|
| 575 |
+
* Scroll chat to bottom
|
| 576 |
+
*/
|
| 577 |
+
scrollToBottom() {
|
| 578 |
+
const container = this.elements.messagesContainer;
|
| 579 |
+
if (container) {
|
| 580 |
+
container.scrollTop = container.scrollHeight;
|
| 581 |
+
}
|
| 582 |
+
},
|
| 583 |
+
|
| 584 |
+
// =========================================================================
|
| 585 |
+
// VOICE INPUT - INLINE LISTENING INDICATOR
|
| 586 |
+
// =========================================================================
|
| 587 |
+
|
| 588 |
+
/**
|
| 589 |
+
* Toggle voice recording
|
| 590 |
+
*/
|
| 591 |
+
async toggleVoiceRecording() {
|
| 592 |
+
if (!VoiceInput.isSupported()) {
|
| 593 |
+
App.showToast('Voice input is not supported in this browser', 'error');
|
| 594 |
+
return;
|
| 595 |
+
}
|
| 596 |
+
|
| 597 |
+
if (VoiceInput.getIsRecording()) {
|
| 598 |
+
this.stopVoiceRecording();
|
| 599 |
+
} else {
|
| 600 |
+
await this.startVoiceRecording();
|
| 601 |
+
}
|
| 602 |
+
},
|
| 603 |
+
|
| 604 |
+
/**
|
| 605 |
+
* Start voice recording
|
| 606 |
+
*/
|
| 607 |
+
async startVoiceRecording() {
|
| 608 |
+
const started = await VoiceInput.startRecording();
|
| 609 |
+
|
| 610 |
+
if (!started) {
|
| 611 |
+
// Error handled by VoiceInput callback
|
| 612 |
+
return;
|
| 613 |
+
}
|
| 614 |
+
},
|
| 615 |
+
|
| 616 |
+
/**
|
| 617 |
+
* Stop voice recording
|
| 618 |
+
*/
|
| 619 |
+
stopVoiceRecording() {
|
| 620 |
+
VoiceInput.stopRecording();
|
| 621 |
+
},
|
| 622 |
+
|
| 623 |
+
/**
|
| 624 |
+
* Handle voice transcription result
|
| 625 |
+
* @param {string} text - Transcribed text
|
| 626 |
+
* @param {object} result - Full result object
|
| 627 |
+
*/
|
| 628 |
+
handleVoiceTranscription(text, result) {
|
| 629 |
+
if (!text) {
|
| 630 |
+
App.showToast('Could not understand audio. Please try again.', 'warning');
|
| 631 |
+
return;
|
| 632 |
+
}
|
| 633 |
+
|
| 634 |
+
// Put text in input
|
| 635 |
+
if (this.elements.chatInput) {
|
| 636 |
+
this.elements.chatInput.value = text;
|
| 637 |
+
this.autoResizeInput();
|
| 638 |
+
this.handleInputChange();
|
| 639 |
+
|
| 640 |
+
// Optionally auto-send
|
| 641 |
+
// this.sendMessage();
|
| 642 |
+
}
|
| 643 |
+
|
| 644 |
+
// Show language detected
|
| 645 |
+
if (result.language) {
|
| 646 |
+
console.log('[Chat] Detected language:', result.language);
|
| 647 |
+
}
|
| 648 |
+
},
|
| 649 |
+
|
| 650 |
+
/**
|
| 651 |
+
* Show inline listening indicator in chat bar
|
| 652 |
+
* Replaces the textarea with a listening indicator + timer
|
| 653 |
+
*/
|
| 654 |
+
showListeningIndicator() {
|
| 655 |
+
const inputBox = this.elements.chatInputBox;
|
| 656 |
+
const textarea = this.elements.chatInput;
|
| 657 |
+
const btnVoice = this.elements.btnVoice;
|
| 658 |
+
const btnSend = this.elements.btnSend;
|
| 659 |
+
|
| 660 |
+
if (!inputBox) return;
|
| 661 |
+
|
| 662 |
+
// Hide textarea and send button
|
| 663 |
+
textarea?.classList.add('hidden');
|
| 664 |
+
btnSend?.classList.add('hidden');
|
| 665 |
+
|
| 666 |
+
// Update voice button to stop style
|
| 667 |
+
btnVoice?.classList.add('recording');
|
| 668 |
+
|
| 669 |
+
// Create listening indicator
|
| 670 |
+
const listeningIndicator = document.createElement('div');
|
| 671 |
+
listeningIndicator.id = 'listening-indicator';
|
| 672 |
+
listeningIndicator.className = 'listening-indicator';
|
| 673 |
+
listeningIndicator.innerHTML = `
|
| 674 |
+
<div class="listening-pulse"></div>
|
| 675 |
+
<span class="listening-text">Listening...</span>
|
| 676 |
+
<span class="listening-timer">0:00</span>
|
| 677 |
+
<button class="btn-stop-inline" title="Stop Recording">
|
| 678 |
+
<svg viewBox="0 0 24 24" fill="currentColor" width="18" height="18">
|
| 679 |
+
<rect x="6" y="6" width="12" height="12" rx="2"/>
|
| 680 |
+
</svg>
|
| 681 |
+
</button>
|
| 682 |
+
`;
|
| 683 |
+
|
| 684 |
+
// Insert before voice button
|
| 685 |
+
inputBox.insertBefore(listeningIndicator, btnVoice);
|
| 686 |
+
|
| 687 |
+
// Bind stop button
|
| 688 |
+
const btnStop = listeningIndicator.querySelector('.btn-stop-inline');
|
| 689 |
+
btnStop?.addEventListener('click', () => this.stopVoiceRecording());
|
| 690 |
+
|
| 691 |
+
// Start timer
|
| 692 |
+
this.recordingSeconds = 0;
|
| 693 |
+
this.updateRecordingTimer();
|
| 694 |
+
this.recordingTimer = setInterval(() => {
|
| 695 |
+
this.recordingSeconds++;
|
| 696 |
+
this.updateRecordingTimer();
|
| 697 |
+
}, 1000);
|
| 698 |
+
|
| 699 |
+
console.log('[Chat] Listening indicator shown');
|
| 700 |
+
},
|
| 701 |
+
|
| 702 |
+
/**
|
| 703 |
+
* Hide inline listening indicator
|
| 704 |
+
*/
|
| 705 |
+
hideListeningIndicator() {
|
| 706 |
+
const inputBox = this.elements.chatInputBox;
|
| 707 |
+
const textarea = this.elements.chatInput;
|
| 708 |
+
const btnVoice = this.elements.btnVoice;
|
| 709 |
+
const btnSend = this.elements.btnSend;
|
| 710 |
+
|
| 711 |
+
// Remove listening indicator
|
| 712 |
+
const indicator = document.getElementById('listening-indicator');
|
| 713 |
+
indicator?.remove();
|
| 714 |
+
|
| 715 |
+
// Show textarea and send button
|
| 716 |
+
textarea?.classList.remove('hidden');
|
| 717 |
+
btnSend?.classList.remove('hidden');
|
| 718 |
+
|
| 719 |
+
// Update voice button
|
| 720 |
+
btnVoice?.classList.remove('recording');
|
| 721 |
+
|
| 722 |
+
// Stop timer
|
| 723 |
+
if (this.recordingTimer) {
|
| 724 |
+
clearInterval(this.recordingTimer);
|
| 725 |
+
this.recordingTimer = null;
|
| 726 |
+
}
|
| 727 |
+
this.recordingSeconds = 0;
|
| 728 |
+
|
| 729 |
+
console.log('[Chat] Listening indicator hidden');
|
| 730 |
+
},
|
| 731 |
+
|
| 732 |
+
/**
|
| 733 |
+
* Update the recording timer display
|
| 734 |
+
*/
|
| 735 |
+
updateRecordingTimer() {
|
| 736 |
+
const timerEl = document.querySelector('.listening-timer');
|
| 737 |
+
if (timerEl) {
|
| 738 |
+
const minutes = Math.floor(this.recordingSeconds / 60);
|
| 739 |
+
const seconds = this.recordingSeconds % 60;
|
| 740 |
+
timerEl.textContent = `${minutes}:${seconds.toString().padStart(2, '0')}`;
|
| 741 |
+
}
|
| 742 |
+
},
|
| 743 |
+
|
| 744 |
+
// =========================================================================
|
| 745 |
+
// LEGACY OVERLAY METHODS (kept for compatibility but not used)
|
| 746 |
+
// =========================================================================
|
| 747 |
+
|
| 748 |
+
/**
|
| 749 |
+
* Show voice recording overlay (LEGACY - not used)
|
| 750 |
+
*/
|
| 751 |
+
showVoiceOverlay() {
|
| 752 |
+
// Replaced by showListeningIndicator()
|
| 753 |
+
this.showListeningIndicator();
|
| 754 |
+
},
|
| 755 |
+
|
| 756 |
+
/**
|
| 757 |
+
* Hide voice recording overlay (LEGACY - not used)
|
| 758 |
+
*/
|
| 759 |
+
hideVoiceOverlay() {
|
| 760 |
+
// Replaced by hideListeningIndicator()
|
| 761 |
+
this.hideListeningIndicator();
|
| 762 |
+
}
|
| 763 |
+
};
|
| 764 |
+
|
| 765 |
+
// Export for use in other modules
|
| 766 |
+
window.Chat = Chat;
|
frontend/js/diagnosis.js
ADDED
|
@@ -0,0 +1,515 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
/**
|
| 2 |
+
* FarmEyes Diagnosis Module
|
| 3 |
+
* =========================
|
| 4 |
+
* Handles image upload, disease detection, and results display.
|
| 5 |
+
* Fixed to correctly map API response structure to UI elements.
|
| 6 |
+
*/
|
| 7 |
+
|
| 8 |
+
const Diagnosis = {
|
| 9 |
+
// State
|
| 10 |
+
currentImage: null,
|
| 11 |
+
currentDiagnosis: null,
|
| 12 |
+
isAnalyzing: false,
|
| 13 |
+
|
| 14 |
+
// DOM Elements (cached)
|
| 15 |
+
elements: {},
|
| 16 |
+
|
| 17 |
+
/**
|
| 18 |
+
* Initialize diagnosis module
|
| 19 |
+
*/
|
| 20 |
+
init() {
|
| 21 |
+
this.cacheElements();
|
| 22 |
+
this.bindEvents();
|
| 23 |
+
console.log('[Diagnosis] Initialized');
|
| 24 |
+
},
|
| 25 |
+
|
| 26 |
+
/**
|
| 27 |
+
* Cache DOM elements for performance
|
| 28 |
+
*/
|
| 29 |
+
cacheElements() {
|
| 30 |
+
this.elements = {
|
| 31 |
+
// Upload
|
| 32 |
+
uploadZone: document.getElementById('upload-zone'),
|
| 33 |
+
fileInput: document.getElementById('file-input'),
|
| 34 |
+
imagePreviewContainer: document.getElementById('image-preview-container'),
|
| 35 |
+
imagePreview: document.getElementById('image-preview'),
|
| 36 |
+
btnRemoveImage: document.getElementById('btn-remove-image'),
|
| 37 |
+
btnAnalyze: document.getElementById('btn-analyze'),
|
| 38 |
+
analyzingLoader: document.getElementById('analyzing-loader'),
|
| 39 |
+
|
| 40 |
+
// Sections
|
| 41 |
+
uploadSection: document.getElementById('upload-section'),
|
| 42 |
+
resultsSection: document.getElementById('results-section'),
|
| 43 |
+
|
| 44 |
+
// Results - Disease Card
|
| 45 |
+
btnNewScan: document.getElementById('btn-new-scan'),
|
| 46 |
+
diseaseIcon: document.getElementById('disease-icon'),
|
| 47 |
+
diseaseName: document.getElementById('disease-name'),
|
| 48 |
+
cropType: document.getElementById('crop-type'),
|
| 49 |
+
confidenceBar: document.getElementById('confidence-bar'),
|
| 50 |
+
confidenceValue: document.getElementById('confidence-value'),
|
| 51 |
+
severityBadge: document.getElementById('severity-badge'),
|
| 52 |
+
|
| 53 |
+
// Tabs
|
| 54 |
+
tabButtons: document.querySelectorAll('.tab-btn'),
|
| 55 |
+
tabSymptoms: document.getElementById('tab-symptoms'),
|
| 56 |
+
tabTreatment: document.getElementById('tab-treatment'),
|
| 57 |
+
tabPrevention: document.getElementById('tab-prevention'),
|
| 58 |
+
|
| 59 |
+
// Symptoms tab content
|
| 60 |
+
symptomsList: document.getElementById('symptoms-list'),
|
| 61 |
+
transmissionList: document.getElementById('transmission-list'),
|
| 62 |
+
yieldImpactText: document.getElementById('yield-impact-text'),
|
| 63 |
+
recoveryBar: document.getElementById('recovery-bar'),
|
| 64 |
+
recoveryText: document.getElementById('recovery-text'),
|
| 65 |
+
|
| 66 |
+
// Treatment tab content
|
| 67 |
+
immediateActionsList: document.getElementById('immediate-actions-list'),
|
| 68 |
+
chemicalTreatments: document.getElementById('chemical-treatments'),
|
| 69 |
+
costEstimate: document.getElementById('cost-estimate'),
|
| 70 |
+
|
| 71 |
+
// Prevention tab content
|
| 72 |
+
preventionList: document.getElementById('prevention-list'),
|
| 73 |
+
|
| 74 |
+
// Chat button
|
| 75 |
+
btnOpenChat: document.getElementById('btn-open-chat')
|
| 76 |
+
};
|
| 77 |
+
},
|
| 78 |
+
|
| 79 |
+
/**
|
| 80 |
+
* Bind event handlers
|
| 81 |
+
*/
|
| 82 |
+
bindEvents() {
|
| 83 |
+
const { uploadZone, fileInput, btnRemoveImage, btnAnalyze,
|
| 84 |
+
btnNewScan, tabButtons, btnOpenChat } = this.elements;
|
| 85 |
+
|
| 86 |
+
// Upload zone click
|
| 87 |
+
uploadZone?.addEventListener('click', () => fileInput?.click());
|
| 88 |
+
|
| 89 |
+
// File input change
|
| 90 |
+
fileInput?.addEventListener('change', (e) => this.handleFileSelect(e));
|
| 91 |
+
|
| 92 |
+
// Drag and drop
|
| 93 |
+
uploadZone?.addEventListener('dragover', (e) => this.handleDragOver(e));
|
| 94 |
+
uploadZone?.addEventListener('dragleave', (e) => this.handleDragLeave(e));
|
| 95 |
+
uploadZone?.addEventListener('drop', (e) => this.handleDrop(e));
|
| 96 |
+
|
| 97 |
+
// Remove image
|
| 98 |
+
btnRemoveImage?.addEventListener('click', (e) => {
|
| 99 |
+
e.stopPropagation();
|
| 100 |
+
this.removeImage();
|
| 101 |
+
});
|
| 102 |
+
|
| 103 |
+
// Analyze button
|
| 104 |
+
btnAnalyze?.addEventListener('click', () => this.analyzeImage());
|
| 105 |
+
|
| 106 |
+
// New scan button
|
| 107 |
+
btnNewScan?.addEventListener('click', () => this.clearResults());
|
| 108 |
+
|
| 109 |
+
// Tab switching
|
| 110 |
+
tabButtons.forEach(btn => {
|
| 111 |
+
btn.addEventListener('click', () => this.switchTab(btn.dataset.tab));
|
| 112 |
+
});
|
| 113 |
+
|
| 114 |
+
// Open chat
|
| 115 |
+
btnOpenChat?.addEventListener('click', () => {
|
| 116 |
+
if (this.currentDiagnosis) {
|
| 117 |
+
App.navigateToChat();
|
| 118 |
+
}
|
| 119 |
+
});
|
| 120 |
+
},
|
| 121 |
+
|
| 122 |
+
// =========================================================================
|
| 123 |
+
// IMAGE HANDLING
|
| 124 |
+
// =========================================================================
|
| 125 |
+
|
| 126 |
+
handleFileSelect(event) {
|
| 127 |
+
const file = event.target.files?.[0];
|
| 128 |
+
if (file) {
|
| 129 |
+
this.loadImage(file);
|
| 130 |
+
}
|
| 131 |
+
},
|
| 132 |
+
|
| 133 |
+
handleDragOver(event) {
|
| 134 |
+
event.preventDefault();
|
| 135 |
+
event.stopPropagation();
|
| 136 |
+
this.elements.uploadZone?.classList.add('dragover');
|
| 137 |
+
},
|
| 138 |
+
|
| 139 |
+
handleDragLeave(event) {
|
| 140 |
+
event.preventDefault();
|
| 141 |
+
event.stopPropagation();
|
| 142 |
+
this.elements.uploadZone?.classList.remove('dragover');
|
| 143 |
+
},
|
| 144 |
+
|
| 145 |
+
handleDrop(event) {
|
| 146 |
+
event.preventDefault();
|
| 147 |
+
event.stopPropagation();
|
| 148 |
+
this.elements.uploadZone?.classList.remove('dragover');
|
| 149 |
+
|
| 150 |
+
const file = event.dataTransfer?.files?.[0];
|
| 151 |
+
if (file && file.type.startsWith('image/')) {
|
| 152 |
+
this.loadImage(file);
|
| 153 |
+
} else {
|
| 154 |
+
App.showToast('Please drop an image file', 'error');
|
| 155 |
+
}
|
| 156 |
+
},
|
| 157 |
+
|
| 158 |
+
loadImage(file) {
|
| 159 |
+
// Validate file type
|
| 160 |
+
const validTypes = ['image/jpeg', 'image/jpg', 'image/png', 'image/webp', 'image/bmp'];
|
| 161 |
+
if (!validTypes.includes(file.type)) {
|
| 162 |
+
App.showToast('Invalid image format. Use JPG, PNG, or WEBP.', 'error');
|
| 163 |
+
return;
|
| 164 |
+
}
|
| 165 |
+
|
| 166 |
+
// Validate file size (10MB max)
|
| 167 |
+
if (file.size > 10 * 1024 * 1024) {
|
| 168 |
+
App.showToast('Image too large. Maximum 10MB.', 'error');
|
| 169 |
+
return;
|
| 170 |
+
}
|
| 171 |
+
|
| 172 |
+
this.currentImage = file;
|
| 173 |
+
|
| 174 |
+
// Show preview
|
| 175 |
+
const reader = new FileReader();
|
| 176 |
+
reader.onload = (e) => {
|
| 177 |
+
this.elements.imagePreview.src = e.target.result;
|
| 178 |
+
this.elements.uploadZone?.classList.add('hidden');
|
| 179 |
+
this.elements.imagePreviewContainer?.classList.remove('hidden');
|
| 180 |
+
this.elements.btnAnalyze.disabled = false;
|
| 181 |
+
};
|
| 182 |
+
reader.readAsDataURL(file);
|
| 183 |
+
|
| 184 |
+
console.log('[Diagnosis] Image loaded:', file.name);
|
| 185 |
+
},
|
| 186 |
+
|
| 187 |
+
removeImage() {
|
| 188 |
+
this.currentImage = null;
|
| 189 |
+
this.elements.imagePreview.src = '';
|
| 190 |
+
this.elements.uploadZone?.classList.remove('hidden');
|
| 191 |
+
this.elements.imagePreviewContainer?.classList.add('hidden');
|
| 192 |
+
this.elements.btnAnalyze.disabled = true;
|
| 193 |
+
this.elements.fileInput.value = '';
|
| 194 |
+
|
| 195 |
+
console.log('[Diagnosis] Image removed');
|
| 196 |
+
},
|
| 197 |
+
|
| 198 |
+
// =========================================================================
|
| 199 |
+
// ANALYSIS
|
| 200 |
+
// =========================================================================
|
| 201 |
+
|
| 202 |
+
async analyzeImage() {
|
| 203 |
+
if (!this.currentImage || this.isAnalyzing) {
|
| 204 |
+
return;
|
| 205 |
+
}
|
| 206 |
+
|
| 207 |
+
this.isAnalyzing = true;
|
| 208 |
+
this.showAnalyzing(true);
|
| 209 |
+
|
| 210 |
+
try {
|
| 211 |
+
console.log('[Diagnosis] Starting analysis...');
|
| 212 |
+
|
| 213 |
+
const result = await FarmEyesAPI.detectDisease(
|
| 214 |
+
this.currentImage,
|
| 215 |
+
I18n.getLanguage()
|
| 216 |
+
);
|
| 217 |
+
|
| 218 |
+
console.log('[Diagnosis] API Response:', result);
|
| 219 |
+
|
| 220 |
+
if (result.success) {
|
| 221 |
+
this.currentDiagnosis = result;
|
| 222 |
+
this.displayResults(result);
|
| 223 |
+
console.log('[Diagnosis] Analysis complete:', result.detection?.disease_name);
|
| 224 |
+
} else {
|
| 225 |
+
throw new Error(result.error || 'Analysis failed');
|
| 226 |
+
}
|
| 227 |
+
|
| 228 |
+
} catch (error) {
|
| 229 |
+
console.error('[Diagnosis] Analysis failed:', error);
|
| 230 |
+
App.showToast(error.message || 'Analysis failed. Please try again.', 'error');
|
| 231 |
+
} finally {
|
| 232 |
+
this.isAnalyzing = false;
|
| 233 |
+
this.showAnalyzing(false);
|
| 234 |
+
}
|
| 235 |
+
},
|
| 236 |
+
|
| 237 |
+
showAnalyzing(show) {
|
| 238 |
+
const { btnAnalyze, analyzingLoader } = this.elements;
|
| 239 |
+
|
| 240 |
+
if (show) {
|
| 241 |
+
btnAnalyze?.classList.add('hidden');
|
| 242 |
+
analyzingLoader?.classList.remove('hidden');
|
| 243 |
+
} else {
|
| 244 |
+
btnAnalyze?.classList.remove('hidden');
|
| 245 |
+
analyzingLoader?.classList.add('hidden');
|
| 246 |
+
}
|
| 247 |
+
},
|
| 248 |
+
|
| 249 |
+
// =========================================================================
|
| 250 |
+
// RESULTS DISPLAY - FIXED MAPPING
|
| 251 |
+
// =========================================================================
|
| 252 |
+
|
| 253 |
+
displayResults(result) {
|
| 254 |
+
const { detection, diagnosis } = result;
|
| 255 |
+
|
| 256 |
+
console.log('[Diagnosis] Displaying results:', { detection, diagnosis });
|
| 257 |
+
|
| 258 |
+
// Show results section
|
| 259 |
+
this.elements.resultsSection?.classList.remove('hidden');
|
| 260 |
+
|
| 261 |
+
// Disease header
|
| 262 |
+
this.elements.diseaseIcon.textContent = this.getDiseaseIcon(detection.crop_type);
|
| 263 |
+
|
| 264 |
+
// Disease name - check multiple possible locations
|
| 265 |
+
const diseaseName = diagnosis?.disease?.name || detection?.disease_name || 'Unknown Disease';
|
| 266 |
+
this.elements.diseaseName.textContent = diseaseName;
|
| 267 |
+
|
| 268 |
+
// Crop type
|
| 269 |
+
this.elements.cropType.textContent = this.formatCropName(detection?.crop_type);
|
| 270 |
+
|
| 271 |
+
// Confidence
|
| 272 |
+
const confidencePercent = detection?.confidence_percent || (detection?.confidence * 100) || 0;
|
| 273 |
+
this.elements.confidenceBar.style.width = `${confidencePercent}%`;
|
| 274 |
+
this.elements.confidenceValue.textContent = `${Math.round(confidencePercent)}%`;
|
| 275 |
+
|
| 276 |
+
// Severity
|
| 277 |
+
const severity = diagnosis?.disease?.severity?.level || detection?.severity_level || 'unknown';
|
| 278 |
+
this.elements.severityBadge.textContent = this.formatSeverity(severity);
|
| 279 |
+
this.elements.severityBadge.className = `severity-badge ${severity.toLowerCase().replace(/\s+/g, '-')}`;
|
| 280 |
+
|
| 281 |
+
// === SYMPTOMS TAB ===
|
| 282 |
+
// Symptoms from diagnosis.symptoms array
|
| 283 |
+
const symptoms = diagnosis?.symptoms || [];
|
| 284 |
+
this.populateList(this.elements.symptomsList, symptoms);
|
| 285 |
+
|
| 286 |
+
// Transmission from diagnosis.transmission array
|
| 287 |
+
const transmission = diagnosis?.transmission || [];
|
| 288 |
+
this.populateList(this.elements.transmissionList, transmission);
|
| 289 |
+
|
| 290 |
+
// Yield impact
|
| 291 |
+
const yieldImpact = diagnosis?.yield_impact;
|
| 292 |
+
if (yieldImpact && this.elements.yieldImpactText) {
|
| 293 |
+
const minLoss = yieldImpact.min_percent || 0;
|
| 294 |
+
const maxLoss = yieldImpact.max_percent || 0;
|
| 295 |
+
this.elements.yieldImpactText.textContent = `${minLoss}% - ${maxLoss}% potential yield loss`;
|
| 296 |
+
}
|
| 297 |
+
|
| 298 |
+
// Recovery/Health projection
|
| 299 |
+
const projection = diagnosis?.current_projection || diagnosis?.health_projection;
|
| 300 |
+
if (projection && this.elements.recoveryBar) {
|
| 301 |
+
const recovery = projection.recovery_chance_percent || projection.recovery_chance || 0;
|
| 302 |
+
this.elements.recoveryBar.style.width = `${recovery}%`;
|
| 303 |
+
if (this.elements.recoveryText) {
|
| 304 |
+
this.elements.recoveryText.textContent = `${recovery}% recovery chance`;
|
| 305 |
+
}
|
| 306 |
+
}
|
| 307 |
+
|
| 308 |
+
// === TREATMENT TAB ===
|
| 309 |
+
// Immediate actions from diagnosis.treatments.immediate_actions
|
| 310 |
+
const treatments = diagnosis?.treatments || {};
|
| 311 |
+
const immediateActions = treatments.immediate_actions || [];
|
| 312 |
+
this.populateActionsList(this.elements.immediateActionsList, immediateActions);
|
| 313 |
+
|
| 314 |
+
// Chemical treatments
|
| 315 |
+
const chemicalTreatments = treatments.chemical || [];
|
| 316 |
+
this.populateChemicalTreatments(chemicalTreatments);
|
| 317 |
+
|
| 318 |
+
// Cost estimate
|
| 319 |
+
const costs = diagnosis?.costs;
|
| 320 |
+
if (costs && this.elements.costEstimate) {
|
| 321 |
+
const minCost = costs.min_ngn || 0;
|
| 322 |
+
const maxCost = costs.max_ngn || 0;
|
| 323 |
+
if (minCost && maxCost) {
|
| 324 |
+
this.elements.costEstimate.textContent = `₦${minCost.toLocaleString()} - ₦${maxCost.toLocaleString()}`;
|
| 325 |
+
} else {
|
| 326 |
+
this.elements.costEstimate.textContent = 'Contact local supplier';
|
| 327 |
+
}
|
| 328 |
+
}
|
| 329 |
+
|
| 330 |
+
// === PREVENTION TAB ===
|
| 331 |
+
// Prevention tips from diagnosis.prevention array
|
| 332 |
+
const prevention = diagnosis?.prevention || [];
|
| 333 |
+
this.populateList(this.elements.preventionList, prevention);
|
| 334 |
+
|
| 335 |
+
// Scroll to results
|
| 336 |
+
this.elements.resultsSection.scrollIntoView({ behavior: 'smooth' });
|
| 337 |
+
},
|
| 338 |
+
|
| 339 |
+
/**
|
| 340 |
+
* Populate a simple list with items
|
| 341 |
+
*/
|
| 342 |
+
populateList(listElement, items) {
|
| 343 |
+
if (!listElement) return;
|
| 344 |
+
|
| 345 |
+
listElement.innerHTML = '';
|
| 346 |
+
|
| 347 |
+
if (!items || items.length === 0) {
|
| 348 |
+
const li = document.createElement('li');
|
| 349 |
+
li.textContent = 'No information available';
|
| 350 |
+
li.style.fontStyle = 'italic';
|
| 351 |
+
li.style.color = 'var(--text-muted)';
|
| 352 |
+
listElement.appendChild(li);
|
| 353 |
+
return;
|
| 354 |
+
}
|
| 355 |
+
|
| 356 |
+
items.slice(0, 6).forEach(item => {
|
| 357 |
+
const li = document.createElement('li');
|
| 358 |
+
// Handle both string items and object items
|
| 359 |
+
if (typeof item === 'string') {
|
| 360 |
+
li.textContent = item;
|
| 361 |
+
} else if (typeof item === 'object') {
|
| 362 |
+
li.textContent = item.text || item.description || item.name || JSON.stringify(item);
|
| 363 |
+
}
|
| 364 |
+
listElement.appendChild(li);
|
| 365 |
+
});
|
| 366 |
+
},
|
| 367 |
+
|
| 368 |
+
/**
|
| 369 |
+
* Populate immediate actions list
|
| 370 |
+
*/
|
| 371 |
+
populateActionsList(listElement, actions) {
|
| 372 |
+
if (!listElement) return;
|
| 373 |
+
|
| 374 |
+
listElement.innerHTML = '';
|
| 375 |
+
|
| 376 |
+
if (!actions || actions.length === 0) {
|
| 377 |
+
const li = document.createElement('li');
|
| 378 |
+
li.textContent = 'Consult agricultural expert for guidance';
|
| 379 |
+
listElement.appendChild(li);
|
| 380 |
+
return;
|
| 381 |
+
}
|
| 382 |
+
|
| 383 |
+
actions.slice(0, 5).forEach(action => {
|
| 384 |
+
const li = document.createElement('li');
|
| 385 |
+
if (typeof action === 'string') {
|
| 386 |
+
li.textContent = action;
|
| 387 |
+
} else if (typeof action === 'object') {
|
| 388 |
+
li.textContent = action.action || action.description || action.text || '';
|
| 389 |
+
}
|
| 390 |
+
listElement.appendChild(li);
|
| 391 |
+
});
|
| 392 |
+
},
|
| 393 |
+
|
| 394 |
+
/**
|
| 395 |
+
* Populate chemical treatments
|
| 396 |
+
*/
|
| 397 |
+
populateChemicalTreatments(treatments) {
|
| 398 |
+
const container = this.elements.chemicalTreatments;
|
| 399 |
+
if (!container) return;
|
| 400 |
+
|
| 401 |
+
container.innerHTML = '';
|
| 402 |
+
|
| 403 |
+
if (!treatments || treatments.length === 0) {
|
| 404 |
+
const div = document.createElement('div');
|
| 405 |
+
div.className = 'treatment-item';
|
| 406 |
+
div.innerHTML = '<span>Consult local agricultural store</span>';
|
| 407 |
+
container.appendChild(div);
|
| 408 |
+
return;
|
| 409 |
+
}
|
| 410 |
+
|
| 411 |
+
treatments.slice(0, 4).forEach(treatment => {
|
| 412 |
+
const div = document.createElement('div');
|
| 413 |
+
div.className = 'treatment-item';
|
| 414 |
+
|
| 415 |
+
const name = treatment.product || treatment.product_name || treatment.name || 'Treatment';
|
| 416 |
+
const dosage = treatment.dosage || treatment.application || '';
|
| 417 |
+
const costMin = treatment.cost_min || treatment.cost_ngn_min || '';
|
| 418 |
+
const costMax = treatment.cost_max || treatment.cost_ngn_max || '';
|
| 419 |
+
|
| 420 |
+
let costText = '';
|
| 421 |
+
if (costMin && costMax) {
|
| 422 |
+
costText = ` - ₦${costMin.toLocaleString()} to ₦${costMax.toLocaleString()}`;
|
| 423 |
+
}
|
| 424 |
+
|
| 425 |
+
div.innerHTML = `
|
| 426 |
+
<strong>${name}</strong>
|
| 427 |
+
<span>${dosage}${costText}</span>
|
| 428 |
+
`;
|
| 429 |
+
|
| 430 |
+
container.appendChild(div);
|
| 431 |
+
});
|
| 432 |
+
},
|
| 433 |
+
|
| 434 |
+
/**
|
| 435 |
+
* Get disease icon based on crop type
|
| 436 |
+
*/
|
| 437 |
+
getDiseaseIcon(cropType) {
|
| 438 |
+
const icons = {
|
| 439 |
+
cassava: '🌿',
|
| 440 |
+
cocoa: '🍫',
|
| 441 |
+
tomato: '🍅'
|
| 442 |
+
};
|
| 443 |
+
return icons[cropType?.toLowerCase()] || '🌱';
|
| 444 |
+
},
|
| 445 |
+
|
| 446 |
+
/**
|
| 447 |
+
* Format crop name
|
| 448 |
+
*/
|
| 449 |
+
formatCropName(cropType) {
|
| 450 |
+
if (!cropType) return 'Unknown';
|
| 451 |
+
return cropType.charAt(0).toUpperCase() + cropType.slice(1).toLowerCase();
|
| 452 |
+
},
|
| 453 |
+
|
| 454 |
+
/**
|
| 455 |
+
* Format severity level
|
| 456 |
+
*/
|
| 457 |
+
formatSeverity(severity) {
|
| 458 |
+
if (!severity) return 'Unknown';
|
| 459 |
+
return severity.replace(/_/g, ' ').replace(/\b\w/g, l => l.toUpperCase());
|
| 460 |
+
},
|
| 461 |
+
|
| 462 |
+
/**
|
| 463 |
+
* Switch between tabs
|
| 464 |
+
*/
|
| 465 |
+
switchTab(tabName) {
|
| 466 |
+
// Update button states
|
| 467 |
+
this.elements.tabButtons.forEach(btn => {
|
| 468 |
+
btn.classList.toggle('active', btn.dataset.tab === tabName);
|
| 469 |
+
});
|
| 470 |
+
|
| 471 |
+
// Update content visibility
|
| 472 |
+
const tabs = ['symptoms', 'treatment', 'prevention'];
|
| 473 |
+
tabs.forEach(tab => {
|
| 474 |
+
const tabElement = this.elements[`tab${tab.charAt(0).toUpperCase() + tab.slice(1)}`];
|
| 475 |
+
if (tabElement) {
|
| 476 |
+
tabElement.classList.toggle('active', tab === tabName);
|
| 477 |
+
tabElement.classList.toggle('hidden', tab !== tabName);
|
| 478 |
+
}
|
| 479 |
+
});
|
| 480 |
+
},
|
| 481 |
+
|
| 482 |
+
/**
|
| 483 |
+
* Clear results and reset for new scan
|
| 484 |
+
*/
|
| 485 |
+
clearResults() {
|
| 486 |
+
this.currentDiagnosis = null;
|
| 487 |
+
this.elements.resultsSection?.classList.add('hidden');
|
| 488 |
+
this.removeImage();
|
| 489 |
+
|
| 490 |
+
// Clear API diagnosis
|
| 491 |
+
FarmEyesAPI.clearDiagnosis().catch(() => {});
|
| 492 |
+
|
| 493 |
+
// Scroll to top
|
| 494 |
+
window.scrollTo({ top: 0, behavior: 'smooth' });
|
| 495 |
+
|
| 496 |
+
console.log('[Diagnosis] Results cleared');
|
| 497 |
+
},
|
| 498 |
+
|
| 499 |
+
/**
|
| 500 |
+
* Get current diagnosis data
|
| 501 |
+
*/
|
| 502 |
+
getDiagnosis() {
|
| 503 |
+
return this.currentDiagnosis;
|
| 504 |
+
},
|
| 505 |
+
|
| 506 |
+
/**
|
| 507 |
+
* Check if there's a valid diagnosis
|
| 508 |
+
*/
|
| 509 |
+
hasDiagnosis() {
|
| 510 |
+
return this.currentDiagnosis !== null;
|
| 511 |
+
}
|
| 512 |
+
};
|
| 513 |
+
|
| 514 |
+
// Export
|
| 515 |
+
window.Diagnosis = Diagnosis;
|
frontend/js/i18n.js
ADDED
|
@@ -0,0 +1,328 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
/**
|
| 2 |
+
* FarmEyes Internationalization (i18n)
|
| 3 |
+
* =====================================
|
| 4 |
+
* Static translations for UI elements.
|
| 5 |
+
* Always works - no API dependency.
|
| 6 |
+
*/
|
| 7 |
+
|
| 8 |
+
const I18n = {
|
| 9 |
+
currentLanguage: 'en',
|
| 10 |
+
|
| 11 |
+
// Static translations - embedded for reliability
|
| 12 |
+
translations: {
|
| 13 |
+
en: {
|
| 14 |
+
// Buttons
|
| 15 |
+
"buttons.continue": "Continue",
|
| 16 |
+
"buttons.analyze": "Analyze Crop",
|
| 17 |
+
"buttons.new_scan": "+ New Scan",
|
| 18 |
+
"buttons.back": "Back",
|
| 19 |
+
"buttons.chat": "Chat with Assistant",
|
| 20 |
+
"buttons.stop": "Stop",
|
| 21 |
+
|
| 22 |
+
// Diagnosis page
|
| 23 |
+
"diagnosis.upload_title": "Upload Crop Image",
|
| 24 |
+
"diagnosis.upload_desc": "Take a clear photo of the affected leaf or plant",
|
| 25 |
+
"diagnosis.click_or_drag": "Click or drag image here",
|
| 26 |
+
"diagnosis.analyzing": "Analyzing your crop...",
|
| 27 |
+
|
| 28 |
+
// Results
|
| 29 |
+
"results.title": "Diagnosis Results",
|
| 30 |
+
"results.confidence": "Confidence:",
|
| 31 |
+
"results.transmission": "How It Spreads",
|
| 32 |
+
"results.yield_impact": "Yield Impact",
|
| 33 |
+
"results.recovery": "Recovery Chance",
|
| 34 |
+
|
| 35 |
+
// Tabs
|
| 36 |
+
"tabs.symptoms": "Symptoms",
|
| 37 |
+
"tabs.treatment": "Treatment",
|
| 38 |
+
"tabs.prevention": "Prevention",
|
| 39 |
+
|
| 40 |
+
// Treatment
|
| 41 |
+
"treatment.immediate": "Immediate Actions",
|
| 42 |
+
"treatment.chemical": "Chemical Treatment",
|
| 43 |
+
"treatment.cost": "Estimated Cost:",
|
| 44 |
+
|
| 45 |
+
// Chat
|
| 46 |
+
"chat.discussing": "Discussing:",
|
| 47 |
+
"chat.welcome": "Ask me anything about your diagnosis, treatments, or prevention tips.",
|
| 48 |
+
"chat.placeholder": "Ask about your diagnosis...",
|
| 49 |
+
"chat.disclaimer": "FarmEyes provides guidance only. Consult experts for serious cases.",
|
| 50 |
+
|
| 51 |
+
// Voice
|
| 52 |
+
"voice.listening": "Listening...",
|
| 53 |
+
|
| 54 |
+
// Severity
|
| 55 |
+
"severity_levels.very_high": "Very High",
|
| 56 |
+
"severity_levels.high": "High",
|
| 57 |
+
"severity_levels.medium": "Medium",
|
| 58 |
+
"severity_levels.low": "Low",
|
| 59 |
+
|
| 60 |
+
// Crops
|
| 61 |
+
"crops.cassava": "Cassava",
|
| 62 |
+
"crops.cocoa": "Cocoa",
|
| 63 |
+
"crops.tomato": "Tomato"
|
| 64 |
+
},
|
| 65 |
+
|
| 66 |
+
ha: {
|
| 67 |
+
// Buttons
|
| 68 |
+
"buttons.continue": "Ci gaba",
|
| 69 |
+
"buttons.analyze": "Bincika Amfanin Gona",
|
| 70 |
+
"buttons.new_scan": "+ Sabon Duba",
|
| 71 |
+
"buttons.back": "Koma",
|
| 72 |
+
"buttons.chat": "Yi magana da Mataimaki",
|
| 73 |
+
"buttons.stop": "Daina",
|
| 74 |
+
|
| 75 |
+
// Diagnosis page
|
| 76 |
+
"diagnosis.upload_title": "Ɗora Hoton Amfanin Gona",
|
| 77 |
+
"diagnosis.upload_desc": "Ɗauki hoto mai kyau na ganyen da ya kamu",
|
| 78 |
+
"diagnosis.click_or_drag": "Danna ko ja hoto nan",
|
| 79 |
+
"diagnosis.analyzing": "Ana bincika amfanin gonar ku...",
|
| 80 |
+
|
| 81 |
+
// Results
|
| 82 |
+
"results.title": "Sakamakon Bincike",
|
| 83 |
+
"results.confidence": "Tabbaci:",
|
| 84 |
+
"results.transmission": "Yadda Yake Yaɗuwa",
|
| 85 |
+
"results.yield_impact": "Tasirin Amfanin Gona",
|
| 86 |
+
"results.recovery": "Damar Murmurewa",
|
| 87 |
+
|
| 88 |
+
// Tabs
|
| 89 |
+
"tabs.symptoms": "Alamomi",
|
| 90 |
+
"tabs.treatment": "Magani",
|
| 91 |
+
"tabs.prevention": "Rigakafi",
|
| 92 |
+
|
| 93 |
+
// Treatment
|
| 94 |
+
"treatment.immediate": "Matakai na Gaggawa",
|
| 95 |
+
"treatment.chemical": "Maganin Sinadari",
|
| 96 |
+
"treatment.cost": "Ƙiyasin Farashi:",
|
| 97 |
+
|
| 98 |
+
// Chat
|
| 99 |
+
"chat.discussing": "Muna tattaunawa:",
|
| 100 |
+
"chat.welcome": "Tambaye ni komai game da binciken ku.",
|
| 101 |
+
"chat.placeholder": "Tambaya game da binciken ku...",
|
| 102 |
+
"chat.disclaimer": "FarmEyes yana ba da jagora kawai.",
|
| 103 |
+
|
| 104 |
+
// Voice
|
| 105 |
+
"voice.listening": "Ana saurara...",
|
| 106 |
+
|
| 107 |
+
// Severity
|
| 108 |
+
"severity_levels.very_high": "Mai Tsanani Sosai",
|
| 109 |
+
"severity_levels.high": "Mai Tsanani",
|
| 110 |
+
"severity_levels.medium": "Matsakaici",
|
| 111 |
+
"severity_levels.low": "Ƙasa",
|
| 112 |
+
|
| 113 |
+
// Crops
|
| 114 |
+
"crops.cassava": "Rogo",
|
| 115 |
+
"crops.cocoa": "Koko",
|
| 116 |
+
"crops.tomato": "Tumatir"
|
| 117 |
+
},
|
| 118 |
+
|
| 119 |
+
yo: {
|
| 120 |
+
// Buttons
|
| 121 |
+
"buttons.continue": "Tẹ̀síwájú",
|
| 122 |
+
"buttons.analyze": "Ṣe Àyẹ̀wò Ohun Ọ̀gbìn",
|
| 123 |
+
"buttons.new_scan": "+ Àyẹ̀wò Tuntun",
|
| 124 |
+
"buttons.back": "Padà",
|
| 125 |
+
"buttons.chat": "Bá Olùrànlọ́wọ́ sọ̀rọ̀",
|
| 126 |
+
"buttons.stop": "Dúró",
|
| 127 |
+
|
| 128 |
+
// Diagnosis page
|
| 129 |
+
"diagnosis.upload_title": "Gbé Àwòrán Ohun Ọ̀gbìn Sókè",
|
| 130 |
+
"diagnosis.upload_desc": "Ya àwòrán tó ṣe kedere ti ewé tó ní àrùn",
|
| 131 |
+
"diagnosis.click_or_drag": "Tẹ tàbí fà àwòrán síbí",
|
| 132 |
+
"diagnosis.analyzing": "A ń ṣe àyẹ̀wò ohun ọ̀gbìn yín...",
|
| 133 |
+
|
| 134 |
+
// Results
|
| 135 |
+
"results.title": "Àbájáde Àyẹ̀wò",
|
| 136 |
+
"results.confidence": "Ìgbẹ́kẹ̀lé:",
|
| 137 |
+
"results.transmission": "Bí Ó Ṣe Ń Tàn Kálẹ̀",
|
| 138 |
+
"results.yield_impact": "Ipa Lórí Èso",
|
| 139 |
+
"results.recovery": "Àǹfààní Ìmúlàradà",
|
| 140 |
+
|
| 141 |
+
// Tabs
|
| 142 |
+
"tabs.symptoms": "Àmì Àrùn",
|
| 143 |
+
"tabs.treatment": "Ìtọ́jú",
|
| 144 |
+
"tabs.prevention": "Ìdènà",
|
| 145 |
+
|
| 146 |
+
// Treatment
|
| 147 |
+
"treatment.immediate": "Ìgbésẹ̀ Lẹ́sẹ̀kẹsẹ̀",
|
| 148 |
+
"treatment.chemical": "Ìtọ́jú Kẹ́míkà",
|
| 149 |
+
"treatment.cost": "Iye Owó Tí A Ṣe Àfojúsùn:",
|
| 150 |
+
|
| 151 |
+
// Chat
|
| 152 |
+
"chat.discussing": "A ń sọ̀rọ̀ nípa:",
|
| 153 |
+
"chat.welcome": "Bi mi nípa àyẹ̀wò rẹ, ìtọ́jú, tàbí ìdènà.",
|
| 154 |
+
"chat.placeholder": "Béèrè nípa àyẹ̀wò rẹ...",
|
| 155 |
+
"chat.disclaimer": "FarmEyes pèsè ìtọ́sọ́nà nìkan.",
|
| 156 |
+
|
| 157 |
+
// Voice
|
| 158 |
+
"voice.listening": "A ń gbọ́...",
|
| 159 |
+
|
| 160 |
+
// Severity
|
| 161 |
+
"severity_levels.very_high": "Ga Jù",
|
| 162 |
+
"severity_levels.high": "Ga",
|
| 163 |
+
"severity_levels.medium": "Àárín",
|
| 164 |
+
"severity_levels.low": "Kéré",
|
| 165 |
+
|
| 166 |
+
// Crops
|
| 167 |
+
"crops.cassava": "Ẹ̀gẹ́",
|
| 168 |
+
"crops.cocoa": "Koko",
|
| 169 |
+
"crops.tomato": "Tòmátì"
|
| 170 |
+
},
|
| 171 |
+
|
| 172 |
+
ig: {
|
| 173 |
+
// Buttons
|
| 174 |
+
"buttons.continue": "Gaa n'ihu",
|
| 175 |
+
"buttons.analyze": "Nyochaa Ihe Ọkụkụ",
|
| 176 |
+
"buttons.new_scan": "+ Nyocha Ọhụụ",
|
| 177 |
+
"buttons.back": "Laghachi",
|
| 178 |
+
"buttons.chat": "Soro Onye enyemaka",
|
| 179 |
+
"buttons.stop": "Kwụsị",
|
| 180 |
+
|
| 181 |
+
// Diagnosis page
|
| 182 |
+
"diagnosis.upload_title": "Bulite Foto Ihe Ọkụkụ",
|
| 183 |
+
"diagnosis.upload_desc": "See foto doro anya nke akwụkwọ nke nwere nsogbu",
|
| 184 |
+
"diagnosis.click_or_drag": "Pịa ma ọ bụ dọrọ foto ebe a",
|
| 185 |
+
"diagnosis.analyzing": "Anyị na-enyocha ihe ọkụkụ gị...",
|
| 186 |
+
|
| 187 |
+
// Results
|
| 188 |
+
"results.title": "Nsonaazụ Nyocha",
|
| 189 |
+
"results.confidence": "Ntụkwasị Obi:",
|
| 190 |
+
"results.transmission": "Otu Ọ Si Agbasa",
|
| 191 |
+
"results.yield_impact": "Mmetụta Ọnụ Ego",
|
| 192 |
+
"results.recovery": "Ohere Ịlaghachi",
|
| 193 |
+
|
| 194 |
+
// Tabs
|
| 195 |
+
"tabs.symptoms": "Ihe Ngosi",
|
| 196 |
+
"tabs.treatment": "Ọgwụgwọ",
|
| 197 |
+
"tabs.prevention": "Mgbochi",
|
| 198 |
+
|
| 199 |
+
// Treatment
|
| 200 |
+
"treatment.immediate": "Ihe Ọsịịsọ",
|
| 201 |
+
"treatment.chemical": "Ọgwụgwọ Kemịkalụ",
|
| 202 |
+
"treatment.cost": "Ego A Tụrụ Anya:",
|
| 203 |
+
|
| 204 |
+
// Chat
|
| 205 |
+
"chat.discussing": "Anyị na-atụ:",
|
| 206 |
+
"chat.welcome": "Jụọ m ihe ọ bụla gbasara nyocha gị.",
|
| 207 |
+
"chat.placeholder": "Jụọ maka nyocha gị...",
|
| 208 |
+
"chat.disclaimer": "FarmEyes na-enye nduzi nọọ.",
|
| 209 |
+
|
| 210 |
+
// Voice
|
| 211 |
+
"voice.listening": "Anyị na-ege...",
|
| 212 |
+
|
| 213 |
+
// Severity
|
| 214 |
+
"severity_levels.very_high": "Dị Elu Nnọọ",
|
| 215 |
+
"severity_levels.high": "Dị Elu",
|
| 216 |
+
"severity_levels.medium": "Etiti",
|
| 217 |
+
"severity_levels.low": "Dị Ala",
|
| 218 |
+
|
| 219 |
+
// Crops
|
| 220 |
+
"crops.cassava": "Akpụ",
|
| 221 |
+
"crops.cocoa": "Koko",
|
| 222 |
+
"crops.tomato": "Tomato"
|
| 223 |
+
}
|
| 224 |
+
},
|
| 225 |
+
|
| 226 |
+
/**
|
| 227 |
+
* Initialize
|
| 228 |
+
*/
|
| 229 |
+
async init(language = 'en') {
|
| 230 |
+
this.currentLanguage = language;
|
| 231 |
+
this.applyTranslations();
|
| 232 |
+
console.log('[I18n] Initialized:', language);
|
| 233 |
+
},
|
| 234 |
+
|
| 235 |
+
/**
|
| 236 |
+
* Set language
|
| 237 |
+
*/
|
| 238 |
+
async setLanguage(language) {
|
| 239 |
+
if (!['en', 'ha', 'yo', 'ig'].includes(language)) {
|
| 240 |
+
language = 'en';
|
| 241 |
+
}
|
| 242 |
+
|
| 243 |
+
this.currentLanguage = language;
|
| 244 |
+
localStorage.setItem('farmeyes_language', language);
|
| 245 |
+
|
| 246 |
+
// Update API
|
| 247 |
+
try {
|
| 248 |
+
await FarmEyesAPI.setLanguage(language);
|
| 249 |
+
} catch (e) {
|
| 250 |
+
console.warn('[I18n] API update failed:', e);
|
| 251 |
+
}
|
| 252 |
+
|
| 253 |
+
this.applyTranslations();
|
| 254 |
+
|
| 255 |
+
window.dispatchEvent(new CustomEvent('languageChanged', { detail: { language } }));
|
| 256 |
+
|
| 257 |
+
console.log('[I18n] Language changed:', language);
|
| 258 |
+
},
|
| 259 |
+
|
| 260 |
+
/**
|
| 261 |
+
* Get translation
|
| 262 |
+
*/
|
| 263 |
+
t(key, params = {}) {
|
| 264 |
+
const langData = this.translations[this.currentLanguage] || this.translations.en;
|
| 265 |
+
let value = langData[key] || this.translations.en[key] || key;
|
| 266 |
+
|
| 267 |
+
// Interpolate
|
| 268 |
+
if (typeof value === 'string' && Object.keys(params).length > 0) {
|
| 269 |
+
value = value.replace(/\{(\w+)\}/g, (match, k) => params[k] !== undefined ? params[k] : match);
|
| 270 |
+
}
|
| 271 |
+
|
| 272 |
+
return value;
|
| 273 |
+
},
|
| 274 |
+
|
| 275 |
+
/**
|
| 276 |
+
* Apply translations to DOM
|
| 277 |
+
*/
|
| 278 |
+
applyTranslations() {
|
| 279 |
+
// Text content
|
| 280 |
+
document.querySelectorAll('[data-i18n]').forEach(el => {
|
| 281 |
+
const key = el.getAttribute('data-i18n');
|
| 282 |
+
const translation = this.t(key);
|
| 283 |
+
if (translation !== key) {
|
| 284 |
+
el.textContent = translation;
|
| 285 |
+
}
|
| 286 |
+
});
|
| 287 |
+
|
| 288 |
+
// Placeholders
|
| 289 |
+
document.querySelectorAll('[data-i18n-placeholder]').forEach(el => {
|
| 290 |
+
const key = el.getAttribute('data-i18n-placeholder');
|
| 291 |
+
const translation = this.t(key);
|
| 292 |
+
if (translation !== key) {
|
| 293 |
+
el.placeholder = translation;
|
| 294 |
+
}
|
| 295 |
+
});
|
| 296 |
+
|
| 297 |
+
document.documentElement.lang = this.currentLanguage;
|
| 298 |
+
},
|
| 299 |
+
|
| 300 |
+
getLanguage() {
|
| 301 |
+
return this.currentLanguage;
|
| 302 |
+
},
|
| 303 |
+
|
| 304 |
+
formatCurrency(amount) {
|
| 305 |
+
if (amount == null) return '';
|
| 306 |
+
return new Intl.NumberFormat('en-NG', {
|
| 307 |
+
style: 'currency',
|
| 308 |
+
currency: 'NGN',
|
| 309 |
+
minimumFractionDigits: 0
|
| 310 |
+
}).format(amount);
|
| 311 |
+
},
|
| 312 |
+
|
| 313 |
+
getSeverity(level) {
|
| 314 |
+
if (!level) return 'Unknown';
|
| 315 |
+
const key = `severity_levels.${level.toLowerCase().replace(/\s+/g, '_')}`;
|
| 316 |
+
const t = this.t(key);
|
| 317 |
+
return t !== key ? t : level;
|
| 318 |
+
},
|
| 319 |
+
|
| 320 |
+
getCropName(crop) {
|
| 321 |
+
if (!crop) return '';
|
| 322 |
+
const key = `crops.${crop.toLowerCase()}`;
|
| 323 |
+
const t = this.t(key);
|
| 324 |
+
return t !== key ? t : crop;
|
| 325 |
+
}
|
| 326 |
+
};
|
| 327 |
+
|
| 328 |
+
window.I18n = I18n;
|
frontend/js/tts.js
ADDED
|
@@ -0,0 +1,526 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
/**
|
| 2 |
+
* FarmEyes TTS Module
|
| 3 |
+
* ===================
|
| 4 |
+
* Text-to-Speech functionality using Meta MMS-TTS via HuggingFace API.
|
| 5 |
+
*
|
| 6 |
+
* Features:
|
| 7 |
+
* - Play/Pause/Stop controls
|
| 8 |
+
* - Speed control (0.75x, 1x, 1.25x, 1.5x)
|
| 9 |
+
* - Audio caching (browser session)
|
| 10 |
+
* - Web Speech API fallback for English
|
| 11 |
+
* - Floating player for last message
|
| 12 |
+
*/
|
| 13 |
+
|
| 14 |
+
const TTS = {
|
| 15 |
+
// ==========================================================================
|
| 16 |
+
// STATE
|
| 17 |
+
// ==========================================================================
|
| 18 |
+
|
| 19 |
+
// Current audio state
|
| 20 |
+
isPlaying: false,
|
| 21 |
+
isPaused: false,
|
| 22 |
+
currentAudio: null,
|
| 23 |
+
currentMessageId: null,
|
| 24 |
+
|
| 25 |
+
// Playback settings
|
| 26 |
+
playbackRate: 1.0,
|
| 27 |
+
|
| 28 |
+
// Cache for generated audio (session-based)
|
| 29 |
+
audioCache: new Map(),
|
| 30 |
+
|
| 31 |
+
// Web Speech API fallback
|
| 32 |
+
speechSynthesis: window.speechSynthesis || null,
|
| 33 |
+
|
| 34 |
+
// Callbacks
|
| 35 |
+
onPlayStart: null,
|
| 36 |
+
onPlayEnd: null,
|
| 37 |
+
onError: null,
|
| 38 |
+
|
| 39 |
+
// ==========================================================================
|
| 40 |
+
// INITIALIZATION
|
| 41 |
+
// ==========================================================================
|
| 42 |
+
|
| 43 |
+
/**
|
| 44 |
+
* Initialize TTS module
|
| 45 |
+
* @param {object} callbacks - Optional callback functions
|
| 46 |
+
*/
|
| 47 |
+
init(callbacks = {}) {
|
| 48 |
+
this.onPlayStart = callbacks.onPlayStart || (() => {});
|
| 49 |
+
this.onPlayEnd = callbacks.onPlayEnd || (() => {});
|
| 50 |
+
this.onError = callbacks.onError || ((err) => console.error('[TTS]', err));
|
| 51 |
+
|
| 52 |
+
// Check Web Speech API availability
|
| 53 |
+
if (this.speechSynthesis) {
|
| 54 |
+
console.log('[TTS] Web Speech API available for fallback');
|
| 55 |
+
}
|
| 56 |
+
|
| 57 |
+
console.log('[TTS] Initialized');
|
| 58 |
+
},
|
| 59 |
+
|
| 60 |
+
// ==========================================================================
|
| 61 |
+
// PUBLIC API
|
| 62 |
+
// ==========================================================================
|
| 63 |
+
|
| 64 |
+
/**
|
| 65 |
+
* Speak text using TTS
|
| 66 |
+
* @param {string} text - Text to speak
|
| 67 |
+
* @param {string} language - Language code (en, ha, yo, ig)
|
| 68 |
+
* @param {string} messageId - Unique message identifier for caching
|
| 69 |
+
* @returns {Promise<boolean>} Success status
|
| 70 |
+
*/
|
| 71 |
+
async speak(text, language = 'en', messageId = null) {
|
| 72 |
+
// Stop any current playback
|
| 73 |
+
this.stop();
|
| 74 |
+
|
| 75 |
+
// Generate cache key
|
| 76 |
+
const cacheKey = messageId || this.generateCacheKey(text, language);
|
| 77 |
+
|
| 78 |
+
// Check cache first
|
| 79 |
+
if (this.audioCache.has(cacheKey)) {
|
| 80 |
+
console.log('[TTS] Using cached audio');
|
| 81 |
+
return this.playFromCache(cacheKey);
|
| 82 |
+
}
|
| 83 |
+
|
| 84 |
+
// Try MMS-TTS API
|
| 85 |
+
try {
|
| 86 |
+
console.log(`[TTS] Synthesizing: lang=${language}, length=${text.length}`);
|
| 87 |
+
|
| 88 |
+
const result = await this.synthesizeAPI(text, language);
|
| 89 |
+
|
| 90 |
+
if (result.success && result.audio_base64) {
|
| 91 |
+
// Cache the audio
|
| 92 |
+
this.audioCache.set(cacheKey, {
|
| 93 |
+
audio_base64: result.audio_base64,
|
| 94 |
+
content_type: result.content_type,
|
| 95 |
+
language: language
|
| 96 |
+
});
|
| 97 |
+
|
| 98 |
+
// Play it
|
| 99 |
+
return this.playAudio(result.audio_base64, result.content_type, cacheKey);
|
| 100 |
+
} else {
|
| 101 |
+
throw new Error(result.error || 'TTS synthesis failed');
|
| 102 |
+
}
|
| 103 |
+
|
| 104 |
+
} catch (error) {
|
| 105 |
+
console.error('[TTS] API failed:', error);
|
| 106 |
+
|
| 107 |
+
// Try fallback for English
|
| 108 |
+
if (language === 'en' && this.speechSynthesis) {
|
| 109 |
+
console.log('[TTS] Falling back to Web Speech API');
|
| 110 |
+
return this.speakWithWebSpeech(text);
|
| 111 |
+
}
|
| 112 |
+
|
| 113 |
+
// No fallback available
|
| 114 |
+
this.onError(`Voice playback failed: ${error.message}`);
|
| 115 |
+
return false;
|
| 116 |
+
}
|
| 117 |
+
},
|
| 118 |
+
|
| 119 |
+
/**
|
| 120 |
+
* Play audio from cache
|
| 121 |
+
* @param {string} cacheKey - Cache key
|
| 122 |
+
* @returns {Promise<boolean>} Success status
|
| 123 |
+
*/
|
| 124 |
+
async playFromCache(cacheKey) {
|
| 125 |
+
const cached = this.audioCache.get(cacheKey);
|
| 126 |
+
if (!cached) return false;
|
| 127 |
+
|
| 128 |
+
return this.playAudio(cached.audio_base64, cached.content_type, cacheKey);
|
| 129 |
+
},
|
| 130 |
+
|
| 131 |
+
/**
|
| 132 |
+
* Pause current playback
|
| 133 |
+
*/
|
| 134 |
+
pause() {
|
| 135 |
+
if (this.currentAudio && this.isPlaying) {
|
| 136 |
+
this.currentAudio.pause();
|
| 137 |
+
this.isPlaying = false;
|
| 138 |
+
this.isPaused = true;
|
| 139 |
+
console.log('[TTS] Paused');
|
| 140 |
+
this.updatePlayerUI('paused');
|
| 141 |
+
}
|
| 142 |
+
},
|
| 143 |
+
|
| 144 |
+
/**
|
| 145 |
+
* Resume paused playback
|
| 146 |
+
*/
|
| 147 |
+
resume() {
|
| 148 |
+
if (this.currentAudio && this.isPaused) {
|
| 149 |
+
this.currentAudio.play();
|
| 150 |
+
this.isPlaying = true;
|
| 151 |
+
this.isPaused = false;
|
| 152 |
+
console.log('[TTS] Resumed');
|
| 153 |
+
this.updatePlayerUI('playing');
|
| 154 |
+
}
|
| 155 |
+
},
|
| 156 |
+
|
| 157 |
+
/**
|
| 158 |
+
* Toggle play/pause
|
| 159 |
+
*/
|
| 160 |
+
togglePlayPause() {
|
| 161 |
+
if (this.isPlaying) {
|
| 162 |
+
this.pause();
|
| 163 |
+
} else if (this.isPaused) {
|
| 164 |
+
this.resume();
|
| 165 |
+
}
|
| 166 |
+
},
|
| 167 |
+
|
| 168 |
+
/**
|
| 169 |
+
* Stop playback completely
|
| 170 |
+
*/
|
| 171 |
+
stop() {
|
| 172 |
+
// Stop HTML5 Audio
|
| 173 |
+
if (this.currentAudio) {
|
| 174 |
+
this.currentAudio.pause();
|
| 175 |
+
this.currentAudio.currentTime = 0;
|
| 176 |
+
this.currentAudio = null;
|
| 177 |
+
}
|
| 178 |
+
|
| 179 |
+
// Stop Web Speech API
|
| 180 |
+
if (this.speechSynthesis) {
|
| 181 |
+
this.speechSynthesis.cancel();
|
| 182 |
+
}
|
| 183 |
+
|
| 184 |
+
this.isPlaying = false;
|
| 185 |
+
this.isPaused = false;
|
| 186 |
+
this.currentMessageId = null;
|
| 187 |
+
|
| 188 |
+
console.log('[TTS] Stopped');
|
| 189 |
+
this.updatePlayerUI('stopped');
|
| 190 |
+
this.onPlayEnd();
|
| 191 |
+
},
|
| 192 |
+
|
| 193 |
+
/**
|
| 194 |
+
* Set playback speed
|
| 195 |
+
* @param {number} rate - Playback rate (0.5 - 2.0)
|
| 196 |
+
*/
|
| 197 |
+
setPlaybackRate(rate) {
|
| 198 |
+
this.playbackRate = Math.max(0.5, Math.min(2.0, rate));
|
| 199 |
+
|
| 200 |
+
if (this.currentAudio) {
|
| 201 |
+
this.currentAudio.playbackRate = this.playbackRate;
|
| 202 |
+
}
|
| 203 |
+
|
| 204 |
+
console.log(`[TTS] Playback rate: ${this.playbackRate}x`);
|
| 205 |
+
this.updateSpeedButtonsUI();
|
| 206 |
+
},
|
| 207 |
+
|
| 208 |
+
/**
|
| 209 |
+
* Check if currently playing
|
| 210 |
+
* @returns {boolean}
|
| 211 |
+
*/
|
| 212 |
+
getIsPlaying() {
|
| 213 |
+
return this.isPlaying;
|
| 214 |
+
},
|
| 215 |
+
|
| 216 |
+
/**
|
| 217 |
+
* Check if paused
|
| 218 |
+
* @returns {boolean}
|
| 219 |
+
*/
|
| 220 |
+
getIsPaused() {
|
| 221 |
+
return this.isPaused;
|
| 222 |
+
},
|
| 223 |
+
|
| 224 |
+
/**
|
| 225 |
+
* Get current playback rate
|
| 226 |
+
* @returns {number}
|
| 227 |
+
*/
|
| 228 |
+
getPlaybackRate() {
|
| 229 |
+
return this.playbackRate;
|
| 230 |
+
},
|
| 231 |
+
|
| 232 |
+
// ==========================================================================
|
| 233 |
+
// API COMMUNICATION
|
| 234 |
+
// ==========================================================================
|
| 235 |
+
|
| 236 |
+
/**
|
| 237 |
+
* Call TTS API to synthesize speech
|
| 238 |
+
* @param {string} text - Text to synthesize
|
| 239 |
+
* @param {string} language - Language code
|
| 240 |
+
* @returns {Promise<object>} API response
|
| 241 |
+
*/
|
| 242 |
+
async synthesizeAPI(text, language) {
|
| 243 |
+
const response = await fetch('/api/tts', {
|
| 244 |
+
method: 'POST',
|
| 245 |
+
headers: {
|
| 246 |
+
'Content-Type': 'application/json'
|
| 247 |
+
},
|
| 248 |
+
body: JSON.stringify({
|
| 249 |
+
text: text,
|
| 250 |
+
language: language
|
| 251 |
+
})
|
| 252 |
+
});
|
| 253 |
+
|
| 254 |
+
if (!response.ok) {
|
| 255 |
+
const error = await response.json().catch(() => ({}));
|
| 256 |
+
throw new Error(error.detail || `HTTP ${response.status}`);
|
| 257 |
+
}
|
| 258 |
+
|
| 259 |
+
return await response.json();
|
| 260 |
+
},
|
| 261 |
+
|
| 262 |
+
// ==========================================================================
|
| 263 |
+
// AUDIO PLAYBACK
|
| 264 |
+
// ==========================================================================
|
| 265 |
+
|
| 266 |
+
/**
|
| 267 |
+
* Play audio from base64 data
|
| 268 |
+
* @param {string} audioBase64 - Base64 encoded audio
|
| 269 |
+
* @param {string} contentType - MIME type
|
| 270 |
+
* @param {string} messageId - Message identifier
|
| 271 |
+
* @returns {Promise<boolean>} Success status
|
| 272 |
+
*/
|
| 273 |
+
async playAudio(audioBase64, contentType, messageId) {
|
| 274 |
+
try {
|
| 275 |
+
// Create blob from base64
|
| 276 |
+
const binaryString = atob(audioBase64);
|
| 277 |
+
const bytes = new Uint8Array(binaryString.length);
|
| 278 |
+
for (let i = 0; i < binaryString.length; i++) {
|
| 279 |
+
bytes[i] = binaryString.charCodeAt(i);
|
| 280 |
+
}
|
| 281 |
+
const blob = new Blob([bytes], { type: contentType });
|
| 282 |
+
const audioUrl = URL.createObjectURL(blob);
|
| 283 |
+
|
| 284 |
+
// Create audio element
|
| 285 |
+
this.currentAudio = new Audio(audioUrl);
|
| 286 |
+
this.currentAudio.playbackRate = this.playbackRate;
|
| 287 |
+
this.currentMessageId = messageId;
|
| 288 |
+
|
| 289 |
+
// Set up event handlers
|
| 290 |
+
this.currentAudio.onplay = () => {
|
| 291 |
+
this.isPlaying = true;
|
| 292 |
+
this.isPaused = false;
|
| 293 |
+
this.onPlayStart();
|
| 294 |
+
this.updatePlayerUI('playing');
|
| 295 |
+
};
|
| 296 |
+
|
| 297 |
+
this.currentAudio.onpause = () => {
|
| 298 |
+
if (!this.currentAudio.ended) {
|
| 299 |
+
this.updatePlayerUI('paused');
|
| 300 |
+
}
|
| 301 |
+
};
|
| 302 |
+
|
| 303 |
+
this.currentAudio.onended = () => {
|
| 304 |
+
this.isPlaying = false;
|
| 305 |
+
this.isPaused = false;
|
| 306 |
+
this.currentMessageId = null;
|
| 307 |
+
URL.revokeObjectURL(audioUrl);
|
| 308 |
+
this.onPlayEnd();
|
| 309 |
+
this.updatePlayerUI('stopped');
|
| 310 |
+
console.log('[TTS] Playback ended');
|
| 311 |
+
};
|
| 312 |
+
|
| 313 |
+
this.currentAudio.onerror = (e) => {
|
| 314 |
+
console.error('[TTS] Audio error:', e);
|
| 315 |
+
this.stop();
|
| 316 |
+
this.onError('Audio playback failed');
|
| 317 |
+
};
|
| 318 |
+
|
| 319 |
+
// Update UI for time tracking
|
| 320 |
+
this.currentAudio.ontimeupdate = () => {
|
| 321 |
+
this.updateProgressUI();
|
| 322 |
+
};
|
| 323 |
+
|
| 324 |
+
// Start playback
|
| 325 |
+
await this.currentAudio.play();
|
| 326 |
+
console.log('[TTS] Playing audio');
|
| 327 |
+
|
| 328 |
+
return true;
|
| 329 |
+
|
| 330 |
+
} catch (error) {
|
| 331 |
+
console.error('[TTS] Play error:', error);
|
| 332 |
+
this.onError('Failed to play audio');
|
| 333 |
+
return false;
|
| 334 |
+
}
|
| 335 |
+
},
|
| 336 |
+
|
| 337 |
+
// ==========================================================================
|
| 338 |
+
// WEB SPEECH API FALLBACK (English only)
|
| 339 |
+
// ==========================================================================
|
| 340 |
+
|
| 341 |
+
/**
|
| 342 |
+
* Speak using Web Speech API (fallback for English)
|
| 343 |
+
* @param {string} text - Text to speak
|
| 344 |
+
* @returns {Promise<boolean>} Success status
|
| 345 |
+
*/
|
| 346 |
+
async speakWithWebSpeech(text) {
|
| 347 |
+
return new Promise((resolve) => {
|
| 348 |
+
if (!this.speechSynthesis) {
|
| 349 |
+
this.onError('Web Speech API not available');
|
| 350 |
+
resolve(false);
|
| 351 |
+
return;
|
| 352 |
+
}
|
| 353 |
+
|
| 354 |
+
// Cancel any ongoing speech
|
| 355 |
+
this.speechSynthesis.cancel();
|
| 356 |
+
|
| 357 |
+
// Create utterance
|
| 358 |
+
const utterance = new SpeechSynthesisUtterance(text);
|
| 359 |
+
utterance.lang = 'en-US';
|
| 360 |
+
utterance.rate = this.playbackRate;
|
| 361 |
+
utterance.pitch = 1.0;
|
| 362 |
+
|
| 363 |
+
// Get English voice
|
| 364 |
+
const voices = this.speechSynthesis.getVoices();
|
| 365 |
+
const englishVoice = voices.find(v => v.lang.startsWith('en'));
|
| 366 |
+
if (englishVoice) {
|
| 367 |
+
utterance.voice = englishVoice;
|
| 368 |
+
}
|
| 369 |
+
|
| 370 |
+
// Event handlers
|
| 371 |
+
utterance.onstart = () => {
|
| 372 |
+
this.isPlaying = true;
|
| 373 |
+
this.isPaused = false;
|
| 374 |
+
this.onPlayStart();
|
| 375 |
+
this.updatePlayerUI('playing');
|
| 376 |
+
};
|
| 377 |
+
|
| 378 |
+
utterance.onend = () => {
|
| 379 |
+
this.isPlaying = false;
|
| 380 |
+
this.isPaused = false;
|
| 381 |
+
this.onPlayEnd();
|
| 382 |
+
this.updatePlayerUI('stopped');
|
| 383 |
+
resolve(true);
|
| 384 |
+
};
|
| 385 |
+
|
| 386 |
+
utterance.onerror = (e) => {
|
| 387 |
+
console.error('[TTS] Web Speech error:', e);
|
| 388 |
+
this.onError('Speech synthesis failed');
|
| 389 |
+
resolve(false);
|
| 390 |
+
};
|
| 391 |
+
|
| 392 |
+
// Speak
|
| 393 |
+
this.speechSynthesis.speak(utterance);
|
| 394 |
+
console.log('[TTS] Using Web Speech API');
|
| 395 |
+
});
|
| 396 |
+
},
|
| 397 |
+
|
| 398 |
+
// ==========================================================================
|
| 399 |
+
// UI HELPERS
|
| 400 |
+
// ==========================================================================
|
| 401 |
+
|
| 402 |
+
/**
|
| 403 |
+
* Generate cache key from text and language
|
| 404 |
+
* @param {string} text - Text content
|
| 405 |
+
* @param {string} language - Language code
|
| 406 |
+
* @returns {string} Cache key
|
| 407 |
+
*/
|
| 408 |
+
generateCacheKey(text, language) {
|
| 409 |
+
// Simple hash-like key
|
| 410 |
+
const hash = text.slice(0, 50).replace(/\s+/g, '_');
|
| 411 |
+
return `${language}_${hash}_${text.length}`;
|
| 412 |
+
},
|
| 413 |
+
|
| 414 |
+
/**
|
| 415 |
+
* Update player UI state
|
| 416 |
+
* @param {string} state - 'playing', 'paused', 'stopped'
|
| 417 |
+
*/
|
| 418 |
+
updatePlayerUI(state) {
|
| 419 |
+
const player = document.getElementById('tts-player');
|
| 420 |
+
const btnPlayPause = document.getElementById('tts-play-pause');
|
| 421 |
+
|
| 422 |
+
if (!player) return;
|
| 423 |
+
|
| 424 |
+
switch (state) {
|
| 425 |
+
case 'playing':
|
| 426 |
+
player.classList.add('active');
|
| 427 |
+
if (btnPlayPause) {
|
| 428 |
+
btnPlayPause.innerHTML = this.getPauseIcon();
|
| 429 |
+
btnPlayPause.title = 'Pause';
|
| 430 |
+
}
|
| 431 |
+
break;
|
| 432 |
+
|
| 433 |
+
case 'paused':
|
| 434 |
+
player.classList.add('active');
|
| 435 |
+
if (btnPlayPause) {
|
| 436 |
+
btnPlayPause.innerHTML = this.getPlayIcon();
|
| 437 |
+
btnPlayPause.title = 'Resume';
|
| 438 |
+
}
|
| 439 |
+
break;
|
| 440 |
+
|
| 441 |
+
case 'stopped':
|
| 442 |
+
player.classList.remove('active');
|
| 443 |
+
this.resetProgressUI();
|
| 444 |
+
break;
|
| 445 |
+
}
|
| 446 |
+
},
|
| 447 |
+
|
| 448 |
+
/**
|
| 449 |
+
* Update progress bar and time display
|
| 450 |
+
*/
|
| 451 |
+
updateProgressUI() {
|
| 452 |
+
if (!this.currentAudio) return;
|
| 453 |
+
|
| 454 |
+
const progress = document.getElementById('tts-progress');
|
| 455 |
+
const timeDisplay = document.getElementById('tts-time');
|
| 456 |
+
|
| 457 |
+
const current = this.currentAudio.currentTime;
|
| 458 |
+
const duration = this.currentAudio.duration || 0;
|
| 459 |
+
|
| 460 |
+
if (progress && duration > 0) {
|
| 461 |
+
const percent = (current / duration) * 100;
|
| 462 |
+
progress.style.width = `${percent}%`;
|
| 463 |
+
}
|
| 464 |
+
|
| 465 |
+
if (timeDisplay && duration > 0) {
|
| 466 |
+
const formatTime = (t) => {
|
| 467 |
+
const mins = Math.floor(t / 60);
|
| 468 |
+
const secs = Math.floor(t % 60);
|
| 469 |
+
return `${mins}:${secs.toString().padStart(2, '0')}`;
|
| 470 |
+
};
|
| 471 |
+
timeDisplay.textContent = `${formatTime(current)} / ${formatTime(duration)}`;
|
| 472 |
+
}
|
| 473 |
+
},
|
| 474 |
+
|
| 475 |
+
/**
|
| 476 |
+
* Reset progress UI
|
| 477 |
+
*/
|
| 478 |
+
resetProgressUI() {
|
| 479 |
+
const progress = document.getElementById('tts-progress');
|
| 480 |
+
const timeDisplay = document.getElementById('tts-time');
|
| 481 |
+
|
| 482 |
+
if (progress) progress.style.width = '0%';
|
| 483 |
+
if (timeDisplay) timeDisplay.textContent = '0:00';
|
| 484 |
+
},
|
| 485 |
+
|
| 486 |
+
/**
|
| 487 |
+
* Update speed buttons to show active state
|
| 488 |
+
*/
|
| 489 |
+
updateSpeedButtonsUI() {
|
| 490 |
+
const buttons = document.querySelectorAll('.tts-speed-btn');
|
| 491 |
+
buttons.forEach(btn => {
|
| 492 |
+
const rate = parseFloat(btn.dataset.rate);
|
| 493 |
+
btn.classList.toggle('active', rate === this.playbackRate);
|
| 494 |
+
});
|
| 495 |
+
},
|
| 496 |
+
|
| 497 |
+
/**
|
| 498 |
+
* Get play icon SVG
|
| 499 |
+
*/
|
| 500 |
+
getPlayIcon() {
|
| 501 |
+
return `<svg viewBox="0 0 24 24" fill="currentColor" width="20" height="20">
|
| 502 |
+
<polygon points="5,3 19,12 5,21"/>
|
| 503 |
+
</svg>`;
|
| 504 |
+
},
|
| 505 |
+
|
| 506 |
+
/**
|
| 507 |
+
* Get pause icon SVG
|
| 508 |
+
*/
|
| 509 |
+
getPauseIcon() {
|
| 510 |
+
return `<svg viewBox="0 0 24 24" fill="currentColor" width="20" height="20">
|
| 511 |
+
<rect x="6" y="4" width="4" height="16"/>
|
| 512 |
+
<rect x="14" y="4" width="4" height="16"/>
|
| 513 |
+
</svg>`;
|
| 514 |
+
},
|
| 515 |
+
|
| 516 |
+
/**
|
| 517 |
+
* Clear audio cache
|
| 518 |
+
*/
|
| 519 |
+
clearCache() {
|
| 520 |
+
this.audioCache.clear();
|
| 521 |
+
console.log('[TTS] Cache cleared');
|
| 522 |
+
}
|
| 523 |
+
};
|
| 524 |
+
|
| 525 |
+
// Export for use in other modules
|
| 526 |
+
window.TTS = TTS;
|
frontend/js/voice.js
ADDED
|
@@ -0,0 +1,951 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
/**
|
| 2 |
+
* FarmEyes Voice Input - Robust Implementation
|
| 3 |
+
* =============================================
|
| 4 |
+
* Handles voice recording and transcription using Web Audio API
|
| 5 |
+
* and backend Whisper service.
|
| 6 |
+
*
|
| 7 |
+
* Pipeline: Voice → Whisper → Text → N-ATLaS → Response
|
| 8 |
+
*
|
| 9 |
+
* Features:
|
| 10 |
+
* - Comprehensive browser compatibility detection
|
| 11 |
+
* - Detailed error logging for debugging
|
| 12 |
+
* - Secure context verification
|
| 13 |
+
* - Graceful fallbacks for older browsers
|
| 14 |
+
* - Safari and Chrome-specific handling
|
| 15 |
+
*
|
| 16 |
+
* @author FarmEyes Team
|
| 17 |
+
* @version 2.0.0
|
| 18 |
+
*/
|
| 19 |
+
|
| 20 |
+
const VoiceInput = {
|
| 21 |
+
// ==========================================================================
|
| 22 |
+
// STATE MANAGEMENT
|
| 23 |
+
// ==========================================================================
|
| 24 |
+
|
| 25 |
+
// Recording state
|
| 26 |
+
isRecording: false,
|
| 27 |
+
mediaRecorder: null,
|
| 28 |
+
audioChunks: [],
|
| 29 |
+
stream: null,
|
| 30 |
+
|
| 31 |
+
// Configuration
|
| 32 |
+
maxDuration: 30000, // 30 seconds max recording
|
| 33 |
+
minDuration: 500, // 0.5 seconds minimum
|
| 34 |
+
recordingTimer: null,
|
| 35 |
+
recordingStartTime: null,
|
| 36 |
+
|
| 37 |
+
// Browser capabilities (cached after first check)
|
| 38 |
+
_capabilities: null,
|
| 39 |
+
|
| 40 |
+
// Callbacks
|
| 41 |
+
onTranscription: null,
|
| 42 |
+
onError: null,
|
| 43 |
+
onRecordingStart: null,
|
| 44 |
+
onRecordingStop: null,
|
| 45 |
+
onPermissionDenied: null,
|
| 46 |
+
|
| 47 |
+
// ==========================================================================
|
| 48 |
+
// INITIALIZATION
|
| 49 |
+
// ==========================================================================
|
| 50 |
+
|
| 51 |
+
/**
|
| 52 |
+
* Initialize voice input with callbacks
|
| 53 |
+
* @param {object} callbacks - Callback functions
|
| 54 |
+
* @param {function} callbacks.onTranscription - Called with transcribed text
|
| 55 |
+
* @param {function} callbacks.onError - Called on errors
|
| 56 |
+
* @param {function} callbacks.onRecordingStart - Called when recording starts
|
| 57 |
+
* @param {function} callbacks.onRecordingStop - Called when recording stops
|
| 58 |
+
* @param {function} callbacks.onPermissionDenied - Called when mic permission denied
|
| 59 |
+
*/
|
| 60 |
+
init(callbacks = {}) {
|
| 61 |
+
// Set callbacks with defaults
|
| 62 |
+
this.onTranscription = callbacks.onTranscription || ((text) => {
|
| 63 |
+
console.log('[Voice] Transcription:', text);
|
| 64 |
+
});
|
| 65 |
+
this.onError = callbacks.onError || ((err) => {
|
| 66 |
+
console.error('[Voice] Error:', err);
|
| 67 |
+
});
|
| 68 |
+
this.onRecordingStart = callbacks.onRecordingStart || (() => {
|
| 69 |
+
console.log('[Voice] Recording started');
|
| 70 |
+
});
|
| 71 |
+
this.onRecordingStop = callbacks.onRecordingStop || (() => {
|
| 72 |
+
console.log('[Voice] Recording stopped');
|
| 73 |
+
});
|
| 74 |
+
this.onPermissionDenied = callbacks.onPermissionDenied || (() => {
|
| 75 |
+
console.warn('[Voice] Permission denied');
|
| 76 |
+
});
|
| 77 |
+
|
| 78 |
+
// Check and cache browser capabilities
|
| 79 |
+
this._capabilities = this.checkCapabilities();
|
| 80 |
+
|
| 81 |
+
// Log initialization status
|
| 82 |
+
console.log('[Voice] Initialized with capabilities:', this._capabilities);
|
| 83 |
+
|
| 84 |
+
return this._capabilities.supported;
|
| 85 |
+
},
|
| 86 |
+
|
| 87 |
+
// ==========================================================================
|
| 88 |
+
// BROWSER CAPABILITY DETECTION
|
| 89 |
+
// ==========================================================================
|
| 90 |
+
|
| 91 |
+
/**
|
| 92 |
+
* Comprehensive browser capability check
|
| 93 |
+
* Returns detailed information about what's supported
|
| 94 |
+
* @returns {object} Capability report
|
| 95 |
+
*/
|
| 96 |
+
checkCapabilities() {
|
| 97 |
+
const capabilities = {
|
| 98 |
+
supported: false,
|
| 99 |
+
secureContext: false,
|
| 100 |
+
mediaDevices: false,
|
| 101 |
+
getUserMedia: false,
|
| 102 |
+
mediaRecorder: false,
|
| 103 |
+
audioContext: false,
|
| 104 |
+
supportedMimeTypes: [],
|
| 105 |
+
browser: this.detectBrowser(),
|
| 106 |
+
issues: []
|
| 107 |
+
};
|
| 108 |
+
|
| 109 |
+
// Check 1: Secure Context (required for getUserMedia)
|
| 110 |
+
// localhost is considered secure, but let's verify
|
| 111 |
+
capabilities.secureContext = this.isSecureContext();
|
| 112 |
+
if (!capabilities.secureContext) {
|
| 113 |
+
capabilities.issues.push('Not in a secure context (HTTPS or localhost required)');
|
| 114 |
+
console.warn('[Voice] ❌ Not in secure context. URL:', window.location.href);
|
| 115 |
+
} else {
|
| 116 |
+
console.log('[Voice] ✓ Secure context verified');
|
| 117 |
+
}
|
| 118 |
+
|
| 119 |
+
// Check 2: navigator.mediaDevices exists
|
| 120 |
+
capabilities.mediaDevices = !!(navigator.mediaDevices);
|
| 121 |
+
if (!capabilities.mediaDevices) {
|
| 122 |
+
capabilities.issues.push('navigator.mediaDevices not available');
|
| 123 |
+
console.warn('[Voice] ❌ navigator.mediaDevices is undefined');
|
| 124 |
+
|
| 125 |
+
// Try to diagnose why
|
| 126 |
+
if (typeof navigator === 'undefined') {
|
| 127 |
+
console.error('[Voice] navigator object is undefined');
|
| 128 |
+
} else {
|
| 129 |
+
console.log('[Voice] navigator exists, but mediaDevices is:', navigator.mediaDevices);
|
| 130 |
+
}
|
| 131 |
+
} else {
|
| 132 |
+
console.log('[Voice] ✓ navigator.mediaDevices available');
|
| 133 |
+
}
|
| 134 |
+
|
| 135 |
+
// Check 3: getUserMedia function exists
|
| 136 |
+
if (capabilities.mediaDevices) {
|
| 137 |
+
capabilities.getUserMedia = !!(navigator.mediaDevices.getUserMedia);
|
| 138 |
+
if (!capabilities.getUserMedia) {
|
| 139 |
+
capabilities.issues.push('getUserMedia not available');
|
| 140 |
+
console.warn('[Voice] ❌ getUserMedia not found on mediaDevices');
|
| 141 |
+
} else {
|
| 142 |
+
console.log('[Voice] ✓ getUserMedia available');
|
| 143 |
+
}
|
| 144 |
+
}
|
| 145 |
+
|
| 146 |
+
// Check 4: MediaRecorder API exists
|
| 147 |
+
capabilities.mediaRecorder = !!(window.MediaRecorder);
|
| 148 |
+
if (!capabilities.mediaRecorder) {
|
| 149 |
+
capabilities.issues.push('MediaRecorder API not available');
|
| 150 |
+
console.warn('[Voice] ❌ MediaRecorder not available');
|
| 151 |
+
} else {
|
| 152 |
+
console.log('[Voice] ✓ MediaRecorder available');
|
| 153 |
+
|
| 154 |
+
// Check supported MIME types
|
| 155 |
+
capabilities.supportedMimeTypes = this.getSupportedMimeTypes();
|
| 156 |
+
console.log('[Voice] Supported MIME types:', capabilities.supportedMimeTypes);
|
| 157 |
+
}
|
| 158 |
+
|
| 159 |
+
// Check 5: AudioContext (optional but useful)
|
| 160 |
+
capabilities.audioContext = !!(window.AudioContext || window.webkitAudioContext);
|
| 161 |
+
if (capabilities.audioContext) {
|
| 162 |
+
console.log('[Voice] ✓ AudioContext available');
|
| 163 |
+
}
|
| 164 |
+
|
| 165 |
+
// Final determination
|
| 166 |
+
capabilities.supported = (
|
| 167 |
+
capabilities.secureContext &&
|
| 168 |
+
capabilities.mediaDevices &&
|
| 169 |
+
capabilities.getUserMedia &&
|
| 170 |
+
capabilities.mediaRecorder
|
| 171 |
+
);
|
| 172 |
+
|
| 173 |
+
if (capabilities.supported) {
|
| 174 |
+
console.log('[Voice] ✅ All capabilities supported - voice input ready');
|
| 175 |
+
} else {
|
| 176 |
+
console.error('[Voice] ❌ Voice input NOT supported. Issues:', capabilities.issues);
|
| 177 |
+
}
|
| 178 |
+
|
| 179 |
+
return capabilities;
|
| 180 |
+
},
|
| 181 |
+
|
| 182 |
+
/**
|
| 183 |
+
* Check if we're in a secure context
|
| 184 |
+
* @returns {boolean} Is secure context
|
| 185 |
+
*/
|
| 186 |
+
isSecureContext() {
|
| 187 |
+
// Modern browsers have window.isSecureContext
|
| 188 |
+
if (typeof window.isSecureContext === 'boolean') {
|
| 189 |
+
return window.isSecureContext;
|
| 190 |
+
}
|
| 191 |
+
|
| 192 |
+
// Fallback check for older browsers
|
| 193 |
+
const protocol = window.location.protocol;
|
| 194 |
+
const hostname = window.location.hostname;
|
| 195 |
+
|
| 196 |
+
// HTTPS is always secure
|
| 197 |
+
if (protocol === 'https:') {
|
| 198 |
+
return true;
|
| 199 |
+
}
|
| 200 |
+
|
| 201 |
+
// localhost and 127.0.0.1 are considered secure even over HTTP
|
| 202 |
+
if (protocol === 'http:') {
|
| 203 |
+
if (hostname === 'localhost' ||
|
| 204 |
+
hostname === '127.0.0.1' ||
|
| 205 |
+
hostname === '[::1]' ||
|
| 206 |
+
hostname.endsWith('.localhost')) {
|
| 207 |
+
return true;
|
| 208 |
+
}
|
| 209 |
+
}
|
| 210 |
+
|
| 211 |
+
// file:// protocol - depends on browser
|
| 212 |
+
if (protocol === 'file:') {
|
| 213 |
+
console.warn('[Voice] file:// protocol detected - may not support getUserMedia');
|
| 214 |
+
return false;
|
| 215 |
+
}
|
| 216 |
+
|
| 217 |
+
return false;
|
| 218 |
+
},
|
| 219 |
+
|
| 220 |
+
/**
|
| 221 |
+
* Detect browser type and version
|
| 222 |
+
* @returns {object} Browser info
|
| 223 |
+
*/
|
| 224 |
+
detectBrowser() {
|
| 225 |
+
const ua = navigator.userAgent;
|
| 226 |
+
let browser = { name: 'unknown', version: 'unknown' };
|
| 227 |
+
|
| 228 |
+
if (ua.includes('Chrome') && !ua.includes('Edg')) {
|
| 229 |
+
const match = ua.match(/Chrome\/(\d+)/);
|
| 230 |
+
browser = { name: 'chrome', version: match ? match[1] : 'unknown' };
|
| 231 |
+
} else if (ua.includes('Safari') && !ua.includes('Chrome')) {
|
| 232 |
+
const match = ua.match(/Version\/(\d+)/);
|
| 233 |
+
browser = { name: 'safari', version: match ? match[1] : 'unknown' };
|
| 234 |
+
} else if (ua.includes('Firefox')) {
|
| 235 |
+
const match = ua.match(/Firefox\/(\d+)/);
|
| 236 |
+
browser = { name: 'firefox', version: match ? match[1] : 'unknown' };
|
| 237 |
+
} else if (ua.includes('Edg')) {
|
| 238 |
+
const match = ua.match(/Edg\/(\d+)/);
|
| 239 |
+
browser = { name: 'edge', version: match ? match[1] : 'unknown' };
|
| 240 |
+
}
|
| 241 |
+
|
| 242 |
+
console.log('[Voice] Detected browser:', browser.name, browser.version);
|
| 243 |
+
return browser;
|
| 244 |
+
},
|
| 245 |
+
|
| 246 |
+
/**
|
| 247 |
+
* Get all supported MIME types for MediaRecorder
|
| 248 |
+
* @returns {string[]} Array of supported MIME types
|
| 249 |
+
*/
|
| 250 |
+
getSupportedMimeTypes() {
|
| 251 |
+
const types = [
|
| 252 |
+
'audio/webm;codecs=opus',
|
| 253 |
+
'audio/webm',
|
| 254 |
+
'audio/ogg;codecs=opus',
|
| 255 |
+
'audio/ogg',
|
| 256 |
+
'audio/mp4',
|
| 257 |
+
'audio/mp4;codecs=mp4a.40.2',
|
| 258 |
+
'audio/mpeg',
|
| 259 |
+
'audio/wav',
|
| 260 |
+
'audio/aac'
|
| 261 |
+
];
|
| 262 |
+
|
| 263 |
+
return types.filter(type => {
|
| 264 |
+
try {
|
| 265 |
+
return MediaRecorder.isTypeSupported(type);
|
| 266 |
+
} catch (e) {
|
| 267 |
+
return false;
|
| 268 |
+
}
|
| 269 |
+
});
|
| 270 |
+
},
|
| 271 |
+
|
| 272 |
+
// ==========================================================================
|
| 273 |
+
// PUBLIC API - COMPATIBILITY CHECK
|
| 274 |
+
// ==========================================================================
|
| 275 |
+
|
| 276 |
+
/**
|
| 277 |
+
* Simple check if voice input is supported
|
| 278 |
+
* @returns {boolean} Support status
|
| 279 |
+
*/
|
| 280 |
+
isSupported() {
|
| 281 |
+
// Use cached capabilities if available
|
| 282 |
+
if (this._capabilities) {
|
| 283 |
+
return this._capabilities.supported;
|
| 284 |
+
}
|
| 285 |
+
|
| 286 |
+
// Quick check without full diagnostics
|
| 287 |
+
return !!(
|
| 288 |
+
this.isSecureContext() &&
|
| 289 |
+
navigator.mediaDevices &&
|
| 290 |
+
navigator.mediaDevices.getUserMedia &&
|
| 291 |
+
window.MediaRecorder
|
| 292 |
+
);
|
| 293 |
+
},
|
| 294 |
+
|
| 295 |
+
/**
|
| 296 |
+
* Get detailed capability report
|
| 297 |
+
* @returns {object} Capability details
|
| 298 |
+
*/
|
| 299 |
+
getCapabilities() {
|
| 300 |
+
if (!this._capabilities) {
|
| 301 |
+
this._capabilities = this.checkCapabilities();
|
| 302 |
+
}
|
| 303 |
+
return this._capabilities;
|
| 304 |
+
},
|
| 305 |
+
|
| 306 |
+
/**
|
| 307 |
+
* Get human-readable error message for unsupported browsers
|
| 308 |
+
* @returns {string} Error message
|
| 309 |
+
*/
|
| 310 |
+
getUnsupportedMessage() {
|
| 311 |
+
const caps = this.getCapabilities();
|
| 312 |
+
|
| 313 |
+
if (caps.supported) {
|
| 314 |
+
return null;
|
| 315 |
+
}
|
| 316 |
+
|
| 317 |
+
// Provide specific guidance based on what's missing
|
| 318 |
+
if (!caps.secureContext) {
|
| 319 |
+
return 'Voice input requires a secure connection. Please access via HTTPS or localhost.';
|
| 320 |
+
}
|
| 321 |
+
|
| 322 |
+
if (!caps.mediaDevices || !caps.getUserMedia) {
|
| 323 |
+
if (caps.browser.name === 'safari') {
|
| 324 |
+
return 'Voice input requires Safari 11 or later. Please update your browser.';
|
| 325 |
+
}
|
| 326 |
+
return 'Your browser does not support voice input. Please use Chrome, Firefox, or Edge.';
|
| 327 |
+
}
|
| 328 |
+
|
| 329 |
+
if (!caps.mediaRecorder) {
|
| 330 |
+
return 'Your browser does not support audio recording. Please use a modern browser.';
|
| 331 |
+
}
|
| 332 |
+
|
| 333 |
+
return 'Voice input is not supported in this browser configuration.';
|
| 334 |
+
},
|
| 335 |
+
|
| 336 |
+
// ==========================================================================
|
| 337 |
+
// PUBLIC API - PERMISSIONS
|
| 338 |
+
// ==========================================================================
|
| 339 |
+
|
| 340 |
+
/**
|
| 341 |
+
* Request microphone permission
|
| 342 |
+
* @returns {Promise<boolean>} Permission granted
|
| 343 |
+
*/
|
| 344 |
+
async requestPermission() {
|
| 345 |
+
// Check capabilities first
|
| 346 |
+
if (!this.isSupported()) {
|
| 347 |
+
const message = this.getUnsupportedMessage();
|
| 348 |
+
console.error('[Voice] Cannot request permission:', message);
|
| 349 |
+
this.onError(message);
|
| 350 |
+
return false;
|
| 351 |
+
}
|
| 352 |
+
|
| 353 |
+
try {
|
| 354 |
+
console.log('[Voice] Requesting microphone permission...');
|
| 355 |
+
|
| 356 |
+
// Request permission with audio constraints
|
| 357 |
+
const stream = await navigator.mediaDevices.getUserMedia({
|
| 358 |
+
audio: {
|
| 359 |
+
channelCount: 1,
|
| 360 |
+
sampleRate: { ideal: 16000 },
|
| 361 |
+
echoCancellation: true,
|
| 362 |
+
noiseSuppression: true,
|
| 363 |
+
autoGainControl: true
|
| 364 |
+
}
|
| 365 |
+
});
|
| 366 |
+
|
| 367 |
+
// Permission granted - stop the test stream immediately
|
| 368 |
+
stream.getTracks().forEach(track => {
|
| 369 |
+
track.stop();
|
| 370 |
+
console.log('[Voice] Test track stopped:', track.label);
|
| 371 |
+
});
|
| 372 |
+
|
| 373 |
+
console.log('[Voice] ✅ Microphone permission granted');
|
| 374 |
+
return true;
|
| 375 |
+
|
| 376 |
+
} catch (error) {
|
| 377 |
+
console.error('[Voice] Permission error:', error.name, error.message);
|
| 378 |
+
|
| 379 |
+
// Handle specific error types
|
| 380 |
+
let errorMessage;
|
| 381 |
+
|
| 382 |
+
switch (error.name) {
|
| 383 |
+
case 'NotAllowedError':
|
| 384 |
+
case 'PermissionDeniedError':
|
| 385 |
+
errorMessage = 'Microphone access denied. Please allow microphone permission in your browser settings.';
|
| 386 |
+
this.onPermissionDenied();
|
| 387 |
+
break;
|
| 388 |
+
|
| 389 |
+
case 'NotFoundError':
|
| 390 |
+
case 'DevicesNotFoundError':
|
| 391 |
+
errorMessage = 'No microphone found. Please connect a microphone and try again.';
|
| 392 |
+
break;
|
| 393 |
+
|
| 394 |
+
case 'NotReadableError':
|
| 395 |
+
case 'TrackStartError':
|
| 396 |
+
errorMessage = 'Microphone is in use by another application. Please close other apps using the microphone.';
|
| 397 |
+
break;
|
| 398 |
+
|
| 399 |
+
case 'OverconstrainedError':
|
| 400 |
+
// Try again with simpler constraints
|
| 401 |
+
console.log('[Voice] Retrying with basic audio constraints...');
|
| 402 |
+
return await this.requestPermissionBasic();
|
| 403 |
+
|
| 404 |
+
case 'AbortError':
|
| 405 |
+
errorMessage = 'Microphone access was aborted. Please try again.';
|
| 406 |
+
break;
|
| 407 |
+
|
| 408 |
+
case 'SecurityError':
|
| 409 |
+
errorMessage = 'Microphone access blocked due to security policy. Please use HTTPS.';
|
| 410 |
+
break;
|
| 411 |
+
|
| 412 |
+
default:
|
| 413 |
+
errorMessage = `Microphone error: ${error.message || error.name}`;
|
| 414 |
+
}
|
| 415 |
+
|
| 416 |
+
this.onError(errorMessage);
|
| 417 |
+
return false;
|
| 418 |
+
}
|
| 419 |
+
},
|
| 420 |
+
|
| 421 |
+
/**
|
| 422 |
+
* Fallback permission request with basic constraints
|
| 423 |
+
* @returns {Promise<boolean>} Permission granted
|
| 424 |
+
*/
|
| 425 |
+
async requestPermissionBasic() {
|
| 426 |
+
try {
|
| 427 |
+
const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
|
| 428 |
+
stream.getTracks().forEach(track => track.stop());
|
| 429 |
+
console.log('[Voice] ✅ Microphone permission granted (basic)');
|
| 430 |
+
return true;
|
| 431 |
+
} catch (error) {
|
| 432 |
+
console.error('[Voice] Basic permission also failed:', error);
|
| 433 |
+
this.onError('Microphone access denied. Please check browser permissions.');
|
| 434 |
+
this.onPermissionDenied();
|
| 435 |
+
return false;
|
| 436 |
+
}
|
| 437 |
+
},
|
| 438 |
+
|
| 439 |
+
/**
|
| 440 |
+
* Check current permission status without prompting
|
| 441 |
+
* @returns {Promise<string>} Permission state: 'granted', 'denied', 'prompt', or 'unknown'
|
| 442 |
+
*/
|
| 443 |
+
async checkPermissionStatus() {
|
| 444 |
+
try {
|
| 445 |
+
// Use Permissions API if available
|
| 446 |
+
if (navigator.permissions && navigator.permissions.query) {
|
| 447 |
+
const result = await navigator.permissions.query({ name: 'microphone' });
|
| 448 |
+
console.log('[Voice] Permission status:', result.state);
|
| 449 |
+
return result.state;
|
| 450 |
+
}
|
| 451 |
+
} catch (e) {
|
| 452 |
+
// Permissions API not supported or microphone not queryable
|
| 453 |
+
console.log('[Voice] Permissions API not available for microphone');
|
| 454 |
+
}
|
| 455 |
+
|
| 456 |
+
return 'unknown';
|
| 457 |
+
},
|
| 458 |
+
|
| 459 |
+
// ==========================================================================
|
| 460 |
+
// PUBLIC API - RECORDING
|
| 461 |
+
// ==========================================================================
|
| 462 |
+
|
| 463 |
+
/**
|
| 464 |
+
* Start recording audio
|
| 465 |
+
* @returns {Promise<boolean>} Started successfully
|
| 466 |
+
*/
|
| 467 |
+
async startRecording() {
|
| 468 |
+
// Prevent double recording
|
| 469 |
+
if (this.isRecording) {
|
| 470 |
+
console.warn('[Voice] Already recording');
|
| 471 |
+
return false;
|
| 472 |
+
}
|
| 473 |
+
|
| 474 |
+
// Check support
|
| 475 |
+
if (!this.isSupported()) {
|
| 476 |
+
const message = this.getUnsupportedMessage();
|
| 477 |
+
this.onError(message);
|
| 478 |
+
return false;
|
| 479 |
+
}
|
| 480 |
+
|
| 481 |
+
try {
|
| 482 |
+
console.log('[Voice] Starting recording...');
|
| 483 |
+
|
| 484 |
+
// Get audio stream with optimal settings for speech
|
| 485 |
+
this.stream = await navigator.mediaDevices.getUserMedia({
|
| 486 |
+
audio: {
|
| 487 |
+
channelCount: 1,
|
| 488 |
+
sampleRate: { ideal: 16000, min: 8000, max: 48000 },
|
| 489 |
+
echoCancellation: true,
|
| 490 |
+
noiseSuppression: true,
|
| 491 |
+
autoGainControl: true
|
| 492 |
+
}
|
| 493 |
+
});
|
| 494 |
+
|
| 495 |
+
console.log('[Voice] Audio stream acquired');
|
| 496 |
+
|
| 497 |
+
// Get best MIME type for this browser
|
| 498 |
+
const mimeType = this.getBestMimeType();
|
| 499 |
+
console.log('[Voice] Using MIME type:', mimeType || 'default');
|
| 500 |
+
|
| 501 |
+
// Create MediaRecorder with options
|
| 502 |
+
const options = {};
|
| 503 |
+
if (mimeType) {
|
| 504 |
+
options.mimeType = mimeType;
|
| 505 |
+
}
|
| 506 |
+
|
| 507 |
+
// Add bitrate for better quality/size balance
|
| 508 |
+
options.audioBitsPerSecond = 128000;
|
| 509 |
+
|
| 510 |
+
try {
|
| 511 |
+
this.mediaRecorder = new MediaRecorder(this.stream, options);
|
| 512 |
+
} catch (e) {
|
| 513 |
+
// Fallback without options if it fails
|
| 514 |
+
console.warn('[Voice] MediaRecorder with options failed, using defaults');
|
| 515 |
+
this.mediaRecorder = new MediaRecorder(this.stream);
|
| 516 |
+
}
|
| 517 |
+
|
| 518 |
+
this.audioChunks = [];
|
| 519 |
+
|
| 520 |
+
// Handle data available
|
| 521 |
+
this.mediaRecorder.ondataavailable = (event) => {
|
| 522 |
+
if (event.data && event.data.size > 0) {
|
| 523 |
+
this.audioChunks.push(event.data);
|
| 524 |
+
console.log('[Voice] Chunk received:', event.data.size, 'bytes');
|
| 525 |
+
}
|
| 526 |
+
};
|
| 527 |
+
|
| 528 |
+
// Handle recording stop
|
| 529 |
+
this.mediaRecorder.onstop = () => {
|
| 530 |
+
console.log('[Voice] MediaRecorder stopped, processing...');
|
| 531 |
+
this.processRecording();
|
| 532 |
+
};
|
| 533 |
+
|
| 534 |
+
// Handle errors
|
| 535 |
+
this.mediaRecorder.onerror = (event) => {
|
| 536 |
+
console.error('[Voice] MediaRecorder error:', event.error);
|
| 537 |
+
this.onError('Recording error: ' + (event.error?.message || 'Unknown error'));
|
| 538 |
+
this.cleanup();
|
| 539 |
+
};
|
| 540 |
+
|
| 541 |
+
// Start recording - collect data every 500ms for responsive UI
|
| 542 |
+
this.mediaRecorder.start(500);
|
| 543 |
+
this.isRecording = true;
|
| 544 |
+
this.recordingStartTime = Date.now();
|
| 545 |
+
|
| 546 |
+
// Set maximum duration timer
|
| 547 |
+
this.recordingTimer = setTimeout(() => {
|
| 548 |
+
if (this.isRecording) {
|
| 549 |
+
console.log('[Voice] Maximum duration reached, stopping...');
|
| 550 |
+
this.stopRecording();
|
| 551 |
+
}
|
| 552 |
+
}, this.maxDuration);
|
| 553 |
+
|
| 554 |
+
// Notify callback
|
| 555 |
+
this.onRecordingStart();
|
| 556 |
+
console.log('[Voice] ✅ Recording started');
|
| 557 |
+
|
| 558 |
+
return true;
|
| 559 |
+
|
| 560 |
+
} catch (error) {
|
| 561 |
+
console.error('[Voice] Start recording failed:', error);
|
| 562 |
+
|
| 563 |
+
// Handle specific errors
|
| 564 |
+
if (error.name === 'NotAllowedError') {
|
| 565 |
+
this.onError('Microphone permission denied. Please allow access.');
|
| 566 |
+
this.onPermissionDenied();
|
| 567 |
+
} else if (error.name === 'NotFoundError') {
|
| 568 |
+
this.onError('No microphone found. Please connect a microphone.');
|
| 569 |
+
} else {
|
| 570 |
+
this.onError('Failed to start recording: ' + (error.message || error.name));
|
| 571 |
+
}
|
| 572 |
+
|
| 573 |
+
this.cleanup();
|
| 574 |
+
return false;
|
| 575 |
+
}
|
| 576 |
+
},
|
| 577 |
+
|
| 578 |
+
/**
|
| 579 |
+
* Stop recording
|
| 580 |
+
*/
|
| 581 |
+
stopRecording() {
|
| 582 |
+
if (!this.isRecording) {
|
| 583 |
+
console.log('[Voice] Not recording, nothing to stop');
|
| 584 |
+
return;
|
| 585 |
+
}
|
| 586 |
+
|
| 587 |
+
console.log('[Voice] Stopping recording...');
|
| 588 |
+
|
| 589 |
+
// Clear max duration timer
|
| 590 |
+
if (this.recordingTimer) {
|
| 591 |
+
clearTimeout(this.recordingTimer);
|
| 592 |
+
this.recordingTimer = null;
|
| 593 |
+
}
|
| 594 |
+
|
| 595 |
+
// Check recording duration
|
| 596 |
+
const duration = Date.now() - (this.recordingStartTime || Date.now());
|
| 597 |
+
if (duration < this.minDuration) {
|
| 598 |
+
console.warn('[Voice] Recording too short:', duration, 'ms');
|
| 599 |
+
}
|
| 600 |
+
|
| 601 |
+
// Stop the MediaRecorder (triggers onstop -> processRecording)
|
| 602 |
+
if (this.mediaRecorder && this.mediaRecorder.state !== 'inactive') {
|
| 603 |
+
try {
|
| 604 |
+
this.mediaRecorder.stop();
|
| 605 |
+
} catch (e) {
|
| 606 |
+
console.warn('[Voice] Error stopping MediaRecorder:', e);
|
| 607 |
+
}
|
| 608 |
+
}
|
| 609 |
+
|
| 610 |
+
// Stop all audio tracks
|
| 611 |
+
if (this.stream) {
|
| 612 |
+
this.stream.getTracks().forEach(track => {
|
| 613 |
+
track.stop();
|
| 614 |
+
console.log('[Voice] Track stopped:', track.label);
|
| 615 |
+
});
|
| 616 |
+
}
|
| 617 |
+
|
| 618 |
+
this.isRecording = false;
|
| 619 |
+
this.onRecordingStop();
|
| 620 |
+
console.log('[Voice] Recording stopped after', duration, 'ms');
|
| 621 |
+
},
|
| 622 |
+
|
| 623 |
+
/**
|
| 624 |
+
* Toggle recording state
|
| 625 |
+
* @returns {Promise<boolean>} New recording state (true = now recording)
|
| 626 |
+
*/
|
| 627 |
+
async toggleRecording() {
|
| 628 |
+
if (this.isRecording) {
|
| 629 |
+
this.stopRecording();
|
| 630 |
+
return false;
|
| 631 |
+
} else {
|
| 632 |
+
return await this.startRecording();
|
| 633 |
+
}
|
| 634 |
+
},
|
| 635 |
+
|
| 636 |
+
/**
|
| 637 |
+
* Cancel recording without processing
|
| 638 |
+
*/
|
| 639 |
+
cancelRecording() {
|
| 640 |
+
console.log('[Voice] Cancelling recording...');
|
| 641 |
+
|
| 642 |
+
// Clear timer
|
| 643 |
+
if (this.recordingTimer) {
|
| 644 |
+
clearTimeout(this.recordingTimer);
|
| 645 |
+
this.recordingTimer = null;
|
| 646 |
+
}
|
| 647 |
+
|
| 648 |
+
// Remove the onstop handler to prevent processing
|
| 649 |
+
if (this.mediaRecorder) {
|
| 650 |
+
this.mediaRecorder.onstop = null;
|
| 651 |
+
|
| 652 |
+
if (this.mediaRecorder.state !== 'inactive') {
|
| 653 |
+
try {
|
| 654 |
+
this.mediaRecorder.stop();
|
| 655 |
+
} catch (e) {
|
| 656 |
+
// Ignore errors during cancel
|
| 657 |
+
}
|
| 658 |
+
}
|
| 659 |
+
}
|
| 660 |
+
|
| 661 |
+
this.cleanup();
|
| 662 |
+
this.onRecordingStop();
|
| 663 |
+
console.log('[Voice] Recording cancelled');
|
| 664 |
+
},
|
| 665 |
+
|
| 666 |
+
// ==========================================================================
|
| 667 |
+
// AUDIO PROCESSING
|
| 668 |
+
// ==========================================================================
|
| 669 |
+
|
| 670 |
+
/**
|
| 671 |
+
* Process recorded audio and send for transcription
|
| 672 |
+
*/
|
| 673 |
+
async processRecording() {
|
| 674 |
+
// Check if we have audio data
|
| 675 |
+
if (!this.audioChunks || this.audioChunks.length === 0) {
|
| 676 |
+
console.warn('[Voice] No audio chunks to process');
|
| 677 |
+
this.onError('No audio recorded. Please try again.');
|
| 678 |
+
this.cleanup();
|
| 679 |
+
return;
|
| 680 |
+
}
|
| 681 |
+
|
| 682 |
+
try {
|
| 683 |
+
// Create blob from chunks
|
| 684 |
+
const mimeType = this.mediaRecorder?.mimeType || 'audio/webm';
|
| 685 |
+
const audioBlob = new Blob(this.audioChunks, { type: mimeType });
|
| 686 |
+
|
| 687 |
+
console.log('[Voice] Processing audio blob:', {
|
| 688 |
+
size: audioBlob.size,
|
| 689 |
+
type: mimeType,
|
| 690 |
+
chunks: this.audioChunks.length
|
| 691 |
+
});
|
| 692 |
+
|
| 693 |
+
// Validate blob size
|
| 694 |
+
if (audioBlob.size < 1000) {
|
| 695 |
+
console.warn('[Voice] Audio blob too small:', audioBlob.size);
|
| 696 |
+
this.onError('Recording too short. Please speak longer.');
|
| 697 |
+
this.cleanup();
|
| 698 |
+
return;
|
| 699 |
+
}
|
| 700 |
+
|
| 701 |
+
// Get file extension
|
| 702 |
+
const extension = this.getExtensionFromMimeType(mimeType);
|
| 703 |
+
const filename = `recording_${Date.now()}.${extension}`;
|
| 704 |
+
|
| 705 |
+
// Create File object for upload
|
| 706 |
+
const audioFile = new File([audioBlob], filename, { type: mimeType });
|
| 707 |
+
|
| 708 |
+
console.log('[Voice] Sending for transcription:', filename);
|
| 709 |
+
|
| 710 |
+
// Get current language for hint
|
| 711 |
+
const languageHint = typeof I18n !== 'undefined' ? I18n.getLanguage() : 'en';
|
| 712 |
+
|
| 713 |
+
// Send to backend
|
| 714 |
+
const result = await this.sendForTranscription(audioFile, languageHint);
|
| 715 |
+
|
| 716 |
+
if (result.success && result.text) {
|
| 717 |
+
console.log('[Voice] ✅ Transcription successful:', result.text);
|
| 718 |
+
this.onTranscription(result.text, result);
|
| 719 |
+
} else {
|
| 720 |
+
const errorMsg = result.error || 'Transcription failed. Please try again.';
|
| 721 |
+
console.error('[Voice] Transcription failed:', errorMsg);
|
| 722 |
+
this.onError(errorMsg);
|
| 723 |
+
}
|
| 724 |
+
|
| 725 |
+
} catch (error) {
|
| 726 |
+
console.error('[Voice] Processing error:', error);
|
| 727 |
+
this.onError('Failed to process recording: ' + (error.message || 'Unknown error'));
|
| 728 |
+
} finally {
|
| 729 |
+
this.cleanup();
|
| 730 |
+
}
|
| 731 |
+
},
|
| 732 |
+
|
| 733 |
+
/**
|
| 734 |
+
* Send audio file to backend for transcription
|
| 735 |
+
* @param {File} audioFile - Audio file to transcribe
|
| 736 |
+
* @param {string} languageHint - Language hint (en, ha, yo, ig)
|
| 737 |
+
* @returns {Promise<object>} Transcription result
|
| 738 |
+
*/
|
| 739 |
+
async sendForTranscription(audioFile, languageHint = 'en') {
|
| 740 |
+
// Check if FarmEyesAPI is available
|
| 741 |
+
if (typeof FarmEyesAPI !== 'undefined' && FarmEyesAPI.transcribeAudio) {
|
| 742 |
+
return await FarmEyesAPI.transcribeAudio(audioFile, languageHint);
|
| 743 |
+
}
|
| 744 |
+
|
| 745 |
+
// Fallback: Direct API call
|
| 746 |
+
console.log('[Voice] Using direct API call for transcription');
|
| 747 |
+
|
| 748 |
+
try {
|
| 749 |
+
const formData = new FormData();
|
| 750 |
+
formData.append('audio', audioFile);
|
| 751 |
+
formData.append('language_hint', languageHint);
|
| 752 |
+
|
| 753 |
+
const response = await fetch('/api/transcribe', {
|
| 754 |
+
method: 'POST',
|
| 755 |
+
body: formData
|
| 756 |
+
});
|
| 757 |
+
|
| 758 |
+
if (!response.ok) {
|
| 759 |
+
const errorData = await response.json().catch(() => ({}));
|
| 760 |
+
throw new Error(errorData.detail || `HTTP ${response.status}`);
|
| 761 |
+
}
|
| 762 |
+
|
| 763 |
+
return await response.json();
|
| 764 |
+
|
| 765 |
+
} catch (error) {
|
| 766 |
+
console.error('[Voice] Transcription API error:', error);
|
| 767 |
+
return {
|
| 768 |
+
success: false,
|
| 769 |
+
error: error.message || 'Failed to connect to transcription service'
|
| 770 |
+
};
|
| 771 |
+
}
|
| 772 |
+
},
|
| 773 |
+
|
| 774 |
+
// ==========================================================================
|
| 775 |
+
// UTILITY METHODS
|
| 776 |
+
// ==========================================================================
|
| 777 |
+
|
| 778 |
+
/**
|
| 779 |
+
* Get the best MIME type for the current browser
|
| 780 |
+
* @returns {string|null} Best supported MIME type
|
| 781 |
+
*/
|
| 782 |
+
getBestMimeType() {
|
| 783 |
+
// Preferred order: webm with opus is best for speech
|
| 784 |
+
const preferredTypes = [
|
| 785 |
+
'audio/webm;codecs=opus',
|
| 786 |
+
'audio/webm',
|
| 787 |
+
'audio/ogg;codecs=opus',
|
| 788 |
+
'audio/ogg',
|
| 789 |
+
'audio/mp4',
|
| 790 |
+
'audio/wav'
|
| 791 |
+
];
|
| 792 |
+
|
| 793 |
+
for (const type of preferredTypes) {
|
| 794 |
+
try {
|
| 795 |
+
if (MediaRecorder.isTypeSupported(type)) {
|
| 796 |
+
return type;
|
| 797 |
+
}
|
| 798 |
+
} catch (e) {
|
| 799 |
+
// Continue to next type
|
| 800 |
+
}
|
| 801 |
+
}
|
| 802 |
+
|
| 803 |
+
return null;
|
| 804 |
+
},
|
| 805 |
+
|
| 806 |
+
/**
|
| 807 |
+
* Get file extension from MIME type
|
| 808 |
+
* @param {string} mimeType - MIME type
|
| 809 |
+
* @returns {string} File extension
|
| 810 |
+
*/
|
| 811 |
+
getExtensionFromMimeType(mimeType) {
|
| 812 |
+
const mimeToExt = {
|
| 813 |
+
'audio/webm': 'webm',
|
| 814 |
+
'audio/webm;codecs=opus': 'webm',
|
| 815 |
+
'audio/ogg': 'ogg',
|
| 816 |
+
'audio/ogg;codecs=opus': 'ogg',
|
| 817 |
+
'audio/mp4': 'm4a',
|
| 818 |
+
'audio/mp4;codecs=mp4a.40.2': 'm4a',
|
| 819 |
+
'audio/mpeg': 'mp3',
|
| 820 |
+
'audio/wav': 'wav',
|
| 821 |
+
'audio/aac': 'aac'
|
| 822 |
+
};
|
| 823 |
+
|
| 824 |
+
// Handle MIME types with parameters
|
| 825 |
+
const baseMime = mimeType.split(';')[0];
|
| 826 |
+
return mimeToExt[mimeType] || mimeToExt[baseMime] || 'webm';
|
| 827 |
+
},
|
| 828 |
+
|
| 829 |
+
/**
|
| 830 |
+
* Cleanup all resources
|
| 831 |
+
*/
|
| 832 |
+
cleanup() {
|
| 833 |
+
console.log('[Voice] Cleaning up resources...');
|
| 834 |
+
|
| 835 |
+
// Clear audio chunks
|
| 836 |
+
this.audioChunks = [];
|
| 837 |
+
|
| 838 |
+
// Clear MediaRecorder
|
| 839 |
+
if (this.mediaRecorder) {
|
| 840 |
+
this.mediaRecorder.ondataavailable = null;
|
| 841 |
+
this.mediaRecorder.onstop = null;
|
| 842 |
+
this.mediaRecorder.onerror = null;
|
| 843 |
+
this.mediaRecorder = null;
|
| 844 |
+
}
|
| 845 |
+
|
| 846 |
+
// Stop and clear stream
|
| 847 |
+
if (this.stream) {
|
| 848 |
+
this.stream.getTracks().forEach(track => {
|
| 849 |
+
try {
|
| 850 |
+
track.stop();
|
| 851 |
+
} catch (e) {
|
| 852 |
+
// Ignore errors during cleanup
|
| 853 |
+
}
|
| 854 |
+
});
|
| 855 |
+
this.stream = null;
|
| 856 |
+
}
|
| 857 |
+
|
| 858 |
+
// Clear timer
|
| 859 |
+
if (this.recordingTimer) {
|
| 860 |
+
clearTimeout(this.recordingTimer);
|
| 861 |
+
this.recordingTimer = null;
|
| 862 |
+
}
|
| 863 |
+
|
| 864 |
+
// Reset state
|
| 865 |
+
this.isRecording = false;
|
| 866 |
+
this.recordingStartTime = null;
|
| 867 |
+
},
|
| 868 |
+
|
| 869 |
+
/**
|
| 870 |
+
* Get current recording state
|
| 871 |
+
* @returns {boolean} Is currently recording
|
| 872 |
+
*/
|
| 873 |
+
getIsRecording() {
|
| 874 |
+
return this.isRecording;
|
| 875 |
+
},
|
| 876 |
+
|
| 877 |
+
/**
|
| 878 |
+
* Get recording duration in milliseconds
|
| 879 |
+
* @returns {number} Duration in ms, or 0 if not recording
|
| 880 |
+
*/
|
| 881 |
+
getRecordingDuration() {
|
| 882 |
+
if (!this.isRecording || !this.recordingStartTime) {
|
| 883 |
+
return 0;
|
| 884 |
+
}
|
| 885 |
+
return Date.now() - this.recordingStartTime;
|
| 886 |
+
},
|
| 887 |
+
|
| 888 |
+
// ==========================================================================
|
| 889 |
+
// DIAGNOSTIC METHODS
|
| 890 |
+
// ==========================================================================
|
| 891 |
+
|
| 892 |
+
/**
|
| 893 |
+
* Run full diagnostic and log to console
|
| 894 |
+
* Useful for debugging issues
|
| 895 |
+
*/
|
| 896 |
+
runDiagnostic() {
|
| 897 |
+
console.group('[Voice] Running Diagnostic');
|
| 898 |
+
|
| 899 |
+
console.log('=== Browser Info ===');
|
| 900 |
+
console.log('User Agent:', navigator.userAgent);
|
| 901 |
+
console.log('Platform:', navigator.platform);
|
| 902 |
+
|
| 903 |
+
console.log('\n=== Security Context ===');
|
| 904 |
+
console.log('URL:', window.location.href);
|
| 905 |
+
console.log('Protocol:', window.location.protocol);
|
| 906 |
+
console.log('Hostname:', window.location.hostname);
|
| 907 |
+
console.log('isSecureContext:', window.isSecureContext);
|
| 908 |
+
console.log('Our check:', this.isSecureContext());
|
| 909 |
+
|
| 910 |
+
console.log('\n=== API Availability ===');
|
| 911 |
+
console.log('navigator:', typeof navigator);
|
| 912 |
+
console.log('navigator.mediaDevices:', typeof navigator.mediaDevices);
|
| 913 |
+
console.log('getUserMedia:', typeof navigator.mediaDevices?.getUserMedia);
|
| 914 |
+
console.log('MediaRecorder:', typeof window.MediaRecorder);
|
| 915 |
+
console.log('AudioContext:', typeof (window.AudioContext || window.webkitAudioContext));
|
| 916 |
+
|
| 917 |
+
console.log('\n=== MediaRecorder MIME Types ===');
|
| 918 |
+
if (window.MediaRecorder) {
|
| 919 |
+
const types = this.getSupportedMimeTypes();
|
| 920 |
+
types.forEach(type => console.log(' ✓', type));
|
| 921 |
+
if (types.length === 0) {
|
| 922 |
+
console.log(' ❌ No supported MIME types');
|
| 923 |
+
}
|
| 924 |
+
}
|
| 925 |
+
|
| 926 |
+
console.log('\n=== Full Capabilities ===');
|
| 927 |
+
const caps = this.checkCapabilities();
|
| 928 |
+
console.log('Supported:', caps.supported);
|
| 929 |
+
console.log('Issues:', caps.issues);
|
| 930 |
+
|
| 931 |
+
console.groupEnd();
|
| 932 |
+
|
| 933 |
+
return caps;
|
| 934 |
+
}
|
| 935 |
+
};
|
| 936 |
+
|
| 937 |
+
// ==========================================================================
|
| 938 |
+
// EXPORT
|
| 939 |
+
// ==========================================================================
|
| 940 |
+
|
| 941 |
+
// Export for use in other modules
|
| 942 |
+
window.VoiceInput = VoiceInput;
|
| 943 |
+
|
| 944 |
+
// Auto-run diagnostic in development
|
| 945 |
+
if (window.location.hostname === 'localhost' || window.location.hostname === '127.0.0.1') {
|
| 946 |
+
// Delay diagnostic to ensure page is fully loaded
|
| 947 |
+
setTimeout(() => {
|
| 948 |
+
console.log('[Voice] Development mode - running diagnostic...');
|
| 949 |
+
VoiceInput.runDiagnostic();
|
| 950 |
+
}, 1000);
|
| 951 |
+
}
|
gitignore
ADDED
|
@@ -0,0 +1,119 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# =============================================================================
|
| 2 |
+
# FarmEyes - .gitignore for HuggingFace Spaces
|
| 3 |
+
# =============================================================================
|
| 4 |
+
|
| 5 |
+
# -----------------------------------------------------------------------------
|
| 6 |
+
# Python
|
| 7 |
+
# -----------------------------------------------------------------------------
|
| 8 |
+
__pycache__/
|
| 9 |
+
*.py[cod]
|
| 10 |
+
*$py.class
|
| 11 |
+
*.so
|
| 12 |
+
.Python
|
| 13 |
+
build/
|
| 14 |
+
develop-eggs/
|
| 15 |
+
dist/
|
| 16 |
+
downloads/
|
| 17 |
+
eggs/
|
| 18 |
+
.eggs/
|
| 19 |
+
lib/
|
| 20 |
+
lib64/
|
| 21 |
+
parts/
|
| 22 |
+
sdist/
|
| 23 |
+
var/
|
| 24 |
+
wheels/
|
| 25 |
+
*.egg-info/
|
| 26 |
+
.installed.cfg
|
| 27 |
+
*.egg
|
| 28 |
+
|
| 29 |
+
# -----------------------------------------------------------------------------
|
| 30 |
+
# Virtual Environments
|
| 31 |
+
# -----------------------------------------------------------------------------
|
| 32 |
+
env/
|
| 33 |
+
venv/
|
| 34 |
+
.venv/
|
| 35 |
+
ENV/
|
| 36 |
+
env.bak/
|
| 37 |
+
venv.bak/
|
| 38 |
+
|
| 39 |
+
# -----------------------------------------------------------------------------
|
| 40 |
+
# Environment Variables (NEVER commit secrets!)
|
| 41 |
+
# -----------------------------------------------------------------------------
|
| 42 |
+
.env
|
| 43 |
+
.env.local
|
| 44 |
+
.env.*.local
|
| 45 |
+
*.env
|
| 46 |
+
|
| 47 |
+
# -----------------------------------------------------------------------------
|
| 48 |
+
# IDE & Editors
|
| 49 |
+
# -----------------------------------------------------------------------------
|
| 50 |
+
.idea/
|
| 51 |
+
.vscode/
|
| 52 |
+
*.swp
|
| 53 |
+
*.swo
|
| 54 |
+
*.sublime-workspace
|
| 55 |
+
*.sublime-project
|
| 56 |
+
.project
|
| 57 |
+
.pydevproject
|
| 58 |
+
.settings/
|
| 59 |
+
|
| 60 |
+
# -----------------------------------------------------------------------------
|
| 61 |
+
# macOS
|
| 62 |
+
# -----------------------------------------------------------------------------
|
| 63 |
+
.DS_Store
|
| 64 |
+
.AppleDouble
|
| 65 |
+
.LSOverride
|
| 66 |
+
._*
|
| 67 |
+
.Spotlight-V100
|
| 68 |
+
.Trashes
|
| 69 |
+
|
| 70 |
+
# -----------------------------------------------------------------------------
|
| 71 |
+
# Large Model Files (Download at runtime instead)
|
| 72 |
+
# -----------------------------------------------------------------------------
|
| 73 |
+
# GGUF models are downloaded from HuggingFace Hub at runtime
|
| 74 |
+
*.gguf
|
| 75 |
+
*.bin
|
| 76 |
+
*.safetensors
|
| 77 |
+
|
| 78 |
+
# Note: YOLOv11 model (farmeyes_yolov11.pt) IS uploaded
|
| 79 |
+
# because it's only 19.2MB
|
| 80 |
+
|
| 81 |
+
# -----------------------------------------------------------------------------
|
| 82 |
+
# Logs & Temp Files
|
| 83 |
+
# -----------------------------------------------------------------------------
|
| 84 |
+
*.log
|
| 85 |
+
logs/
|
| 86 |
+
temp/
|
| 87 |
+
tmp/
|
| 88 |
+
*.tmp
|
| 89 |
+
*.temp
|
| 90 |
+
|
| 91 |
+
# -----------------------------------------------------------------------------
|
| 92 |
+
# User Uploads & Generated Files
|
| 93 |
+
# -----------------------------------------------------------------------------
|
| 94 |
+
uploads/
|
| 95 |
+
outputs/
|
| 96 |
+
*.wav
|
| 97 |
+
*.mp3
|
| 98 |
+
*.ogg
|
| 99 |
+
|
| 100 |
+
# -----------------------------------------------------------------------------
|
| 101 |
+
# Jupyter Notebooks
|
| 102 |
+
# -----------------------------------------------------------------------------
|
| 103 |
+
.ipynb_checkpoints/
|
| 104 |
+
*.ipynb
|
| 105 |
+
|
| 106 |
+
# -----------------------------------------------------------------------------
|
| 107 |
+
# Testing
|
| 108 |
+
# -----------------------------------------------------------------------------
|
| 109 |
+
.pytest_cache/
|
| 110 |
+
.coverage
|
| 111 |
+
htmlcov/
|
| 112 |
+
.tox/
|
| 113 |
+
.nox/
|
| 114 |
+
|
| 115 |
+
# -----------------------------------------------------------------------------
|
| 116 |
+
# HuggingFace Cache (created at runtime)
|
| 117 |
+
# -----------------------------------------------------------------------------
|
| 118 |
+
.cache/
|
| 119 |
+
huggingface/
|
main.py
ADDED
|
@@ -0,0 +1,442 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
FarmEyes Main Application
|
| 3 |
+
=========================
|
| 4 |
+
FastAPI backend server for FarmEyes crop disease detection.
|
| 5 |
+
|
| 6 |
+
FIXED:
|
| 7 |
+
- Preloads GGUF model at startup for better performance
|
| 8 |
+
- Serves static files correctly for frontend
|
| 9 |
+
|
| 10 |
+
Run: python main.py
|
| 11 |
+
"""
|
| 12 |
+
|
| 13 |
+
import os
|
| 14 |
+
import sys
|
| 15 |
+
from pathlib import Path
|
| 16 |
+
from contextlib import asynccontextmanager
|
| 17 |
+
from datetime import datetime
|
| 18 |
+
import logging
|
| 19 |
+
|
| 20 |
+
# Add project root to path
|
| 21 |
+
PROJECT_ROOT = Path(__file__).parent.resolve()
|
| 22 |
+
sys.path.insert(0, str(PROJECT_ROOT))
|
| 23 |
+
|
| 24 |
+
from fastapi import FastAPI, HTTPException, Request
|
| 25 |
+
from fastapi.middleware.cors import CORSMiddleware
|
| 26 |
+
from fastapi.staticfiles import StaticFiles
|
| 27 |
+
from fastapi.responses import HTMLResponse, JSONResponse, FileResponse
|
| 28 |
+
import uvicorn
|
| 29 |
+
|
| 30 |
+
# Configure logging
|
| 31 |
+
logging.basicConfig(
|
| 32 |
+
level=logging.INFO,
|
| 33 |
+
format="%(asctime)s - %(name)s - %(levelname)s - %(message)s"
|
| 34 |
+
)
|
| 35 |
+
logger = logging.getLogger(__name__)
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
# =============================================================================
|
| 39 |
+
# APPLICATION LIFESPAN
|
| 40 |
+
# =============================================================================
|
| 41 |
+
|
| 42 |
+
@asynccontextmanager
|
| 43 |
+
async def lifespan(app: FastAPI):
|
| 44 |
+
"""
|
| 45 |
+
Application lifespan manager.
|
| 46 |
+
Handles startup and shutdown events.
|
| 47 |
+
FIXED: Preloads GGUF model for better chat performance.
|
| 48 |
+
"""
|
| 49 |
+
# STARTUP
|
| 50 |
+
logger.info("=" * 60)
|
| 51 |
+
logger.info("🌱 FarmEyes Starting Up...")
|
| 52 |
+
logger.info("=" * 60)
|
| 53 |
+
|
| 54 |
+
# Print config
|
| 55 |
+
try:
|
| 56 |
+
from config import print_config_summary
|
| 57 |
+
print_config_summary()
|
| 58 |
+
except ImportError as e:
|
| 59 |
+
logger.warning(f"Could not load config: {e}")
|
| 60 |
+
|
| 61 |
+
# Initialize session manager
|
| 62 |
+
try:
|
| 63 |
+
from services.session_manager import get_session_manager
|
| 64 |
+
get_session_manager()
|
| 65 |
+
logger.info("✅ Session manager initialized")
|
| 66 |
+
except Exception as e:
|
| 67 |
+
logger.warning(f"Session manager init failed: {e}")
|
| 68 |
+
|
| 69 |
+
# PRELOAD GGUF MODEL FOR PERFORMANCE
|
| 70 |
+
try:
|
| 71 |
+
from models.natlas_model import get_natlas_model
|
| 72 |
+
logger.info("🔄 Preloading N-ATLaS GGUF model...")
|
| 73 |
+
model = get_natlas_model(auto_load_local=True)
|
| 74 |
+
if model.local_model.is_loaded:
|
| 75 |
+
logger.info("✅ N-ATLaS GGUF model preloaded successfully!")
|
| 76 |
+
else:
|
| 77 |
+
logger.warning("⚠️ GGUF model not loaded - will load on first use")
|
| 78 |
+
except Exception as e:
|
| 79 |
+
logger.warning(f"⚠️ GGUF model preload failed: {e}")
|
| 80 |
+
logger.warning(" Model will load on first use (slower first request)")
|
| 81 |
+
|
| 82 |
+
logger.info("=" * 60)
|
| 83 |
+
logger.info("🚀 FarmEyes Ready!")
|
| 84 |
+
logger.info("=" * 60)
|
| 85 |
+
|
| 86 |
+
yield # Application runs
|
| 87 |
+
|
| 88 |
+
# SHUTDOWN
|
| 89 |
+
logger.info("=" * 60)
|
| 90 |
+
logger.info("🛑 FarmEyes Shutting Down...")
|
| 91 |
+
logger.info("=" * 60)
|
| 92 |
+
|
| 93 |
+
try:
|
| 94 |
+
from services.whisper_service import unload_whisper_service
|
| 95 |
+
unload_whisper_service()
|
| 96 |
+
except Exception:
|
| 97 |
+
pass
|
| 98 |
+
|
| 99 |
+
try:
|
| 100 |
+
from models.natlas_model import unload_natlas_model
|
| 101 |
+
unload_natlas_model()
|
| 102 |
+
except Exception:
|
| 103 |
+
pass
|
| 104 |
+
|
| 105 |
+
logger.info("👋 Goodbye!")
|
| 106 |
+
|
| 107 |
+
|
| 108 |
+
# =============================================================================
|
| 109 |
+
# CREATE APPLICATION
|
| 110 |
+
# =============================================================================
|
| 111 |
+
|
| 112 |
+
app = FastAPI(
|
| 113 |
+
title="FarmEyes API",
|
| 114 |
+
description="AI-Powered Crop Disease Detection for African Farmers",
|
| 115 |
+
version="2.0.0",
|
| 116 |
+
docs_url="/api/docs",
|
| 117 |
+
redoc_url="/api/redoc",
|
| 118 |
+
lifespan=lifespan
|
| 119 |
+
)
|
| 120 |
+
|
| 121 |
+
# CORS
|
| 122 |
+
app.add_middleware(
|
| 123 |
+
CORSMiddleware,
|
| 124 |
+
allow_origins=["*"],
|
| 125 |
+
allow_credentials=True,
|
| 126 |
+
allow_methods=["*"],
|
| 127 |
+
allow_headers=["*"],
|
| 128 |
+
)
|
| 129 |
+
|
| 130 |
+
|
| 131 |
+
# =============================================================================
|
| 132 |
+
# REQUEST LOGGING MIDDLEWARE
|
| 133 |
+
# =============================================================================
|
| 134 |
+
|
| 135 |
+
@app.middleware("http")
|
| 136 |
+
async def log_requests(request: Request, call_next):
|
| 137 |
+
"""Log all requests with timing."""
|
| 138 |
+
start_time = datetime.now()
|
| 139 |
+
|
| 140 |
+
response = await call_next(request)
|
| 141 |
+
|
| 142 |
+
# Skip logging for static files
|
| 143 |
+
if not request.url.path.startswith("/static"):
|
| 144 |
+
duration = (datetime.now() - start_time).total_seconds() * 1000
|
| 145 |
+
logger.info(f"{request.method} {request.url.path} - {response.status_code} - {duration:.1f}ms")
|
| 146 |
+
|
| 147 |
+
return response
|
| 148 |
+
|
| 149 |
+
|
| 150 |
+
# =============================================================================
|
| 151 |
+
# INCLUDE API ROUTERS
|
| 152 |
+
# =============================================================================
|
| 153 |
+
|
| 154 |
+
try:
|
| 155 |
+
from api.routes.detection import router as detection_router
|
| 156 |
+
app.include_router(detection_router)
|
| 157 |
+
logger.info("✅ Detection routes loaded")
|
| 158 |
+
except ImportError as e:
|
| 159 |
+
logger.error(f"Failed to load detection routes: {e}")
|
| 160 |
+
|
| 161 |
+
try:
|
| 162 |
+
from api.routes.chat import router as chat_router
|
| 163 |
+
app.include_router(chat_router)
|
| 164 |
+
logger.info("✅ Chat routes loaded")
|
| 165 |
+
except ImportError as e:
|
| 166 |
+
logger.error(f"Failed to load chat routes: {e}")
|
| 167 |
+
|
| 168 |
+
try:
|
| 169 |
+
from api.routes.transcribe import router as transcribe_router
|
| 170 |
+
app.include_router(transcribe_router)
|
| 171 |
+
logger.info("✅ Transcribe routes loaded")
|
| 172 |
+
except ImportError as e:
|
| 173 |
+
logger.error(f"Failed to load transcribe routes: {e}")
|
| 174 |
+
|
| 175 |
+
try:
|
| 176 |
+
from api.routes.tts import router as tts_router
|
| 177 |
+
app.include_router(tts_router)
|
| 178 |
+
logger.info("✅ TTS routes loaded")
|
| 179 |
+
except ImportError as e:
|
| 180 |
+
logger.error(f"Failed to load TTS routes: {e}")
|
| 181 |
+
|
| 182 |
+
|
| 183 |
+
# =============================================================================
|
| 184 |
+
# STATIC FILES
|
| 185 |
+
# =============================================================================
|
| 186 |
+
|
| 187 |
+
# Mount static files for CSS, JS
|
| 188 |
+
static_dir = PROJECT_ROOT / "frontend"
|
| 189 |
+
if static_dir.exists():
|
| 190 |
+
app.mount("/static", StaticFiles(directory=str(static_dir)), name="static")
|
| 191 |
+
logger.info(f"✅ Static files mounted from: {static_dir}")
|
| 192 |
+
else:
|
| 193 |
+
logger.warning(f"⚠️ Frontend directory not found: {static_dir}")
|
| 194 |
+
|
| 195 |
+
|
| 196 |
+
# =============================================================================
|
| 197 |
+
# ROOT ENDPOINTS
|
| 198 |
+
# =============================================================================
|
| 199 |
+
|
| 200 |
+
@app.get("/", response_class=HTMLResponse)
|
| 201 |
+
async def root():
|
| 202 |
+
"""Serve the frontend application."""
|
| 203 |
+
index_path = PROJECT_ROOT / "frontend" / "index.html"
|
| 204 |
+
|
| 205 |
+
if index_path.exists():
|
| 206 |
+
return FileResponse(index_path)
|
| 207 |
+
else:
|
| 208 |
+
return HTMLResponse(content="""
|
| 209 |
+
<!DOCTYPE html>
|
| 210 |
+
<html>
|
| 211 |
+
<head><title>FarmEyes</title></head>
|
| 212 |
+
<body>
|
| 213 |
+
<h1>🌱 FarmEyes API</h1>
|
| 214 |
+
<p>Frontend not found. API is running.</p>
|
| 215 |
+
<p>Visit <a href="/api/docs">/api/docs</a> for API documentation.</p>
|
| 216 |
+
</body>
|
| 217 |
+
</html>
|
| 218 |
+
""")
|
| 219 |
+
|
| 220 |
+
|
| 221 |
+
@app.get("/health")
|
| 222 |
+
async def health_check():
|
| 223 |
+
"""Health check endpoint."""
|
| 224 |
+
return {
|
| 225 |
+
"status": "healthy",
|
| 226 |
+
"service": "FarmEyes",
|
| 227 |
+
"version": "2.0.0",
|
| 228 |
+
"timestamp": datetime.now().isoformat()
|
| 229 |
+
}
|
| 230 |
+
|
| 231 |
+
|
| 232 |
+
@app.get("/api")
|
| 233 |
+
async def api_info():
|
| 234 |
+
"""API information endpoint."""
|
| 235 |
+
return {
|
| 236 |
+
"name": "FarmEyes API",
|
| 237 |
+
"version": "2.0.0",
|
| 238 |
+
"description": "AI-Powered Crop Disease Detection for African Farmers",
|
| 239 |
+
"endpoints": {
|
| 240 |
+
"detection": "/api/detect",
|
| 241 |
+
"chat": "/api/chat",
|
| 242 |
+
"transcribe": "/api/transcribe",
|
| 243 |
+
"docs": "/api/docs"
|
| 244 |
+
},
|
| 245 |
+
"supported_languages": ["en", "ha", "yo", "ig"],
|
| 246 |
+
"supported_crops": ["cassava", "cocoa", "tomato"]
|
| 247 |
+
}
|
| 248 |
+
|
| 249 |
+
|
| 250 |
+
# =============================================================================
|
| 251 |
+
# SESSION ENDPOINTS
|
| 252 |
+
# =============================================================================
|
| 253 |
+
|
| 254 |
+
@app.get("/api/session")
|
| 255 |
+
async def create_session(language: str = "en"):
|
| 256 |
+
"""Create a new session."""
|
| 257 |
+
try:
|
| 258 |
+
from services.session_manager import get_session_manager
|
| 259 |
+
|
| 260 |
+
session_manager = get_session_manager()
|
| 261 |
+
session = session_manager.create_session(language)
|
| 262 |
+
|
| 263 |
+
# Note: created_at is already an ISO format string from session_manager
|
| 264 |
+
return {
|
| 265 |
+
"success": True,
|
| 266 |
+
"session_id": session.session_id,
|
| 267 |
+
"language": session.language,
|
| 268 |
+
"created_at": session.created_at
|
| 269 |
+
}
|
| 270 |
+
except Exception as e:
|
| 271 |
+
raise HTTPException(status_code=500, detail=str(e))
|
| 272 |
+
|
| 273 |
+
|
| 274 |
+
@app.get("/api/session/{session_id}")
|
| 275 |
+
async def get_session(session_id: str):
|
| 276 |
+
"""Get session information."""
|
| 277 |
+
try:
|
| 278 |
+
from services.session_manager import get_session_manager
|
| 279 |
+
|
| 280 |
+
session_manager = get_session_manager()
|
| 281 |
+
session = session_manager.get_session(session_id)
|
| 282 |
+
|
| 283 |
+
if not session:
|
| 284 |
+
raise HTTPException(status_code=404, detail="Session not found")
|
| 285 |
+
|
| 286 |
+
return {
|
| 287 |
+
"success": True,
|
| 288 |
+
"session_id": session.session_id,
|
| 289 |
+
"language": session.language,
|
| 290 |
+
"has_diagnosis": session.diagnosis is not None,
|
| 291 |
+
"chat_messages": len(session.chat_history),
|
| 292 |
+
"created_at": session.created_at, # Already ISO string
|
| 293 |
+
"last_accessed": session.last_accessed # Unix timestamp float
|
| 294 |
+
}
|
| 295 |
+
except HTTPException:
|
| 296 |
+
raise
|
| 297 |
+
except Exception as e:
|
| 298 |
+
raise HTTPException(status_code=500, detail=str(e))
|
| 299 |
+
|
| 300 |
+
|
| 301 |
+
@app.put("/api/session/{session_id}/language")
|
| 302 |
+
async def update_session_language(session_id: str, language: str = "en"):
|
| 303 |
+
"""Update session language."""
|
| 304 |
+
try:
|
| 305 |
+
from services.session_manager import get_session_manager
|
| 306 |
+
|
| 307 |
+
valid_languages = ["en", "ha", "yo", "ig"]
|
| 308 |
+
if language not in valid_languages:
|
| 309 |
+
raise HTTPException(status_code=400, detail=f"Invalid language. Use: {valid_languages}")
|
| 310 |
+
|
| 311 |
+
session_manager = get_session_manager()
|
| 312 |
+
success = session_manager.set_language(session_id, language)
|
| 313 |
+
|
| 314 |
+
if not success:
|
| 315 |
+
raise HTTPException(status_code=404, detail="Session not found")
|
| 316 |
+
|
| 317 |
+
return {
|
| 318 |
+
"success": True,
|
| 319 |
+
"session_id": session_id,
|
| 320 |
+
"language": language
|
| 321 |
+
}
|
| 322 |
+
except HTTPException:
|
| 323 |
+
raise
|
| 324 |
+
except Exception as e:
|
| 325 |
+
raise HTTPException(status_code=500, detail=str(e))
|
| 326 |
+
|
| 327 |
+
|
| 328 |
+
@app.delete("/api/session/{session_id}")
|
| 329 |
+
async def delete_session(session_id: str):
|
| 330 |
+
"""Delete a session."""
|
| 331 |
+
try:
|
| 332 |
+
from services.session_manager import get_session_manager
|
| 333 |
+
|
| 334 |
+
session_manager = get_session_manager()
|
| 335 |
+
success = session_manager.delete_session(session_id)
|
| 336 |
+
|
| 337 |
+
return {
|
| 338 |
+
"success": success,
|
| 339 |
+
"session_id": session_id
|
| 340 |
+
}
|
| 341 |
+
except Exception as e:
|
| 342 |
+
raise HTTPException(status_code=500, detail=str(e))
|
| 343 |
+
|
| 344 |
+
|
| 345 |
+
# =============================================================================
|
| 346 |
+
# TRANSLATIONS ENDPOINT
|
| 347 |
+
# =============================================================================
|
| 348 |
+
|
| 349 |
+
@app.get("/api/translations")
|
| 350 |
+
async def get_translations(language: str = "en"):
|
| 351 |
+
"""Get UI translations."""
|
| 352 |
+
try:
|
| 353 |
+
translations_path = PROJECT_ROOT / "static" / "ui_translations.json"
|
| 354 |
+
|
| 355 |
+
if translations_path.exists():
|
| 356 |
+
import json
|
| 357 |
+
with open(translations_path, "r", encoding="utf-8") as f:
|
| 358 |
+
all_translations = json.load(f)
|
| 359 |
+
|
| 360 |
+
lang_translations = all_translations.get(language, all_translations.get("en", {}))
|
| 361 |
+
|
| 362 |
+
return {
|
| 363 |
+
"success": True,
|
| 364 |
+
"language": language,
|
| 365 |
+
"translations": lang_translations
|
| 366 |
+
}
|
| 367 |
+
else:
|
| 368 |
+
return {
|
| 369 |
+
"success": False,
|
| 370 |
+
"language": language,
|
| 371 |
+
"translations": {},
|
| 372 |
+
"error": "Translations file not found"
|
| 373 |
+
}
|
| 374 |
+
except Exception as e:
|
| 375 |
+
return {
|
| 376 |
+
"success": False,
|
| 377 |
+
"language": language,
|
| 378 |
+
"translations": {},
|
| 379 |
+
"error": str(e)
|
| 380 |
+
}
|
| 381 |
+
|
| 382 |
+
|
| 383 |
+
# =============================================================================
|
| 384 |
+
# ERROR HANDLERS
|
| 385 |
+
# =============================================================================
|
| 386 |
+
|
| 387 |
+
@app.exception_handler(404)
|
| 388 |
+
async def not_found_handler(request: Request, exc: HTTPException):
|
| 389 |
+
"""Handle 404 errors - serve SPA for non-API routes."""
|
| 390 |
+
if not request.url.path.startswith("/api"):
|
| 391 |
+
index_path = PROJECT_ROOT / "frontend" / "index.html"
|
| 392 |
+
if index_path.exists():
|
| 393 |
+
return FileResponse(index_path)
|
| 394 |
+
|
| 395 |
+
return JSONResponse(
|
| 396 |
+
status_code=404,
|
| 397 |
+
content={"error": "Not found", "path": request.url.path}
|
| 398 |
+
)
|
| 399 |
+
|
| 400 |
+
|
| 401 |
+
@app.exception_handler(500)
|
| 402 |
+
async def server_error_handler(request: Request, exc: Exception):
|
| 403 |
+
"""Handle 500 errors."""
|
| 404 |
+
logger.error(f"Server error: {exc}")
|
| 405 |
+
return JSONResponse(
|
| 406 |
+
status_code=500,
|
| 407 |
+
content={"error": "Internal server error"}
|
| 408 |
+
)
|
| 409 |
+
|
| 410 |
+
|
| 411 |
+
# =============================================================================
|
| 412 |
+
# MAIN ENTRY POINT
|
| 413 |
+
# =============================================================================
|
| 414 |
+
|
| 415 |
+
if __name__ == "__main__":
|
| 416 |
+
# Check if running on HuggingFace Spaces
|
| 417 |
+
is_spaces = os.environ.get("SPACE_ID") is not None
|
| 418 |
+
|
| 419 |
+
if is_spaces:
|
| 420 |
+
# HuggingFace Spaces config - must use 0.0.0.0 for external access
|
| 421 |
+
host = "0.0.0.0"
|
| 422 |
+
port = 7860
|
| 423 |
+
reload = False
|
| 424 |
+
else:
|
| 425 |
+
# Local development config
|
| 426 |
+
# FIXED: Use 127.0.0.1 instead of 0.0.0.0 for secure context
|
| 427 |
+
# This allows navigator.mediaDevices (microphone) to work in Chrome
|
| 428 |
+
# Access via http://localhost:7860 (NOT http://0.0.0.0:7860)
|
| 429 |
+
host = os.environ.get("HOST", "127.0.0.1")
|
| 430 |
+
port = int(os.environ.get("PORT", 7860))
|
| 431 |
+
reload = os.environ.get("RELOAD", "false").lower() == "true"
|
| 432 |
+
|
| 433 |
+
logger.info(f"Starting server on {host}:{port}")
|
| 434 |
+
logger.info(f"Access the app at: http://localhost:{port}")
|
| 435 |
+
|
| 436 |
+
uvicorn.run(
|
| 437 |
+
"main:app",
|
| 438 |
+
host=host,
|
| 439 |
+
port=port,
|
| 440 |
+
reload=reload,
|
| 441 |
+
log_level="info"
|
| 442 |
+
)
|
models/.DS_Store
ADDED
|
Binary file (6.15 kB). View file
|
|
|
models/.ipynb_checkpoints/__init__-checkpoint.py
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
FarmEyes Models Package
|
| 3 |
+
=======================
|
| 4 |
+
Machine learning model integrations for FarmEyes.
|
| 5 |
+
- N-ATLaS: Multilingual language model for translation and text generation
|
| 6 |
+
- YOLOv11: Computer vision model for disease detection
|
| 7 |
+
"""
|
| 8 |
+
|
| 9 |
+
from .natlas_model import (
|
| 10 |
+
NATLaSModel,
|
| 11 |
+
get_natlas_model,
|
| 12 |
+
unload_natlas_model,
|
| 13 |
+
translate_text,
|
| 14 |
+
generate_diagnosis
|
| 15 |
+
)
|
| 16 |
+
|
| 17 |
+
# YOLOv11 model will be added in next step
|
| 18 |
+
# from .yolo_model import YOLOModel, get_yolo_model
|
| 19 |
+
|
| 20 |
+
__all__ = [
|
| 21 |
+
# N-ATLaS exports
|
| 22 |
+
"NATLaSModel",
|
| 23 |
+
"get_natlas_model",
|
| 24 |
+
"unload_natlas_model",
|
| 25 |
+
"translate_text",
|
| 26 |
+
"generate_diagnosis",
|
| 27 |
+
|
| 28 |
+
# YOLO exports (to be added)
|
| 29 |
+
# "YOLOModel",
|
| 30 |
+
# "get_yolo_model",
|
| 31 |
+
]
|
models/.ipynb_checkpoints/natlas_model-checkpoint.py
ADDED
|
@@ -0,0 +1,787 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
FarmEyes N-ATLaS Model Integration
|
| 3 |
+
==================================
|
| 4 |
+
Handles loading and inference with N-ATLaS GGUF model using llama-cpp-python.
|
| 5 |
+
Optimized for Apple Silicon M1 Pro with Metal acceleration.
|
| 6 |
+
|
| 7 |
+
Model: tosinamuda/N-ATLaS-GGUF (8B parameters, 16-bit quantized)
|
| 8 |
+
Supported Languages: English, Hausa, Yoruba, Igbo
|
| 9 |
+
"""
|
| 10 |
+
|
| 11 |
+
import os
|
| 12 |
+
import sys
|
| 13 |
+
from pathlib import Path
|
| 14 |
+
from typing import Optional, Dict, List, Generator
|
| 15 |
+
from dataclasses import dataclass
|
| 16 |
+
import logging
|
| 17 |
+
|
| 18 |
+
# Add parent directory to path for imports
|
| 19 |
+
sys.path.append(str(Path(__file__).parent.parent))
|
| 20 |
+
|
| 21 |
+
from config import natlas_config, device_config
|
| 22 |
+
from utils.prompt_templates import (
|
| 23 |
+
TranslationPrompts,
|
| 24 |
+
DiagnosisPrompts,
|
| 25 |
+
ReportPrompts,
|
| 26 |
+
ConversationalPrompts,
|
| 27 |
+
get_system_prompt,
|
| 28 |
+
format_prompt_for_natlas,
|
| 29 |
+
LANGUAGE_NAMES
|
| 30 |
+
)
|
| 31 |
+
|
| 32 |
+
# Configure logging
|
| 33 |
+
logging.basicConfig(level=logging.INFO)
|
| 34 |
+
logger = logging.getLogger(__name__)
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
# =============================================================================
|
| 38 |
+
# N-ATLaS MODEL CLASS
|
| 39 |
+
# =============================================================================
|
| 40 |
+
|
| 41 |
+
class NATLaSModel:
|
| 42 |
+
"""
|
| 43 |
+
N-ATLaS Language Model wrapper for FarmEyes application.
|
| 44 |
+
Uses llama-cpp-python for GGUF model inference with Metal acceleration.
|
| 45 |
+
"""
|
| 46 |
+
|
| 47 |
+
def __init__(
|
| 48 |
+
self,
|
| 49 |
+
model_path: Optional[str] = None,
|
| 50 |
+
n_ctx: int = 4096,
|
| 51 |
+
n_gpu_layers: int = -1,
|
| 52 |
+
n_threads: int = 8,
|
| 53 |
+
n_batch: int = 512,
|
| 54 |
+
verbose: bool = False
|
| 55 |
+
):
|
| 56 |
+
"""
|
| 57 |
+
Initialize N-ATLaS model.
|
| 58 |
+
|
| 59 |
+
Args:
|
| 60 |
+
model_path: Path to GGUF model file (downloads if not exists)
|
| 61 |
+
n_ctx: Context window size
|
| 62 |
+
n_gpu_layers: Number of layers to offload to GPU (-1 for all)
|
| 63 |
+
n_threads: Number of CPU threads
|
| 64 |
+
n_batch: Batch size for prompt processing
|
| 65 |
+
verbose: Enable verbose output from llama.cpp
|
| 66 |
+
"""
|
| 67 |
+
self.model_path = model_path or str(natlas_config.gguf_path)
|
| 68 |
+
self.n_ctx = n_ctx
|
| 69 |
+
self.n_gpu_layers = n_gpu_layers
|
| 70 |
+
self.n_threads = n_threads
|
| 71 |
+
self.n_batch = n_batch
|
| 72 |
+
self.verbose = verbose
|
| 73 |
+
|
| 74 |
+
# Model instance (lazy loaded)
|
| 75 |
+
self._model = None
|
| 76 |
+
self._is_loaded = False
|
| 77 |
+
|
| 78 |
+
logger.info(f"NATLaSModel initialized with config:")
|
| 79 |
+
logger.info(f" Model path: {self.model_path}")
|
| 80 |
+
logger.info(f" Context length: {self.n_ctx}")
|
| 81 |
+
logger.info(f" GPU layers: {self.n_gpu_layers}")
|
| 82 |
+
|
| 83 |
+
# =========================================================================
|
| 84 |
+
# MODEL LOADING
|
| 85 |
+
# =========================================================================
|
| 86 |
+
|
| 87 |
+
def download_model(self) -> str:
|
| 88 |
+
"""
|
| 89 |
+
Download N-ATLaS GGUF model from HuggingFace if not already present.
|
| 90 |
+
|
| 91 |
+
Returns:
|
| 92 |
+
Path to the downloaded model file
|
| 93 |
+
"""
|
| 94 |
+
from huggingface_hub import hf_hub_download
|
| 95 |
+
|
| 96 |
+
model_dir = Path(self.model_path).parent
|
| 97 |
+
model_dir.mkdir(parents=True, exist_ok=True)
|
| 98 |
+
|
| 99 |
+
# Check if model already exists
|
| 100 |
+
if Path(self.model_path).exists():
|
| 101 |
+
logger.info(f"Model already exists at {self.model_path}")
|
| 102 |
+
return self.model_path
|
| 103 |
+
|
| 104 |
+
logger.info(f"Downloading N-ATLaS model from HuggingFace...")
|
| 105 |
+
logger.info(f" Repository: {natlas_config.hf_repo}")
|
| 106 |
+
logger.info(f" Filename: {natlas_config.model_filename}")
|
| 107 |
+
|
| 108 |
+
try:
|
| 109 |
+
# Download from HuggingFace Hub
|
| 110 |
+
downloaded_path = hf_hub_download(
|
| 111 |
+
repo_id=natlas_config.hf_repo,
|
| 112 |
+
filename=natlas_config.model_filename,
|
| 113 |
+
local_dir=str(model_dir),
|
| 114 |
+
local_dir_use_symlinks=False
|
| 115 |
+
)
|
| 116 |
+
|
| 117 |
+
logger.info(f"Model downloaded successfully to {downloaded_path}")
|
| 118 |
+
return downloaded_path
|
| 119 |
+
|
| 120 |
+
except Exception as e:
|
| 121 |
+
logger.error(f"Failed to download model: {e}")
|
| 122 |
+
raise RuntimeError(f"Could not download N-ATLaS model: {e}")
|
| 123 |
+
|
| 124 |
+
def load_model(self) -> bool:
|
| 125 |
+
"""
|
| 126 |
+
Load the N-ATLaS GGUF model into memory.
|
| 127 |
+
|
| 128 |
+
Returns:
|
| 129 |
+
True if model loaded successfully
|
| 130 |
+
"""
|
| 131 |
+
if self._is_loaded:
|
| 132 |
+
logger.info("Model already loaded")
|
| 133 |
+
return True
|
| 134 |
+
|
| 135 |
+
try:
|
| 136 |
+
from llama_cpp import Llama
|
| 137 |
+
|
| 138 |
+
# Ensure model is downloaded
|
| 139 |
+
model_path = self.download_model()
|
| 140 |
+
|
| 141 |
+
logger.info("Loading N-ATLaS model into memory...")
|
| 142 |
+
logger.info(" This may take a minute for the first load...")
|
| 143 |
+
|
| 144 |
+
# Initialize Llama model with Metal acceleration
|
| 145 |
+
self._model = Llama(
|
| 146 |
+
model_path=model_path,
|
| 147 |
+
n_ctx=self.n_ctx,
|
| 148 |
+
n_gpu_layers=self.n_gpu_layers, # -1 = offload all to GPU
|
| 149 |
+
n_threads=self.n_threads,
|
| 150 |
+
n_batch=self.n_batch,
|
| 151 |
+
verbose=self.verbose,
|
| 152 |
+
# Metal-specific settings for Apple Silicon
|
| 153 |
+
use_mlock=True, # Lock model in RAM
|
| 154 |
+
use_mmap=True, # Memory-map the model
|
| 155 |
+
)
|
| 156 |
+
|
| 157 |
+
self._is_loaded = True
|
| 158 |
+
logger.info("✅ N-ATLaS model loaded successfully!")
|
| 159 |
+
logger.info(f" Context window: {self.n_ctx} tokens")
|
| 160 |
+
logger.info(f" GPU acceleration: {'Enabled (Metal)' if self.n_gpu_layers != 0 else 'Disabled'}")
|
| 161 |
+
|
| 162 |
+
return True
|
| 163 |
+
|
| 164 |
+
except ImportError:
|
| 165 |
+
logger.error("llama-cpp-python not installed!")
|
| 166 |
+
logger.error("Install with: CMAKE_ARGS=\"-DLLAMA_METAL=on\" pip install llama-cpp-python")
|
| 167 |
+
raise ImportError("llama-cpp-python is required. Install with Metal support for Apple Silicon.")
|
| 168 |
+
|
| 169 |
+
except Exception as e:
|
| 170 |
+
logger.error(f"Failed to load model: {e}")
|
| 171 |
+
self._is_loaded = False
|
| 172 |
+
raise RuntimeError(f"Could not load N-ATLaS model: {e}")
|
| 173 |
+
|
| 174 |
+
def unload_model(self):
|
| 175 |
+
"""Unload model from memory to free resources."""
|
| 176 |
+
if self._model is not None:
|
| 177 |
+
del self._model
|
| 178 |
+
self._model = None
|
| 179 |
+
self._is_loaded = False
|
| 180 |
+
logger.info("Model unloaded from memory")
|
| 181 |
+
|
| 182 |
+
@property
|
| 183 |
+
def is_loaded(self) -> bool:
|
| 184 |
+
"""Check if model is currently loaded."""
|
| 185 |
+
return self._is_loaded
|
| 186 |
+
|
| 187 |
+
# =========================================================================
|
| 188 |
+
# TEXT GENERATION
|
| 189 |
+
# =========================================================================
|
| 190 |
+
|
| 191 |
+
def generate(
|
| 192 |
+
self,
|
| 193 |
+
prompt: str,
|
| 194 |
+
max_tokens: int = 512,
|
| 195 |
+
temperature: float = 0.7,
|
| 196 |
+
top_p: float = 0.9,
|
| 197 |
+
top_k: int = 40,
|
| 198 |
+
repeat_penalty: float = 1.1,
|
| 199 |
+
stop: Optional[List[str]] = None,
|
| 200 |
+
system_prompt: Optional[str] = None
|
| 201 |
+
) -> str:
|
| 202 |
+
"""
|
| 203 |
+
Generate text completion using N-ATLaS model.
|
| 204 |
+
|
| 205 |
+
Args:
|
| 206 |
+
prompt: Input prompt/instruction
|
| 207 |
+
max_tokens: Maximum tokens to generate
|
| 208 |
+
temperature: Sampling temperature (0.0 = deterministic)
|
| 209 |
+
top_p: Nucleus sampling parameter
|
| 210 |
+
top_k: Top-k sampling parameter
|
| 211 |
+
repeat_penalty: Penalty for repeating tokens
|
| 212 |
+
stop: List of stop sequences
|
| 213 |
+
system_prompt: Optional system prompt to prepend
|
| 214 |
+
|
| 215 |
+
Returns:
|
| 216 |
+
Generated text response
|
| 217 |
+
"""
|
| 218 |
+
# Ensure model is loaded
|
| 219 |
+
if not self._is_loaded:
|
| 220 |
+
self.load_model()
|
| 221 |
+
|
| 222 |
+
# Format prompt with system instruction if provided
|
| 223 |
+
if system_prompt:
|
| 224 |
+
full_prompt = format_prompt_for_natlas(system_prompt, prompt)
|
| 225 |
+
else:
|
| 226 |
+
full_prompt = prompt
|
| 227 |
+
|
| 228 |
+
# Default stop sequences
|
| 229 |
+
if stop is None:
|
| 230 |
+
stop = ["<|eot_id|>", "<|end_of_text|>", "\n\n\n"]
|
| 231 |
+
|
| 232 |
+
try:
|
| 233 |
+
# Generate completion
|
| 234 |
+
response = self._model(
|
| 235 |
+
full_prompt,
|
| 236 |
+
max_tokens=max_tokens,
|
| 237 |
+
temperature=temperature,
|
| 238 |
+
top_p=top_p,
|
| 239 |
+
top_k=top_k,
|
| 240 |
+
repeat_penalty=repeat_penalty,
|
| 241 |
+
stop=stop,
|
| 242 |
+
echo=False # Don't include prompt in response
|
| 243 |
+
)
|
| 244 |
+
|
| 245 |
+
# Extract generated text
|
| 246 |
+
generated_text = response["choices"][0]["text"].strip()
|
| 247 |
+
|
| 248 |
+
return generated_text
|
| 249 |
+
|
| 250 |
+
except Exception as e:
|
| 251 |
+
logger.error(f"Generation error: {e}")
|
| 252 |
+
raise RuntimeError(f"Text generation failed: {e}")
|
| 253 |
+
|
| 254 |
+
def generate_stream(
|
| 255 |
+
self,
|
| 256 |
+
prompt: str,
|
| 257 |
+
max_tokens: int = 512,
|
| 258 |
+
temperature: float = 0.7,
|
| 259 |
+
system_prompt: Optional[str] = None
|
| 260 |
+
) -> Generator[str, None, None]:
|
| 261 |
+
"""
|
| 262 |
+
Generate text with streaming output.
|
| 263 |
+
|
| 264 |
+
Args:
|
| 265 |
+
prompt: Input prompt
|
| 266 |
+
max_tokens: Maximum tokens to generate
|
| 267 |
+
temperature: Sampling temperature
|
| 268 |
+
system_prompt: Optional system prompt
|
| 269 |
+
|
| 270 |
+
Yields:
|
| 271 |
+
Generated text tokens one at a time
|
| 272 |
+
"""
|
| 273 |
+
if not self._is_loaded:
|
| 274 |
+
self.load_model()
|
| 275 |
+
|
| 276 |
+
if system_prompt:
|
| 277 |
+
full_prompt = format_prompt_for_natlas(system_prompt, prompt)
|
| 278 |
+
else:
|
| 279 |
+
full_prompt = prompt
|
| 280 |
+
|
| 281 |
+
try:
|
| 282 |
+
# Stream generation
|
| 283 |
+
for output in self._model(
|
| 284 |
+
full_prompt,
|
| 285 |
+
max_tokens=max_tokens,
|
| 286 |
+
temperature=temperature,
|
| 287 |
+
stream=True,
|
| 288 |
+
stop=["<|eot_id|>", "<|end_of_text|>"]
|
| 289 |
+
):
|
| 290 |
+
token = output["choices"][0]["text"]
|
| 291 |
+
yield token
|
| 292 |
+
|
| 293 |
+
except Exception as e:
|
| 294 |
+
logger.error(f"Streaming generation error: {e}")
|
| 295 |
+
raise RuntimeError(f"Streaming generation failed: {e}")
|
| 296 |
+
|
| 297 |
+
# =========================================================================
|
| 298 |
+
# TRANSLATION METHODS
|
| 299 |
+
# =========================================================================
|
| 300 |
+
|
| 301 |
+
def translate(
|
| 302 |
+
self,
|
| 303 |
+
text: str,
|
| 304 |
+
target_language: str,
|
| 305 |
+
temperature: float = 0.3
|
| 306 |
+
) -> str:
|
| 307 |
+
"""
|
| 308 |
+
Translate text to target language.
|
| 309 |
+
|
| 310 |
+
Args:
|
| 311 |
+
text: Text to translate
|
| 312 |
+
target_language: Target language code (ha, yo, ig, en)
|
| 313 |
+
temperature: Lower temperature for more accurate translation
|
| 314 |
+
|
| 315 |
+
Returns:
|
| 316 |
+
Translated text
|
| 317 |
+
"""
|
| 318 |
+
# Validate language
|
| 319 |
+
if target_language not in LANGUAGE_NAMES:
|
| 320 |
+
raise ValueError(f"Unsupported language: {target_language}")
|
| 321 |
+
|
| 322 |
+
# If target is English and text appears to be English, return as-is
|
| 323 |
+
if target_language == "en":
|
| 324 |
+
return text
|
| 325 |
+
|
| 326 |
+
# Create translation prompt
|
| 327 |
+
prompt = TranslationPrompts.translate_text(text, target_language)
|
| 328 |
+
system = get_system_prompt("translation")
|
| 329 |
+
|
| 330 |
+
# Generate translation with lower temperature for accuracy
|
| 331 |
+
translation = self.generate(
|
| 332 |
+
prompt=prompt,
|
| 333 |
+
system_prompt=system,
|
| 334 |
+
max_tokens=len(text) * 3, # Allow for expansion
|
| 335 |
+
temperature=temperature,
|
| 336 |
+
repeat_penalty=1.0 # Disable repeat penalty for translation
|
| 337 |
+
)
|
| 338 |
+
|
| 339 |
+
return translation.strip()
|
| 340 |
+
|
| 341 |
+
def translate_disease_name(
|
| 342 |
+
self,
|
| 343 |
+
disease_name: str,
|
| 344 |
+
target_language: str
|
| 345 |
+
) -> str:
|
| 346 |
+
"""
|
| 347 |
+
Translate a disease name to target language.
|
| 348 |
+
|
| 349 |
+
Args:
|
| 350 |
+
disease_name: Disease name in English
|
| 351 |
+
target_language: Target language code
|
| 352 |
+
|
| 353 |
+
Returns:
|
| 354 |
+
Translated disease name
|
| 355 |
+
"""
|
| 356 |
+
if target_language == "en":
|
| 357 |
+
return disease_name
|
| 358 |
+
|
| 359 |
+
prompt = TranslationPrompts.translate_disease_name(disease_name, target_language)
|
| 360 |
+
system = get_system_prompt("translation")
|
| 361 |
+
|
| 362 |
+
translation = self.generate(
|
| 363 |
+
prompt=prompt,
|
| 364 |
+
system_prompt=system,
|
| 365 |
+
max_tokens=100,
|
| 366 |
+
temperature=0.3
|
| 367 |
+
)
|
| 368 |
+
|
| 369 |
+
return translation.strip()
|
| 370 |
+
|
| 371 |
+
def translate_symptoms(
|
| 372 |
+
self,
|
| 373 |
+
symptoms: List[str],
|
| 374 |
+
target_language: str
|
| 375 |
+
) -> List[str]:
|
| 376 |
+
"""
|
| 377 |
+
Translate a list of symptoms to target language.
|
| 378 |
+
|
| 379 |
+
Args:
|
| 380 |
+
symptoms: List of symptom descriptions
|
| 381 |
+
target_language: Target language code
|
| 382 |
+
|
| 383 |
+
Returns:
|
| 384 |
+
List of translated symptoms
|
| 385 |
+
"""
|
| 386 |
+
if target_language == "en":
|
| 387 |
+
return symptoms
|
| 388 |
+
|
| 389 |
+
prompt = TranslationPrompts.translate_symptoms(symptoms, target_language)
|
| 390 |
+
system = get_system_prompt("translation")
|
| 391 |
+
|
| 392 |
+
translation = self.generate(
|
| 393 |
+
prompt=prompt,
|
| 394 |
+
system_prompt=system,
|
| 395 |
+
max_tokens=len(" ".join(symptoms)) * 3,
|
| 396 |
+
temperature=0.3
|
| 397 |
+
)
|
| 398 |
+
|
| 399 |
+
# Parse translated symptoms (expecting dash-separated list)
|
| 400 |
+
translated = []
|
| 401 |
+
for line in translation.strip().split("\n"):
|
| 402 |
+
line = line.strip()
|
| 403 |
+
if line.startswith("-"):
|
| 404 |
+
translated.append(line[1:].strip())
|
| 405 |
+
elif line:
|
| 406 |
+
translated.append(line)
|
| 407 |
+
|
| 408 |
+
return translated if translated else symptoms
|
| 409 |
+
|
| 410 |
+
def batch_translate(
|
| 411 |
+
self,
|
| 412 |
+
texts: List[str],
|
| 413 |
+
target_language: str
|
| 414 |
+
) -> List[str]:
|
| 415 |
+
"""
|
| 416 |
+
Translate multiple texts at once.
|
| 417 |
+
|
| 418 |
+
Args:
|
| 419 |
+
texts: List of texts to translate
|
| 420 |
+
target_language: Target language code
|
| 421 |
+
|
| 422 |
+
Returns:
|
| 423 |
+
List of translated texts
|
| 424 |
+
"""
|
| 425 |
+
if target_language == "en":
|
| 426 |
+
return texts
|
| 427 |
+
|
| 428 |
+
prompt = TranslationPrompts.batch_translate(texts, target_language)
|
| 429 |
+
system = get_system_prompt("translation")
|
| 430 |
+
|
| 431 |
+
translation = self.generate(
|
| 432 |
+
prompt=prompt,
|
| 433 |
+
system_prompt=system,
|
| 434 |
+
max_tokens=len(" ".join(texts)) * 3,
|
| 435 |
+
temperature=0.3
|
| 436 |
+
)
|
| 437 |
+
|
| 438 |
+
# Parse numbered translations
|
| 439 |
+
translated = []
|
| 440 |
+
for line in translation.strip().split("\n"):
|
| 441 |
+
line = line.strip()
|
| 442 |
+
# Remove numbering (e.g., "1. ", "2. ")
|
| 443 |
+
if line and line[0].isdigit():
|
| 444 |
+
parts = line.split(".", 1)
|
| 445 |
+
if len(parts) > 1:
|
| 446 |
+
translated.append(parts[1].strip())
|
| 447 |
+
else:
|
| 448 |
+
translated.append(line)
|
| 449 |
+
elif line:
|
| 450 |
+
translated.append(line)
|
| 451 |
+
|
| 452 |
+
# Ensure we have same number of translations
|
| 453 |
+
while len(translated) < len(texts):
|
| 454 |
+
translated.append(texts[len(translated)])
|
| 455 |
+
|
| 456 |
+
return translated[:len(texts)]
|
| 457 |
+
|
| 458 |
+
# =========================================================================
|
| 459 |
+
# DIAGNOSIS GENERATION METHODS
|
| 460 |
+
# =========================================================================
|
| 461 |
+
|
| 462 |
+
def generate_diagnosis_summary(
|
| 463 |
+
self,
|
| 464 |
+
disease_name: str,
|
| 465 |
+
crop: str,
|
| 466 |
+
confidence: float,
|
| 467 |
+
severity: str,
|
| 468 |
+
target_language: str = "en"
|
| 469 |
+
) -> str:
|
| 470 |
+
"""
|
| 471 |
+
Generate a diagnosis summary for the farmer.
|
| 472 |
+
|
| 473 |
+
Args:
|
| 474 |
+
disease_name: Detected disease name
|
| 475 |
+
crop: Crop type
|
| 476 |
+
confidence: Detection confidence (0.0-1.0)
|
| 477 |
+
severity: Severity level
|
| 478 |
+
target_language: Output language
|
| 479 |
+
|
| 480 |
+
Returns:
|
| 481 |
+
Diagnosis summary text
|
| 482 |
+
"""
|
| 483 |
+
prompt = DiagnosisPrompts.generate_diagnosis_summary(
|
| 484 |
+
disease_name=disease_name,
|
| 485 |
+
crop=crop,
|
| 486 |
+
confidence=confidence,
|
| 487 |
+
severity=severity,
|
| 488 |
+
target_language=target_language
|
| 489 |
+
)
|
| 490 |
+
system = get_system_prompt("diagnosis")
|
| 491 |
+
|
| 492 |
+
summary = self.generate(
|
| 493 |
+
prompt=prompt,
|
| 494 |
+
system_prompt=system,
|
| 495 |
+
max_tokens=300,
|
| 496 |
+
temperature=0.7
|
| 497 |
+
)
|
| 498 |
+
|
| 499 |
+
return summary.strip()
|
| 500 |
+
|
| 501 |
+
def generate_treatment_advice(
|
| 502 |
+
self,
|
| 503 |
+
disease_name: str,
|
| 504 |
+
treatments: Dict,
|
| 505 |
+
target_language: str = "en"
|
| 506 |
+
) -> str:
|
| 507 |
+
"""
|
| 508 |
+
Generate treatment recommendations for the farmer.
|
| 509 |
+
|
| 510 |
+
Args:
|
| 511 |
+
disease_name: Disease name
|
| 512 |
+
treatments: Treatment information from knowledge base
|
| 513 |
+
target_language: Output language
|
| 514 |
+
|
| 515 |
+
Returns:
|
| 516 |
+
Treatment advice text
|
| 517 |
+
"""
|
| 518 |
+
prompt = DiagnosisPrompts.generate_treatment_recommendation(
|
| 519 |
+
disease_name=disease_name,
|
| 520 |
+
treatments=treatments,
|
| 521 |
+
target_language=target_language
|
| 522 |
+
)
|
| 523 |
+
system = get_system_prompt("diagnosis")
|
| 524 |
+
|
| 525 |
+
advice = self.generate(
|
| 526 |
+
prompt=prompt,
|
| 527 |
+
system_prompt=system,
|
| 528 |
+
max_tokens=600,
|
| 529 |
+
temperature=0.7
|
| 530 |
+
)
|
| 531 |
+
|
| 532 |
+
return advice.strip()
|
| 533 |
+
|
| 534 |
+
def generate_prevention_advice(
|
| 535 |
+
self,
|
| 536 |
+
disease_name: str,
|
| 537 |
+
prevention_tips: List[str],
|
| 538 |
+
target_language: str = "en"
|
| 539 |
+
) -> str:
|
| 540 |
+
"""
|
| 541 |
+
Generate prevention advice for the farmer.
|
| 542 |
+
|
| 543 |
+
Args:
|
| 544 |
+
disease_name: Disease name
|
| 545 |
+
prevention_tips: List of prevention methods
|
| 546 |
+
target_language: Output language
|
| 547 |
+
|
| 548 |
+
Returns:
|
| 549 |
+
Prevention advice text
|
| 550 |
+
"""
|
| 551 |
+
prompt = DiagnosisPrompts.generate_prevention_advice(
|
| 552 |
+
disease_name=disease_name,
|
| 553 |
+
prevention_tips=prevention_tips,
|
| 554 |
+
target_language=target_language
|
| 555 |
+
)
|
| 556 |
+
system = get_system_prompt("diagnosis")
|
| 557 |
+
|
| 558 |
+
advice = self.generate(
|
| 559 |
+
prompt=prompt,
|
| 560 |
+
system_prompt=system,
|
| 561 |
+
max_tokens=400,
|
| 562 |
+
temperature=0.7
|
| 563 |
+
)
|
| 564 |
+
|
| 565 |
+
return advice.strip()
|
| 566 |
+
|
| 567 |
+
def generate_full_report(
|
| 568 |
+
self,
|
| 569 |
+
disease_data: Dict,
|
| 570 |
+
confidence: float,
|
| 571 |
+
target_language: str = "en"
|
| 572 |
+
) -> str:
|
| 573 |
+
"""
|
| 574 |
+
Generate a complete diagnosis report.
|
| 575 |
+
|
| 576 |
+
Args:
|
| 577 |
+
disease_data: Full disease information from knowledge base
|
| 578 |
+
confidence: Detection confidence
|
| 579 |
+
target_language: Output language
|
| 580 |
+
|
| 581 |
+
Returns:
|
| 582 |
+
Complete diagnosis report
|
| 583 |
+
"""
|
| 584 |
+
prompt = ReportPrompts.generate_full_report(
|
| 585 |
+
disease_data=disease_data,
|
| 586 |
+
confidence=confidence,
|
| 587 |
+
target_language=target_language
|
| 588 |
+
)
|
| 589 |
+
system = get_system_prompt("diagnosis")
|
| 590 |
+
|
| 591 |
+
report = self.generate(
|
| 592 |
+
prompt=prompt,
|
| 593 |
+
system_prompt=system,
|
| 594 |
+
max_tokens=800,
|
| 595 |
+
temperature=0.7
|
| 596 |
+
)
|
| 597 |
+
|
| 598 |
+
return report.strip()
|
| 599 |
+
|
| 600 |
+
def generate_healthy_message(
|
| 601 |
+
self,
|
| 602 |
+
crop: str,
|
| 603 |
+
target_language: str = "en"
|
| 604 |
+
) -> str:
|
| 605 |
+
"""
|
| 606 |
+
Generate a message for healthy plant detection.
|
| 607 |
+
|
| 608 |
+
Args:
|
| 609 |
+
crop: Crop type
|
| 610 |
+
target_language: Output language
|
| 611 |
+
|
| 612 |
+
Returns:
|
| 613 |
+
Healthy plant message
|
| 614 |
+
"""
|
| 615 |
+
prompt = DiagnosisPrompts.generate_healthy_plant_message(
|
| 616 |
+
crop=crop,
|
| 617 |
+
target_language=target_language
|
| 618 |
+
)
|
| 619 |
+
system = get_system_prompt("diagnosis")
|
| 620 |
+
|
| 621 |
+
message = self.generate(
|
| 622 |
+
prompt=prompt,
|
| 623 |
+
system_prompt=system,
|
| 624 |
+
max_tokens=250,
|
| 625 |
+
temperature=0.7
|
| 626 |
+
)
|
| 627 |
+
|
| 628 |
+
return message.strip()
|
| 629 |
+
|
| 630 |
+
# =========================================================================
|
| 631 |
+
# CONVERSATIONAL METHODS
|
| 632 |
+
# =========================================================================
|
| 633 |
+
|
| 634 |
+
def answer_question(
|
| 635 |
+
self,
|
| 636 |
+
question: str,
|
| 637 |
+
context: Optional[str] = None,
|
| 638 |
+
target_language: str = "en"
|
| 639 |
+
) -> str:
|
| 640 |
+
"""
|
| 641 |
+
Answer a farmer's question.
|
| 642 |
+
|
| 643 |
+
Args:
|
| 644 |
+
question: The farmer's question
|
| 645 |
+
context: Optional context (current diagnosis, etc.)
|
| 646 |
+
target_language: Response language
|
| 647 |
+
|
| 648 |
+
Returns:
|
| 649 |
+
Answer text
|
| 650 |
+
"""
|
| 651 |
+
prompt = ConversationalPrompts.answer_farmer_question(
|
| 652 |
+
question=question,
|
| 653 |
+
context=context,
|
| 654 |
+
target_language=target_language
|
| 655 |
+
)
|
| 656 |
+
system = get_system_prompt("conversation")
|
| 657 |
+
|
| 658 |
+
answer = self.generate(
|
| 659 |
+
prompt=prompt,
|
| 660 |
+
system_prompt=system,
|
| 661 |
+
max_tokens=400,
|
| 662 |
+
temperature=0.7
|
| 663 |
+
)
|
| 664 |
+
|
| 665 |
+
return answer.strip()
|
| 666 |
+
|
| 667 |
+
|
| 668 |
+
# =============================================================================
|
| 669 |
+
# SINGLETON INSTANCE
|
| 670 |
+
# =============================================================================
|
| 671 |
+
|
| 672 |
+
# Global model instance (lazy loaded)
|
| 673 |
+
_model_instance: Optional[NATLaSModel] = None
|
| 674 |
+
|
| 675 |
+
|
| 676 |
+
def get_natlas_model() -> NATLaSModel:
|
| 677 |
+
"""
|
| 678 |
+
Get the singleton N-ATLaS model instance.
|
| 679 |
+
Creates and loads the model on first call.
|
| 680 |
+
|
| 681 |
+
Returns:
|
| 682 |
+
NATLaSModel instance
|
| 683 |
+
"""
|
| 684 |
+
global _model_instance
|
| 685 |
+
|
| 686 |
+
if _model_instance is None:
|
| 687 |
+
_model_instance = NATLaSModel(
|
| 688 |
+
n_ctx=natlas_config.context_length,
|
| 689 |
+
n_gpu_layers=natlas_config.n_gpu_layers,
|
| 690 |
+
n_threads=natlas_config.n_threads,
|
| 691 |
+
n_batch=natlas_config.n_batch
|
| 692 |
+
)
|
| 693 |
+
|
| 694 |
+
return _model_instance
|
| 695 |
+
|
| 696 |
+
|
| 697 |
+
def unload_natlas_model():
|
| 698 |
+
"""Unload the singleton model instance to free memory."""
|
| 699 |
+
global _model_instance
|
| 700 |
+
|
| 701 |
+
if _model_instance is not None:
|
| 702 |
+
_model_instance.unload_model()
|
| 703 |
+
_model_instance = None
|
| 704 |
+
|
| 705 |
+
|
| 706 |
+
# =============================================================================
|
| 707 |
+
# CONVENIENCE FUNCTIONS
|
| 708 |
+
# =============================================================================
|
| 709 |
+
|
| 710 |
+
def translate_text(text: str, target_language: str) -> str:
|
| 711 |
+
"""
|
| 712 |
+
Convenience function to translate text.
|
| 713 |
+
|
| 714 |
+
Args:
|
| 715 |
+
text: Text to translate
|
| 716 |
+
target_language: Target language code
|
| 717 |
+
|
| 718 |
+
Returns:
|
| 719 |
+
Translated text
|
| 720 |
+
"""
|
| 721 |
+
model = get_natlas_model()
|
| 722 |
+
return model.translate(text, target_language)
|
| 723 |
+
|
| 724 |
+
|
| 725 |
+
def generate_diagnosis(
|
| 726 |
+
disease_data: Dict,
|
| 727 |
+
confidence: float,
|
| 728 |
+
target_language: str = "en"
|
| 729 |
+
) -> str:
|
| 730 |
+
"""
|
| 731 |
+
Convenience function to generate diagnosis report.
|
| 732 |
+
|
| 733 |
+
Args:
|
| 734 |
+
disease_data: Disease information
|
| 735 |
+
confidence: Detection confidence
|
| 736 |
+
target_language: Output language
|
| 737 |
+
|
| 738 |
+
Returns:
|
| 739 |
+
Diagnosis report
|
| 740 |
+
"""
|
| 741 |
+
model = get_natlas_model()
|
| 742 |
+
return model.generate_full_report(disease_data, confidence, target_language)
|
| 743 |
+
|
| 744 |
+
|
| 745 |
+
# =============================================================================
|
| 746 |
+
# MAIN - Test the model
|
| 747 |
+
# =============================================================================
|
| 748 |
+
|
| 749 |
+
if __name__ == "__main__":
|
| 750 |
+
print("=" * 60)
|
| 751 |
+
print("N-ATLaS Model Test")
|
| 752 |
+
print("=" * 60)
|
| 753 |
+
|
| 754 |
+
# Initialize model
|
| 755 |
+
print("\n1. Initializing N-ATLaS model...")
|
| 756 |
+
model = NATLaSModel(verbose=False)
|
| 757 |
+
|
| 758 |
+
# Load model
|
| 759 |
+
print("\n2. Loading model (this may take a minute)...")
|
| 760 |
+
model.load_model()
|
| 761 |
+
|
| 762 |
+
# Test translation
|
| 763 |
+
print("\n3. Testing translation (English to Hausa)...")
|
| 764 |
+
english_text = "Your cassava plant has a disease. Please remove infected leaves."
|
| 765 |
+
hausa_text = model.translate(english_text, "ha")
|
| 766 |
+
print(f" English: {english_text}")
|
| 767 |
+
print(f" Hausa: {hausa_text}")
|
| 768 |
+
|
| 769 |
+
# Test diagnosis summary
|
| 770 |
+
print("\n4. Testing diagnosis summary generation (Yoruba)...")
|
| 771 |
+
summary = model.generate_diagnosis_summary(
|
| 772 |
+
disease_name="Cassava Mosaic Disease",
|
| 773 |
+
crop="cassava",
|
| 774 |
+
confidence=0.89,
|
| 775 |
+
severity="high",
|
| 776 |
+
target_language="yo"
|
| 777 |
+
)
|
| 778 |
+
print(f" Summary: {summary}")
|
| 779 |
+
|
| 780 |
+
# Test healthy message
|
| 781 |
+
print("\n5. Testing healthy plant message (Igbo)...")
|
| 782 |
+
message = model.generate_healthy_message(crop="tomato", target_language="ig")
|
| 783 |
+
print(f" Message: {message}")
|
| 784 |
+
|
| 785 |
+
print("\n" + "=" * 60)
|
| 786 |
+
print("✅ All tests completed!")
|
| 787 |
+
print("=" * 60)
|
models/.ipynb_checkpoints/yolo_model-checkpoint.py
ADDED
|
@@ -0,0 +1,699 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
FarmEyes YOLOv11 Model Integration
|
| 3 |
+
==================================
|
| 4 |
+
Handles loading and inference with YOLOv11 model for crop disease detection.
|
| 5 |
+
Optimized for Apple Silicon M1 Pro with MPS (Metal Performance Shaders) acceleration.
|
| 6 |
+
|
| 7 |
+
Model: Custom trained YOLOv11 for 10 disease classes
|
| 8 |
+
Crops: Cassava, Cocoa, Tomato
|
| 9 |
+
"""
|
| 10 |
+
|
| 11 |
+
import os
|
| 12 |
+
import sys
|
| 13 |
+
from pathlib import Path
|
| 14 |
+
from typing import Optional, Dict, List, Tuple, Union
|
| 15 |
+
from dataclasses import dataclass
|
| 16 |
+
import logging
|
| 17 |
+
|
| 18 |
+
# Add parent directory to path for imports
|
| 19 |
+
sys.path.append(str(Path(__file__).parent.parent))
|
| 20 |
+
|
| 21 |
+
import numpy as np
|
| 22 |
+
from PIL import Image
|
| 23 |
+
|
| 24 |
+
# Configure logging
|
| 25 |
+
logging.basicConfig(level=logging.INFO)
|
| 26 |
+
logger = logging.getLogger(__name__)
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
# =============================================================================
|
| 30 |
+
# PREDICTION RESULT DATACLASS
|
| 31 |
+
# =============================================================================
|
| 32 |
+
|
| 33 |
+
@dataclass
|
| 34 |
+
class PredictionResult:
|
| 35 |
+
"""
|
| 36 |
+
Container for disease prediction results.
|
| 37 |
+
"""
|
| 38 |
+
class_index: int # Index of predicted class (0-9)
|
| 39 |
+
class_name: str # Human-readable class name
|
| 40 |
+
disease_key: str # Key for knowledge base lookup
|
| 41 |
+
confidence: float # Confidence score (0.0 - 1.0)
|
| 42 |
+
crop_type: str # Crop type (cassava, cocoa, tomato)
|
| 43 |
+
is_healthy: bool # Whether plant is healthy
|
| 44 |
+
bbox: Optional[List[float]] = None # Bounding box [x1, y1, x2, y2] if available
|
| 45 |
+
|
| 46 |
+
def to_dict(self) -> Dict:
|
| 47 |
+
"""Convert to dictionary for JSON serialization."""
|
| 48 |
+
return {
|
| 49 |
+
"class_index": self.class_index,
|
| 50 |
+
"class_name": self.class_name,
|
| 51 |
+
"disease_key": self.disease_key,
|
| 52 |
+
"confidence": round(self.confidence, 4),
|
| 53 |
+
"confidence_percent": round(self.confidence * 100, 1),
|
| 54 |
+
"crop_type": self.crop_type,
|
| 55 |
+
"is_healthy": self.is_healthy,
|
| 56 |
+
"bbox": self.bbox
|
| 57 |
+
}
|
| 58 |
+
|
| 59 |
+
def __repr__(self) -> str:
|
| 60 |
+
return f"PredictionResult({self.class_name}, conf={self.confidence:.2%}, crop={self.crop_type})"
|
| 61 |
+
|
| 62 |
+
|
| 63 |
+
# =============================================================================
|
| 64 |
+
# YOLO MODEL CLASS
|
| 65 |
+
# =============================================================================
|
| 66 |
+
|
| 67 |
+
class YOLOModel:
|
| 68 |
+
"""
|
| 69 |
+
YOLOv11 Model wrapper for FarmEyes crop disease detection.
|
| 70 |
+
Uses Ultralytics library with MPS acceleration for Apple Silicon.
|
| 71 |
+
"""
|
| 72 |
+
|
| 73 |
+
# Class mappings (must match your trained model)
|
| 74 |
+
CLASS_NAMES: List[str] = [
|
| 75 |
+
"Cassava Bacteria Blight",
|
| 76 |
+
"Cassava Healthy Leaf",
|
| 77 |
+
"Cassava Mosaic Disease",
|
| 78 |
+
"Cocoa Healthy Leaf",
|
| 79 |
+
"Cocoa Monilia Disease",
|
| 80 |
+
"Cocoa Phytophthora Disease",
|
| 81 |
+
"Tomato Gray Mold Disease",
|
| 82 |
+
"Tomato Healthy Leaf",
|
| 83 |
+
"Tomato Viral Disease",
|
| 84 |
+
"Tomato Wilt Disease"
|
| 85 |
+
]
|
| 86 |
+
|
| 87 |
+
# Class index to knowledge base key mapping
|
| 88 |
+
CLASS_TO_KEY: Dict[int, str] = {
|
| 89 |
+
0: "cassava_bacterial_blight",
|
| 90 |
+
1: "cassava_healthy",
|
| 91 |
+
2: "cassava_mosaic_disease",
|
| 92 |
+
3: "cocoa_healthy",
|
| 93 |
+
4: "cocoa_monilia_disease",
|
| 94 |
+
5: "cocoa_phytophthora_disease",
|
| 95 |
+
6: "tomato_gray_mold",
|
| 96 |
+
7: "tomato_healthy",
|
| 97 |
+
8: "tomato_viral_disease",
|
| 98 |
+
9: "tomato_wilt_disease"
|
| 99 |
+
}
|
| 100 |
+
|
| 101 |
+
# Class index to crop type mapping
|
| 102 |
+
CLASS_TO_CROP: Dict[int, str] = {
|
| 103 |
+
0: "cassava", 1: "cassava", 2: "cassava",
|
| 104 |
+
3: "cocoa", 4: "cocoa", 5: "cocoa",
|
| 105 |
+
6: "tomato", 7: "tomato", 8: "tomato", 9: "tomato"
|
| 106 |
+
}
|
| 107 |
+
|
| 108 |
+
# Healthy class indices
|
| 109 |
+
HEALTHY_INDICES: List[int] = [1, 3, 7]
|
| 110 |
+
|
| 111 |
+
def __init__(
|
| 112 |
+
self,
|
| 113 |
+
model_path: Optional[str] = None,
|
| 114 |
+
confidence_threshold: float = 0.5,
|
| 115 |
+
iou_threshold: float = 0.45,
|
| 116 |
+
device: str = "mps",
|
| 117 |
+
input_size: int = 640
|
| 118 |
+
):
|
| 119 |
+
"""
|
| 120 |
+
Initialize YOLOv11 model.
|
| 121 |
+
|
| 122 |
+
Args:
|
| 123 |
+
model_path: Path to trained YOLOv11 .pt weights file
|
| 124 |
+
confidence_threshold: Minimum confidence for detections
|
| 125 |
+
iou_threshold: IoU threshold for NMS
|
| 126 |
+
device: Compute device ('mps' for Apple Silicon, 'cuda', 'cpu')
|
| 127 |
+
input_size: Input image size for the model
|
| 128 |
+
"""
|
| 129 |
+
# Import config here to avoid circular imports
|
| 130 |
+
from config import yolo_config, MODELS_DIR
|
| 131 |
+
|
| 132 |
+
self.model_path = model_path or str(yolo_config.model_path)
|
| 133 |
+
self.confidence_threshold = confidence_threshold
|
| 134 |
+
self.iou_threshold = iou_threshold
|
| 135 |
+
self.input_size = input_size
|
| 136 |
+
|
| 137 |
+
# Determine best device
|
| 138 |
+
self.device = self._get_best_device(device)
|
| 139 |
+
|
| 140 |
+
# Model instance (lazy loaded)
|
| 141 |
+
self._model = None
|
| 142 |
+
self._is_loaded = False
|
| 143 |
+
|
| 144 |
+
logger.info(f"YOLOModel initialized:")
|
| 145 |
+
logger.info(f" Model path: {self.model_path}")
|
| 146 |
+
logger.info(f" Device: {self.device}")
|
| 147 |
+
logger.info(f" Confidence threshold: {self.confidence_threshold}")
|
| 148 |
+
logger.info(f" Input size: {self.input_size}")
|
| 149 |
+
|
| 150 |
+
# =========================================================================
|
| 151 |
+
# DEVICE MANAGEMENT
|
| 152 |
+
# =========================================================================
|
| 153 |
+
|
| 154 |
+
def _get_best_device(self, preferred: str = "mps") -> str:
|
| 155 |
+
"""
|
| 156 |
+
Determine the best available compute device.
|
| 157 |
+
|
| 158 |
+
Args:
|
| 159 |
+
preferred: Preferred device ('mps', 'cuda', 'cpu')
|
| 160 |
+
|
| 161 |
+
Returns:
|
| 162 |
+
Best available device string
|
| 163 |
+
"""
|
| 164 |
+
import torch
|
| 165 |
+
|
| 166 |
+
if preferred == "mps" and torch.backends.mps.is_available():
|
| 167 |
+
logger.info("Using MPS (Metal Performance Shaders) for Apple Silicon")
|
| 168 |
+
return "mps"
|
| 169 |
+
elif preferred == "cuda" and torch.cuda.is_available():
|
| 170 |
+
logger.info(f"Using CUDA: {torch.cuda.get_device_name(0)}")
|
| 171 |
+
return "cuda"
|
| 172 |
+
else:
|
| 173 |
+
logger.info("Using CPU for inference")
|
| 174 |
+
return "cpu"
|
| 175 |
+
|
| 176 |
+
# =========================================================================
|
| 177 |
+
# MODEL LOADING
|
| 178 |
+
# =========================================================================
|
| 179 |
+
|
| 180 |
+
def load_model(self) -> bool:
|
| 181 |
+
"""
|
| 182 |
+
Load the YOLOv11 model into memory.
|
| 183 |
+
|
| 184 |
+
Returns:
|
| 185 |
+
True if model loaded successfully
|
| 186 |
+
"""
|
| 187 |
+
if self._is_loaded:
|
| 188 |
+
logger.info("Model already loaded")
|
| 189 |
+
return True
|
| 190 |
+
|
| 191 |
+
try:
|
| 192 |
+
from ultralytics import YOLO
|
| 193 |
+
|
| 194 |
+
# Check if model file exists
|
| 195 |
+
if not Path(self.model_path).exists():
|
| 196 |
+
logger.warning(f"Model file not found at {self.model_path}")
|
| 197 |
+
logger.warning("Using placeholder - please provide trained model")
|
| 198 |
+
|
| 199 |
+
# Create a placeholder with pretrained YOLOv11n for testing
|
| 200 |
+
# Replace this with your actual trained model
|
| 201 |
+
logger.info("Loading pretrained YOLOv11n as placeholder...")
|
| 202 |
+
self._model = YOLO("yolo11n.pt") # Downloads pretrained model
|
| 203 |
+
self._is_placeholder = True
|
| 204 |
+
else:
|
| 205 |
+
logger.info(f"Loading YOLOv11 model from {self.model_path}...")
|
| 206 |
+
self._model = YOLO(self.model_path)
|
| 207 |
+
self._is_placeholder = False
|
| 208 |
+
|
| 209 |
+
# Move model to device
|
| 210 |
+
self._model.to(self.device)
|
| 211 |
+
|
| 212 |
+
self._is_loaded = True
|
| 213 |
+
logger.info(f"✅ YOLOv11 model loaded successfully on {self.device}!")
|
| 214 |
+
|
| 215 |
+
return True
|
| 216 |
+
|
| 217 |
+
except ImportError:
|
| 218 |
+
logger.error("Ultralytics not installed!")
|
| 219 |
+
logger.error("Install with: pip install ultralytics")
|
| 220 |
+
raise ImportError("ultralytics package is required")
|
| 221 |
+
|
| 222 |
+
except Exception as e:
|
| 223 |
+
logger.error(f"Failed to load model: {e}")
|
| 224 |
+
self._is_loaded = False
|
| 225 |
+
raise RuntimeError(f"Could not load YOLOv11 model: {e}")
|
| 226 |
+
|
| 227 |
+
def unload_model(self):
|
| 228 |
+
"""Unload model from memory."""
|
| 229 |
+
if self._model is not None:
|
| 230 |
+
del self._model
|
| 231 |
+
self._model = None
|
| 232 |
+
self._is_loaded = False
|
| 233 |
+
|
| 234 |
+
# Clear GPU cache
|
| 235 |
+
import torch
|
| 236 |
+
if self.device == "mps":
|
| 237 |
+
torch.mps.empty_cache()
|
| 238 |
+
elif self.device == "cuda":
|
| 239 |
+
torch.cuda.empty_cache()
|
| 240 |
+
|
| 241 |
+
logger.info("Model unloaded from memory")
|
| 242 |
+
|
| 243 |
+
@property
|
| 244 |
+
def is_loaded(self) -> bool:
|
| 245 |
+
"""Check if model is currently loaded."""
|
| 246 |
+
return self._is_loaded
|
| 247 |
+
|
| 248 |
+
# =========================================================================
|
| 249 |
+
# IMAGE PREPROCESSING
|
| 250 |
+
# =========================================================================
|
| 251 |
+
|
| 252 |
+
def preprocess_image(
|
| 253 |
+
self,
|
| 254 |
+
image: Union[str, Path, Image.Image, np.ndarray]
|
| 255 |
+
) -> Image.Image:
|
| 256 |
+
"""
|
| 257 |
+
Preprocess image for model inference.
|
| 258 |
+
|
| 259 |
+
Args:
|
| 260 |
+
image: Input image (file path, PIL Image, or numpy array)
|
| 261 |
+
|
| 262 |
+
Returns:
|
| 263 |
+
Preprocessed PIL Image
|
| 264 |
+
"""
|
| 265 |
+
# Load image if path provided
|
| 266 |
+
if isinstance(image, (str, Path)):
|
| 267 |
+
image_path = Path(image)
|
| 268 |
+
if not image_path.exists():
|
| 269 |
+
raise FileNotFoundError(f"Image not found: {image_path}")
|
| 270 |
+
image = Image.open(image_path)
|
| 271 |
+
|
| 272 |
+
# Convert numpy array to PIL Image
|
| 273 |
+
elif isinstance(image, np.ndarray):
|
| 274 |
+
# Handle different array formats
|
| 275 |
+
if image.dtype != np.uint8:
|
| 276 |
+
image = (image * 255).astype(np.uint8)
|
| 277 |
+
if len(image.shape) == 2:
|
| 278 |
+
image = Image.fromarray(image, mode='L').convert('RGB')
|
| 279 |
+
elif image.shape[2] == 4:
|
| 280 |
+
image = Image.fromarray(image, mode='RGBA').convert('RGB')
|
| 281 |
+
else:
|
| 282 |
+
image = Image.fromarray(image, mode='RGB')
|
| 283 |
+
|
| 284 |
+
# Ensure PIL Image
|
| 285 |
+
if not isinstance(image, Image.Image):
|
| 286 |
+
raise ValueError(f"Unsupported image type: {type(image)}")
|
| 287 |
+
|
| 288 |
+
# Convert to RGB if necessary
|
| 289 |
+
if image.mode != 'RGB':
|
| 290 |
+
image = image.convert('RGB')
|
| 291 |
+
|
| 292 |
+
return image
|
| 293 |
+
|
| 294 |
+
def validate_image(self, image: Image.Image) -> Tuple[bool, str]:
|
| 295 |
+
"""
|
| 296 |
+
Validate image for disease detection.
|
| 297 |
+
|
| 298 |
+
Args:
|
| 299 |
+
image: PIL Image to validate
|
| 300 |
+
|
| 301 |
+
Returns:
|
| 302 |
+
Tuple of (is_valid, message)
|
| 303 |
+
"""
|
| 304 |
+
# Check image size
|
| 305 |
+
width, height = image.size
|
| 306 |
+
min_size = 64
|
| 307 |
+
max_size = 8192
|
| 308 |
+
|
| 309 |
+
if width < min_size or height < min_size:
|
| 310 |
+
return False, f"Image too small. Minimum size is {min_size}x{min_size} pixels."
|
| 311 |
+
|
| 312 |
+
if width > max_size or height > max_size:
|
| 313 |
+
return False, f"Image too large. Maximum size is {max_size}x{max_size} pixels."
|
| 314 |
+
|
| 315 |
+
# Check aspect ratio (should be reasonable for a leaf photo)
|
| 316 |
+
aspect_ratio = max(width, height) / min(width, height)
|
| 317 |
+
if aspect_ratio > 10:
|
| 318 |
+
return False, "Image aspect ratio is too extreme. Please take a more centered photo."
|
| 319 |
+
|
| 320 |
+
return True, "Image is valid"
|
| 321 |
+
|
| 322 |
+
# =========================================================================
|
| 323 |
+
# INFERENCE
|
| 324 |
+
# =========================================================================
|
| 325 |
+
|
| 326 |
+
def predict(
|
| 327 |
+
self,
|
| 328 |
+
image: Union[str, Path, Image.Image, np.ndarray],
|
| 329 |
+
return_all: bool = False
|
| 330 |
+
) -> Union[PredictionResult, List[PredictionResult]]:
|
| 331 |
+
"""
|
| 332 |
+
Run disease detection on an image.
|
| 333 |
+
|
| 334 |
+
Args:
|
| 335 |
+
image: Input image (file path, PIL Image, or numpy array)
|
| 336 |
+
return_all: If True, return all predictions; otherwise return top prediction
|
| 337 |
+
|
| 338 |
+
Returns:
|
| 339 |
+
PredictionResult or list of PredictionResults
|
| 340 |
+
"""
|
| 341 |
+
# Ensure model is loaded
|
| 342 |
+
if not self._is_loaded:
|
| 343 |
+
self.load_model()
|
| 344 |
+
|
| 345 |
+
# Preprocess image
|
| 346 |
+
pil_image = self.preprocess_image(image)
|
| 347 |
+
|
| 348 |
+
# Validate image
|
| 349 |
+
is_valid, message = self.validate_image(pil_image)
|
| 350 |
+
if not is_valid:
|
| 351 |
+
raise ValueError(message)
|
| 352 |
+
|
| 353 |
+
try:
|
| 354 |
+
# Run inference
|
| 355 |
+
results = self._model(
|
| 356 |
+
pil_image,
|
| 357 |
+
conf=self.confidence_threshold,
|
| 358 |
+
iou=self.iou_threshold,
|
| 359 |
+
imgsz=self.input_size,
|
| 360 |
+
device=self.device,
|
| 361 |
+
verbose=False
|
| 362 |
+
)
|
| 363 |
+
|
| 364 |
+
# Parse results
|
| 365 |
+
predictions = self._parse_results(results)
|
| 366 |
+
|
| 367 |
+
if not predictions:
|
| 368 |
+
# No confident detection - return low confidence result
|
| 369 |
+
logger.warning("No confident detection found")
|
| 370 |
+
return self._create_low_confidence_result()
|
| 371 |
+
|
| 372 |
+
# Return results
|
| 373 |
+
if return_all:
|
| 374 |
+
return predictions
|
| 375 |
+
else:
|
| 376 |
+
return predictions[0] # Top prediction
|
| 377 |
+
|
| 378 |
+
except Exception as e:
|
| 379 |
+
logger.error(f"Inference error: {e}")
|
| 380 |
+
raise RuntimeError(f"Disease detection failed: {e}")
|
| 381 |
+
|
| 382 |
+
def predict_with_visualization(
|
| 383 |
+
self,
|
| 384 |
+
image: Union[str, Path, Image.Image, np.ndarray]
|
| 385 |
+
) -> Tuple[PredictionResult, Image.Image]:
|
| 386 |
+
"""
|
| 387 |
+
Run detection and return annotated image.
|
| 388 |
+
|
| 389 |
+
Args:
|
| 390 |
+
image: Input image
|
| 391 |
+
|
| 392 |
+
Returns:
|
| 393 |
+
Tuple of (PredictionResult, annotated PIL Image)
|
| 394 |
+
"""
|
| 395 |
+
if not self._is_loaded:
|
| 396 |
+
self.load_model()
|
| 397 |
+
|
| 398 |
+
pil_image = self.preprocess_image(image)
|
| 399 |
+
|
| 400 |
+
# Run inference
|
| 401 |
+
results = self._model(
|
| 402 |
+
pil_image,
|
| 403 |
+
conf=self.confidence_threshold,
|
| 404 |
+
iou=self.iou_threshold,
|
| 405 |
+
imgsz=self.input_size,
|
| 406 |
+
device=self.device,
|
| 407 |
+
verbose=False
|
| 408 |
+
)
|
| 409 |
+
|
| 410 |
+
# Get prediction
|
| 411 |
+
predictions = self._parse_results(results)
|
| 412 |
+
prediction = predictions[0] if predictions else self._create_low_confidence_result()
|
| 413 |
+
|
| 414 |
+
# Get annotated image
|
| 415 |
+
annotated = results[0].plot() # Returns numpy array with annotations
|
| 416 |
+
annotated_image = Image.fromarray(annotated)
|
| 417 |
+
|
| 418 |
+
return prediction, annotated_image
|
| 419 |
+
|
| 420 |
+
def _parse_results(self, results) -> List[PredictionResult]:
|
| 421 |
+
"""
|
| 422 |
+
Parse YOLO results into PredictionResult objects.
|
| 423 |
+
|
| 424 |
+
Args:
|
| 425 |
+
results: YOLO inference results
|
| 426 |
+
|
| 427 |
+
Returns:
|
| 428 |
+
List of PredictionResult objects sorted by confidence
|
| 429 |
+
"""
|
| 430 |
+
predictions = []
|
| 431 |
+
|
| 432 |
+
for result in results:
|
| 433 |
+
# Check if we have classification results (for classification model)
|
| 434 |
+
if hasattr(result, 'probs') and result.probs is not None:
|
| 435 |
+
probs = result.probs
|
| 436 |
+
|
| 437 |
+
# Get top prediction
|
| 438 |
+
top_idx = int(probs.top1)
|
| 439 |
+
top_conf = float(probs.top1conf)
|
| 440 |
+
|
| 441 |
+
# Handle placeholder model (pretrained YOLO)
|
| 442 |
+
if hasattr(self, '_is_placeholder') and self._is_placeholder:
|
| 443 |
+
# Map to our classes for demo purposes
|
| 444 |
+
top_idx = top_idx % len(self.CLASS_NAMES)
|
| 445 |
+
|
| 446 |
+
if top_idx < len(self.CLASS_NAMES):
|
| 447 |
+
prediction = PredictionResult(
|
| 448 |
+
class_index=top_idx,
|
| 449 |
+
class_name=self.CLASS_NAMES[top_idx],
|
| 450 |
+
disease_key=self.CLASS_TO_KEY[top_idx],
|
| 451 |
+
confidence=top_conf,
|
| 452 |
+
crop_type=self.CLASS_TO_CROP[top_idx],
|
| 453 |
+
is_healthy=top_idx in self.HEALTHY_INDICES
|
| 454 |
+
)
|
| 455 |
+
predictions.append(prediction)
|
| 456 |
+
|
| 457 |
+
# Check for detection results (for detection model)
|
| 458 |
+
elif hasattr(result, 'boxes') and result.boxes is not None:
|
| 459 |
+
boxes = result.boxes
|
| 460 |
+
|
| 461 |
+
for i in range(len(boxes)):
|
| 462 |
+
cls_idx = int(boxes.cls[i])
|
| 463 |
+
conf = float(boxes.conf[i])
|
| 464 |
+
bbox = boxes.xyxy[i].tolist() if boxes.xyxy is not None else None
|
| 465 |
+
|
| 466 |
+
# Handle placeholder model
|
| 467 |
+
if hasattr(self, '_is_placeholder') and self._is_placeholder:
|
| 468 |
+
cls_idx = cls_idx % len(self.CLASS_NAMES)
|
| 469 |
+
|
| 470 |
+
if cls_idx < len(self.CLASS_NAMES):
|
| 471 |
+
prediction = PredictionResult(
|
| 472 |
+
class_index=cls_idx,
|
| 473 |
+
class_name=self.CLASS_NAMES[cls_idx],
|
| 474 |
+
disease_key=self.CLASS_TO_KEY[cls_idx],
|
| 475 |
+
confidence=conf,
|
| 476 |
+
crop_type=self.CLASS_TO_CROP[cls_idx],
|
| 477 |
+
is_healthy=cls_idx in self.HEALTHY_INDICES,
|
| 478 |
+
bbox=bbox
|
| 479 |
+
)
|
| 480 |
+
predictions.append(prediction)
|
| 481 |
+
|
| 482 |
+
# Sort by confidence (highest first)
|
| 483 |
+
predictions.sort(key=lambda x: x.confidence, reverse=True)
|
| 484 |
+
|
| 485 |
+
return predictions
|
| 486 |
+
|
| 487 |
+
def _create_low_confidence_result(self) -> PredictionResult:
|
| 488 |
+
"""Create a result indicating low confidence / no detection."""
|
| 489 |
+
return PredictionResult(
|
| 490 |
+
class_index=-1,
|
| 491 |
+
class_name="Unknown",
|
| 492 |
+
disease_key="unknown",
|
| 493 |
+
confidence=0.0,
|
| 494 |
+
crop_type="unknown",
|
| 495 |
+
is_healthy=False
|
| 496 |
+
)
|
| 497 |
+
|
| 498 |
+
# =========================================================================
|
| 499 |
+
# BATCH INFERENCE
|
| 500 |
+
# =========================================================================
|
| 501 |
+
|
| 502 |
+
def predict_batch(
|
| 503 |
+
self,
|
| 504 |
+
images: List[Union[str, Path, Image.Image, np.ndarray]]
|
| 505 |
+
) -> List[PredictionResult]:
|
| 506 |
+
"""
|
| 507 |
+
Run detection on multiple images.
|
| 508 |
+
|
| 509 |
+
Args:
|
| 510 |
+
images: List of input images
|
| 511 |
+
|
| 512 |
+
Returns:
|
| 513 |
+
List of PredictionResult objects (one per image)
|
| 514 |
+
"""
|
| 515 |
+
if not self._is_loaded:
|
| 516 |
+
self.load_model()
|
| 517 |
+
|
| 518 |
+
results = []
|
| 519 |
+
for image in images:
|
| 520 |
+
try:
|
| 521 |
+
result = self.predict(image)
|
| 522 |
+
results.append(result)
|
| 523 |
+
except Exception as e:
|
| 524 |
+
logger.error(f"Failed to process image: {e}")
|
| 525 |
+
results.append(self._create_low_confidence_result())
|
| 526 |
+
|
| 527 |
+
return results
|
| 528 |
+
|
| 529 |
+
# =========================================================================
|
| 530 |
+
# UTILITY METHODS
|
| 531 |
+
# =========================================================================
|
| 532 |
+
|
| 533 |
+
def get_class_info(self, class_index: int) -> Dict:
|
| 534 |
+
"""
|
| 535 |
+
Get information about a class by index.
|
| 536 |
+
|
| 537 |
+
Args:
|
| 538 |
+
class_index: Index of the class (0-9)
|
| 539 |
+
|
| 540 |
+
Returns:
|
| 541 |
+
Dictionary with class information
|
| 542 |
+
"""
|
| 543 |
+
if class_index < 0 or class_index >= len(self.CLASS_NAMES):
|
| 544 |
+
return {
|
| 545 |
+
"class_index": class_index,
|
| 546 |
+
"class_name": "Unknown",
|
| 547 |
+
"disease_key": "unknown",
|
| 548 |
+
"crop_type": "unknown",
|
| 549 |
+
"is_healthy": False
|
| 550 |
+
}
|
| 551 |
+
|
| 552 |
+
return {
|
| 553 |
+
"class_index": class_index,
|
| 554 |
+
"class_name": self.CLASS_NAMES[class_index],
|
| 555 |
+
"disease_key": self.CLASS_TO_KEY[class_index],
|
| 556 |
+
"crop_type": self.CLASS_TO_CROP[class_index],
|
| 557 |
+
"is_healthy": class_index in self.HEALTHY_INDICES
|
| 558 |
+
}
|
| 559 |
+
|
| 560 |
+
def get_model_info(self) -> Dict:
|
| 561 |
+
"""Get information about the loaded model."""
|
| 562 |
+
info = {
|
| 563 |
+
"model_path": self.model_path,
|
| 564 |
+
"is_loaded": self._is_loaded,
|
| 565 |
+
"device": self.device,
|
| 566 |
+
"confidence_threshold": self.confidence_threshold,
|
| 567 |
+
"input_size": self.input_size,
|
| 568 |
+
"num_classes": len(self.CLASS_NAMES),
|
| 569 |
+
"classes": self.CLASS_NAMES
|
| 570 |
+
}
|
| 571 |
+
|
| 572 |
+
if self._is_loaded and hasattr(self, '_is_placeholder'):
|
| 573 |
+
info["is_placeholder"] = self._is_placeholder
|
| 574 |
+
|
| 575 |
+
return info
|
| 576 |
+
|
| 577 |
+
|
| 578 |
+
# =============================================================================
|
| 579 |
+
# SINGLETON INSTANCE
|
| 580 |
+
# =============================================================================
|
| 581 |
+
|
| 582 |
+
_model_instance: Optional[YOLOModel] = None
|
| 583 |
+
|
| 584 |
+
|
| 585 |
+
def get_yolo_model() -> YOLOModel:
|
| 586 |
+
"""
|
| 587 |
+
Get the singleton YOLO model instance.
|
| 588 |
+
|
| 589 |
+
Returns:
|
| 590 |
+
YOLOModel instance
|
| 591 |
+
"""
|
| 592 |
+
global _model_instance
|
| 593 |
+
|
| 594 |
+
if _model_instance is None:
|
| 595 |
+
from config import yolo_config
|
| 596 |
+
|
| 597 |
+
_model_instance = YOLOModel(
|
| 598 |
+
model_path=str(yolo_config.model_path),
|
| 599 |
+
confidence_threshold=yolo_config.confidence_threshold,
|
| 600 |
+
iou_threshold=yolo_config.iou_threshold,
|
| 601 |
+
device=yolo_config.device,
|
| 602 |
+
input_size=yolo_config.input_size
|
| 603 |
+
)
|
| 604 |
+
|
| 605 |
+
return _model_instance
|
| 606 |
+
|
| 607 |
+
|
| 608 |
+
def unload_yolo_model():
|
| 609 |
+
"""Unload the singleton YOLO model to free memory."""
|
| 610 |
+
global _model_instance
|
| 611 |
+
|
| 612 |
+
if _model_instance is not None:
|
| 613 |
+
_model_instance.unload_model()
|
| 614 |
+
_model_instance = None
|
| 615 |
+
|
| 616 |
+
|
| 617 |
+
# =============================================================================
|
| 618 |
+
# CONVENIENCE FUNCTIONS
|
| 619 |
+
# =============================================================================
|
| 620 |
+
|
| 621 |
+
def detect_disease(
|
| 622 |
+
image: Union[str, Path, Image.Image, np.ndarray]
|
| 623 |
+
) -> PredictionResult:
|
| 624 |
+
"""
|
| 625 |
+
Convenience function to detect disease in an image.
|
| 626 |
+
|
| 627 |
+
Args:
|
| 628 |
+
image: Input image (path, PIL Image, or numpy array)
|
| 629 |
+
|
| 630 |
+
Returns:
|
| 631 |
+
PredictionResult with disease information
|
| 632 |
+
"""
|
| 633 |
+
model = get_yolo_model()
|
| 634 |
+
return model.predict(image)
|
| 635 |
+
|
| 636 |
+
|
| 637 |
+
def detect_disease_with_image(
|
| 638 |
+
image: Union[str, Path, Image.Image, np.ndarray]
|
| 639 |
+
) -> Tuple[PredictionResult, Image.Image]:
|
| 640 |
+
"""
|
| 641 |
+
Detect disease and return annotated image.
|
| 642 |
+
|
| 643 |
+
Args:
|
| 644 |
+
image: Input image
|
| 645 |
+
|
| 646 |
+
Returns:
|
| 647 |
+
Tuple of (PredictionResult, annotated Image)
|
| 648 |
+
"""
|
| 649 |
+
model = get_yolo_model()
|
| 650 |
+
return model.predict_with_visualization(image)
|
| 651 |
+
|
| 652 |
+
|
| 653 |
+
# =============================================================================
|
| 654 |
+
# MAIN - Test the model
|
| 655 |
+
# =============================================================================
|
| 656 |
+
|
| 657 |
+
if __name__ == "__main__":
|
| 658 |
+
import torch
|
| 659 |
+
|
| 660 |
+
print("=" * 60)
|
| 661 |
+
print("YOLOv11 Model Test")
|
| 662 |
+
print("=" * 60)
|
| 663 |
+
|
| 664 |
+
# Check device
|
| 665 |
+
print("\n1. Checking compute device...")
|
| 666 |
+
print(f" PyTorch version: {torch.__version__}")
|
| 667 |
+
print(f" MPS available: {torch.backends.mps.is_available()}")
|
| 668 |
+
print(f" MPS built: {torch.backends.mps.is_built()}")
|
| 669 |
+
|
| 670 |
+
# Initialize model
|
| 671 |
+
print("\n2. Initializing YOLOv11 model...")
|
| 672 |
+
model = YOLOModel()
|
| 673 |
+
|
| 674 |
+
# Load model
|
| 675 |
+
print("\n3. Loading model...")
|
| 676 |
+
model.load_model()
|
| 677 |
+
|
| 678 |
+
# Print model info
|
| 679 |
+
print("\n4. Model information:")
|
| 680 |
+
info = model.get_model_info()
|
| 681 |
+
for key, value in info.items():
|
| 682 |
+
print(f" {key}: {value}")
|
| 683 |
+
|
| 684 |
+
# Test with a sample image (if available)
|
| 685 |
+
print("\n5. Testing inference...")
|
| 686 |
+
print(" To test with an actual image, run:")
|
| 687 |
+
print(" >>> result = model.predict('path/to/your/image.jpg')")
|
| 688 |
+
print(" >>> print(result)")
|
| 689 |
+
|
| 690 |
+
# Print class mappings
|
| 691 |
+
print("\n6. Class mappings:")
|
| 692 |
+
for idx, name in enumerate(model.CLASS_NAMES):
|
| 693 |
+
crop = model.CLASS_TO_CROP[idx]
|
| 694 |
+
healthy = "✓ Healthy" if idx in model.HEALTHY_INDICES else "✗ Disease"
|
| 695 |
+
print(f" {idx}: {name} ({crop}) - {healthy}")
|
| 696 |
+
|
| 697 |
+
print("\n" + "=" * 60)
|
| 698 |
+
print("✅ YOLOv11 model test completed!")
|
| 699 |
+
print("=" * 60)
|
models/__init__.py
ADDED
|
@@ -0,0 +1,51 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
FarmEyes Models Package
|
| 3 |
+
=======================
|
| 4 |
+
AI model wrappers for the FarmEyes application.
|
| 5 |
+
|
| 6 |
+
Models:
|
| 7 |
+
- natlas_model: N-ATLaS hybrid model (API + GGUF) for translation and chat
|
| 8 |
+
- yolo_model: YOLOv11 for crop disease detection
|
| 9 |
+
"""
|
| 10 |
+
|
| 11 |
+
from models.natlas_model import (
|
| 12 |
+
NATLaSModel,
|
| 13 |
+
HuggingFaceAPIClient,
|
| 14 |
+
LocalGGUFModel,
|
| 15 |
+
get_natlas_model,
|
| 16 |
+
unload_natlas_model,
|
| 17 |
+
translate_text,
|
| 18 |
+
translate_batch,
|
| 19 |
+
LANGUAGE_NAMES,
|
| 20 |
+
NATIVE_LANGUAGE_NAMES
|
| 21 |
+
)
|
| 22 |
+
|
| 23 |
+
from models.yolo_model import (
|
| 24 |
+
YOLOModel,
|
| 25 |
+
PredictionResult,
|
| 26 |
+
get_yolo_model,
|
| 27 |
+
unload_yolo_model,
|
| 28 |
+
detect_disease,
|
| 29 |
+
detect_disease_with_image
|
| 30 |
+
)
|
| 31 |
+
|
| 32 |
+
__all__ = [
|
| 33 |
+
# N-ATLaS
|
| 34 |
+
"NATLaSModel",
|
| 35 |
+
"HuggingFaceAPIClient",
|
| 36 |
+
"LocalGGUFModel",
|
| 37 |
+
"get_natlas_model",
|
| 38 |
+
"unload_natlas_model",
|
| 39 |
+
"translate_text",
|
| 40 |
+
"translate_batch",
|
| 41 |
+
"LANGUAGE_NAMES",
|
| 42 |
+
"NATIVE_LANGUAGE_NAMES",
|
| 43 |
+
|
| 44 |
+
# YOLO
|
| 45 |
+
"YOLOModel",
|
| 46 |
+
"PredictionResult",
|
| 47 |
+
"get_yolo_model",
|
| 48 |
+
"unload_yolo_model",
|
| 49 |
+
"detect_disease",
|
| 50 |
+
"detect_disease_with_image"
|
| 51 |
+
]
|
models/farmeyes_yolov11.pt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:db0ae83b787eb371120737cdf92519406b5dc01aa74faa9963d7a36ed730a1b6
|
| 3 |
+
size 19189523
|
models/natlas_model.py
ADDED
|
@@ -0,0 +1,647 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
FarmEyes N-ATLaS Model Integration (Hybrid)
|
| 3 |
+
============================================
|
| 4 |
+
HYBRID APPROACH:
|
| 5 |
+
1. PRIMARY: HuggingFace Inference API (fast, cloud-based)
|
| 6 |
+
2. FALLBACK: Local GGUF model (downloads at runtime, always works)
|
| 7 |
+
|
| 8 |
+
API Model: NCAIR1/N-ATLaS
|
| 9 |
+
GGUF Model: tosinamuda/N-ATLaS-GGUF (N-ATLaS-GGUF-Q4_K_M.gguf)
|
| 10 |
+
|
| 11 |
+
HUGGINGFACE SPACES OPTIMIZED:
|
| 12 |
+
- Downloads GGUF model at runtime (no need to upload 4.92GB)
|
| 13 |
+
- CPU-only mode for free tier (no GPU layers)
|
| 14 |
+
- Caches model in HF cache directory
|
| 15 |
+
|
| 16 |
+
Languages: English, Hausa, Yoruba, Igbo
|
| 17 |
+
"""
|
| 18 |
+
|
| 19 |
+
import os
|
| 20 |
+
import sys
|
| 21 |
+
from pathlib import Path
|
| 22 |
+
from typing import Optional, Dict, List
|
| 23 |
+
import logging
|
| 24 |
+
import time
|
| 25 |
+
|
| 26 |
+
logging.basicConfig(level=logging.INFO)
|
| 27 |
+
logger = logging.getLogger(__name__)
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
# =============================================================================
|
| 31 |
+
# ENVIRONMENT DETECTION
|
| 32 |
+
# =============================================================================
|
| 33 |
+
|
| 34 |
+
# Check if running on HuggingFace Spaces
|
| 35 |
+
IS_HF_SPACES = os.environ.get("SPACE_ID") is not None
|
| 36 |
+
|
| 37 |
+
# On Spaces, use CPU-only mode (free tier has no GPU)
|
| 38 |
+
if IS_HF_SPACES:
|
| 39 |
+
logger.info("🤗 Running on HuggingFace Spaces - CPU mode enabled")
|
| 40 |
+
DEFAULT_GPU_LAYERS = 0 # CPU only
|
| 41 |
+
DEFAULT_THREADS = 4 # Spaces has multi-core CPU
|
| 42 |
+
else:
|
| 43 |
+
logger.info("🖥️ Running locally")
|
| 44 |
+
DEFAULT_GPU_LAYERS = -1 # Use all GPU layers (for Apple Silicon MPS)
|
| 45 |
+
DEFAULT_THREADS = 4
|
| 46 |
+
|
| 47 |
+
|
| 48 |
+
# =============================================================================
|
| 49 |
+
# LANGUAGE MAPPINGS
|
| 50 |
+
# =============================================================================
|
| 51 |
+
|
| 52 |
+
LANGUAGE_NAMES = {
|
| 53 |
+
"en": "English",
|
| 54 |
+
"ha": "Hausa",
|
| 55 |
+
"yo": "Yoruba",
|
| 56 |
+
"ig": "Igbo"
|
| 57 |
+
}
|
| 58 |
+
|
| 59 |
+
NATIVE_LANGUAGE_NAMES = {
|
| 60 |
+
"en": "English",
|
| 61 |
+
"ha": "Yaren Hausa",
|
| 62 |
+
"yo": "Èdè Yorùbá",
|
| 63 |
+
"ig": "Asụsụ Igbo"
|
| 64 |
+
}
|
| 65 |
+
|
| 66 |
+
|
| 67 |
+
# =============================================================================
|
| 68 |
+
# HUGGINGFACE INFERENCE API CLIENT (PRIMARY)
|
| 69 |
+
# =============================================================================
|
| 70 |
+
|
| 71 |
+
class HuggingFaceAPIClient:
|
| 72 |
+
"""
|
| 73 |
+
Client for HuggingFace Serverless Inference API.
|
| 74 |
+
Primary method - fast cloud-based inference.
|
| 75 |
+
|
| 76 |
+
NOTE: This API may not always be available for N-ATLaS model.
|
| 77 |
+
The GGUF fallback ensures reliability.
|
| 78 |
+
"""
|
| 79 |
+
|
| 80 |
+
MODEL_ID = "NCAIR1/N-ATLaS"
|
| 81 |
+
API_URL = "https://api-inference.huggingface.co/models/NCAIR1/N-ATLaS"
|
| 82 |
+
|
| 83 |
+
def __init__(self, api_token: Optional[str] = None):
|
| 84 |
+
self.api_token = api_token or os.environ.get("HF_TOKEN") or os.environ.get("HUGGINGFACE_TOKEN")
|
| 85 |
+
self._is_available = None
|
| 86 |
+
self._last_check = 0
|
| 87 |
+
self._check_interval = 300 # 5 minutes
|
| 88 |
+
|
| 89 |
+
if self.api_token:
|
| 90 |
+
logger.info("✅ HuggingFace API token found")
|
| 91 |
+
else:
|
| 92 |
+
logger.warning("⚠️ No HF_TOKEN set - will use GGUF model only")
|
| 93 |
+
|
| 94 |
+
def is_available(self) -> bool:
|
| 95 |
+
"""Check if API is available."""
|
| 96 |
+
if not self.api_token:
|
| 97 |
+
return False
|
| 98 |
+
|
| 99 |
+
current_time = time.time()
|
| 100 |
+
if self._is_available is not None and current_time - self._last_check < self._check_interval:
|
| 101 |
+
return self._is_available
|
| 102 |
+
|
| 103 |
+
try:
|
| 104 |
+
import requests
|
| 105 |
+
|
| 106 |
+
headers = {"Authorization": "Bearer " + self.api_token}
|
| 107 |
+
response = requests.get(
|
| 108 |
+
"https://huggingface.co/api/models/" + self.MODEL_ID,
|
| 109 |
+
headers=headers,
|
| 110 |
+
timeout=10
|
| 111 |
+
)
|
| 112 |
+
|
| 113 |
+
self._is_available = response.status_code == 200
|
| 114 |
+
self._last_check = current_time
|
| 115 |
+
|
| 116 |
+
if self._is_available:
|
| 117 |
+
logger.info("✅ HuggingFace API is available")
|
| 118 |
+
else:
|
| 119 |
+
logger.warning("⚠️ HuggingFace API unavailable: " + str(response.status_code))
|
| 120 |
+
|
| 121 |
+
return self._is_available
|
| 122 |
+
|
| 123 |
+
except Exception as e:
|
| 124 |
+
logger.warning("⚠️ API check failed: " + str(e))
|
| 125 |
+
self._is_available = False
|
| 126 |
+
self._last_check = current_time
|
| 127 |
+
return False
|
| 128 |
+
|
| 129 |
+
def generate(
|
| 130 |
+
self,
|
| 131 |
+
prompt: str,
|
| 132 |
+
max_new_tokens: int = 512,
|
| 133 |
+
temperature: float = 0.7,
|
| 134 |
+
top_p: float = 0.9
|
| 135 |
+
) -> Optional[str]:
|
| 136 |
+
"""Generate text using HuggingFace Inference API."""
|
| 137 |
+
if not self.api_token:
|
| 138 |
+
return None
|
| 139 |
+
|
| 140 |
+
try:
|
| 141 |
+
import requests
|
| 142 |
+
|
| 143 |
+
headers = {
|
| 144 |
+
"Authorization": "Bearer " + self.api_token,
|
| 145 |
+
"Content-Type": "application/json"
|
| 146 |
+
}
|
| 147 |
+
|
| 148 |
+
payload = {
|
| 149 |
+
"inputs": prompt,
|
| 150 |
+
"parameters": {
|
| 151 |
+
"max_new_tokens": max_new_tokens,
|
| 152 |
+
"temperature": temperature,
|
| 153 |
+
"top_p": top_p,
|
| 154 |
+
"do_sample": True,
|
| 155 |
+
"return_full_text": False
|
| 156 |
+
},
|
| 157 |
+
"options": {
|
| 158 |
+
"wait_for_model": True
|
| 159 |
+
}
|
| 160 |
+
}
|
| 161 |
+
|
| 162 |
+
logger.info("📡 Calling HuggingFace Inference API...")
|
| 163 |
+
|
| 164 |
+
response = requests.post(
|
| 165 |
+
self.API_URL,
|
| 166 |
+
headers=headers,
|
| 167 |
+
json=payload,
|
| 168 |
+
timeout=120
|
| 169 |
+
)
|
| 170 |
+
|
| 171 |
+
if response.status_code == 200:
|
| 172 |
+
result = response.json()
|
| 173 |
+
if isinstance(result, list) and len(result) > 0:
|
| 174 |
+
text = result[0].get("generated_text", "")
|
| 175 |
+
if text:
|
| 176 |
+
logger.info("✅ API generation successful: " + str(len(text)) + " chars")
|
| 177 |
+
return text
|
| 178 |
+
return None
|
| 179 |
+
else:
|
| 180 |
+
logger.warning("⚠️ API request failed: " + str(response.status_code))
|
| 181 |
+
return None
|
| 182 |
+
|
| 183 |
+
except Exception as e:
|
| 184 |
+
logger.error("❌ API call failed: " + str(e))
|
| 185 |
+
return None
|
| 186 |
+
|
| 187 |
+
def translate(self, text: str, target_language: str) -> Optional[str]:
|
| 188 |
+
"""Translate text using the API."""
|
| 189 |
+
if target_language == "en" or not text:
|
| 190 |
+
return text
|
| 191 |
+
|
| 192 |
+
lang_name = LANGUAGE_NAMES.get(target_language, target_language)
|
| 193 |
+
prompt = "Translate to " + lang_name + ": " + text
|
| 194 |
+
|
| 195 |
+
result = self.generate(prompt, max_new_tokens=len(text) * 3, temperature=0.3)
|
| 196 |
+
|
| 197 |
+
if result:
|
| 198 |
+
result = result.strip()
|
| 199 |
+
# Clean up prefixes
|
| 200 |
+
for prefix in [lang_name + ":", "Translation:"]:
|
| 201 |
+
if result.lower().startswith(prefix.lower()):
|
| 202 |
+
result = result[len(prefix):].strip()
|
| 203 |
+
return result
|
| 204 |
+
|
| 205 |
+
return None
|
| 206 |
+
|
| 207 |
+
|
| 208 |
+
# =============================================================================
|
| 209 |
+
# LOCAL GGUF MODEL (FALLBACK - ALWAYS WORKS)
|
| 210 |
+
# =============================================================================
|
| 211 |
+
|
| 212 |
+
class LocalGGUFModel:
|
| 213 |
+
"""
|
| 214 |
+
Local GGUF model using llama-cpp-python.
|
| 215 |
+
FALLBACK: Always works - downloads model at runtime if not present.
|
| 216 |
+
|
| 217 |
+
Model: tosinamuda/N-ATLaS-GGUF
|
| 218 |
+
File: N-ATLaS-GGUF-Q4_K_M.gguf (4.92GB)
|
| 219 |
+
|
| 220 |
+
HUGGINGFACE SPACES:
|
| 221 |
+
- Model downloads automatically on first startup (~5-10 min)
|
| 222 |
+
- Cached in HF cache directory
|
| 223 |
+
- Uses CPU-only inference (free tier)
|
| 224 |
+
"""
|
| 225 |
+
|
| 226 |
+
HF_REPO = "tosinamuda/N-ATLaS-GGUF"
|
| 227 |
+
MODEL_FILENAME = "N-ATLaS-GGUF-Q4_K_M.gguf"
|
| 228 |
+
|
| 229 |
+
def __init__(
|
| 230 |
+
self,
|
| 231 |
+
model_path: Optional[str] = None,
|
| 232 |
+
n_ctx: int = 2048, # Reduced for Spaces memory
|
| 233 |
+
n_gpu_layers: int = DEFAULT_GPU_LAYERS,
|
| 234 |
+
n_threads: int = DEFAULT_THREADS,
|
| 235 |
+
n_batch: int = 256, # Reduced for Spaces memory
|
| 236 |
+
verbose: bool = False
|
| 237 |
+
):
|
| 238 |
+
self.model_path = model_path
|
| 239 |
+
self.n_ctx = n_ctx
|
| 240 |
+
self.n_gpu_layers = n_gpu_layers
|
| 241 |
+
self.n_threads = n_threads
|
| 242 |
+
self.n_batch = n_batch
|
| 243 |
+
self.verbose = verbose
|
| 244 |
+
|
| 245 |
+
self._model = None
|
| 246 |
+
self._is_loaded = False
|
| 247 |
+
|
| 248 |
+
# Log configuration
|
| 249 |
+
logger.info(f"GGUF Config: ctx={n_ctx}, gpu_layers={n_gpu_layers}, threads={n_threads}, batch={n_batch}")
|
| 250 |
+
|
| 251 |
+
def download_model(self) -> str:
|
| 252 |
+
"""
|
| 253 |
+
Download GGUF model from HuggingFace Hub.
|
| 254 |
+
|
| 255 |
+
This is the KEY feature for HuggingFace Spaces:
|
| 256 |
+
- Downloads the 4.92GB model at runtime
|
| 257 |
+
- Caches in HF cache directory
|
| 258 |
+
- No need to upload large model files
|
| 259 |
+
"""
|
| 260 |
+
try:
|
| 261 |
+
from huggingface_hub import hf_hub_download
|
| 262 |
+
|
| 263 |
+
logger.info("=" * 60)
|
| 264 |
+
logger.info("📥 DOWNLOADING N-ATLaS GGUF MODEL")
|
| 265 |
+
logger.info("=" * 60)
|
| 266 |
+
logger.info(f" Repository: {self.HF_REPO}")
|
| 267 |
+
logger.info(f" File: {self.MODEL_FILENAME}")
|
| 268 |
+
logger.info(f" Size: ~4.92 GB")
|
| 269 |
+
logger.info(" This may take 5-15 minutes on first startup...")
|
| 270 |
+
logger.info("=" * 60)
|
| 271 |
+
|
| 272 |
+
# Download with progress
|
| 273 |
+
model_path = hf_hub_download(
|
| 274 |
+
repo_id=self.HF_REPO,
|
| 275 |
+
filename=self.MODEL_FILENAME,
|
| 276 |
+
cache_dir=None, # Use default HF cache
|
| 277 |
+
resume_download=True # Resume if interrupted
|
| 278 |
+
)
|
| 279 |
+
|
| 280 |
+
logger.info("=" * 60)
|
| 281 |
+
logger.info("✅ MODEL DOWNLOAD COMPLETE!")
|
| 282 |
+
logger.info(f" Path: {model_path}")
|
| 283 |
+
logger.info("=" * 60)
|
| 284 |
+
|
| 285 |
+
return model_path
|
| 286 |
+
|
| 287 |
+
except Exception as e:
|
| 288 |
+
logger.error("=" * 60)
|
| 289 |
+
logger.error("❌ MODEL DOWNLOAD FAILED!")
|
| 290 |
+
logger.error(f" Error: {str(e)}")
|
| 291 |
+
logger.error("=" * 60)
|
| 292 |
+
raise
|
| 293 |
+
|
| 294 |
+
def load_model(self) -> bool:
|
| 295 |
+
"""Load GGUF model."""
|
| 296 |
+
if self._is_loaded:
|
| 297 |
+
return True
|
| 298 |
+
|
| 299 |
+
try:
|
| 300 |
+
from llama_cpp import Llama
|
| 301 |
+
|
| 302 |
+
# Download if not present
|
| 303 |
+
if self.model_path is None or not Path(self.model_path).exists():
|
| 304 |
+
logger.info("Model not found locally, downloading...")
|
| 305 |
+
self.model_path = self.download_model()
|
| 306 |
+
|
| 307 |
+
logger.info("🔄 Loading GGUF model into memory...")
|
| 308 |
+
logger.info(f" Path: {self.model_path}")
|
| 309 |
+
logger.info(f" GPU Layers: {self.n_gpu_layers}")
|
| 310 |
+
logger.info(f" Context: {self.n_ctx}")
|
| 311 |
+
|
| 312 |
+
self._model = Llama(
|
| 313 |
+
model_path=self.model_path,
|
| 314 |
+
n_ctx=self.n_ctx,
|
| 315 |
+
n_gpu_layers=self.n_gpu_layers,
|
| 316 |
+
n_threads=self.n_threads,
|
| 317 |
+
n_batch=self.n_batch,
|
| 318 |
+
verbose=self.verbose
|
| 319 |
+
)
|
| 320 |
+
|
| 321 |
+
self._is_loaded = True
|
| 322 |
+
logger.info("✅ GGUF model loaded successfully!")
|
| 323 |
+
return True
|
| 324 |
+
|
| 325 |
+
except ImportError:
|
| 326 |
+
logger.error("❌ llama-cpp-python not installed!")
|
| 327 |
+
logger.error(" Run: pip install llama-cpp-python")
|
| 328 |
+
return False
|
| 329 |
+
except Exception as e:
|
| 330 |
+
logger.error(f"❌ Model load failed: {str(e)}")
|
| 331 |
+
return False
|
| 332 |
+
|
| 333 |
+
def unload_model(self):
|
| 334 |
+
"""Unload model to free memory."""
|
| 335 |
+
if self._model is not None:
|
| 336 |
+
del self._model
|
| 337 |
+
self._model = None
|
| 338 |
+
self._is_loaded = False
|
| 339 |
+
logger.info("Model unloaded")
|
| 340 |
+
|
| 341 |
+
@property
|
| 342 |
+
def is_loaded(self) -> bool:
|
| 343 |
+
return self._is_loaded
|
| 344 |
+
|
| 345 |
+
def generate(
|
| 346 |
+
self,
|
| 347 |
+
prompt: str,
|
| 348 |
+
max_tokens: int = 512,
|
| 349 |
+
temperature: float = 0.7,
|
| 350 |
+
top_p: float = 0.9,
|
| 351 |
+
stop: Optional[List[str]] = None
|
| 352 |
+
) -> Optional[str]:
|
| 353 |
+
"""Generate text using GGUF model with Llama-3 format."""
|
| 354 |
+
if not self._is_loaded:
|
| 355 |
+
if not self.load_model():
|
| 356 |
+
return None
|
| 357 |
+
|
| 358 |
+
try:
|
| 359 |
+
# Format prompt for Llama-3 chat format
|
| 360 |
+
formatted_prompt = (
|
| 361 |
+
"<|begin_of_text|><|start_header_id|>system<|end_header_id|>\n\n"
|
| 362 |
+
"You are a helpful AI assistant for African farmers. You help with crop disease diagnosis, "
|
| 363 |
+
"treatment advice, and agricultural questions. Respond in the same language the user writes in."
|
| 364 |
+
"<|eot_id|><|start_header_id|>user<|end_header_id|>\n\n"
|
| 365 |
+
+ prompt +
|
| 366 |
+
"<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n"
|
| 367 |
+
)
|
| 368 |
+
|
| 369 |
+
response = self._model(
|
| 370 |
+
formatted_prompt,
|
| 371 |
+
max_tokens=max_tokens,
|
| 372 |
+
temperature=temperature,
|
| 373 |
+
top_p=top_p,
|
| 374 |
+
stop=stop or ["<|eot_id|>", "<|end_of_text|>"],
|
| 375 |
+
echo=False
|
| 376 |
+
)
|
| 377 |
+
|
| 378 |
+
text = response["choices"][0]["text"].strip()
|
| 379 |
+
|
| 380 |
+
# Clean up special tokens
|
| 381 |
+
for token in ["<|eot_id|>", "<|end_of_text|>", "<|start_header_id|>", "<|end_header_id|>"]:
|
| 382 |
+
text = text.replace(token, "")
|
| 383 |
+
|
| 384 |
+
text = text.strip()
|
| 385 |
+
|
| 386 |
+
if text:
|
| 387 |
+
logger.info(f"✅ GGUF generation: {len(text)} chars")
|
| 388 |
+
return text
|
| 389 |
+
else:
|
| 390 |
+
logger.warning("⚠️ GGUF returned empty response")
|
| 391 |
+
return None
|
| 392 |
+
|
| 393 |
+
except Exception as e:
|
| 394 |
+
logger.error(f"❌ GGUF generation error: {str(e)}")
|
| 395 |
+
return None
|
| 396 |
+
|
| 397 |
+
def translate(self, text: str, target_language: str) -> Optional[str]:
|
| 398 |
+
"""Translate text using GGUF model."""
|
| 399 |
+
if target_language == "en" or not text:
|
| 400 |
+
return text
|
| 401 |
+
|
| 402 |
+
lang_name = LANGUAGE_NAMES.get(target_language, target_language)
|
| 403 |
+
prompt = "Translate to " + lang_name + ": " + text
|
| 404 |
+
|
| 405 |
+
result = self.generate(
|
| 406 |
+
prompt,
|
| 407 |
+
max_tokens=len(text) * 4,
|
| 408 |
+
temperature=0.3
|
| 409 |
+
)
|
| 410 |
+
|
| 411 |
+
if result:
|
| 412 |
+
result = result.strip()
|
| 413 |
+
for prefix in [lang_name + ":", "Translation:", "In " + lang_name + ":"]:
|
| 414 |
+
if result.lower().startswith(prefix.lower()):
|
| 415 |
+
result = result[len(prefix):].strip()
|
| 416 |
+
return result
|
| 417 |
+
|
| 418 |
+
return text
|
| 419 |
+
|
| 420 |
+
def chat_response(self, message: str, context: Dict, language: str = "en") -> Optional[str]:
|
| 421 |
+
"""Generate chat response with diagnosis context."""
|
| 422 |
+
|
| 423 |
+
crop = context.get("crop_type", "crop").capitalize()
|
| 424 |
+
disease = context.get("disease_name", "unknown disease")
|
| 425 |
+
severity = context.get("severity_level", "unknown")
|
| 426 |
+
confidence = context.get("confidence", 0)
|
| 427 |
+
if confidence <= 1:
|
| 428 |
+
confidence = int(confidence * 100)
|
| 429 |
+
|
| 430 |
+
# Language instruction
|
| 431 |
+
lang_instructions = {
|
| 432 |
+
"ha": "Respond in Hausa language.",
|
| 433 |
+
"yo": "Respond in Yoruba language.",
|
| 434 |
+
"ig": "Respond in Igbo language."
|
| 435 |
+
}
|
| 436 |
+
lang_instruction = lang_instructions.get(language, "Respond in English.")
|
| 437 |
+
|
| 438 |
+
prompt = (
|
| 439 |
+
"You are FarmEyes, an AI assistant helping African farmers with crop diseases.\n\n"
|
| 440 |
+
"Current diagnosis:\n"
|
| 441 |
+
"- Crop: " + crop + "\n"
|
| 442 |
+
"- Disease: " + disease + "\n"
|
| 443 |
+
"- Severity: " + severity + "\n"
|
| 444 |
+
"- Confidence: " + str(confidence) + "%\n\n"
|
| 445 |
+
+ lang_instruction + "\n\n"
|
| 446 |
+
"Farmer's question: " + message + "\n\n"
|
| 447 |
+
"Provide a helpful, practical response about this disease or related farming advice. "
|
| 448 |
+
"Keep it concise (2-3 paragraphs max)."
|
| 449 |
+
)
|
| 450 |
+
|
| 451 |
+
return self.generate(prompt, max_tokens=400, temperature=0.7)
|
| 452 |
+
|
| 453 |
+
|
| 454 |
+
# =============================================================================
|
| 455 |
+
# HYBRID N-ATLAS MODEL (MAIN CLASS)
|
| 456 |
+
# =============================================================================
|
| 457 |
+
|
| 458 |
+
class NATLaSModel:
|
| 459 |
+
"""
|
| 460 |
+
HYBRID N-ATLaS model.
|
| 461 |
+
|
| 462 |
+
Strategy:
|
| 463 |
+
1. Try HuggingFace Inference API first (if token available)
|
| 464 |
+
2. Fall back to local GGUF model (downloads at runtime, always works)
|
| 465 |
+
|
| 466 |
+
This ensures:
|
| 467 |
+
- Fast responses when API is available
|
| 468 |
+
- Reliable fallback when offline or API fails
|
| 469 |
+
- Works on HuggingFace Spaces (free tier)
|
| 470 |
+
"""
|
| 471 |
+
|
| 472 |
+
def __init__(
|
| 473 |
+
self,
|
| 474 |
+
api_token: Optional[str] = None,
|
| 475 |
+
prefer_api: bool = True,
|
| 476 |
+
auto_load_local: bool = True,
|
| 477 |
+
**local_kwargs
|
| 478 |
+
):
|
| 479 |
+
self.prefer_api = prefer_api
|
| 480 |
+
|
| 481 |
+
# Initialize API client (PRIMARY)
|
| 482 |
+
self.api_client = HuggingFaceAPIClient(api_token)
|
| 483 |
+
|
| 484 |
+
# Initialize GGUF model (FALLBACK)
|
| 485 |
+
self.local_model = LocalGGUFModel(**local_kwargs)
|
| 486 |
+
|
| 487 |
+
# Translation cache
|
| 488 |
+
self._cache: Dict[str, str] = {}
|
| 489 |
+
|
| 490 |
+
# Auto-load GGUF for reliable fallback
|
| 491 |
+
if auto_load_local:
|
| 492 |
+
logger.info("🔄 Pre-loading GGUF model for fallback...")
|
| 493 |
+
self.local_model.load_model()
|
| 494 |
+
|
| 495 |
+
logger.info("=" * 60)
|
| 496 |
+
logger.info("✅ NATLaSModel (Hybrid) initialized")
|
| 497 |
+
logger.info(f" API token: {'Yes' if self.api_client.api_token else 'No'}")
|
| 498 |
+
logger.info(f" GGUF loaded: {'Yes' if self.local_model.is_loaded else 'No'}")
|
| 499 |
+
logger.info(f" Running on: {'HuggingFace Spaces' if IS_HF_SPACES else 'Local'}")
|
| 500 |
+
logger.info("=" * 60)
|
| 501 |
+
|
| 502 |
+
@property
|
| 503 |
+
def is_loaded(self) -> bool:
|
| 504 |
+
return self.api_client.is_available() or self.local_model.is_loaded
|
| 505 |
+
|
| 506 |
+
def load_model(self) -> bool:
|
| 507 |
+
if self.api_client.is_available():
|
| 508 |
+
return True
|
| 509 |
+
return self.local_model.load_model()
|
| 510 |
+
|
| 511 |
+
def translate(self, text: str, target_language: str, use_cache: bool = True) -> str:
|
| 512 |
+
"""
|
| 513 |
+
Translate text using hybrid approach.
|
| 514 |
+
1. Try API first
|
| 515 |
+
2. Fall back to GGUF
|
| 516 |
+
"""
|
| 517 |
+
if target_language == "en" or not text or not text.strip():
|
| 518 |
+
return text
|
| 519 |
+
|
| 520 |
+
# Check cache
|
| 521 |
+
cache_key = target_language + ":" + str(hash(text))
|
| 522 |
+
if use_cache and cache_key in self._cache:
|
| 523 |
+
return self._cache[cache_key]
|
| 524 |
+
|
| 525 |
+
result = None
|
| 526 |
+
|
| 527 |
+
# Try API first if preferred and available
|
| 528 |
+
if self.prefer_api and self.api_client.api_token:
|
| 529 |
+
logger.info("📡 Trying API translation...")
|
| 530 |
+
result = self.api_client.translate(text, target_language)
|
| 531 |
+
if result:
|
| 532 |
+
logger.info("✅ API translation successful")
|
| 533 |
+
|
| 534 |
+
# Fall back to GGUF
|
| 535 |
+
if result is None:
|
| 536 |
+
logger.info("🔄 Using GGUF for translation (fallback)...")
|
| 537 |
+
result = self.local_model.translate(text, target_language)
|
| 538 |
+
|
| 539 |
+
# Cache and return
|
| 540 |
+
if result and result != text and use_cache:
|
| 541 |
+
self._cache[cache_key] = result
|
| 542 |
+
if len(self._cache) > 500:
|
| 543 |
+
keys = list(self._cache.keys())[:100]
|
| 544 |
+
for k in keys:
|
| 545 |
+
del self._cache[k]
|
| 546 |
+
|
| 547 |
+
return result if result else text
|
| 548 |
+
|
| 549 |
+
def translate_batch(self, texts: List[str], target_language: str) -> List[str]:
|
| 550 |
+
"""Translate multiple texts."""
|
| 551 |
+
return [self.translate(text, target_language) for text in texts]
|
| 552 |
+
|
| 553 |
+
def generate(self, prompt: str, max_tokens: int = 512, temperature: float = 0.7, **kwargs) -> str:
|
| 554 |
+
"""
|
| 555 |
+
Generate text using hybrid approach.
|
| 556 |
+
1. Try API first
|
| 557 |
+
2. Fall back to GGUF
|
| 558 |
+
"""
|
| 559 |
+
result = None
|
| 560 |
+
|
| 561 |
+
# Try API first if preferred and available
|
| 562 |
+
if self.prefer_api and self.api_client.api_token:
|
| 563 |
+
logger.info("📡 Trying API generation...")
|
| 564 |
+
result = self.api_client.generate(prompt, max_tokens, temperature)
|
| 565 |
+
if result:
|
| 566 |
+
logger.info("✅ API generation successful")
|
| 567 |
+
|
| 568 |
+
# Fall back to GGUF
|
| 569 |
+
if result is None:
|
| 570 |
+
logger.info("🔄 Using GGUF for generation (fallback)...")
|
| 571 |
+
result = self.local_model.generate(prompt, max_tokens, temperature)
|
| 572 |
+
|
| 573 |
+
return result if result else ""
|
| 574 |
+
|
| 575 |
+
def chat_response(self, message: str, context: Dict, language: str = "en") -> str:
|
| 576 |
+
"""Generate chat response with context (uses GGUF directly for better context handling)."""
|
| 577 |
+
result = self.local_model.chat_response(message, context, language)
|
| 578 |
+
return result if result else ""
|
| 579 |
+
|
| 580 |
+
def load_local_model(self) -> bool:
|
| 581 |
+
return self.local_model.load_model()
|
| 582 |
+
|
| 583 |
+
def unload_local_model(self):
|
| 584 |
+
self.local_model.unload_model()
|
| 585 |
+
|
| 586 |
+
def get_status(self) -> Dict:
|
| 587 |
+
return {
|
| 588 |
+
"api_available": self.api_client.is_available(),
|
| 589 |
+
"api_token_set": bool(self.api_client.api_token),
|
| 590 |
+
"local_model_loaded": self.local_model.is_loaded,
|
| 591 |
+
"prefer_api": self.prefer_api,
|
| 592 |
+
"cache_size": len(self._cache),
|
| 593 |
+
"running_on": "HuggingFace Spaces" if IS_HF_SPACES else "Local"
|
| 594 |
+
}
|
| 595 |
+
|
| 596 |
+
def clear_cache(self):
|
| 597 |
+
self._cache.clear()
|
| 598 |
+
|
| 599 |
+
|
| 600 |
+
# =============================================================================
|
| 601 |
+
# SINGLETON
|
| 602 |
+
# =============================================================================
|
| 603 |
+
|
| 604 |
+
_model_instance: Optional[NATLaSModel] = None
|
| 605 |
+
|
| 606 |
+
|
| 607 |
+
def get_natlas_model(
|
| 608 |
+
api_token: Optional[str] = None,
|
| 609 |
+
auto_load_local: bool = True,
|
| 610 |
+
**kwargs
|
| 611 |
+
) -> NATLaSModel:
|
| 612 |
+
"""Get singleton model instance."""
|
| 613 |
+
global _model_instance
|
| 614 |
+
|
| 615 |
+
if _model_instance is None:
|
| 616 |
+
_model_instance = NATLaSModel(
|
| 617 |
+
api_token=api_token,
|
| 618 |
+
prefer_api=True, # Try API first
|
| 619 |
+
auto_load_local=auto_load_local, # Pre-load GGUF as fallback
|
| 620 |
+
**kwargs
|
| 621 |
+
)
|
| 622 |
+
|
| 623 |
+
return _model_instance
|
| 624 |
+
|
| 625 |
+
|
| 626 |
+
def unload_natlas_model():
|
| 627 |
+
"""Unload model."""
|
| 628 |
+
global _model_instance
|
| 629 |
+
if _model_instance is not None:
|
| 630 |
+
_model_instance.unload_local_model()
|
| 631 |
+
_model_instance = None
|
| 632 |
+
|
| 633 |
+
|
| 634 |
+
# =============================================================================
|
| 635 |
+
# CONVENIENCE FUNCTIONS
|
| 636 |
+
# =============================================================================
|
| 637 |
+
|
| 638 |
+
def translate_text(text: str, target_language: str) -> str:
|
| 639 |
+
return get_natlas_model().translate(text, target_language)
|
| 640 |
+
|
| 641 |
+
|
| 642 |
+
def translate_batch(texts: List[str], target_language: str) -> List[str]:
|
| 643 |
+
return get_natlas_model().translate_batch(texts, target_language)
|
| 644 |
+
|
| 645 |
+
|
| 646 |
+
def generate_text(prompt: str, max_tokens: int = 512) -> str:
|
| 647 |
+
return get_natlas_model().generate(prompt, max_tokens=max_tokens)
|
models/yolo_model.py
ADDED
|
@@ -0,0 +1,703 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
FarmEyes YOLOv11 Model Integration
|
| 3 |
+
==================================
|
| 4 |
+
Handles loading and inference with YOLOv11 model for crop disease detection.
|
| 5 |
+
Optimized for Apple Silicon M1 Pro with MPS (Metal Performance Shaders) acceleration.
|
| 6 |
+
|
| 7 |
+
Model: Custom trained YOLOv11 for 6 disease classes (no healthy classes)
|
| 8 |
+
Crops: Cassava, Cocoa, Tomato
|
| 9 |
+
Classes:
|
| 10 |
+
0: Cassava Bacteria Blight
|
| 11 |
+
1: Cassava Mosaic Virus
|
| 12 |
+
2: Cocoa Monilia Disease
|
| 13 |
+
3: Cocoa Phytophthora Disease
|
| 14 |
+
4: Tomato Gray Mold Disease
|
| 15 |
+
5: Tomato Wilt Disease
|
| 16 |
+
"""
|
| 17 |
+
|
| 18 |
+
import os
|
| 19 |
+
import sys
|
| 20 |
+
from pathlib import Path
|
| 21 |
+
from typing import Optional, Dict, List, Tuple, Union
|
| 22 |
+
from dataclasses import dataclass
|
| 23 |
+
import logging
|
| 24 |
+
|
| 25 |
+
# Add parent directory to path for imports
|
| 26 |
+
sys.path.append(str(Path(__file__).parent.parent))
|
| 27 |
+
|
| 28 |
+
import numpy as np
|
| 29 |
+
from PIL import Image
|
| 30 |
+
|
| 31 |
+
# Configure logging
|
| 32 |
+
logging.basicConfig(level=logging.INFO)
|
| 33 |
+
logger = logging.getLogger(__name__)
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
# =============================================================================
|
| 37 |
+
# PREDICTION RESULT DATACLASS
|
| 38 |
+
# =============================================================================
|
| 39 |
+
|
| 40 |
+
@dataclass
|
| 41 |
+
class PredictionResult:
|
| 42 |
+
"""
|
| 43 |
+
Container for disease prediction results.
|
| 44 |
+
"""
|
| 45 |
+
class_index: int # Index of predicted class (0-5)
|
| 46 |
+
class_name: str # Human-readable class name
|
| 47 |
+
disease_key: str # Key for knowledge base lookup
|
| 48 |
+
confidence: float # Confidence score (0.0 - 1.0)
|
| 49 |
+
crop_type: str # Crop type (cassava, cocoa, tomato)
|
| 50 |
+
is_healthy: bool # Whether plant is healthy (always False in 6-class model)
|
| 51 |
+
bbox: Optional[List[float]] = None # Bounding box [x1, y1, x2, y2] if available
|
| 52 |
+
|
| 53 |
+
def to_dict(self) -> Dict:
|
| 54 |
+
"""Convert to dictionary for JSON serialization."""
|
| 55 |
+
return {
|
| 56 |
+
"class_index": self.class_index,
|
| 57 |
+
"class_name": self.class_name,
|
| 58 |
+
"disease_key": self.disease_key,
|
| 59 |
+
"confidence": round(self.confidence, 4),
|
| 60 |
+
"confidence_percent": round(self.confidence * 100, 1),
|
| 61 |
+
"crop_type": self.crop_type,
|
| 62 |
+
"is_healthy": self.is_healthy,
|
| 63 |
+
"bbox": self.bbox
|
| 64 |
+
}
|
| 65 |
+
|
| 66 |
+
def __repr__(self) -> str:
|
| 67 |
+
return f"PredictionResult({self.class_name}, conf={self.confidence:.2%}, crop={self.crop_type})"
|
| 68 |
+
|
| 69 |
+
|
| 70 |
+
# =============================================================================
|
| 71 |
+
# YOLO MODEL CLASS
|
| 72 |
+
# =============================================================================
|
| 73 |
+
|
| 74 |
+
class YOLOModel:
|
| 75 |
+
"""
|
| 76 |
+
YOLOv11 Model wrapper for FarmEyes crop disease detection.
|
| 77 |
+
Uses Ultralytics library with MPS acceleration for Apple Silicon.
|
| 78 |
+
|
| 79 |
+
6-class model (all diseases, no healthy classes):
|
| 80 |
+
0: Cassava Bacteria Blight
|
| 81 |
+
1: Cassava Mosaic Virus
|
| 82 |
+
2: Cocoa Monilia Disease
|
| 83 |
+
3: Cocoa Phytophthora Disease
|
| 84 |
+
4: Tomato Gray Mold Disease
|
| 85 |
+
5: Tomato Wilt Disease
|
| 86 |
+
"""
|
| 87 |
+
|
| 88 |
+
# Class mappings (must match your trained model - 6 classes)
|
| 89 |
+
CLASS_NAMES: List[str] = [
|
| 90 |
+
"Cassava Bacteria Blight", # Index 0
|
| 91 |
+
"Cassava Mosaic Virus", # Index 1
|
| 92 |
+
"Cocoa Monilia Disease", # Index 2
|
| 93 |
+
"Cocoa Phytophthora Disease", # Index 3
|
| 94 |
+
"Tomato Gray Mold Disease", # Index 4
|
| 95 |
+
"Tomato Wilt Disease" # Index 5
|
| 96 |
+
]
|
| 97 |
+
|
| 98 |
+
# Class index to knowledge base key mapping (6 classes)
|
| 99 |
+
CLASS_TO_KEY: Dict[int, str] = {
|
| 100 |
+
0: "cassava_bacterial_blight",
|
| 101 |
+
1: "cassava_mosaic_virus",
|
| 102 |
+
2: "cocoa_monilia_disease",
|
| 103 |
+
3: "cocoa_phytophthora_disease",
|
| 104 |
+
4: "tomato_gray_mold",
|
| 105 |
+
5: "tomato_wilt_disease"
|
| 106 |
+
}
|
| 107 |
+
|
| 108 |
+
# Class index to crop type mapping (6 classes)
|
| 109 |
+
CLASS_TO_CROP: Dict[int, str] = {
|
| 110 |
+
0: "cassava", # Cassava Bacteria Blight
|
| 111 |
+
1: "cassava", # Cassava Mosaic Virus
|
| 112 |
+
2: "cocoa", # Cocoa Monilia Disease
|
| 113 |
+
3: "cocoa", # Cocoa Phytophthora Disease
|
| 114 |
+
4: "tomato", # Tomato Gray Mold Disease
|
| 115 |
+
5: "tomato" # Tomato Wilt Disease
|
| 116 |
+
}
|
| 117 |
+
|
| 118 |
+
# No healthy class indices in 6-class model (all classes are diseases)
|
| 119 |
+
HEALTHY_INDICES: List[int] = []
|
| 120 |
+
|
| 121 |
+
def __init__(
|
| 122 |
+
self,
|
| 123 |
+
model_path: Optional[str] = None,
|
| 124 |
+
confidence_threshold: float = 0.5,
|
| 125 |
+
iou_threshold: float = 0.45,
|
| 126 |
+
device: str = "mps",
|
| 127 |
+
input_size: int = 640
|
| 128 |
+
):
|
| 129 |
+
"""
|
| 130 |
+
Initialize YOLOv11 model.
|
| 131 |
+
|
| 132 |
+
Args:
|
| 133 |
+
model_path: Path to trained YOLOv11 .pt weights file
|
| 134 |
+
confidence_threshold: Minimum confidence for detections
|
| 135 |
+
iou_threshold: IoU threshold for NMS
|
| 136 |
+
device: Compute device ('mps' for Apple Silicon, 'cuda', 'cpu')
|
| 137 |
+
input_size: Input image size for the model
|
| 138 |
+
"""
|
| 139 |
+
# Import config here to avoid circular imports
|
| 140 |
+
from config import yolo_config, MODELS_DIR
|
| 141 |
+
|
| 142 |
+
self.model_path = model_path or str(yolo_config.model_path)
|
| 143 |
+
self.confidence_threshold = confidence_threshold
|
| 144 |
+
self.iou_threshold = iou_threshold
|
| 145 |
+
self.input_size = input_size
|
| 146 |
+
|
| 147 |
+
# Determine best device
|
| 148 |
+
self.device = self._get_best_device(device)
|
| 149 |
+
|
| 150 |
+
# Model instance (lazy loaded)
|
| 151 |
+
self._model = None
|
| 152 |
+
self._is_loaded = False
|
| 153 |
+
|
| 154 |
+
logger.info(f"YOLOModel initialized:")
|
| 155 |
+
logger.info(f" Model path: {self.model_path}")
|
| 156 |
+
logger.info(f" Device: {self.device}")
|
| 157 |
+
logger.info(f" Confidence threshold: {self.confidence_threshold}")
|
| 158 |
+
logger.info(f" Input size: {self.input_size}")
|
| 159 |
+
logger.info(f" Number of classes: {len(self.CLASS_NAMES)}")
|
| 160 |
+
|
| 161 |
+
# =========================================================================
|
| 162 |
+
# DEVICE MANAGEMENT
|
| 163 |
+
# =========================================================================
|
| 164 |
+
|
| 165 |
+
def _get_best_device(self, preferred: str = "mps") -> str:
|
| 166 |
+
"""
|
| 167 |
+
Determine the best available compute device.
|
| 168 |
+
|
| 169 |
+
Args:
|
| 170 |
+
preferred: Preferred device ('mps', 'cuda', 'cpu')
|
| 171 |
+
|
| 172 |
+
Returns:
|
| 173 |
+
Best available device string
|
| 174 |
+
"""
|
| 175 |
+
import torch
|
| 176 |
+
|
| 177 |
+
if preferred == "mps" and torch.backends.mps.is_available():
|
| 178 |
+
logger.info("Using MPS (Metal Performance Shaders) for Apple Silicon")
|
| 179 |
+
return "mps"
|
| 180 |
+
elif preferred == "cuda" and torch.cuda.is_available():
|
| 181 |
+
logger.info(f"Using CUDA: {torch.cuda.get_device_name(0)}")
|
| 182 |
+
return "cuda"
|
| 183 |
+
else:
|
| 184 |
+
logger.info("Using CPU for inference")
|
| 185 |
+
return "cpu"
|
| 186 |
+
|
| 187 |
+
# =========================================================================
|
| 188 |
+
# MODEL LOADING
|
| 189 |
+
# =========================================================================
|
| 190 |
+
|
| 191 |
+
def load_model(self) -> bool:
|
| 192 |
+
"""
|
| 193 |
+
Load the YOLOv11 model into memory.
|
| 194 |
+
|
| 195 |
+
Returns:
|
| 196 |
+
True if model loaded successfully
|
| 197 |
+
"""
|
| 198 |
+
if self._is_loaded:
|
| 199 |
+
logger.info("Model already loaded")
|
| 200 |
+
return True
|
| 201 |
+
|
| 202 |
+
try:
|
| 203 |
+
from ultralytics import YOLO
|
| 204 |
+
|
| 205 |
+
# Check if model file exists
|
| 206 |
+
if not Path(self.model_path).exists():
|
| 207 |
+
logger.warning(f"Model file not found at {self.model_path}")
|
| 208 |
+
logger.warning("Using placeholder - please provide trained model")
|
| 209 |
+
|
| 210 |
+
# Create a placeholder with pretrained YOLOv11n for testing
|
| 211 |
+
# Replace this with your actual trained model
|
| 212 |
+
logger.info("Loading pretrained YOLOv11n as placeholder...")
|
| 213 |
+
self._model = YOLO("yolo11n.pt") # Downloads pretrained model
|
| 214 |
+
self._is_placeholder = True
|
| 215 |
+
else:
|
| 216 |
+
logger.info(f"Loading YOLOv11 model from {self.model_path}...")
|
| 217 |
+
self._model = YOLO(self.model_path)
|
| 218 |
+
self._is_placeholder = False
|
| 219 |
+
|
| 220 |
+
# Move model to device
|
| 221 |
+
self._model.to(self.device)
|
| 222 |
+
|
| 223 |
+
self._is_loaded = True
|
| 224 |
+
logger.info(f"✅ YOLOv11 model loaded successfully on {self.device}!")
|
| 225 |
+
|
| 226 |
+
return True
|
| 227 |
+
|
| 228 |
+
except ImportError:
|
| 229 |
+
logger.error("Ultralytics not installed!")
|
| 230 |
+
logger.error("Install with: pip install ultralytics")
|
| 231 |
+
raise ImportError("ultralytics package is required")
|
| 232 |
+
|
| 233 |
+
except Exception as e:
|
| 234 |
+
logger.error(f"Failed to load model: {e}")
|
| 235 |
+
self._is_loaded = False
|
| 236 |
+
raise RuntimeError(f"Could not load YOLOv11 model: {e}")
|
| 237 |
+
|
| 238 |
+
def unload_model(self):
|
| 239 |
+
"""Unload model from memory."""
|
| 240 |
+
if self._model is not None:
|
| 241 |
+
del self._model
|
| 242 |
+
self._model = None
|
| 243 |
+
self._is_loaded = False
|
| 244 |
+
|
| 245 |
+
# Clear GPU cache
|
| 246 |
+
import torch
|
| 247 |
+
if self.device == "mps":
|
| 248 |
+
torch.mps.empty_cache()
|
| 249 |
+
elif self.device == "cuda":
|
| 250 |
+
torch.cuda.empty_cache()
|
| 251 |
+
|
| 252 |
+
logger.info("Model unloaded from memory")
|
| 253 |
+
|
| 254 |
+
@property
|
| 255 |
+
def is_loaded(self) -> bool:
|
| 256 |
+
"""Check if model is loaded."""
|
| 257 |
+
return self._is_loaded
|
| 258 |
+
|
| 259 |
+
# =========================================================================
|
| 260 |
+
# IMAGE PREPROCESSING
|
| 261 |
+
# =========================================================================
|
| 262 |
+
|
| 263 |
+
def preprocess_image(
|
| 264 |
+
self,
|
| 265 |
+
image: Union[str, Path, Image.Image, np.ndarray]
|
| 266 |
+
) -> Image.Image:
|
| 267 |
+
"""
|
| 268 |
+
Preprocess image for inference.
|
| 269 |
+
|
| 270 |
+
Args:
|
| 271 |
+
image: Input image (path, PIL Image, or numpy array)
|
| 272 |
+
|
| 273 |
+
Returns:
|
| 274 |
+
PIL Image ready for inference
|
| 275 |
+
"""
|
| 276 |
+
# Handle different input types
|
| 277 |
+
if isinstance(image, (str, Path)):
|
| 278 |
+
image_path = Path(image)
|
| 279 |
+
if not image_path.exists():
|
| 280 |
+
raise FileNotFoundError(f"Image not found: {image_path}")
|
| 281 |
+
pil_image = Image.open(image_path)
|
| 282 |
+
elif isinstance(image, np.ndarray):
|
| 283 |
+
pil_image = Image.fromarray(image)
|
| 284 |
+
elif isinstance(image, Image.Image):
|
| 285 |
+
pil_image = image
|
| 286 |
+
else:
|
| 287 |
+
raise TypeError(f"Unsupported image type: {type(image)}")
|
| 288 |
+
|
| 289 |
+
# Convert to RGB if necessary
|
| 290 |
+
if pil_image.mode != "RGB":
|
| 291 |
+
pil_image = pil_image.convert("RGB")
|
| 292 |
+
|
| 293 |
+
return pil_image
|
| 294 |
+
|
| 295 |
+
def validate_image(self, image: Image.Image) -> Tuple[bool, str]:
|
| 296 |
+
"""
|
| 297 |
+
Validate image for inference.
|
| 298 |
+
|
| 299 |
+
Args:
|
| 300 |
+
image: PIL Image to validate
|
| 301 |
+
|
| 302 |
+
Returns:
|
| 303 |
+
Tuple of (is_valid, message)
|
| 304 |
+
"""
|
| 305 |
+
# Check image size
|
| 306 |
+
width, height = image.size
|
| 307 |
+
|
| 308 |
+
if width < 32 or height < 32:
|
| 309 |
+
return False, "Image too small. Minimum size is 32x32 pixels."
|
| 310 |
+
|
| 311 |
+
if width > 4096 or height > 4096:
|
| 312 |
+
return False, "Image too large. Maximum size is 4096x4096 pixels."
|
| 313 |
+
|
| 314 |
+
return True, "Image is valid"
|
| 315 |
+
|
| 316 |
+
# =========================================================================
|
| 317 |
+
# INFERENCE
|
| 318 |
+
# =========================================================================
|
| 319 |
+
|
| 320 |
+
def predict(
|
| 321 |
+
self,
|
| 322 |
+
image: Union[str, Path, Image.Image, np.ndarray]
|
| 323 |
+
) -> PredictionResult:
|
| 324 |
+
"""
|
| 325 |
+
Run disease detection on an image.
|
| 326 |
+
|
| 327 |
+
Args:
|
| 328 |
+
image: Input image (path, PIL Image, or numpy array)
|
| 329 |
+
|
| 330 |
+
Returns:
|
| 331 |
+
PredictionResult with disease information
|
| 332 |
+
"""
|
| 333 |
+
if not self._is_loaded:
|
| 334 |
+
self.load_model()
|
| 335 |
+
|
| 336 |
+
# Preprocess image
|
| 337 |
+
pil_image = self.preprocess_image(image)
|
| 338 |
+
|
| 339 |
+
# Validate image
|
| 340 |
+
is_valid, message = self.validate_image(pil_image)
|
| 341 |
+
if not is_valid:
|
| 342 |
+
logger.warning(f"Image validation failed: {message}")
|
| 343 |
+
return self._create_low_confidence_result()
|
| 344 |
+
|
| 345 |
+
try:
|
| 346 |
+
# Run inference
|
| 347 |
+
results = self._model(
|
| 348 |
+
pil_image,
|
| 349 |
+
conf=self.confidence_threshold,
|
| 350 |
+
iou=self.iou_threshold,
|
| 351 |
+
imgsz=self.input_size,
|
| 352 |
+
device=self.device,
|
| 353 |
+
verbose=False
|
| 354 |
+
)
|
| 355 |
+
|
| 356 |
+
# Parse results
|
| 357 |
+
predictions = self._parse_results(results)
|
| 358 |
+
|
| 359 |
+
if not predictions:
|
| 360 |
+
logger.info("No predictions above confidence threshold")
|
| 361 |
+
return self._create_low_confidence_result()
|
| 362 |
+
|
| 363 |
+
# Return top prediction
|
| 364 |
+
return predictions[0]
|
| 365 |
+
|
| 366 |
+
except Exception as e:
|
| 367 |
+
logger.error(f"Inference failed: {e}")
|
| 368 |
+
return self._create_low_confidence_result()
|
| 369 |
+
|
| 370 |
+
def predict_with_visualization(
|
| 371 |
+
self,
|
| 372 |
+
image: Union[str, Path, Image.Image, np.ndarray]
|
| 373 |
+
) -> Tuple[PredictionResult, Image.Image]:
|
| 374 |
+
"""
|
| 375 |
+
Run detection and return annotated image.
|
| 376 |
+
|
| 377 |
+
Args:
|
| 378 |
+
image: Input image
|
| 379 |
+
|
| 380 |
+
Returns:
|
| 381 |
+
Tuple of (PredictionResult, annotated PIL Image)
|
| 382 |
+
"""
|
| 383 |
+
if not self._is_loaded:
|
| 384 |
+
self.load_model()
|
| 385 |
+
|
| 386 |
+
# Preprocess image
|
| 387 |
+
pil_image = self.preprocess_image(image)
|
| 388 |
+
|
| 389 |
+
# Validate image
|
| 390 |
+
is_valid, message = self.validate_image(pil_image)
|
| 391 |
+
if not is_valid:
|
| 392 |
+
logger.warning(f"Image validation failed: {message}")
|
| 393 |
+
return self._create_low_confidence_result(), pil_image
|
| 394 |
+
|
| 395 |
+
try:
|
| 396 |
+
# Run inference
|
| 397 |
+
results = self._model(
|
| 398 |
+
pil_image,
|
| 399 |
+
conf=self.confidence_threshold,
|
| 400 |
+
iou=self.iou_threshold,
|
| 401 |
+
imgsz=self.input_size,
|
| 402 |
+
device=self.device,
|
| 403 |
+
verbose=False
|
| 404 |
+
)
|
| 405 |
+
|
| 406 |
+
# Parse results
|
| 407 |
+
predictions = self._parse_results(results)
|
| 408 |
+
|
| 409 |
+
# Get annotated image
|
| 410 |
+
annotated = results[0].plot()
|
| 411 |
+
annotated_pil = Image.fromarray(annotated[..., ::-1]) # BGR to RGB
|
| 412 |
+
|
| 413 |
+
if not predictions:
|
| 414 |
+
return self._create_low_confidence_result(), annotated_pil
|
| 415 |
+
|
| 416 |
+
return predictions[0], annotated_pil
|
| 417 |
+
|
| 418 |
+
except Exception as e:
|
| 419 |
+
logger.error(f"Inference with visualization failed: {e}")
|
| 420 |
+
return self._create_low_confidence_result(), pil_image
|
| 421 |
+
|
| 422 |
+
def _parse_results(self, results) -> List[PredictionResult]:
|
| 423 |
+
"""
|
| 424 |
+
Parse YOLO results into PredictionResult objects.
|
| 425 |
+
|
| 426 |
+
Args:
|
| 427 |
+
results: YOLO inference results
|
| 428 |
+
|
| 429 |
+
Returns:
|
| 430 |
+
List of PredictionResult objects sorted by confidence
|
| 431 |
+
"""
|
| 432 |
+
predictions = []
|
| 433 |
+
|
| 434 |
+
for result in results:
|
| 435 |
+
# Check if we have classification results (for classification model)
|
| 436 |
+
if hasattr(result, 'probs') and result.probs is not None:
|
| 437 |
+
probs = result.probs
|
| 438 |
+
|
| 439 |
+
# Get top prediction
|
| 440 |
+
top_idx = int(probs.top1)
|
| 441 |
+
top_conf = float(probs.top1conf)
|
| 442 |
+
|
| 443 |
+
# Handle placeholder model (pretrained YOLO)
|
| 444 |
+
if hasattr(self, '_is_placeholder') and self._is_placeholder:
|
| 445 |
+
# Map to our classes for demo purposes
|
| 446 |
+
top_idx = top_idx % len(self.CLASS_NAMES)
|
| 447 |
+
|
| 448 |
+
if top_idx < len(self.CLASS_NAMES):
|
| 449 |
+
prediction = PredictionResult(
|
| 450 |
+
class_index=top_idx,
|
| 451 |
+
class_name=self.CLASS_NAMES[top_idx],
|
| 452 |
+
disease_key=self.CLASS_TO_KEY[top_idx],
|
| 453 |
+
confidence=top_conf,
|
| 454 |
+
crop_type=self.CLASS_TO_CROP[top_idx],
|
| 455 |
+
is_healthy=top_idx in self.HEALTHY_INDICES # Always False for 6-class model
|
| 456 |
+
)
|
| 457 |
+
predictions.append(prediction)
|
| 458 |
+
|
| 459 |
+
# Check for detection results (for detection model)
|
| 460 |
+
elif hasattr(result, 'boxes') and result.boxes is not None:
|
| 461 |
+
boxes = result.boxes
|
| 462 |
+
|
| 463 |
+
for i in range(len(boxes)):
|
| 464 |
+
cls_idx = int(boxes.cls[i])
|
| 465 |
+
conf = float(boxes.conf[i])
|
| 466 |
+
bbox = boxes.xyxy[i].tolist() if boxes.xyxy is not None else None
|
| 467 |
+
|
| 468 |
+
# Handle placeholder model
|
| 469 |
+
if hasattr(self, '_is_placeholder') and self._is_placeholder:
|
| 470 |
+
cls_idx = cls_idx % len(self.CLASS_NAMES)
|
| 471 |
+
|
| 472 |
+
if cls_idx < len(self.CLASS_NAMES):
|
| 473 |
+
prediction = PredictionResult(
|
| 474 |
+
class_index=cls_idx,
|
| 475 |
+
class_name=self.CLASS_NAMES[cls_idx],
|
| 476 |
+
disease_key=self.CLASS_TO_KEY[cls_idx],
|
| 477 |
+
confidence=conf,
|
| 478 |
+
crop_type=self.CLASS_TO_CROP[cls_idx],
|
| 479 |
+
is_healthy=cls_idx in self.HEALTHY_INDICES, # Always False for 6-class model
|
| 480 |
+
bbox=bbox
|
| 481 |
+
)
|
| 482 |
+
predictions.append(prediction)
|
| 483 |
+
|
| 484 |
+
# Sort by confidence (highest first)
|
| 485 |
+
predictions.sort(key=lambda x: x.confidence, reverse=True)
|
| 486 |
+
|
| 487 |
+
return predictions
|
| 488 |
+
|
| 489 |
+
def _create_low_confidence_result(self) -> PredictionResult:
|
| 490 |
+
"""Create a result indicating low confidence / no detection."""
|
| 491 |
+
return PredictionResult(
|
| 492 |
+
class_index=-1,
|
| 493 |
+
class_name="Unknown",
|
| 494 |
+
disease_key="unknown",
|
| 495 |
+
confidence=0.0,
|
| 496 |
+
crop_type="unknown",
|
| 497 |
+
is_healthy=False
|
| 498 |
+
)
|
| 499 |
+
|
| 500 |
+
# =========================================================================
|
| 501 |
+
# BATCH INFERENCE
|
| 502 |
+
# =========================================================================
|
| 503 |
+
|
| 504 |
+
def predict_batch(
|
| 505 |
+
self,
|
| 506 |
+
images: List[Union[str, Path, Image.Image, np.ndarray]]
|
| 507 |
+
) -> List[PredictionResult]:
|
| 508 |
+
"""
|
| 509 |
+
Run detection on multiple images.
|
| 510 |
+
|
| 511 |
+
Args:
|
| 512 |
+
images: List of input images
|
| 513 |
+
|
| 514 |
+
Returns:
|
| 515 |
+
List of PredictionResult objects (one per image)
|
| 516 |
+
"""
|
| 517 |
+
if not self._is_loaded:
|
| 518 |
+
self.load_model()
|
| 519 |
+
|
| 520 |
+
results = []
|
| 521 |
+
for image in images:
|
| 522 |
+
try:
|
| 523 |
+
result = self.predict(image)
|
| 524 |
+
results.append(result)
|
| 525 |
+
except Exception as e:
|
| 526 |
+
logger.error(f"Failed to process image: {e}")
|
| 527 |
+
results.append(self._create_low_confidence_result())
|
| 528 |
+
|
| 529 |
+
return results
|
| 530 |
+
|
| 531 |
+
# =========================================================================
|
| 532 |
+
# UTILITY METHODS
|
| 533 |
+
# =========================================================================
|
| 534 |
+
|
| 535 |
+
def get_class_info(self, class_index: int) -> Dict:
|
| 536 |
+
"""
|
| 537 |
+
Get information about a class by index.
|
| 538 |
+
|
| 539 |
+
Args:
|
| 540 |
+
class_index: Index of the class (0-5)
|
| 541 |
+
|
| 542 |
+
Returns:
|
| 543 |
+
Dictionary with class information
|
| 544 |
+
"""
|
| 545 |
+
if class_index < 0 or class_index >= len(self.CLASS_NAMES):
|
| 546 |
+
return {
|
| 547 |
+
"class_index": class_index,
|
| 548 |
+
"class_name": "Unknown",
|
| 549 |
+
"disease_key": "unknown",
|
| 550 |
+
"crop_type": "unknown",
|
| 551 |
+
"is_healthy": False
|
| 552 |
+
}
|
| 553 |
+
|
| 554 |
+
return {
|
| 555 |
+
"class_index": class_index,
|
| 556 |
+
"class_name": self.CLASS_NAMES[class_index],
|
| 557 |
+
"disease_key": self.CLASS_TO_KEY[class_index],
|
| 558 |
+
"crop_type": self.CLASS_TO_CROP[class_index],
|
| 559 |
+
"is_healthy": class_index in self.HEALTHY_INDICES # Always False for 6-class model
|
| 560 |
+
}
|
| 561 |
+
|
| 562 |
+
def get_model_info(self) -> Dict:
|
| 563 |
+
"""Get information about the loaded model."""
|
| 564 |
+
info = {
|
| 565 |
+
"model_path": self.model_path,
|
| 566 |
+
"is_loaded": self._is_loaded,
|
| 567 |
+
"device": self.device,
|
| 568 |
+
"confidence_threshold": self.confidence_threshold,
|
| 569 |
+
"input_size": self.input_size,
|
| 570 |
+
"num_classes": len(self.CLASS_NAMES),
|
| 571 |
+
"classes": self.CLASS_NAMES
|
| 572 |
+
}
|
| 573 |
+
|
| 574 |
+
if self._is_loaded and hasattr(self, '_is_placeholder'):
|
| 575 |
+
info["is_placeholder"] = self._is_placeholder
|
| 576 |
+
|
| 577 |
+
return info
|
| 578 |
+
|
| 579 |
+
|
| 580 |
+
# =============================================================================
|
| 581 |
+
# SINGLETON INSTANCE
|
| 582 |
+
# =============================================================================
|
| 583 |
+
|
| 584 |
+
_model_instance: Optional[YOLOModel] = None
|
| 585 |
+
|
| 586 |
+
|
| 587 |
+
def get_yolo_model() -> YOLOModel:
|
| 588 |
+
"""
|
| 589 |
+
Get the singleton YOLO model instance.
|
| 590 |
+
|
| 591 |
+
Returns:
|
| 592 |
+
YOLOModel instance
|
| 593 |
+
"""
|
| 594 |
+
global _model_instance
|
| 595 |
+
|
| 596 |
+
if _model_instance is None:
|
| 597 |
+
from config import yolo_config
|
| 598 |
+
|
| 599 |
+
_model_instance = YOLOModel(
|
| 600 |
+
model_path=str(yolo_config.model_path),
|
| 601 |
+
confidence_threshold=yolo_config.confidence_threshold,
|
| 602 |
+
iou_threshold=yolo_config.iou_threshold,
|
| 603 |
+
device=yolo_config.device,
|
| 604 |
+
input_size=yolo_config.input_size
|
| 605 |
+
)
|
| 606 |
+
|
| 607 |
+
return _model_instance
|
| 608 |
+
|
| 609 |
+
|
| 610 |
+
def unload_yolo_model():
|
| 611 |
+
"""Unload the singleton YOLO model to free memory."""
|
| 612 |
+
global _model_instance
|
| 613 |
+
|
| 614 |
+
if _model_instance is not None:
|
| 615 |
+
_model_instance.unload_model()
|
| 616 |
+
_model_instance = None
|
| 617 |
+
|
| 618 |
+
|
| 619 |
+
# =============================================================================
|
| 620 |
+
# CONVENIENCE FUNCTIONS
|
| 621 |
+
# =============================================================================
|
| 622 |
+
|
| 623 |
+
def detect_disease(
|
| 624 |
+
image: Union[str, Path, Image.Image, np.ndarray]
|
| 625 |
+
) -> PredictionResult:
|
| 626 |
+
"""
|
| 627 |
+
Convenience function to detect disease in an image.
|
| 628 |
+
|
| 629 |
+
Args:
|
| 630 |
+
image: Input image (path, PIL Image, or numpy array)
|
| 631 |
+
|
| 632 |
+
Returns:
|
| 633 |
+
PredictionResult with disease information
|
| 634 |
+
"""
|
| 635 |
+
model = get_yolo_model()
|
| 636 |
+
return model.predict(image)
|
| 637 |
+
|
| 638 |
+
|
| 639 |
+
def detect_disease_with_image(
|
| 640 |
+
image: Union[str, Path, Image.Image, np.ndarray]
|
| 641 |
+
) -> Tuple[PredictionResult, Image.Image]:
|
| 642 |
+
"""
|
| 643 |
+
Detect disease and return annotated image.
|
| 644 |
+
|
| 645 |
+
Args:
|
| 646 |
+
image: Input image
|
| 647 |
+
|
| 648 |
+
Returns:
|
| 649 |
+
Tuple of (PredictionResult, annotated Image)
|
| 650 |
+
"""
|
| 651 |
+
model = get_yolo_model()
|
| 652 |
+
return model.predict_with_visualization(image)
|
| 653 |
+
|
| 654 |
+
|
| 655 |
+
# =============================================================================
|
| 656 |
+
# MAIN - Test the model
|
| 657 |
+
# =============================================================================
|
| 658 |
+
|
| 659 |
+
if __name__ == "__main__":
|
| 660 |
+
import torch
|
| 661 |
+
|
| 662 |
+
print("=" * 60)
|
| 663 |
+
print("YOLOv11 Model Test (6-Class Disease Detection)")
|
| 664 |
+
print("=" * 60)
|
| 665 |
+
|
| 666 |
+
# Check device
|
| 667 |
+
print("\n1. Checking compute device...")
|
| 668 |
+
print(f" PyTorch version: {torch.__version__}")
|
| 669 |
+
print(f" MPS available: {torch.backends.mps.is_available()}")
|
| 670 |
+
print(f" MPS built: {torch.backends.mps.is_built()}")
|
| 671 |
+
|
| 672 |
+
# Initialize model
|
| 673 |
+
print("\n2. Initializing YOLOv11 model...")
|
| 674 |
+
model = YOLOModel()
|
| 675 |
+
|
| 676 |
+
# Load model
|
| 677 |
+
print("\n3. Loading model...")
|
| 678 |
+
model.load_model()
|
| 679 |
+
|
| 680 |
+
# Print model info
|
| 681 |
+
print("\n4. Model information:")
|
| 682 |
+
info = model.get_model_info()
|
| 683 |
+
for key, value in info.items():
|
| 684 |
+
print(f" {key}: {value}")
|
| 685 |
+
|
| 686 |
+
# Test with a sample image (if available)
|
| 687 |
+
print("\n5. Testing inference...")
|
| 688 |
+
print(" To test with an actual image, run:")
|
| 689 |
+
print(" >>> result = model.predict('path/to/your/image.jpg')")
|
| 690 |
+
print(" >>> print(result)")
|
| 691 |
+
|
| 692 |
+
# Print class mappings
|
| 693 |
+
print("\n6. Class mappings (6 classes - all diseases):")
|
| 694 |
+
for idx, name in enumerate(model.CLASS_NAMES):
|
| 695 |
+
crop = model.CLASS_TO_CROP[idx]
|
| 696 |
+
key = model.CLASS_TO_KEY[idx]
|
| 697 |
+
print(f" {idx}: {name}")
|
| 698 |
+
print(f" Crop: {crop}")
|
| 699 |
+
print(f" Key: {key}")
|
| 700 |
+
|
| 701 |
+
print("\n" + "=" * 60)
|
| 702 |
+
print("✅ YOLOv11 model test completed!")
|
| 703 |
+
print("=" * 60)
|
requirements.txt
ADDED
|
@@ -0,0 +1,70 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# =============================================================================
|
| 2 |
+
# FarmEyes - Requirements for HuggingFace Spaces
|
| 3 |
+
# =============================================================================
|
| 4 |
+
# Optimized for Docker deployment on HF Spaces free tier (CPU)
|
| 5 |
+
# =============================================================================
|
| 6 |
+
|
| 7 |
+
# -----------------------------------------------------------------------------
|
| 8 |
+
# Web Framework
|
| 9 |
+
# -----------------------------------------------------------------------------
|
| 10 |
+
fastapi>=0.104.0
|
| 11 |
+
uvicorn[standard]>=0.24.0
|
| 12 |
+
python-multipart>=0.0.6
|
| 13 |
+
|
| 14 |
+
# -----------------------------------------------------------------------------
|
| 15 |
+
# AI/ML - Core
|
| 16 |
+
# -----------------------------------------------------------------------------
|
| 17 |
+
torch>=2.0.0
|
| 18 |
+
torchvision>=0.15.0
|
| 19 |
+
ultralytics>=8.0.0
|
| 20 |
+
|
| 21 |
+
# -----------------------------------------------------------------------------
|
| 22 |
+
# AI/ML - Transformers & HuggingFace
|
| 23 |
+
# -----------------------------------------------------------------------------
|
| 24 |
+
transformers>=4.35.0
|
| 25 |
+
huggingface-hub>=0.19.0
|
| 26 |
+
|
| 27 |
+
# -----------------------------------------------------------------------------
|
| 28 |
+
# Audio Processing (Whisper for voice input)
|
| 29 |
+
# -----------------------------------------------------------------------------
|
| 30 |
+
openai-whisper>=20231117
|
| 31 |
+
soundfile>=0.12.0
|
| 32 |
+
|
| 33 |
+
# -----------------------------------------------------------------------------
|
| 34 |
+
# Image Processing
|
| 35 |
+
# -----------------------------------------------------------------------------
|
| 36 |
+
Pillow>=10.0.0
|
| 37 |
+
opencv-python-headless>=4.8.0
|
| 38 |
+
|
| 39 |
+
# -----------------------------------------------------------------------------
|
| 40 |
+
# HTTP & Networking
|
| 41 |
+
# -----------------------------------------------------------------------------
|
| 42 |
+
requests>=2.31.0
|
| 43 |
+
httpx>=0.25.0
|
| 44 |
+
|
| 45 |
+
# -----------------------------------------------------------------------------
|
| 46 |
+
# Data Processing
|
| 47 |
+
# -----------------------------------------------------------------------------
|
| 48 |
+
numpy>=1.24.0
|
| 49 |
+
scipy>=1.11.0
|
| 50 |
+
pydantic>=2.0.0
|
| 51 |
+
|
| 52 |
+
# -----------------------------------------------------------------------------
|
| 53 |
+
# Utilities
|
| 54 |
+
# -----------------------------------------------------------------------------
|
| 55 |
+
python-dotenv>=1.0.0
|
| 56 |
+
tqdm>=4.66.0
|
| 57 |
+
|
| 58 |
+
# -----------------------------------------------------------------------------
|
| 59 |
+
# NOTES:
|
| 60 |
+
# -----------------------------------------------------------------------------
|
| 61 |
+
# 1. llama-cpp-python is installed separately in Dockerfile
|
| 62 |
+
# (requires compilation, better to install after other deps)
|
| 63 |
+
#
|
| 64 |
+
# 2. The N-ATLaS GGUF model (~4.92GB) is NOT included here
|
| 65 |
+
# It downloads automatically at runtime from:
|
| 66 |
+
# https://huggingface.co/tosinamuda/N-ATLaS-GGUF
|
| 67 |
+
#
|
| 68 |
+
# 3. For local development on Apple Silicon (M1/M2/M3), install:
|
| 69 |
+
# CMAKE_ARGS="-DLLAMA_METAL=on" pip install llama-cpp-python
|
| 70 |
+
# =============================================================================
|
services/__init__.py
ADDED
|
@@ -0,0 +1,86 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
FarmEyes Services Package
|
| 3 |
+
=========================
|
| 4 |
+
Service layer modules for the FarmEyes application.
|
| 5 |
+
|
| 6 |
+
Services:
|
| 7 |
+
- session_manager: Session state and chat memory management
|
| 8 |
+
- chat_service: Contextual agricultural chatbot
|
| 9 |
+
- whisper_service: Speech-to-text for voice input
|
| 10 |
+
- disease_detector: Disease detection with knowledge base
|
| 11 |
+
- translator: N-ATLaS translation service
|
| 12 |
+
- diagnosis_generator: Complete diagnosis report generation
|
| 13 |
+
"""
|
| 14 |
+
|
| 15 |
+
# Import services for easy access
|
| 16 |
+
from services.session_manager import (
|
| 17 |
+
SessionManager,
|
| 18 |
+
UserSession,
|
| 19 |
+
DiagnosisContext,
|
| 20 |
+
ChatMessage,
|
| 21 |
+
get_session_manager,
|
| 22 |
+
create_session,
|
| 23 |
+
get_session,
|
| 24 |
+
get_or_create_session
|
| 25 |
+
)
|
| 26 |
+
|
| 27 |
+
from services.chat_service import (
|
| 28 |
+
ChatService,
|
| 29 |
+
get_chat_service,
|
| 30 |
+
chat,
|
| 31 |
+
get_welcome
|
| 32 |
+
)
|
| 33 |
+
|
| 34 |
+
from services.whisper_service import (
|
| 35 |
+
WhisperService,
|
| 36 |
+
AudioProcessor,
|
| 37 |
+
get_whisper_service,
|
| 38 |
+
transcribe_audio,
|
| 39 |
+
transcribe_bytes
|
| 40 |
+
)
|
| 41 |
+
|
| 42 |
+
# These will be imported from existing files
|
| 43 |
+
# from services.disease_detector import (
|
| 44 |
+
# DiseaseDetectorService,
|
| 45 |
+
# DetectionResult,
|
| 46 |
+
# get_disease_detector,
|
| 47 |
+
# detect_crop_disease
|
| 48 |
+
# )
|
| 49 |
+
|
| 50 |
+
# from services.translator import (
|
| 51 |
+
# TranslatorService,
|
| 52 |
+
# get_translator,
|
| 53 |
+
# translate_text
|
| 54 |
+
# )
|
| 55 |
+
|
| 56 |
+
# from services.diagnosis_generator import (
|
| 57 |
+
# DiagnosisGenerator,
|
| 58 |
+
# DiagnosisReport,
|
| 59 |
+
# get_diagnosis_generator,
|
| 60 |
+
# generate_diagnosis
|
| 61 |
+
# )
|
| 62 |
+
|
| 63 |
+
__all__ = [
|
| 64 |
+
# Session management
|
| 65 |
+
"SessionManager",
|
| 66 |
+
"UserSession",
|
| 67 |
+
"DiagnosisContext",
|
| 68 |
+
"ChatMessage",
|
| 69 |
+
"get_session_manager",
|
| 70 |
+
"create_session",
|
| 71 |
+
"get_session",
|
| 72 |
+
"get_or_create_session",
|
| 73 |
+
|
| 74 |
+
# Chat service
|
| 75 |
+
"ChatService",
|
| 76 |
+
"get_chat_service",
|
| 77 |
+
"chat",
|
| 78 |
+
"get_welcome",
|
| 79 |
+
|
| 80 |
+
# Whisper service
|
| 81 |
+
"WhisperService",
|
| 82 |
+
"AudioProcessor",
|
| 83 |
+
"get_whisper_service",
|
| 84 |
+
"transcribe_audio",
|
| 85 |
+
"transcribe_bytes",
|
| 86 |
+
]
|