NaseefNazrul commited on
Commit
7b96393
Β·
verified Β·
1 Parent(s): 1c83ba9

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +159 -0
app.py ADDED
@@ -0,0 +1,159 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import joblib
3
+ import numpy as np
4
+ from fastapi import FastAPI, HTTPException
5
+ from pydantic import BaseModel
6
+ import logging
7
+ import sys
8
+
9
+ # Configure logging
10
+ logging.basicConfig(level=logging.INFO)
11
+ logger = logging.getLogger(__name__)
12
+
13
+ app = FastAPI(title="Bloom Prediction ML API")
14
+
15
+ # ML Model artifacts (upload these to your Hugging Face Space)
16
+ MODEL_PATH = "mil_bloom_model.joblib"
17
+ SCALER_PATH = "mil_scaler.joblib"
18
+ FEATURES_PATH = "mil_features.joblib"
19
+
20
+ # Global variables for ML model
21
+ ML_MODEL = None
22
+ SCALER = None
23
+ FEATURE_COLUMNS = None
24
+
25
+ class PredictionRequest(BaseModel):
26
+ features: dict
27
+ parameters: dict = {}
28
+
29
+ class PredictionResponse(BaseModel):
30
+ success: bool
31
+ bloom_probability: float
32
+ prediction: str
33
+ confidence: str
34
+ message: str = ""
35
+
36
+ def load_ml_model():
37
+ """Load the ML model and artifacts"""
38
+ global ML_MODEL, SCALER, FEATURE_COLUMNS
39
+
40
+ try:
41
+ ML_MODEL = joblib.load(MODEL_PATH)
42
+ SCALER = joblib.load(SCALER_PATH)
43
+ FEATURE_COLUMNS = joblib.load(FEATURES_PATH)
44
+ logger.info("βœ… ML model loaded successfully in Hugging Face Space")
45
+ logger.info(f"βœ… Features: {FEATURE_COLUMNS}")
46
+ except Exception as e:
47
+ logger.error(f"❌ Failed to load ML model: {e}")
48
+ raise
49
+
50
+ def predict_bloom(features_dict: dict):
51
+ """
52
+ ML prediction logic - same as your original but now runs on Hugging Face
53
+ """
54
+ if ML_MODEL is None:
55
+ raise ValueError("ML model not loaded")
56
+
57
+ # Extract features in correct order
58
+ try:
59
+ features_array = np.array([[
60
+ float(features_dict['ndvi']),
61
+ float(features_dict['ndwi']),
62
+ float(features_dict['evi']),
63
+ float(features_dict['lst']),
64
+ float(features_dict['cloud_cover']),
65
+ float(features_dict['month']),
66
+ float(features_dict['day_of_year'])
67
+ ]])
68
+
69
+ # Scale features
70
+ features_scaled = SCALER.transform(features_array)
71
+
72
+ # Get prediction
73
+ probabilities = ML_MODEL.predict_proba(features_scaled)
74
+
75
+ if probabilities.shape[1] == 2:
76
+ bloom_probability = probabilities[0, 1]
77
+ else:
78
+ bloom_probability = probabilities[0, 0]
79
+
80
+ prediction = ML_MODEL.predict(features_scaled)[0]
81
+
82
+ # Apply your business logic (winter adjustments, etc.)
83
+ ndvi = features_dict['ndvi']
84
+ evi = features_dict['evi']
85
+ month = features_dict['month']
86
+
87
+ # Winter adjustment
88
+ if month in [11, 12, 1, 2] and evi < 0.8 and ndvi < 0.3:
89
+ bloom_probability = bloom_probability * 0.5
90
+ logger.info("❄️ Applied winter adjustment")
91
+
92
+ # Confidence calculation
93
+ if bloom_probability > 0.75 or bloom_probability < 0.25:
94
+ confidence = 'HIGH'
95
+ elif bloom_probability > 0.6 or bloom_probability < 0.4:
96
+ confidence = 'MEDIUM'
97
+ else:
98
+ confidence = 'LOW'
99
+
100
+ return {
101
+ 'bloom_probability': round(float(bloom_probability * 100), 2),
102
+ 'prediction': 'BLOOM' if prediction == 1 else 'NO_BLOOM',
103
+ 'confidence': confidence,
104
+ }
105
+
106
+ except Exception as e:
107
+ logger.error(f"❌ Prediction error: {e}")
108
+ raise
109
+
110
+ @app.on_event("startup")
111
+ async def startup_event():
112
+ """Load ML model when the app starts"""
113
+ load_ml_model()
114
+
115
+ @app.get("/")
116
+ async def root():
117
+ return {
118
+ "message": "Bloom Prediction ML API",
119
+ "status": "active",
120
+ "model_loaded": ML_MODEL is not None
121
+ }
122
+
123
+ @app.get("/health")
124
+ async def health():
125
+ return {
126
+ "status": "healthy",
127
+ "model_loaded": ML_MODEL is not None
128
+ }
129
+
130
+ @app.post("/predict")
131
+ async def predict(request: PredictionRequest):
132
+ """
133
+ Main prediction endpoint called by the backend
134
+ """
135
+ try:
136
+ logger.info(f"πŸ“Š Received prediction request with features: {request.features}")
137
+
138
+ # Perform ML prediction
139
+ prediction_result = predict_bloom(request.features)
140
+
141
+ response = PredictionResponse(
142
+ success=True,
143
+ bloom_probability=prediction_result['bloom_probability'],
144
+ prediction=prediction_result['prediction'],
145
+ confidence=prediction_result['confidence'],
146
+ message="Prediction completed successfully"
147
+ )
148
+
149
+ logger.info(f"βœ… Prediction completed: {prediction_result['bloom_probability']}%")
150
+ return response
151
+
152
+ except Exception as e:
153
+ logger.error(f"❌ Prediction failed: {e}")
154
+ raise HTTPException(status_code=500, detail=f"Prediction failed: {str(e)}")
155
+
156
+ # For Hugging Face Spaces deployment
157
+ if __name__ == "__main__":
158
+ import uvicorn
159
+ uvicorn.run(app, host="0.0.0.0", port=7860)