agentsay commited on
Commit
9b7c64f
·
verified ·
1 Parent(s): 05e43fd

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +293 -293
app.py CHANGED
@@ -1,293 +1,293 @@
1
- from fastapi import FastAPI, HTTPException, Query
2
- from fastapi.responses import JSONResponse
3
- from pydantic import BaseModel
4
- import os
5
- import requests
6
- import time
7
- import cloudinary
8
- import cloudinary.utils
9
- import engine
10
- import config
11
- import futureWeather
12
- import warnings
13
- import re
14
- from geopy.geocoders import Nominatim
15
- from geopy.exc import GeocoderTimedOut
16
- import pandas as pd
17
-
18
- # Load environment variables from .env file
19
- try:
20
- from dotenv import load_dotenv
21
- load_dotenv()
22
- except ImportError:
23
- print("Warning: python-dotenv not installed. Using system environment variables only.")
24
-
25
- warnings.filterwarnings("ignore")
26
-
27
- app = FastAPI()
28
-
29
- # Configure Cloudinary using environment variables
30
- cloudinary_config = {
31
- 'cloud_name': config.CLOUDINARY_CLOUD_NAME,
32
- 'api_key': config.CLOUDINARY_API_KEY,
33
- 'api_secret': config.CLOUDINARY_API_SECRET
34
- }
35
-
36
- # Validate that all required Cloudinary credentials are present
37
- if not all(cloudinary_config.values()):
38
- print("Warning: Some Cloudinary environment variables are missing!")
39
- missing = [k for k, v in cloudinary_config.items() if not v]
40
- print(f"Missing: {missing}")
41
-
42
- cloudinary.config(**cloudinary_config)
43
-
44
- # Ensure upload directory exists
45
- UPLOAD_FOLDER = 'Uploads'
46
- if not os.path.exists(UPLOAD_FOLDER):
47
- os.makedirs(UPLOAD_FOLDER)
48
-
49
- # Pydantic models for request validation
50
- class ImageRequest(BaseModel):
51
- publicId: str
52
- fileType: str
53
- originalName: str | None = None
54
-
55
- class CropYieldRequest(BaseModel):
56
- cropName: str
57
- locationLat: float
58
- locationLong: float
59
-
60
- class WeatherPredictionRequest(BaseModel):
61
- locationLat: float
62
- locationLong: float
63
- language: str
64
-
65
- # Generate signed URL for Cloudinary
66
- def get_signed_url(public_id: str, resource_type: str = 'image', expires_in: int = 300) -> str:
67
- expires_at = int(time.time()) + expires_in
68
- url, options = cloudinary.utils.cloudinary_url(
69
- public_id,
70
- resource_type=resource_type,
71
- type="authenticated",
72
- sign_url=True,
73
- expires_at=expires_at
74
- )
75
- return url
76
-
77
- # Download from Cloudinary and save to local file
78
- def download_file(public_id: str, save_path: str, file_type: str = 'image/jpeg') -> bool:
79
- resource_type = 'raw' if file_type == 'raw' else 'image'
80
- url = get_signed_url(public_id, resource_type=resource_type)
81
- response = requests.get(url, headers={'Content-Type': file_type})
82
- if response.status_code == 200:
83
- with open(save_path, 'wb') as f:
84
- f.write(response.content)
85
- return True
86
- return False
87
-
88
- # --- FastAPI Routes ---
89
- @app.get("/")
90
- async def root():
91
- return {
92
- "message": "Agrosure API is running!",
93
- "status": "healthy",
94
- "endpoints": {
95
- "exif_metadata": "/api/exif_metadata",
96
- "damage_detection": "/api/damage_detection",
97
- "crop_type": "/api/crop_type",
98
- "crop_yield_prediction": "/predictForCrop",
99
- "weather_prediction": "/futureWeatherPrediction"
100
- },
101
- "docs": "/docs",
102
- "redoc": "/redoc"
103
- }
104
-
105
- @app.post("/api/exif_metadata")
106
- async def exif_metadata(image_request: ImageRequest):
107
- filename = image_request.originalName or f"{image_request.publicId.split('/')[-1]}.jpg"
108
- filepath = os.path.join(UPLOAD_FOLDER, filename)
109
-
110
- if not download_file(image_request.publicId, filepath, image_request.fileType):
111
- raise HTTPException(status_code=500, detail=f"Failed to download image from Cloudinary: {image_request.publicId}")
112
-
113
- result = engine.get_exif_data(filepath)
114
- os.remove(filepath)
115
- return result
116
-
117
- @app.post("/api/damage_detection")
118
- async def damage_detection(image_request: ImageRequest):
119
- print(f"Received damage detection request: {image_request}")
120
- filename = image_request.originalName or f"{image_request.publicId.split('/')[-1]}.jpg"
121
- filepath = os.path.join(UPLOAD_FOLDER, filename)
122
-
123
- if not download_file(image_request.publicId, filepath, image_request.fileType):
124
- raise HTTPException(status_code=500, detail=f"Failed to download image from Cloudinary: {image_request.publicId}")
125
-
126
- result = engine.predict_damage(filepath)
127
- os.remove(filepath)
128
- return result
129
-
130
- @app.post("/api/crop_type")
131
- async def crop_type(image_request: ImageRequest):
132
- filename = image_request.originalName or f"{image_request.publicId.split('/')[-1]}.jpg"
133
- filepath = os.path.join(UPLOAD_FOLDER, filename)
134
-
135
- if not download_file(image_request.publicId, filepath, image_request.fileType):
136
- raise HTTPException(status_code=500, detail=f"Failed to download image from Cloudinary: {image_request.publicId}")
137
-
138
- result = engine.predict_crop(filepath)
139
- os.remove(filepath)
140
- return result
141
-
142
- @app.post("/predictForCrop")
143
- async def predict_crop_yield(data: CropYieldRequest):
144
- if not (-90 <= data.locationLat <= 90) or not (-180 <= data.locationLong <= 180):
145
- raise HTTPException(status_code=400, detail="Invalid latitude or longitude values")
146
-
147
- try:
148
- result = engine.predict_crop_yield_from_location(
149
- crop_input=data.cropName.upper(),
150
- lat=data.locationLat,
151
- lon=data.locationLong
152
- )
153
- return result
154
- except ValueError as e:
155
- raise HTTPException(status_code=400, detail=f"Invalid numeric input: {str(e)}")
156
- except Exception as e:
157
- raise HTTPException(status_code=500, detail=str(e))
158
-
159
-
160
- @app.post("/futureWeatherPrediction")
161
- async def future_weather_prediction(data: WeatherPredictionRequest):
162
- if not (-90 <= data.locationLat <= 90) or not (-180 <= data.locationLong <= 180):
163
- raise HTTPException(status_code=400, detail="Invalid latitude or longitude values")
164
-
165
- try:
166
- tom = futureWeather.fetch_tomorrow(data.locationLat, data.locationLong)
167
- if not tom or len(tom.get("timelines", {}).get("daily", [])) < 7:
168
- weather_data, source = futureWeather.fetch_open_meteo(data.locationLat, data.locationLong), "open-meteo"
169
- else:
170
- weather_data, source = tom, "tomorrow"
171
-
172
- summary, score, should_claim, flags = futureWeather.extract_and_calc(weather_data, source)
173
- ai_text = futureWeather.invoke_gemini(summary, score, should_claim, flags, data.language)
174
-
175
- return {
176
- "claim_recommendation": {
177
- "should_claim": should_claim,
178
- "weather_trend_risk_score": round(score, 2),
179
- "forecast_summary": summary,
180
- "language": data.language,
181
- "gemini_response": ai_text
182
- }
183
- }
184
- except ValueError as e:
185
- raise HTTPException(status_code=400, detail=f"Invalid numeric input: {str(e)}")
186
- except Exception as e:
187
- raise HTTPException(status_code=500, detail=str(e))
188
-
189
-
190
-
191
- ## MADE BY UDDALAK MUKHERJEE
192
- # Load and clean crop data once on startup
193
- CROP_DATA_PATH = "data/ICRISAT-District_Level_Data_30_Years.csv"
194
- df_crop = pd.read_csv(CROP_DATA_PATH)
195
- df_crop_clean = df_crop.drop(columns=['State Code', 'Year', 'State Name'], errors='ignore')
196
- mean_crop_by_district = df_crop_clean.groupby('Dist Name').mean(numeric_only=True)
197
-
198
- def get_district_from_coordinates(lat, lon):
199
- geolocator = Nominatim(user_agent="agrisure-ai")
200
- try:
201
- location = geolocator.reverse((lat, lon), exactly_one=True)
202
- except GeocoderTimedOut:
203
- raise Exception("Reverse geocoding service timed out.")
204
- except Exception as e:
205
- raise Exception(f"Geocoding error: {str(e)}")
206
-
207
- if not location:
208
- raise ValueError("Could not get district from coordinates.")
209
-
210
- # Handle potential async/coroutine response with type ignoring
211
- try:
212
- # Use type: ignore to suppress type checker warnings for geopy attributes
213
- address = location.raw.get('address', {}) # type: ignore
214
- except (AttributeError, TypeError):
215
- try:
216
- # Fallback: try to get address from location attributes
217
- addr_str = str(location.address) # type: ignore
218
- # Basic parsing fallback
219
- address = {'display_name': addr_str}
220
- except (AttributeError, TypeError):
221
- raise ValueError("Could not parse location data.")
222
-
223
- if not address:
224
- raise ValueError("Could not get district from coordinates.")
225
- district = (
226
- address.get('district') or
227
- address.get('state_district') or
228
- address.get('county')
229
- )
230
- if district and 'district' in district.lower():
231
- district = district.replace("District", "").strip()
232
- return district
233
-
234
- def clean_district_name(district):
235
- if not isinstance(district, str):
236
- return district
237
- district = re.sub(r"\s*[-\u2013]\s*(I{1,3}|IV|V|VI|VII|VIII|IX|X|\d+)$", "", district, flags=re.IGNORECASE)
238
- district = district.replace("District", "").strip()
239
- aliases = {
240
- "Purba Bardhaman": "Burdwan",
241
- "Paschim Bardhaman": "Burdwan",
242
- "Bardhaman": "Burdwan",
243
- "Kalna": "Burdwan",
244
- "Kalyani": "Nadia",
245
- "Raiganj": "Uttar Dinajpur",
246
- "Kolkata": "North 24 Parganas"
247
- }
248
- return aliases.get(district, district)
249
-
250
- @app.get("/top-crops")
251
- async def get_top_5_crops(
252
- lat: float = Query(..., description="Latitude of the location"),
253
- lon: float = Query(..., description="Longitude of the location")
254
- ):
255
- try:
256
- district_name = get_district_from_coordinates(lat, lon)
257
- if not district_name:
258
- return JSONResponse(status_code=404, content={"error": "Could not resolve district from coordinates."})
259
-
260
- district_name = clean_district_name(district_name)
261
-
262
- matched_district = None
263
- for dist in mean_crop_by_district.index:
264
- if dist.strip().lower() == district_name.lower():
265
- matched_district = dist
266
- break
267
-
268
- if not matched_district:
269
- return JSONResponse(status_code=404, content={"error": f"District '{district_name}' not found in dataset."})
270
-
271
- top_crops = mean_crop_by_district.loc[matched_district].sort_values(ascending=False).head(5)
272
-
273
- print(top_crops)
274
-
275
- return {
276
- "district": matched_district,
277
- "top_5_crops": [
278
- crop.replace(" (Kg per ha)", "").replace("YIELD", "").strip()
279
- for crop in top_crops.index
280
- ]
281
- }
282
-
283
- except Exception as e:
284
- return JSONResponse(status_code=500, content={"error": str(e)})
285
-
286
-
287
- if __name__ == "__main__":
288
- import uvicorn
289
- print("Starting FastAPI server...")
290
- print("Server will be available at:")
291
- print(" - http://localhost:5001")
292
- print("\nPress CTRL+C to stop the server")
293
- uvicorn.run("app:app", host="0.0.0.0", port=5001, reload=True)
 
1
+ from fastapi import FastAPI, HTTPException, Query
2
+ from fastapi.responses import JSONResponse
3
+ from pydantic import BaseModel
4
+ import os
5
+ import requests
6
+ import time
7
+ import cloudinary
8
+ import cloudinary.utils
9
+ import engine
10
+ import config
11
+ import futureWeather
12
+ import warnings
13
+ import re
14
+ from geopy.geocoders import Nominatim
15
+ from geopy.exc import GeocoderTimedOut
16
+ import pandas as pd
17
+
18
+ # Load environment variables from .env file
19
+ try:
20
+ from dotenv import load_dotenv
21
+ load_dotenv()
22
+ except ImportError:
23
+ print("Warning: python-dotenv not installed. Using system environment variables only.")
24
+
25
+ warnings.filterwarnings("ignore")
26
+
27
+ app = FastAPI()
28
+
29
+ # Configure Cloudinary using environment variables
30
+ cloudinary_config = {
31
+ 'cloud_name': config.CLOUDINARY_CLOUD_NAME,
32
+ 'api_key': config.CLOUDINARY_API_KEY,
33
+ 'api_secret': config.CLOUDINARY_API_SECRET
34
+ }
35
+
36
+ # Validate that all required Cloudinary credentials are present
37
+ if not all(cloudinary_config.values()):
38
+ print("Warning: Some Cloudinary environment variables are missing!")
39
+ missing = [k for k, v in cloudinary_config.items() if not v]
40
+ print(f"Missing: {missing}")
41
+
42
+ cloudinary.config(**cloudinary_config)
43
+
44
+ # Ensure upload directory exists
45
+ UPLOAD_FOLDER = 'Uploads'
46
+ if not os.path.exists(UPLOAD_FOLDER):
47
+ os.makedirs(UPLOAD_FOLDER)
48
+
49
+ # Pydantic models for request validation
50
+ class ImageRequest(BaseModel):
51
+ publicId: str
52
+ fileType: str
53
+ originalName: str | None = None
54
+
55
+ class CropYieldRequest(BaseModel):
56
+ cropName: str
57
+ locationLat: float
58
+ locationLong: float
59
+
60
+ class WeatherPredictionRequest(BaseModel):
61
+ locationLat: float
62
+ locationLong: float
63
+ language: str
64
+
65
+ # Generate signed URL for Cloudinary
66
+ def get_signed_url(public_id: str, resource_type: str = 'image', expires_in: int = 300) -> str:
67
+ expires_at = int(time.time()) + expires_in
68
+ url, options = cloudinary.utils.cloudinary_url(
69
+ public_id,
70
+ resource_type=resource_type,
71
+ type="authenticated",
72
+ sign_url=True,
73
+ expires_at=expires_at
74
+ )
75
+ return url
76
+
77
+ # Download from Cloudinary and save to local file
78
+ def download_file(public_id: str, save_path: str, file_type: str = 'image/jpeg') -> bool:
79
+ resource_type = 'raw' if file_type == 'raw' else 'image'
80
+ url = get_signed_url(public_id, resource_type=resource_type)
81
+ response = requests.get(url, headers={'Content-Type': file_type})
82
+ if response.status_code == 200:
83
+ with open(save_path, 'wb') as f:
84
+ f.write(response.content)
85
+ return True
86
+ return False
87
+
88
+ # --- FastAPI Routes ---
89
+ @app.get("/")
90
+ async def root():
91
+ return {
92
+ "message": "Agrosure API is running!",
93
+ "status": "healthy",
94
+ "endpoints": {
95
+ "exif_metadata": "/api/exif_metadata",
96
+ "damage_detection": "/api/damage_detection",
97
+ "crop_type": "/api/crop_type",
98
+ "crop_yield_prediction": "/predictForCrop",
99
+ "weather_prediction": "/futureWeatherPrediction"
100
+ },
101
+ "docs": "/docs",
102
+ "redoc": "/redoc"
103
+ }
104
+
105
+ @app.post("/api/exif_metadata")
106
+ async def exif_metadata(image_request: ImageRequest):
107
+ filename = image_request.originalName or f"{image_request.publicId.split('/')[-1]}.jpg"
108
+ filepath = os.path.join(UPLOAD_FOLDER, filename)
109
+
110
+ if not download_file(image_request.publicId, filepath, image_request.fileType):
111
+ raise HTTPException(status_code=500, detail=f"Failed to download image from Cloudinary: {image_request.publicId}")
112
+
113
+ result = engine.get_exif_data(filepath)
114
+ os.remove(filepath)
115
+ return result
116
+
117
+ @app.post("/api/damage_detection")
118
+ async def damage_detection(image_request: ImageRequest):
119
+ print(f"Received damage detection request: {image_request}")
120
+ filename = image_request.originalName or f"{image_request.publicId.split('/')[-1]}.jpg"
121
+ filepath = os.path.join(UPLOAD_FOLDER, filename)
122
+
123
+ if not download_file(image_request.publicId, filepath, image_request.fileType):
124
+ raise HTTPException(status_code=500, detail=f"Failed to download image from Cloudinary: {image_request.publicId}")
125
+
126
+ result = engine.predict_damage(filepath)
127
+ os.remove(filepath)
128
+ return result
129
+
130
+ @app.post("/api/crop_type")
131
+ async def crop_type(image_request: ImageRequest):
132
+ filename = image_request.originalName or f"{image_request.publicId.split('/')[-1]}.jpg"
133
+ filepath = os.path.join(UPLOAD_FOLDER, filename)
134
+
135
+ if not download_file(image_request.publicId, filepath, image_request.fileType):
136
+ raise HTTPException(status_code=500, detail=f"Failed to download image from Cloudinary: {image_request.publicId}")
137
+
138
+ result = engine.predict_crop(filepath)
139
+ os.remove(filepath)
140
+ return result
141
+
142
+ @app.post("/predictForCrop")
143
+ async def predict_crop_yield(data: CropYieldRequest):
144
+ if not (-90 <= data.locationLat <= 90) or not (-180 <= data.locationLong <= 180):
145
+ raise HTTPException(status_code=400, detail="Invalid latitude or longitude values")
146
+
147
+ try:
148
+ result = engine.predict_crop_yield_from_location(
149
+ crop_input=data.cropName.upper(),
150
+ lat=data.locationLat,
151
+ lon=data.locationLong
152
+ )
153
+ return result
154
+ except ValueError as e:
155
+ raise HTTPException(status_code=400, detail=f"Invalid numeric input: {str(e)}")
156
+ except Exception as e:
157
+ raise HTTPException(status_code=500, detail=str(e))
158
+
159
+
160
+ @app.post("/futureWeatherPrediction")
161
+ async def future_weather_prediction(data: WeatherPredictionRequest):
162
+ if not (-90 <= data.locationLat <= 90) or not (-180 <= data.locationLong <= 180):
163
+ raise HTTPException(status_code=400, detail="Invalid latitude or longitude values")
164
+
165
+ try:
166
+ tom = futureWeather.fetch_tomorrow(data.locationLat, data.locationLong)
167
+ if not tom or len(tom.get("timelines", {}).get("daily", [])) < 7:
168
+ weather_data, source = futureWeather.fetch_open_meteo(data.locationLat, data.locationLong), "open-meteo"
169
+ else:
170
+ weather_data, source = tom, "tomorrow"
171
+
172
+ summary, score, should_claim, flags = futureWeather.extract_and_calc(weather_data, source)
173
+ ai_text = futureWeather.invoke_gemini(summary, score, should_claim, flags, data.language)
174
+
175
+ return {
176
+ "claim_recommendation": {
177
+ "should_claim": should_claim,
178
+ "weather_trend_risk_score": round(score, 2),
179
+ "forecast_summary": summary,
180
+ "language": data.language,
181
+ "gemini_response": ai_text
182
+ }
183
+ }
184
+ except ValueError as e:
185
+ raise HTTPException(status_code=400, detail=f"Invalid numeric input: {str(e)}")
186
+ except Exception as e:
187
+ raise HTTPException(status_code=500, detail=str(e))
188
+
189
+
190
+
191
+ ## MADE BY UDDALAK MUKHERJEE
192
+ # Load and clean crop data once on startup
193
+ CROP_DATA_PATH = "data/ICRISAT-District_Level_Data_30_Years.csv"
194
+ df_crop = pd.read_csv(CROP_DATA_PATH)
195
+ df_crop_clean = df_crop.drop(columns=['State Code', 'Year', 'State Name'], errors='ignore')
196
+ mean_crop_by_district = df_crop_clean.groupby('Dist Name').mean(numeric_only=True)
197
+
198
+ def get_district_from_coordinates(lat, lon):
199
+ geolocator = Nominatim(user_agent="agrisure-ai")
200
+ try:
201
+ location = geolocator.reverse((lat, lon), exactly_one=True)
202
+ except GeocoderTimedOut:
203
+ raise Exception("Reverse geocoding service timed out.")
204
+ except Exception as e:
205
+ raise Exception(f"Geocoding error: {str(e)}")
206
+
207
+ if not location:
208
+ raise ValueError("Could not get district from coordinates.")
209
+
210
+ # Handle potential async/coroutine response with type ignoring
211
+ try:
212
+ # Use type: ignore to suppress type checker warnings for geopy attributes
213
+ address = location.raw.get('address', {}) # type: ignore
214
+ except (AttributeError, TypeError):
215
+ try:
216
+ # Fallback: try to get address from location attributes
217
+ addr_str = str(location.address) # type: ignore
218
+ # Basic parsing fallback
219
+ address = {'display_name': addr_str}
220
+ except (AttributeError, TypeError):
221
+ raise ValueError("Could not parse location data.")
222
+
223
+ if not address:
224
+ raise ValueError("Could not get district from coordinates.")
225
+ district = (
226
+ address.get('district') or
227
+ address.get('state_district') or
228
+ address.get('county')
229
+ )
230
+ if district and 'district' in district.lower():
231
+ district = district.replace("District", "").strip()
232
+ return district
233
+
234
+ def clean_district_name(district):
235
+ if not isinstance(district, str):
236
+ return district
237
+ district = re.sub(r"\s*[-\u2013]\s*(I{1,3}|IV|V|VI|VII|VIII|IX|X|\d+)$", "", district, flags=re.IGNORECASE)
238
+ district = district.replace("District", "").strip()
239
+ aliases = {
240
+ "Purba Bardhaman": "Burdwan",
241
+ "Paschim Bardhaman": "Burdwan",
242
+ "Bardhaman": "Burdwan",
243
+ "Kalna": "Burdwan",
244
+ "Kalyani": "Nadia",
245
+ "Raiganj": "Uttar Dinajpur",
246
+ "Kolkata": "North 24 Parganas"
247
+ }
248
+ return aliases.get(district, district)
249
+
250
+ @app.get("/top-crops")
251
+ async def get_top_5_crops(
252
+ lat: float = Query(..., description="Latitude of the location"),
253
+ lon: float = Query(..., description="Longitude of the location")
254
+ ):
255
+ try:
256
+ district_name = get_district_from_coordinates(lat, lon)
257
+ if not district_name:
258
+ return JSONResponse(status_code=404, content={"error": "Could not resolve district from coordinates."})
259
+
260
+ district_name = clean_district_name(district_name)
261
+
262
+ matched_district = None
263
+ for dist in mean_crop_by_district.index:
264
+ if dist.strip().lower() == district_name.lower():
265
+ matched_district = dist
266
+ break
267
+
268
+ if not matched_district:
269
+ return JSONResponse(status_code=404, content={"error": f"District '{district_name}' not found in dataset."})
270
+
271
+ top_crops = mean_crop_by_district.loc[matched_district].sort_values(ascending=False).head(5)
272
+
273
+ print(top_crops)
274
+
275
+ return {
276
+ "district": matched_district,
277
+ "top_5_crops": [
278
+ crop.replace(" (Kg per ha)", "").replace("YIELD", "").strip()
279
+ for crop in top_crops.index
280
+ ]
281
+ }
282
+
283
+ except Exception as e:
284
+ return JSONResponse(status_code=500, content={"error": str(e)})
285
+
286
+
287
+ if __name__ == "__main__":
288
+ import uvicorn
289
+ print("Starting FastAPI server...")
290
+ print("Server will be available at:")
291
+ print(" - http://localhost:7860")
292
+ print("\nPress CTRL+C to stop the server")
293
+ uvicorn.run("app:app", host="0.0.0.0", port=7860, reload=True)