| import json, os, re, requests, uvicorn, logging |
| from datetime import datetime, timedelta |
| from typing import Optional, List |
| from dotenv import load_dotenv |
| from fastapi import FastAPI, HTTPException, Body |
| from fastapi.middleware.cors import CORSMiddleware |
| from pydantic import BaseModel |
| from google import genai |
|
|
| load_dotenv() |
| logging.basicConfig(level=logging.INFO, format="%(asctime)s [%(levelname)s] %(name)s: %(message)s") |
| logger = logging.getLogger("PestAPI") |
|
|
| app = FastAPI(title="Pest Prediction API") |
| app.add_middleware(CORSMiddleware, allow_origins=["*"], allow_methods=["*"], allow_headers=["*"]) |
|
|
| GEMINI_ID = os.getenv("GEMINI_API_KEY") |
| NV_KEY = os.getenv("NVIDIA_API_KEY") |
| OWM_KEY = os.getenv("OPENWEATHERMAP_API_KEY") |
|
|
| class PredictRequest(BaseModel): |
| latitude: float |
| longitude: float |
| crop_type: str |
| season: str |
| soil_type: Optional[str] = "Unknown" |
| language: Optional[str] = "English" |
|
|
| def fetch_weather(lat, lon): |
| try: |
| r = requests.get("https://api.open-meteo.com/v1/forecast", params={"latitude": lat, "longitude": lon, "current_weather": True, "hourly": "relative_humidity_2m", "timezone": "auto"}).json() |
| res = {"temp": r['current_weather']['temperature'], "humidity": sum(r['hourly']['relative_humidity_2m'][:24])/24} |
| if OWM_KEY: |
| ow = requests.get(f"https://api.openweathermap.org/data/2.5/weather?lat={lat}&lon={lon}&appid={OWM_KEY}&units=metric").json() |
| res.update({"temp": ow['main']['temp'], "humidity": ow['main']['humidity']}) |
| return res |
| except: return {"temp": 25, "humidity": 60} |
|
|
| @app.post("/api/predict") |
| def predict(payload: PredictRequest): |
| w = fetch_weather(payload.latitude, payload.longitude) |
| prompt = f"Expert Entomologist: Crop {payload.crop_type}, Season {payload.season}, Weather {w}. Output JSON: {{'report_title': '...', 'pest_prediction_table': [{{'pest_name': '...', 'severity': '...'}}]}}" |
| |
| report = None |
| if GEMINI_ID: |
| try: |
| client = genai.Client(api_key=GEMINI_ID) |
| resp = client.models.generate_content(model="gemini-2.0-flash", contents=prompt) |
| data = re.search(r"\{.*\}", resp.text, re.S) |
| if data: report = json.loads(data.group()) |
| except: pass |
|
|
| if not report and NV_KEY: |
| try: |
| from openai import OpenAI |
| nv = OpenAI(base_url="https://integrate.api.nvidia.com/v1", api_key=NV_KEY) |
| resp = nv.chat.completions.create(model="meta/llama-3.1-70b-instruct", messages=[{"role": "user", "content": prompt}], max_tokens=1024) |
| data = re.search(r"\{.*\}", resp.choices[0].message.content, re.S) |
| if data: report = json.loads(data.group()) |
| except: pass |
|
|
| if not report: raise HTTPException(500, "AI models failed") |
| return {**report, "weather_profile": []} |
|
|
| @app.get("/health") |
| def health(): return {"status": "ok"} |
|
|
| if __name__ == "__main__": |
| uvicorn.run(app, host="0.0.0.0", port=8000) |