Wall06 commited on
Commit
4baffa4
·
verified ·
1 Parent(s): 823c89e

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +18 -24
app.py CHANGED
@@ -15,39 +15,40 @@ from groq import Groq
15
  import google.generativeai as genai
16
 
17
  # -------------------- ENVIRONMENT VARIABLES --------------------
18
- # We try to get keys, but we don't crash if they are missing
19
  HF_API_KEY = os.getenv("HF_API_KEY")
20
  GROQ_API_KEY = os.getenv("GROQ_API_KEY")
21
  GEMINI_API_KEY = os.getenv("GEMINI_API_KEY")
22
  SENTINEL_CLIENT_ID = os.getenv("SENTINEL_CLIENT_ID")
23
  SENTINEL_CLIENT_SECRET = os.getenv("SENTINEL_CLIENT_SECRET")
24
 
25
- # -------------------- SENTINEL HUB CONFIG --------------------
26
  config = SHConfig()
27
  if SENTINEL_CLIENT_ID and SENTINEL_CLIENT_SECRET:
28
  config.client_id = SENTINEL_CLIENT_ID
29
  config.client_secret = SENTINEL_CLIENT_SECRET
30
 
31
- # -------------------- AI SUMMARY FUNCTIONS --------------------
32
 
33
  def gemini_summary(text):
34
- """Backup: Google Gemini (High Reliability)"""
35
  try:
36
  if not GEMINI_API_KEY: return None, "Missing Key"
37
  genai.configure(api_key=GEMINI_API_KEY)
38
- model = genai.GenerativeModel('gemini-pro')
 
39
  response = model.generate_content(text)
40
  return response.text, None
41
  except Exception as e:
42
  return None, str(e)
43
 
44
  def groq_summary(text):
45
- """Primary: Groq (Fastest)"""
46
  try:
47
  if not GROQ_API_KEY: return None, "Missing Key"
48
  client = Groq(api_key=GROQ_API_KEY)
49
  completion = client.chat.completions.create(
50
- model="mixtral-8x7b-32768",
 
51
  messages=[{"role": "user", "content": text}]
52
  )
53
  return completion.choices[0].message.content, None
@@ -55,18 +56,18 @@ def groq_summary(text):
55
  return None, str(e)
56
 
57
  def hf_summary(text):
58
- """Fallback: Hugging Face Free Inference"""
59
  try:
60
- # Switching to a more stable model for free tier
61
- url = "https://api-inference.huggingface.co/models/mistralai/Mistral-7B-Instruct-v0.3"
62
  headers = {"Authorization": f"Bearer {HF_API_KEY}"}
63
  payload = {
64
- "inputs": f"[INST] {text} [/INST]",
65
  "parameters": {"max_new_tokens": 500}
66
  }
67
  r = requests.post(url, headers=headers, json=payload, timeout=25)
68
  if r.status_code == 200:
69
- return r.json()[0]["generated_text"].replace(payload["inputs"], ""), None
70
  else:
71
  return None, f"Status {r.status_code}: {r.text}"
72
  except Exception as e:
@@ -75,26 +76,25 @@ def hf_summary(text):
75
  def smart_summary(text):
76
  errors = []
77
 
78
- # 1. Try Groq
79
  out, err = groq_summary(text)
80
  if out: return out
81
  errors.append(f"Groq Error: {err}")
82
 
83
- # 2. Try Gemini (Google)
84
  out, err = gemini_summary(text)
85
  if out: return out
86
  errors.append(f"Gemini Error: {err}")
87
 
88
- # 3. Try Hugging Face
89
  if HF_API_KEY:
90
  out, err = hf_summary(text)
91
  if out: return out
92
  errors.append(f"HF Error: {err}")
93
 
94
- # If all fail, return the error log so the user sees WHY
95
  return "⚠ ALL AI MODELS FAILED. DEBUG LOG:\n" + "\n".join(errors)
96
 
97
- # -------------------- CALCULATION LOGIC --------------------
98
  def calculate_wqi(pH, do, nutrients):
99
  wqi = (7 - abs(7 - pH)) * 0.2 + (do/14) * 0.5 + (10 - nutrients) * 0.3
100
  wqi_score = max(0, min(100, int(wqi*10)))
@@ -122,7 +122,7 @@ def analyze_satellite_image(img):
122
  turbidity_score = int(np.mean(img_array)/2.55)
123
  return turbidity_score
124
 
125
- # -------------------- VISUALS & REPORTS --------------------
126
  def create_plots(wqi, hsi, erosion, turbidity):
127
  fig = go.Figure()
128
  colors = ['#1E90FF', '#32CD32', '#FF4500', '#FFA500']
@@ -157,7 +157,6 @@ def generate_pdf(wqi, hsi, erosion, turbidity, summary_text):
157
  pdf.cell(0, 10, "Comprehensive Analysis", ln=True)
158
  pdf.set_font("Arial", "", 11)
159
 
160
- # Handle Text Encoding (force cleanup of special characters)
161
  safe_summary = summary_text.encode('latin-1', 'replace').decode('latin-1')
162
  pdf.multi_cell(0, 6, safe_summary)
163
 
@@ -180,7 +179,6 @@ def generate_pdf(wqi, hsi, erosion, turbidity, summary_text):
180
  # -------------------- MAIN PROCESSOR --------------------
181
  def process_data(flow_rate, water_temp, sediment, construction, pH, do, nutrients, sat_img):
182
  try:
183
- # 1. Calculate Scores
184
  wqi = calculate_wqi(pH, do, nutrients)
185
  hsi = calculate_hsi(flow_rate, water_temp, sediment)
186
  erosion = calculate_erosion(sediment, construction)
@@ -188,7 +186,6 @@ def process_data(flow_rate, water_temp, sediment, construction, pH, do, nutrient
188
  stability = river_stability(wqi, hsi, erosion)
189
  potability = potability_status(wqi)
190
 
191
- # 2. Prompt
192
  prompt = f"""
193
  Act as an Environmental Scientist. Analyze this river data:
194
  - Water Quality (WQI): {wqi}/100 ({potability})
@@ -205,7 +202,6 @@ def process_data(flow_rate, water_temp, sediment, construction, pH, do, nutrient
205
 
206
  summary = smart_summary(prompt)
207
 
208
- # 3. Generate Outputs
209
  fig = create_plots(wqi, hsi, erosion, turbidity)
210
  pdf_bytes = generate_pdf(wqi, hsi, erosion, turbidity, summary)
211
 
@@ -263,7 +259,6 @@ with gr.Blocks(title="FlumenIntel") as demo:
263
  )
264
 
265
  with gr.Row():
266
- # LEFT COLUMN
267
  with gr.Column(scale=1):
268
  with gr.Group():
269
  gr.Markdown("### 1. Hydrological Data")
@@ -284,7 +279,6 @@ with gr.Blocks(title="FlumenIntel") as demo:
284
 
285
  analyze_btn = gr.Button("🚀 Run Analysis", variant="primary", size="lg")
286
 
287
- # RIGHT COLUMN
288
  with gr.Column(scale=2):
289
  status_box = gr.Textbox(label="Quick Status", interactive=False)
290
 
 
15
  import google.generativeai as genai
16
 
17
  # -------------------- ENVIRONMENT VARIABLES --------------------
 
18
  HF_API_KEY = os.getenv("HF_API_KEY")
19
  GROQ_API_KEY = os.getenv("GROQ_API_KEY")
20
  GEMINI_API_KEY = os.getenv("GEMINI_API_KEY")
21
  SENTINEL_CLIENT_ID = os.getenv("SENTINEL_CLIENT_ID")
22
  SENTINEL_CLIENT_SECRET = os.getenv("SENTINEL_CLIENT_SECRET")
23
 
24
+ # -------------------- SENTINEL CONFIG --------------------
25
  config = SHConfig()
26
  if SENTINEL_CLIENT_ID and SENTINEL_CLIENT_SECRET:
27
  config.client_id = SENTINEL_CLIENT_ID
28
  config.client_secret = SENTINEL_CLIENT_SECRET
29
 
30
+ # -------------------- AI FUNCTIONS (UPDATED MODELS) --------------------
31
 
32
  def gemini_summary(text):
33
+ """Backup: Google Gemini 1.5 Flash"""
34
  try:
35
  if not GEMINI_API_KEY: return None, "Missing Key"
36
  genai.configure(api_key=GEMINI_API_KEY)
37
+ # UPDATED MODEL NAME
38
+ model = genai.GenerativeModel('gemini-1.5-flash')
39
  response = model.generate_content(text)
40
  return response.text, None
41
  except Exception as e:
42
  return None, str(e)
43
 
44
  def groq_summary(text):
45
+ """Primary: Groq (Llama 3.3)"""
46
  try:
47
  if not GROQ_API_KEY: return None, "Missing Key"
48
  client = Groq(api_key=GROQ_API_KEY)
49
  completion = client.chat.completions.create(
50
+ # UPDATED MODEL NAME - The old Mixtral one is retired
51
+ model="llama-3.3-70b-versatile",
52
  messages=[{"role": "user", "content": text}]
53
  )
54
  return completion.choices[0].message.content, None
 
56
  return None, str(e)
57
 
58
  def hf_summary(text):
59
+ """Fallback: Hugging Face (Zephyr)"""
60
  try:
61
+ # UPDATED URL AND MODEL
62
+ url = "https://api-inference.huggingface.co/models/HuggingFaceH4/zephyr-7b-beta"
63
  headers = {"Authorization": f"Bearer {HF_API_KEY}"}
64
  payload = {
65
+ "inputs": f"<|system|>You are an environmental scientist.</s><|user|>{text}</s><|assistant|>",
66
  "parameters": {"max_new_tokens": 500}
67
  }
68
  r = requests.post(url, headers=headers, json=payload, timeout=25)
69
  if r.status_code == 200:
70
+ return r.json()[0]["generated_text"].split("<|assistant|>")[-1], None
71
  else:
72
  return None, f"Status {r.status_code}: {r.text}"
73
  except Exception as e:
 
76
  def smart_summary(text):
77
  errors = []
78
 
79
+ # 1. Try Groq (Fastest)
80
  out, err = groq_summary(text)
81
  if out: return out
82
  errors.append(f"Groq Error: {err}")
83
 
84
+ # 2. Try Gemini (Most Reliable)
85
  out, err = gemini_summary(text)
86
  if out: return out
87
  errors.append(f"Gemini Error: {err}")
88
 
89
+ # 3. Try Hugging Face (Backup)
90
  if HF_API_KEY:
91
  out, err = hf_summary(text)
92
  if out: return out
93
  errors.append(f"HF Error: {err}")
94
 
 
95
  return "⚠ ALL AI MODELS FAILED. DEBUG LOG:\n" + "\n".join(errors)
96
 
97
+ # -------------------- MATH & LOGIC --------------------
98
  def calculate_wqi(pH, do, nutrients):
99
  wqi = (7 - abs(7 - pH)) * 0.2 + (do/14) * 0.5 + (10 - nutrients) * 0.3
100
  wqi_score = max(0, min(100, int(wqi*10)))
 
122
  turbidity_score = int(np.mean(img_array)/2.55)
123
  return turbidity_score
124
 
125
+ # -------------------- VISUALS --------------------
126
  def create_plots(wqi, hsi, erosion, turbidity):
127
  fig = go.Figure()
128
  colors = ['#1E90FF', '#32CD32', '#FF4500', '#FFA500']
 
157
  pdf.cell(0, 10, "Comprehensive Analysis", ln=True)
158
  pdf.set_font("Arial", "", 11)
159
 
 
160
  safe_summary = summary_text.encode('latin-1', 'replace').decode('latin-1')
161
  pdf.multi_cell(0, 6, safe_summary)
162
 
 
179
  # -------------------- MAIN PROCESSOR --------------------
180
  def process_data(flow_rate, water_temp, sediment, construction, pH, do, nutrients, sat_img):
181
  try:
 
182
  wqi = calculate_wqi(pH, do, nutrients)
183
  hsi = calculate_hsi(flow_rate, water_temp, sediment)
184
  erosion = calculate_erosion(sediment, construction)
 
186
  stability = river_stability(wqi, hsi, erosion)
187
  potability = potability_status(wqi)
188
 
 
189
  prompt = f"""
190
  Act as an Environmental Scientist. Analyze this river data:
191
  - Water Quality (WQI): {wqi}/100 ({potability})
 
202
 
203
  summary = smart_summary(prompt)
204
 
 
205
  fig = create_plots(wqi, hsi, erosion, turbidity)
206
  pdf_bytes = generate_pdf(wqi, hsi, erosion, turbidity, summary)
207
 
 
259
  )
260
 
261
  with gr.Row():
 
262
  with gr.Column(scale=1):
263
  with gr.Group():
264
  gr.Markdown("### 1. Hydrological Data")
 
279
 
280
  analyze_btn = gr.Button("🚀 Run Analysis", variant="primary", size="lg")
281
 
 
282
  with gr.Column(scale=2):
283
  status_box = gr.Textbox(label="Quick Status", interactive=False)
284