Wall06 commited on
Commit
e6a4e89
·
verified ·
1 Parent(s): 4baffa4

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +91 -99
app.py CHANGED
@@ -1,18 +1,16 @@
1
  import os
2
  import gradio as gr
3
  import numpy as np
4
- import pandas as pd
5
- import matplotlib.pyplot as plt
6
  import plotly.graph_objects as go
7
  import requests
8
  import cv2
9
  from PIL import Image
10
  import qrcode
11
  from fpdf import FPDF
12
- from io import BytesIO
13
  from sentinelhub import SHConfig
14
  from groq import Groq
15
  import google.generativeai as genai
 
16
 
17
  # -------------------- ENVIRONMENT VARIABLES --------------------
18
  HF_API_KEY = os.getenv("HF_API_KEY")
@@ -27,14 +25,12 @@ if SENTINEL_CLIENT_ID and SENTINEL_CLIENT_SECRET:
27
  config.client_id = SENTINEL_CLIENT_ID
28
  config.client_secret = SENTINEL_CLIENT_SECRET
29
 
30
- # -------------------- AI FUNCTIONS (UPDATED MODELS) --------------------
31
-
32
  def gemini_summary(text):
33
  """Backup: Google Gemini 1.5 Flash"""
34
  try:
35
  if not GEMINI_API_KEY: return None, "Missing Key"
36
  genai.configure(api_key=GEMINI_API_KEY)
37
- # UPDATED MODEL NAME
38
  model = genai.GenerativeModel('gemini-1.5-flash')
39
  response = model.generate_content(text)
40
  return response.text, None
@@ -47,7 +43,6 @@ def groq_summary(text):
47
  if not GROQ_API_KEY: return None, "Missing Key"
48
  client = Groq(api_key=GROQ_API_KEY)
49
  completion = client.chat.completions.create(
50
- # UPDATED MODEL NAME - The old Mixtral one is retired
51
  model="llama-3.3-70b-versatile",
52
  messages=[{"role": "user", "content": text}]
53
  )
@@ -58,12 +53,11 @@ def groq_summary(text):
58
  def hf_summary(text):
59
  """Fallback: Hugging Face (Zephyr)"""
60
  try:
61
- # UPDATED URL AND MODEL
62
  url = "https://api-inference.huggingface.co/models/HuggingFaceH4/zephyr-7b-beta"
63
  headers = {"Authorization": f"Bearer {HF_API_KEY}"}
64
  payload = {
65
- "inputs": f"<|system|>You are an environmental scientist.</s><|user|>{text}</s><|assistant|>",
66
- "parameters": {"max_new_tokens": 500}
67
  }
68
  r = requests.post(url, headers=headers, json=payload, timeout=25)
69
  if r.status_code == 200:
@@ -75,24 +69,21 @@ def hf_summary(text):
75
 
76
  def smart_summary(text):
77
  errors = []
78
-
79
- # 1. Try Groq (Fastest)
80
  out, err = groq_summary(text)
81
  if out: return out
82
- errors.append(f"Groq Error: {err}")
83
-
84
- # 2. Try Gemini (Most Reliable)
85
  out, err = gemini_summary(text)
86
  if out: return out
87
- errors.append(f"Gemini Error: {err}")
88
-
89
- # 3. Try Hugging Face (Backup)
90
  if HF_API_KEY:
91
  out, err = hf_summary(text)
92
  if out: return out
93
- errors.append(f"HF Error: {err}")
94
 
95
- return "⚠ ALL AI MODELS FAILED. DEBUG LOG:\n" + "\n".join(errors)
96
 
97
  # -------------------- MATH & LOGIC --------------------
98
  def calculate_wqi(pH, do, nutrients):
@@ -131,50 +122,63 @@ def create_plots(wqi, hsi, erosion, turbidity):
131
  fig.update_layout(title="River Health Metrics", yaxis=dict(range=[0,100]), template="plotly_white")
132
  return fig
133
 
 
134
  def generate_pdf(wqi, hsi, erosion, turbidity, summary_text):
135
  pdf = FPDF()
136
  pdf.add_page()
137
- pdf.set_font("Arial", "B", 20)
138
- pdf.set_text_color(30, 144, 255)
139
- pdf.cell(0, 15, "FlumenIntel Report", ln=True, align='C')
140
 
141
- pdf.set_font("Arial", "I", 10)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
142
  pdf.set_text_color(100, 100, 100)
143
- pdf.cell(0, 10, "Developed by Abdullah", ln=True, align='C')
144
  pdf.ln(10)
145
 
146
- # Metrics
147
- pdf.set_font("Arial", "B", 12)
148
  pdf.set_text_color(0, 0, 0)
149
- pdf.cell(0,10,f"Water Quality Index (WQI): {wqi}", ln=True)
150
- pdf.cell(0,10,f"Habitat Suitability (HSI): {hsi}", ln=True)
151
- pdf.cell(0,10,f"Erosion Risk: {erosion}", ln=True)
152
- pdf.cell(0,10,f"Turbidity Level: {turbidity}", ln=True)
153
- pdf.ln(5)
 
 
 
 
 
 
154
 
155
- # Summary
 
 
 
 
156
  pdf.set_font("Arial", "B", 14)
157
- pdf.cell(0, 10, "Comprehensive Analysis", ln=True)
158
  pdf.set_font("Arial", "", 11)
159
 
160
- safe_summary = summary_text.encode('latin-1', 'replace').decode('latin-1')
161
- pdf.multi_cell(0, 6, safe_summary)
162
-
163
- # QR Code
164
- import tempfile
165
- qr = qrcode.QRCode(box_size=3)
166
- qr.add_data("FlumenIntel - AI River Analysis")
167
- qr.make(fit=True)
168
- img = qr.make_image(fill_color="black", back_color="white")
169
 
170
- with tempfile.NamedTemporaryFile(delete=False, suffix=".png") as tmp:
171
- img.save(tmp.name)
172
- pdf.image(tmp.name, x=170, y=10, w=25)
173
-
174
- try:
175
- return pdf.output(dest='S').encode('latin-1')
176
- except:
177
- return pdf.output(dest='S')
178
 
179
  # -------------------- MAIN PROCESSOR --------------------
180
  def process_data(flow_rate, water_temp, sediment, construction, pH, do, nutrients, sat_img):
@@ -186,18 +190,24 @@ def process_data(flow_rate, water_temp, sediment, construction, pH, do, nutrient
186
  stability = river_stability(wqi, hsi, erosion)
187
  potability = potability_status(wqi)
188
 
 
189
  prompt = f"""
190
- Act as an Environmental Scientist. Analyze this river data:
191
- - Water Quality (WQI): {wqi}/100 ({potability})
192
- - Biodiversity Habitat (HSI): {hsi}/100
193
- - Erosion Risk: {erosion}/100
194
- - Turbidity: {turbidity}/100
 
 
195
 
196
- Write a report (150+ words) with these sections:
197
- 1. **Executive Summary**
198
- 2. **Biodiversity Impact**
199
- 3. **Causes & Mitigation**
200
- 4. **Future Prediction**
 
 
 
201
  """
202
 
203
  summary = smart_summary(prompt)
@@ -205,58 +215,34 @@ def process_data(flow_rate, water_temp, sediment, construction, pH, do, nutrient
205
  fig = create_plots(wqi, hsi, erosion, turbidity)
206
  pdf_bytes = generate_pdf(wqi, hsi, erosion, turbidity, summary)
207
 
208
- import tempfile
209
  with tempfile.NamedTemporaryFile(delete=False, suffix=".pdf") as tmp_pdf:
210
  tmp_pdf.write(pdf_bytes)
211
  pdf_path = tmp_pdf.name
212
 
213
- status_text = f"Overall Stability: {stability}/100\nPotability: {potability}"
214
 
215
  return status_text, fig, summary, pdf_path
216
 
217
  except Exception as e:
218
- return str(e), None, f"System Error: {str(e)}", None
219
 
220
  # -------------------- UI DESIGN --------------------
221
  custom_css = """
222
  @import url('https://fonts.googleapis.com/css2?family=Poppins:wght@400;600&display=swap');
223
-
224
  #title-box {
225
  text-align: center;
226
- background: linear-gradient(135deg, #0061ff 0%, #60efff 100%);
227
  padding: 25px;
228
- border-radius: 12px;
229
- box-shadow: 0 4px 15px rgba(0,0,0,0.1);
230
- color: white;
231
- margin-bottom: 20px;
232
- }
233
- #main-title {
234
- font-family: 'Poppins', sans-serif;
235
- font-size: 2.5rem;
236
- font-weight: 600;
237
- margin: 0;
238
- color: white;
239
- }
240
- #sub-title {
241
- font-family: 'Poppins', sans-serif;
242
- font-size: 1.1rem;
243
- font-weight: 400;
244
- opacity: 0.9;
245
- margin-top: 5px;
246
  color: white;
247
  }
248
  """
249
 
250
- with gr.Blocks(title="FlumenIntel") as demo:
251
  gr.Markdown(f"<style>{custom_css}</style>")
252
 
253
  with gr.Column(elem_id="title-box"):
254
- gr.Markdown(
255
- """
256
- <h1 id="main-title">FlumenIntel 🌊</h1>
257
- <div id="sub-title">AI River Health Analyzer | Developed by Abdullah</div>
258
- """
259
- )
260
 
261
  with gr.Row():
262
  with gr.Column(scale=1):
@@ -277,21 +263,27 @@ with gr.Blocks(title="FlumenIntel") as demo:
277
  gr.Markdown("### 3. Visual Analysis")
278
  sat_img = gr.Image(label="Satellite Image", sources=["upload", "clipboard"], type="pil")
279
 
280
- analyze_btn = gr.Button("🚀 Run Analysis", variant="primary", size="lg")
281
 
282
  with gr.Column(scale=2):
283
- status_box = gr.Textbox(label="Quick Status", interactive=False)
284
 
285
  with gr.Tabs():
286
  with gr.TabItem("📊 Visual Analytics"):
287
- plot_output = gr.Plot(label="Health Metrics")
288
 
289
- with gr.TabItem("🤖 AI Comprehensive Report"):
290
- ai_summary = gr.Textbox(label="Analysis", lines=12, interactive=False)
 
 
 
 
 
 
291
 
292
- with gr.TabItem("📄 Download Report"):
293
- gr.Markdown("### Export Data")
294
- pdf_output = gr.File(label="Download Official PDF Report")
295
 
296
  analyze_btn.click(
297
  process_data,
 
1
  import os
2
  import gradio as gr
3
  import numpy as np
 
 
4
  import plotly.graph_objects as go
5
  import requests
6
  import cv2
7
  from PIL import Image
8
  import qrcode
9
  from fpdf import FPDF
 
10
  from sentinelhub import SHConfig
11
  from groq import Groq
12
  import google.generativeai as genai
13
+ import tempfile
14
 
15
  # -------------------- ENVIRONMENT VARIABLES --------------------
16
  HF_API_KEY = os.getenv("HF_API_KEY")
 
25
  config.client_id = SENTINEL_CLIENT_ID
26
  config.client_secret = SENTINEL_CLIENT_SECRET
27
 
28
+ # -------------------- AI FUNCTIONS (UPDATED) --------------------
 
29
  def gemini_summary(text):
30
  """Backup: Google Gemini 1.5 Flash"""
31
  try:
32
  if not GEMINI_API_KEY: return None, "Missing Key"
33
  genai.configure(api_key=GEMINI_API_KEY)
 
34
  model = genai.GenerativeModel('gemini-1.5-flash')
35
  response = model.generate_content(text)
36
  return response.text, None
 
43
  if not GROQ_API_KEY: return None, "Missing Key"
44
  client = Groq(api_key=GROQ_API_KEY)
45
  completion = client.chat.completions.create(
 
46
  model="llama-3.3-70b-versatile",
47
  messages=[{"role": "user", "content": text}]
48
  )
 
53
  def hf_summary(text):
54
  """Fallback: Hugging Face (Zephyr)"""
55
  try:
 
56
  url = "https://api-inference.huggingface.co/models/HuggingFaceH4/zephyr-7b-beta"
57
  headers = {"Authorization": f"Bearer {HF_API_KEY}"}
58
  payload = {
59
+ "inputs": f"<|system|>You are a scientist.</s><|user|>{text}</s><|assistant|>",
60
+ "parameters": {"max_new_tokens": 800}
61
  }
62
  r = requests.post(url, headers=headers, json=payload, timeout=25)
63
  if r.status_code == 200:
 
69
 
70
  def smart_summary(text):
71
  errors = []
72
+ # 1. Try Groq
 
73
  out, err = groq_summary(text)
74
  if out: return out
75
+ errors.append(f"Groq: {err}")
76
+ # 2. Try Gemini
 
77
  out, err = gemini_summary(text)
78
  if out: return out
79
+ errors.append(f"Gemini: {err}")
80
+ # 3. Try Hugging Face
 
81
  if HF_API_KEY:
82
  out, err = hf_summary(text)
83
  if out: return out
84
+ errors.append(f"HF: {err}")
85
 
86
+ return "⚠ SYSTEM FAILURE. DEBUG LOG:\n" + "\n".join(errors)
87
 
88
  # -------------------- MATH & LOGIC --------------------
89
  def calculate_wqi(pH, do, nutrients):
 
122
  fig.update_layout(title="River Health Metrics", yaxis=dict(range=[0,100]), template="plotly_white")
123
  return fig
124
 
125
+ # -------------------- PDF ENGINE --------------------
126
  def generate_pdf(wqi, hsi, erosion, turbidity, summary_text):
127
  pdf = FPDF()
128
  pdf.add_page()
 
 
 
129
 
130
+ # --- 1. QR CODE (TOP RIGHT) ---
131
+ qr = qrcode.QRCode(box_size=3)
132
+ qr.add_data("FlumenIntel Report Verified")
133
+ qr.make(fit=True)
134
+ img = qr.make_image(fill_color="black", back_color="white")
135
+
136
+ with tempfile.NamedTemporaryFile(delete=False, suffix=".png") as tmp:
137
+ img.save(tmp.name)
138
+ # x=160 pushes it to the right margin (A4 width is 210)
139
+ pdf.image(tmp.name, x=165, y=10, w=30)
140
+
141
+ # --- 2. HEADER ---
142
+ pdf.set_y(15) # Align with QR code
143
+ pdf.set_font("Arial", "B", 24)
144
+ pdf.set_text_color(0, 51, 102) # Dark Blue
145
+ pdf.cell(0, 10, "FlumenIntel", ln=True, align='L')
146
+
147
+ pdf.set_font("Arial", "I", 12)
148
  pdf.set_text_color(100, 100, 100)
149
+ pdf.cell(0, 10, "Professional River Health Assessment", ln=True, align='L')
150
  pdf.ln(10)
151
 
152
+ # --- 3. METRICS TABLE ---
153
+ pdf.set_font("Arial", "B", 14)
154
  pdf.set_text_color(0, 0, 0)
155
+ pdf.cell(0, 10, "1. Key Environmental Metrics", ln=True)
156
+
157
+ pdf.set_font("Arial", "", 12)
158
+ pdf.cell(50, 10, f"Water Quality (WQI):", border=1)
159
+ pdf.cell(50, 10, f"{wqi}/100", border=1, ln=1)
160
+
161
+ pdf.cell(50, 10, f"Habitat Score (HSI):", border=1)
162
+ pdf.cell(50, 10, f"{hsi}/100", border=1, ln=1)
163
+
164
+ pdf.cell(50, 10, f"Erosion Risk:", border=1)
165
+ pdf.cell(50, 10, f"{erosion}/100", border=1, ln=1)
166
 
167
+ pdf.cell(50, 10, f"Turbidity:", border=1)
168
+ pdf.cell(50, 10, f"{turbidity}/100", border=1, ln=1)
169
+ pdf.ln(10)
170
+
171
+ # --- 4. PROFESSIONAL SUMMARY ---
172
  pdf.set_font("Arial", "B", 14)
173
+ pdf.cell(0, 10, "2. Scientist's Analysis", ln=True)
174
  pdf.set_font("Arial", "", 11)
175
 
176
+ # Clean text for PDF (latin-1 encoding issue fix)
177
+ clean_summary = summary_text.encode('latin-1', 'replace').decode('latin-1')
178
+ pdf.multi_cell(0, 6, clean_summary)
 
 
 
 
 
 
179
 
180
+ # Output
181
+ return pdf.output(dest='S').encode('latin-1')
 
 
 
 
 
 
182
 
183
  # -------------------- MAIN PROCESSOR --------------------
184
  def process_data(flow_rate, water_temp, sediment, construction, pH, do, nutrients, sat_img):
 
190
  stability = river_stability(wqi, hsi, erosion)
191
  potability = potability_status(wqi)
192
 
193
+ # --- PROFESSIONAL PROMPT ---
194
  prompt = f"""
195
+ ROLE: Senior Environmental Scientist.
196
+ TASK: Write a formal "River Health Assessment Report".
197
+ DATA:
198
+ - WQI: {wqi} (Potability: {potability})
199
+ - HSI: {hsi}
200
+ - Erosion: {erosion}
201
+ - Turbidity: {turbidity}
202
 
203
+ REQUIREMENTS:
204
+ - Tone: Professional, Objective, Scientific.
205
+ - No Markdown symbols (like ** or ##). Use plain text formatting.
206
+ - Structure:
207
+ 1. EXECUTIVE SUMMARY: High-level status.
208
+ 2. BIOLOGICAL IMPACT: Effect on local aquatic life.
209
+ 3. MITIGATION PLAN: 3 specific, actionable steps.
210
+ 4. FORECAST: Predicted outcome if untreated.
211
  """
212
 
213
  summary = smart_summary(prompt)
 
215
  fig = create_plots(wqi, hsi, erosion, turbidity)
216
  pdf_bytes = generate_pdf(wqi, hsi, erosion, turbidity, summary)
217
 
 
218
  with tempfile.NamedTemporaryFile(delete=False, suffix=".pdf") as tmp_pdf:
219
  tmp_pdf.write(pdf_bytes)
220
  pdf_path = tmp_pdf.name
221
 
222
+ status_text = f"Stability Index: {stability}/100\nStatus: {potability}"
223
 
224
  return status_text, fig, summary, pdf_path
225
 
226
  except Exception as e:
227
+ return str(e), None, f"Error: {str(e)}", None
228
 
229
  # -------------------- UI DESIGN --------------------
230
  custom_css = """
231
  @import url('https://fonts.googleapis.com/css2?family=Poppins:wght@400;600&display=swap');
 
232
  #title-box {
233
  text-align: center;
234
+ background: linear-gradient(135deg, #0f2027, #203a43, #2c5364); /* Professional Dark Blue */
235
  padding: 25px;
236
+ border-radius: 8px;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
237
  color: white;
238
  }
239
  """
240
 
241
+ with gr.Blocks(title="FlumenIntel", css=custom_css) as demo:
242
  gr.Markdown(f"<style>{custom_css}</style>")
243
 
244
  with gr.Column(elem_id="title-box"):
245
+ gr.Markdown("<h1>FlumenIntel 🌊</h1><h3>Advanced River Health Analytics</h3>")
 
 
 
 
 
246
 
247
  with gr.Row():
248
  with gr.Column(scale=1):
 
263
  gr.Markdown("### 3. Visual Analysis")
264
  sat_img = gr.Image(label="Satellite Image", sources=["upload", "clipboard"], type="pil")
265
 
266
+ analyze_btn = gr.Button("GENERATE REPORT", variant="primary", size="lg")
267
 
268
  with gr.Column(scale=2):
269
+ status_box = gr.Textbox(label="System Status", interactive=False)
270
 
271
  with gr.Tabs():
272
  with gr.TabItem("📊 Visual Analytics"):
273
+ plot_output = gr.Plot(label="Metric Visualization")
274
 
275
+ with gr.TabItem("📄 Official Report"):
276
+ # lines=25 creates a large box. Gradio ADDS A SCROLLBAR automatically if text overflows.
277
+ ai_summary = gr.Textbox(
278
+ label="Scientist's Assessment",
279
+ lines=25,
280
+ show_copy_button=True,
281
+ interactive=False
282
+ )
283
 
284
+ with gr.TabItem("📥 Export"):
285
+ gr.Markdown("### Download Verified PDF")
286
+ pdf_output = gr.File(label="FlumenIntel Report.pdf")
287
 
288
  analyze_btn.click(
289
  process_data,