Wall06 commited on
Commit
6c411f0
Β·
verified Β·
1 Parent(s): 59c06a6

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +227 -57
app.py CHANGED
@@ -11,100 +11,270 @@ from sentinelhub import SHConfig
11
  from groq import Groq
12
  import google.generativeai as genai
13
  import tempfile
14
- import pytz # Ensure pytz is available
15
 
16
- # -------------------- API KEYS --------------------
 
17
  GROQ_API_KEY = "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"
18
  GEMINI_API_KEY = "AIzaSyCqPnhDNwBP6Tsw1wkLGdXCIVDnNO44swY"
 
 
 
 
 
 
 
 
19
 
20
  # -------------------- AI FUNCTIONS --------------------
21
- def smart_summary(text):
 
 
 
 
 
 
 
 
 
 
22
  try:
 
23
  client = Groq(api_key=GROQ_API_KEY)
24
  completion = client.chat.completions.create(
25
  model="llama-3.3-70b-versatile",
26
  messages=[{"role": "user", "content": text}]
27
  )
28
- return completion.choices[0].message.content
29
- except:
30
- try:
31
- genai.configure(api_key=GEMINI_API_KEY)
32
- model = genai.GenerativeModel('gemini-1.5-flash')
33
- response = model.generate_content(text)
34
- return response.text
35
- except:
36
- return "⚠ AI Error: Check your internet and API keys."
37
-
38
- # -------------------- STABLE gTTS AUDIO --------------------
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
39
  def generate_audio_report(text):
40
  try:
41
  from gtts import gTTS
42
  except ImportError:
43
- raise gr.Error("❌ Add 'gTTS' to requirements.txt")
44
 
45
- if not text or len(text.strip()) < 10:
46
- raise gr.Error("❌ Generate the report first!")
47
 
48
  try:
49
- # Generate speech using free Google engine
50
  tts = gTTS(text=text[:1500], lang='en')
51
 
52
  with tempfile.NamedTemporaryFile(delete=False, suffix=".mp3") as f:
53
  tts.save(f.name)
54
  return f.name
55
  except Exception as e:
56
- # Handles the 429 Too Many Requests error
57
  raise gr.Error(f"Speech Generation Error: {str(e)}")
58
 
59
- # -------------------- CORE LOGIC --------------------
60
- def run_app(flow, temp, sed, const, ph, do, nutri, sat_img):
61
- wqi = max(0, min(100, int(((7 - abs(7 - ph)) * 0.2 + (do/14) * 0.5 + (10 - nutri) * 0.3) * 10)))
62
- hsi = max(0, min(100, int(100 - abs(flow-50)*0.5 - abs(temp-20)*2 - sed*1.5)))
63
- erosion = max(0, min(100, int(sed*1.5 + const*2)))
64
- turb = int(np.mean(np.array(sat_img.convert("L")))/2.55) if sat_img else 0
65
-
66
- prompt = f"Scientist Report. WQI {wqi}, HSI {hsi}, Erosion {erosion}, Turbidity {turb}."
67
- summary = smart_summary(prompt)
68
-
69
- fig = go.Figure(go.Bar(x=["WQI", "HSI", "Erosion", "Turbidity"], y=[wqi, hsi, erosion, turb], marker_color='#0061ff'))
70
- fig.update_layout(yaxis=dict(range=[0,100]), template="plotly_white")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
71
 
 
 
 
 
 
 
72
  pdf = FPDF()
73
  pdf.add_page()
74
- pdf.set_font("Arial", "B", 16)
75
- pdf.cell(0, 10, "FlumenIntel River Health Report", ln=True)
 
 
 
 
 
 
 
 
 
 
 
 
76
  pdf.set_font("Arial", "", 12)
77
- pdf.multi_cell(0, 10, summary.encode('latin-1', 'replace').decode('latin-1'))
 
 
 
 
78
 
79
  report_path = os.path.join(tempfile.gettempdir(), "FlumenIntel_Report.pdf")
80
  pdf.output(report_path)
81
- return f"River Status: {wqi}/100", fig, summary, report_path
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
82
 
83
- # -------------------- UI --------------------
84
  with gr.Blocks(title="FlumenIntel") as demo:
85
- gr.Markdown("# FlumenIntel 🌊")
86
- with gr.Row():
87
- with gr.Column():
88
- flow = gr.Number(label="Flow", value=45)
89
- temp = gr.Number(label="Temp", value=18)
90
- sed = gr.Slider(0, 10, label="Sediment", value=2)
91
- const = gr.Slider(0, 10, label="Construction", value=0)
92
- ph = gr.Number(label="pH", value=7.2)
93
- do = gr.Number(label="Oxygen", value=9.5)
94
- nutri = gr.Slider(0, 10, label="Nutrients", value=1)
95
- sat_img = gr.Image(label="Satellite", type="pil")
96
- btn = gr.Button("GENERATE REPORT", variant="primary")
97
- with gr.Column():
98
- status = gr.Textbox(label="Status")
99
- plot = gr.Plot()
100
- ai_txt = gr.Textbox(label="Report", lines=10)
101
  with gr.Row():
102
- audio_btn = gr.Button("πŸ”Š Listen")
103
- audio_out = gr.Audio(label="Audio")
104
- pdf_out = gr.File(label="Download")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
105
 
106
- btn.click(run_app, [flow, temp, sed, const, ph, do, nutri, sat_img], [status, plot, ai_txt, pdf_out])
107
- audio_btn.click(generate_audio_report, ai_txt, audio_out)
 
 
 
 
108
 
109
  if __name__ == "__main__":
110
  demo.launch()
 
11
  from groq import Groq
12
  import google.generativeai as genai
13
  import tempfile
 
14
 
15
+ # -------------------- ENVIRONMENT VARIABLES --------------------
16
+ HF_API_KEY = os.getenv("HF_API_KEY")
17
  GROQ_API_KEY = "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"
18
  GEMINI_API_KEY = "AIzaSyCqPnhDNwBP6Tsw1wkLGdXCIVDnNO44swY"
19
+ SENTINEL_CLIENT_ID = os.getenv("SENTINEL_CLIENT_ID")
20
+ SENTINEL_CLIENT_SECRET = os.getenv("SENTINEL_CLIENT_SECRET")
21
+
22
+ # -------------------- SENTINEL CONFIG --------------------
23
+ config = SHConfig()
24
+ if SENTINEL_CLIENT_ID and SENTINEL_CLIENT_SECRET:
25
+ config.client_id = SENTINEL_CLIENT_ID
26
+ config.client_secret = SENTINEL_CLIENT_SECRET
27
 
28
  # -------------------- AI FUNCTIONS --------------------
29
+ def gemini_summary(text):
30
+ try:
31
+ if not GEMINI_API_KEY: return None, "Missing Key"
32
+ genai.configure(api_key=GEMINI_API_KEY)
33
+ model = genai.GenerativeModel('gemini-1.5-flash')
34
+ response = model.generate_content(text)
35
+ return response.text, None
36
+ except Exception as e:
37
+ return None, str(e)
38
+
39
+ def groq_summary(text):
40
  try:
41
+ if not GROQ_API_KEY: return None, "Missing Key"
42
  client = Groq(api_key=GROQ_API_KEY)
43
  completion = client.chat.completions.create(
44
  model="llama-3.3-70b-versatile",
45
  messages=[{"role": "user", "content": text}]
46
  )
47
+ return completion.choices[0].message.content, None
48
+ except Exception as e:
49
+ return None, str(e)
50
+
51
+ def hf_summary(text):
52
+ try:
53
+ url = "https://api-inference.huggingface.co/models/HuggingFaceH4/zephyr-7b-beta"
54
+ headers = {"Authorization": f"Bearer {HF_API_KEY}"}
55
+ payload = {
56
+ "inputs": f"<|system|>You are a scientist.</s><|user|>{text}</s><|assistant|>",
57
+ "parameters": {"max_new_tokens": 800}
58
+ }
59
+ r = requests.post(url, headers=headers, json=payload, timeout=25)
60
+ if r.status_code == 200:
61
+ return r.json()[0]["generated_text"].split("<|assistant|>")[-1], None
62
+ else:
63
+ return None, f"Status {r.status_code}: {r.text}"
64
+ except Exception as e:
65
+ return None, str(e)
66
+
67
+ def smart_summary(text):
68
+ errors = []
69
+ out, err = groq_summary(text)
70
+ if out: return out
71
+ errors.append(f"Groq: {err}")
72
+ out, err = gemini_summary(text)
73
+ if out: return out
74
+ errors.append(f"Gemini: {err}")
75
+ if HF_API_KEY:
76
+ out, err = hf_summary(text)
77
+ if out: return out
78
+ errors.append(f"HF: {err}")
79
+ return "⚠ SYSTEM FAILURE. DEBUG LOG:\n" + "\n".join(errors)
80
+
81
+ # -------------------- AUDIO FUNCTION (STABLE gTTS) --------------------
82
  def generate_audio_report(text):
83
  try:
84
  from gtts import gTTS
85
  except ImportError:
86
+ raise gr.Error("❌ Library Missing! Add 'gTTS' to requirements.txt")
87
 
88
+ if not text or "SYSTEM FAILURE" in text:
89
+ raise gr.Error("❌ No valid report text found. Generate report first!")
90
 
91
  try:
92
+ # No API Key needed for gTTS
93
  tts = gTTS(text=text[:1500], lang='en')
94
 
95
  with tempfile.NamedTemporaryFile(delete=False, suffix=".mp3") as f:
96
  tts.save(f.name)
97
  return f.name
98
  except Exception as e:
 
99
  raise gr.Error(f"Speech Generation Error: {str(e)}")
100
 
101
+ # -------------------- MATH & LOGIC --------------------
102
+ def calculate_wqi(pH, do, nutrients):
103
+ wqi = (7 - abs(7 - pH)) * 0.2 + (do/14) * 0.5 + (10 - nutrients) * 0.3
104
+ wqi_score = max(0, min(100, int(wqi*10)))
105
+ return wqi_score
106
+
107
+ def calculate_hsi(flow_rate, temp, sediment):
108
+ hsi = 100 - abs(flow_rate-50)*0.5 - abs(temp-20)*2 - sediment*1.5
109
+ return max(0, min(100, int(hsi)))
110
+
111
+ def calculate_erosion(sediment, construction):
112
+ score = sediment*1.5 + construction*2
113
+ return max(0, min(100, int(score)))
114
+
115
+ def potability_status(wqi):
116
+ if wqi > 80: return "Safe"
117
+ elif wqi > 50: return "Boil Required"
118
+ else: return "Toxic"
119
+
120
+ def river_stability(wqi, hsi, erosion):
121
+ return int((wqi*0.4 + hsi*0.4 + (100-erosion)*0.2))
122
+
123
+ def analyze_satellite_image(img):
124
+ if img is None: return 0
125
+ img_array = np.array(img.convert("L"))
126
+ turbidity_score = int(np.mean(img_array)/2.55)
127
+ return turbidity_score
128
+
129
+ # -------------------- VISUALS & INSIGHTS --------------------
130
+ def create_plots(wqi, hsi, erosion, turbidity):
131
+ fig = go.Figure()
132
+ colors = ['#0061ff', '#60efff', '#ff4b4b', '#ffb347']
133
+ fig.add_trace(go.Bar(name="Metrics", x=["WQI", "HSI", "Erosion", "Turbidity"],
134
+ y=[wqi, hsi, erosion, turbidity], marker_color=colors))
135
+ fig.update_layout(title="River Health Metrics", yaxis=dict(range=[0,100]), template="plotly_white")
136
+ return fig
137
+
138
+ def generate_graph_insights(wqi, hsi, erosion, turbidity):
139
+ text = "### πŸ“‰ Graph Analysis\n\n"
140
+ if wqi > 70: text += f"πŸ”΅ **Water Quality:** {wqi}/100. Excellent condition.\n\n"
141
+ elif wqi > 40: text += f"πŸ”΅ **Water Quality:** {wqi}/100. Moderate pollution.\n\n"
142
+ else: text += f"πŸ”΅ **Water Quality:** {wqi}/100. **CRITICAL**.\n\n"
143
 
144
+ if hsi > 70: text += f"🟒 **Habitat:** {hsi}/100. Good biodiversity.\n\n"
145
+ else: text += f"🟒 **Habitat:** {hsi}/100. Poor conditions.\n\n"
146
+ return text
147
+
148
+ # -------------------- PDF ENGINE --------------------
149
+ def generate_pdf(wqi, hsi, erosion, turbidity, summary_text):
150
  pdf = FPDF()
151
  pdf.add_page()
152
+ qr = qrcode.QRCode(box_size=3)
153
+ qr.add_data(f"Verified FlumenIntel Report | WQI: {wqi}")
154
+ qr.make(fit=True)
155
+ img = qr.make_image(fill_color="black", back_color="white")
156
+
157
+ with tempfile.NamedTemporaryFile(delete=False, suffix=".png") as tmp:
158
+ img.save(tmp.name)
159
+ pdf.image(tmp.name, x=165, y=10, w=30)
160
+
161
+ pdf.set_y(15)
162
+ pdf.set_font("Arial", "B", 24)
163
+ pdf.set_text_color(0, 97, 255)
164
+ pdf.cell(0, 10, "FlumenIntel", ln=True, align='L')
165
+ pdf.ln(10)
166
  pdf.set_font("Arial", "", 12)
167
+ pdf.set_text_color(0, 0, 0)
168
+
169
+ # Cleaning summary for FPDF compatibility
170
+ clean_text = summary_text.encode('latin-1', 'replace').decode('latin-1')
171
+ pdf.multi_cell(0, 6, clean_text)
172
 
173
  report_path = os.path.join(tempfile.gettempdir(), "FlumenIntel_Report.pdf")
174
  pdf.output(report_path)
175
+ return report_path
176
+
177
+ # -------------------- MAIN PROCESSOR --------------------
178
+ def process_data(flow_rate, water_temp, sediment, construction, pH, do, nutrients, sat_img):
179
+ try:
180
+ wqi = calculate_wqi(pH, do, nutrients)
181
+ hsi = calculate_hsi(flow_rate, water_temp, sediment)
182
+ erosion = calculate_erosion(sediment, construction)
183
+ turbidity = analyze_satellite_image(sat_img)
184
+ stability = river_stability(wqi, hsi, erosion)
185
+ potability = potability_status(wqi)
186
+
187
+ prompt = f"Write a professional health report for a river. WQI: {wqi}, HSI: {hsi}, Erosion: {erosion}, Turbidity: {turbidity}. Potability: {potability}."
188
+ summary = smart_summary(prompt)
189
+
190
+ fig = create_plots(wqi, hsi, erosion, turbidity)
191
+ graph_text = generate_graph_insights(wqi, hsi, erosion, turbidity)
192
+
193
+ # Returns path for gr.File to enable download
194
+ pdf_path = generate_pdf(wqi, hsi, erosion, turbidity, summary)
195
+
196
+ status_text = f"Stability Index: {stability}/100\nStatus: {potability}"
197
+ return status_text, fig, graph_text, summary, pdf_path
198
+
199
+ except Exception as e:
200
+ return str(e), None, "", "", None
201
+
202
+ # Wrapper
203
+ def run_app(flow, temp, sediment, construction, ph, do, nutrients, sat_img):
204
+ return process_data(flow, temp, sediment, construction, ph, do, nutrients, sat_img)
205
+
206
+ # -------------------- UI DESIGN --------------------
207
+ custom_css = """
208
+ @import url('https://fonts.googleapis.com/css2?family=Poppins:wght@300;400;600&display=swap');
209
+ * { font-family: 'Poppins', sans-serif !important; }
210
+ #title-box { background: linear-gradient(135deg, #0061ff 0%, #60efff 100%); color: white; padding: 20px; border-radius: 12px; text-align: center;}
211
+ #analyze-btn { background: #0061ff; color: white; border: none; font-weight: bold; cursor: pointer; border-radius: 8px;}
212
+ """
213
 
 
214
  with gr.Blocks(title="FlumenIntel") as demo:
215
+ gr.HTML(f"<style>{custom_css}</style>")
216
+ with gr.Column(elem_id="title-box"):
217
+ gr.Markdown("# FlumenIntel 🌊\n### Advanced River Health Analytics")
218
+
219
+ with gr.Tabs():
220
+ # --- TAB 1: DASHBOARD ---
221
+ with gr.TabItem("πŸš€ Dashboard"):
 
 
 
 
 
 
 
 
 
222
  with gr.Row():
223
+ # LEFT INPUTS
224
+ with gr.Column(scale=1):
225
+ gr.Markdown("### 1. Hydrological Data")
226
+ flow = gr.Number(label="Flow Rate", value=45)
227
+ temp = gr.Number(label="Temperature", value=18)
228
+ sediment = gr.Slider(0, 10, label="Sediment", value=2)
229
+ construction = gr.Slider(0, 10, label="Construction", value=0)
230
+
231
+ gr.Markdown("### 2. Chemical Data")
232
+ ph = gr.Number(label="pH Level", value=7.2)
233
+ do = gr.Number(label="Dissolved Oxygen", value=9.5)
234
+ nutrients = gr.Slider(0, 10, label="Nutrient Load", value=1)
235
+
236
+ gr.Markdown("### 3. Visual Analysis")
237
+ sat_img = gr.Image(label="Satellite Image", type="pil")
238
+
239
+ analyze_btn = gr.Button("GENERATE REPORT", elem_id="analyze-btn")
240
+
241
+ # RIGHT OUTPUTS
242
+ with gr.Column(scale=2):
243
+ status_box = gr.Textbox(label="System Status", interactive=False)
244
+
245
+ with gr.Tabs():
246
+ with gr.TabItem("πŸ“Š Visual Analytics"):
247
+ plot_output = gr.Plot(label="Metric Visualization")
248
+ graph_summary_box = gr.Markdown("### Insights...")
249
+
250
+ with gr.TabItem("πŸ“„ Official Report"):
251
+ ai_summary = gr.Textbox(label="Scientist's Assessment", lines=15, interactive=False)
252
+
253
+ # --- AUDIO BUTTON ---
254
+ with gr.Row():
255
+ audio_btn = gr.Button("πŸ”Š Listen to Report (gTTS)")
256
+ audio_out = gr.Audio(label="Player", type="filepath")
257
+
258
+ audio_btn.click(
259
+ fn=generate_audio_report,
260
+ inputs=ai_summary,
261
+ outputs=audio_out
262
+ )
263
+
264
+ with gr.TabItem("πŸ“₯ Export"):
265
+ # The gr.File component provides the download sign automatically
266
+ pdf_output = gr.File(label="Download Official FlumenIntel Report")
267
+
268
+ # --- TAB 2: ABOUT ME ---
269
+ with gr.TabItem("πŸ‘€ About Me"):
270
+ gr.Markdown("## Abdullah\nComputer Engineering Undergraduate | AI & Hardware Enthusiast")
271
 
272
+ # Events
273
+ analyze_btn.click(
274
+ run_app,
275
+ inputs=[flow, temp, sediment, construction, ph, do, nutrients, sat_img],
276
+ outputs=[status_box, plot_output, graph_summary_box, ai_summary, pdf_output]
277
+ )
278
 
279
  if __name__ == "__main__":
280
  demo.launch()