Wall06 commited on
Commit
afb796a
·
verified ·
1 Parent(s): 1f07f73

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +118 -247
app.py CHANGED
@@ -12,296 +12,167 @@ from groq import Groq
12
  import google.generativeai as genai
13
  import tempfile
14
 
15
- # -------------------- ENVIRONMENT VARIABLES --------------------
16
  HF_API_KEY = os.getenv("HF_API_KEY")
17
  GROQ_API_KEY = os.getenv("GROQ_API_KEY")
18
  GEMINI_API_KEY = os.getenv("GEMINI_API_KEY")
19
- SENTINEL_CLIENT_ID = os.getenv("SENTINEL_CLIENT_ID")
20
- SENTINEL_CLIENT_SECRET = os.getenv("SENTINEL_CLIENT_SECRET")
21
  ELEVENLABS_API_KEY = os.getenv("ELEVENLABS_API_KEY")
22
 
23
- # -------------------- SENTINEL CONFIG --------------------
24
- config = SHConfig()
25
- if SENTINEL_CLIENT_ID and SENTINEL_CLIENT_SECRET:
26
- config.client_id = SENTINEL_CLIENT_ID
27
- config.client_secret = SENTINEL_CLIENT_SECRET
28
-
29
- # -------------------- AI FUNCTIONS --------------------
30
  def gemini_summary(text):
31
- try:
32
- if not GEMINI_API_KEY: return None, "Missing Key"
33
- genai.configure(api_key=GEMINI_API_KEY)
34
- model = genai.GenerativeModel('gemini-1.5-flash')
35
- response = model.generate_content(text)
36
- return response.text, None
37
- except Exception as e:
38
- return None, str(e)
39
 
40
  def groq_summary(text):
41
- try:
42
- if not GROQ_API_KEY: return None, "Missing Key"
43
- client = Groq(api_key=GROQ_API_KEY)
44
- completion = client.chat.completions.create(
45
- model="llama-3.3-70b-versatile",
46
- messages=[{"role": "user", "content": text}]
47
- )
48
- return completion.choices[0].message.content, None
49
- except Exception as e:
50
- return None, str(e)
51
 
52
  def hf_summary(text):
53
- try:
54
- url = "https://api-inference.huggingface.co/models/HuggingFaceH4/zephyr-7b-beta"
55
- headers = {"Authorization": f"Bearer {HF_API_KEY}"}
56
- payload = {
57
- "inputs": f"<|system|>You are a scientist.</s><|user|>{text}</s><|assistant|>",
58
- "parameters": {"max_new_tokens": 800}
59
- }
60
- r = requests.post(url, headers=headers, json=payload, timeout=25)
61
- if r.status_code == 200:
62
- return r.json()[0]["generated_text"].split("<|assistant|>")[-1], None
63
- else:
64
- return None, f"Status {r.status_code}: {r.text}"
65
- except Exception as e:
66
- return None, str(e)
67
 
68
  def smart_summary(text):
69
- errors = []
70
- out, err = groq_summary(text)
71
- if out: return out
72
- errors.append(f"Groq: {err}")
73
- out, err = gemini_summary(text)
74
- if out: return out
75
- errors.append(f"Gemini: {err}")
76
- if HF_API_KEY:
77
- out, err = hf_summary(text)
78
- if out: return out
79
- errors.append(f"HF: {err}")
80
- return "⚠ SYSTEM FAILURE. DEBUG LOG:\n" + "\n".join(errors)
81
-
82
- # -------------------- AUDIO FUNCTION (UPDATED FOR V1.0) --------------------
83
  def generate_audio_report(text):
84
- # 1. Check Library
85
- try:
86
- from elevenlabs.client import ElevenLabs
87
- except ImportError:
88
- raise gr.Error("❌ Library Missing! Add 'elevenlabs' to requirements.txt")
89
 
90
- # 2. Check Text
91
- if not text:
92
- raise gr.Error("❌ No text to read!")
93
 
94
- # 3. Check Key
95
- api_key = os.getenv("ELEVENLABS_API_KEY")
96
- if not api_key:
97
- raise gr.Error("❌ API Key Missing! Check Settings > Secrets.")
98
 
99
- # 4. Generate with NEW Syntax
100
  try:
101
- client = ElevenLabs(api_key=api_key)
102
-
103
- # Use 'text_to_speech.convert' instead of 'generate'
104
- # Voice ID for "Brian": nPczCjzI2devNBz1zQrb
105
- audio_stream = client.text_to_speech.convert(
106
- text=text[:400],
107
- voice_id="nPczCjzI2devNBz1zQrb",
108
  model_id="eleven_multilingual_v2"
109
  )
110
-
111
  with tempfile.NamedTemporaryFile(delete=False, suffix=".mp3") as f:
112
- for chunk in audio_stream:
113
  f.write(chunk)
114
  return f.name
115
 
116
- except Exception as e:
117
- error_msg = str(e)
118
- if "401" in error_msg:
119
- raise gr.Error("❌ 401 Unauthorized: API Key is wrong.")
120
- elif "quota" in error_msg.lower():
121
- raise gr.Error("❌ Quota Exceeded: No credits left.")
122
- else:
123
- raise gr.Error(f"❌ ElevenLabs Error: {error_msg}")
124
 
125
- # -------------------- MATH & LOGIC --------------------
126
  def calculate_wqi(pH, do, nutrients):
127
- wqi = (7 - abs(7 - pH)) * 0.2 + (do/14) * 0.5 + (10 - nutrients) * 0.3
128
- wqi_score = max(0, min(100, int(wqi*10)))
129
- return wqi_score
130
 
131
- def calculate_hsi(flow_rate, temp, sediment):
132
- hsi = 100 - abs(flow_rate-50)*0.5 - abs(temp-20)*2 - sediment*1.5
133
- return max(0, min(100, int(hsi)))
134
 
135
  def calculate_erosion(sediment, construction):
136
- score = sediment*1.5 + construction*2
137
- return max(0, min(100, int(score)))
138
-
139
- def potability_status(wqi):
140
- if wqi > 80: return "Safe"
141
- elif wqi > 50: return "Boil Required"
142
- else: return "Toxic"
143
-
144
- def river_stability(wqi, hsi, erosion):
145
- return int((wqi*0.4 + hsi*0.4 + (100-erosion)*0.2))
146
 
147
  def analyze_satellite_image(img):
148
- if img is None: return 0
149
- img_array = np.array(img.convert("L"))
150
- turbidity_score = int(np.mean(img_array)/2.55)
151
- return turbidity_score
152
 
153
- # -------------------- VISUALS & INSIGHTS --------------------
154
- def create_plots(wqi, hsi, erosion, turbidity):
155
  fig = go.Figure()
156
- colors = ['#0061ff', '#60efff', '#ff4b4b', '#ffb347']
157
- fig.add_trace(go.Bar(name="Metrics", x=["WQI", "HSI", "Erosion", "Turbidity"],
158
- y=[wqi, hsi, erosion, turbidity], marker_color=colors))
159
- fig.update_layout(title="River Health Metrics", yaxis=dict(range=[0,100]), template="plotly_white")
 
160
  return fig
161
 
162
- def generate_graph_insights(wqi, hsi, erosion, turbidity):
163
- text = "### 📉 Graph Analysis\n\n"
164
- if wqi > 70: text += f"🔵 **Water Quality:** {wqi}/100. Excellent condition.\n\n"
165
- elif wqi > 40: text += f"🔵 **Water Quality:** {wqi}/100. Moderate pollution.\n\n"
166
- else: text += f"🔵 **Water Quality:** {wqi}/100. **CRITICAL**.\n\n"
167
-
168
- if hsi > 70: text += f"🟢 **Habitat:** {hsi}/100. Good biodiversity.\n\n"
169
- else: text += f"🟢 **Habitat:** {hsi}/100. Poor conditions.\n\n"
170
- return text
171
-
172
- # -------------------- PDF ENGINE --------------------
173
- def generate_pdf(wqi, hsi, erosion, turbidity, summary_text):
174
  pdf = FPDF()
175
  pdf.add_page()
176
- qr = qrcode.QRCode(box_size=3)
177
- qr.add_data("FlumenIntel Report Verified")
178
- qr.make(fit=True)
179
- img = qr.make_image(fill_color="black", back_color="white")
180
- with tempfile.NamedTemporaryFile(delete=False, suffix=".png") as tmp:
181
- img.save(tmp.name)
182
- pdf.image(tmp.name, x=165, y=10, w=30)
183
- pdf.set_y(15)
184
- pdf.set_font("Arial", "B", 24)
185
- pdf.set_text_color(0, 97, 255)
186
- pdf.cell(0, 10, "FlumenIntel", ln=True, align='L')
187
- pdf.ln(10)
188
- pdf.set_font("Arial", "", 12)
189
- pdf.set_text_color(0, 0, 0)
190
- pdf.multi_cell(0, 6, summary_text.encode('latin-1', 'replace').decode('latin-1'))
191
- try:
192
- return pdf.output(dest='S').encode('latin-1')
193
- except:
194
- return pdf.output(dest='S')
195
 
196
- # -------------------- MAIN PROCESSOR --------------------
197
- def process_data(flow_rate, water_temp, sediment, construction, pH, do, nutrients, sat_img):
198
- try:
199
- wqi = calculate_wqi(pH, do, nutrients)
200
- hsi = calculate_hsi(flow_rate, water_temp, sediment)
201
- erosion = calculate_erosion(sediment, construction)
202
- turbidity = analyze_satellite_image(sat_img)
203
- stability = river_stability(wqi, hsi, erosion)
204
- potability = potability_status(wqi)
205
-
206
- prompt = f"""
207
- ROLE: Senior Environmental Scientist.
208
- TASK: Write a formal River Health Report.
209
- DATA: WQI: {wqi}, HSI: {hsi}, Erosion: {erosion}, Turbidity: {turbidity}.
210
- REQUIREMENTS: Professional tone. 3 paragraphs max.
211
- """
212
-
213
- summary = smart_summary(prompt)
214
- fig = create_plots(wqi, hsi, erosion, turbidity)
215
- graph_text = generate_graph_insights(wqi, hsi, erosion, turbidity)
216
- pdf_bytes = generate_pdf(wqi, hsi, erosion, turbidity, summary)
217
-
218
- with tempfile.NamedTemporaryFile(delete=False, suffix=".pdf") as tmp_pdf:
219
- tmp_pdf.write(pdf_bytes)
220
- pdf_path = tmp_pdf.name
221
-
222
- status_text = f"Stability Index: {stability}/100\nStatus: {potability}"
223
-
224
- return status_text, fig, graph_text, summary, pdf_path
225
 
226
- except Exception as e:
227
- return str(e), None, "", "", None
 
228
 
229
- # Wrapper
230
- def run_app(flow, temp, sediment, construction, ph, do, nutrients, sat_img):
231
- return process_data(flow, temp, sediment, construction, ph, do, nutrients, sat_img)
 
 
232
 
233
- # -------------------- UI DESIGN --------------------
234
- custom_css = """
235
- @import url('https://fonts.googleapis.com/css2?family=Poppins:wght@300;400;600&display=swap');
236
- * { font-family: 'Poppins', sans-serif !important; }
237
- #title-box { background: linear-gradient(135deg, #0061ff 0%, #60efff 100%); color: white; padding: 20px; border-radius: 12px; text-align: center;}
238
- #analyze-btn { background: linear-gradient(90deg, #0061ff 0%, #60efff 100%); color: white; border: none; }
239
- """
240
 
241
- with gr.Blocks(title="FlumenIntel") as demo:
242
- gr.HTML(f"<style>{custom_css}</style>")
243
-
244
- with gr.Column(elem_id="title-box"):
245
- gr.Markdown("# FlumenIntel 🌊\n### Advanced River Health Analytics")
246
-
247
- with gr.Tabs():
248
- # --- TAB 1: DASHBOARD ---
249
- with gr.TabItem("🚀 Dashboard"):
250
- with gr.Row():
251
- # LEFT INPUTS
252
- with gr.Column(scale=1):
253
- gr.Markdown("### 1. Hydrological Data")
254
- flow = gr.Number(label="Flow Rate", value=45)
255
- temp = gr.Number(label="Temperature", value=18)
256
- sediment = gr.Slider(0, 10, label="Sediment", value=2)
257
- construction = gr.Slider(0, 10, label="Construction", value=0)
258
-
259
- gr.Markdown("### 2. Chemical Data")
260
- ph = gr.Number(label="pH Level", value=7.2)
261
- do = gr.Number(label="Dissolved Oxygen", value=9.5)
262
- nutrients = gr.Slider(0, 10, label="Nutrient Load", value=1)
263
-
264
- gr.Markdown("### 3. Visual Analysis")
265
- sat_img = gr.Image(label="Satellite Image", type="pil")
266
-
267
- analyze_btn = gr.Button("GENERATE REPORT", elem_id="analyze-btn")
268
-
269
- # RIGHT OUTPUTS
270
- with gr.Column(scale=2):
271
- status_box = gr.Textbox(label="System Status", interactive=False)
272
-
273
- with gr.Tabs():
274
- with gr.TabItem("📊 Visual Analytics"):
275
- plot_output = gr.Plot(label="Metric Visualization")
276
- graph_summary_box = gr.Markdown("### Insights...")
277
-
278
- with gr.TabItem("📄 Official Report"):
279
- ai_summary = gr.Textbox(label="Scientist's Assessment", lines=15, interactive=False)
280
-
281
- # --- AUDIO BUTTON ---
282
- with gr.Row():
283
- audio_btn = gr.Button("🔊 Listen to Report (ElevenLabs)")
284
- audio_out = gr.Audio(label="Player", type="filepath")
285
-
286
- audio_btn.click(
287
- fn=generate_audio_report,
288
- inputs=ai_summary,
289
- outputs=audio_out
290
- )
291
 
292
- with gr.TabItem("📥 Export"):
293
- pdf_output = gr.File(label="FlumenIntel Report.pdf")
294
 
295
- # --- TAB 2: ABOUT ME ---
296
- with gr.TabItem("👤 About Me"):
297
- gr.Markdown("## Abdullah\nComputer Engineering Undergraduate | AI & Hardware Enthusiast")
298
 
299
- # Events
300
- analyze_btn.click(
301
- run_app,
302
- inputs=[flow, temp, sediment, construction, ph, do, nutrients, sat_img],
303
- outputs=[status_box, plot_output, graph_summary_box, ai_summary, pdf_output]
304
- )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
305
 
306
  if __name__ == "__main__":
307
- demo.launch()
 
12
  import google.generativeai as genai
13
  import tempfile
14
 
15
+ # ================= ENV VARS =================
16
  HF_API_KEY = os.getenv("HF_API_KEY")
17
  GROQ_API_KEY = os.getenv("GROQ_API_KEY")
18
  GEMINI_API_KEY = os.getenv("GEMINI_API_KEY")
 
 
19
  ELEVENLABS_API_KEY = os.getenv("ELEVENLABS_API_KEY")
20
 
21
+ # ================= AI =================
 
 
 
 
 
 
22
  def gemini_summary(text):
23
+ if not GEMINI_API_KEY:
24
+ return None
25
+ genai.configure(api_key=GEMINI_API_KEY)
26
+ model = genai.GenerativeModel("gemini-1.5-flash")
27
+ return model.generate_content(text).text
 
 
 
28
 
29
  def groq_summary(text):
30
+ if not GROQ_API_KEY:
31
+ return None
32
+ client = Groq(api_key=GROQ_API_KEY)
33
+ res = client.chat.completions.create(
34
+ model="llama-3.3-70b-versatile",
35
+ messages=[{"role": "user", "content": text}]
36
+ )
37
+ return res.choices[0].message.content
 
 
38
 
39
  def hf_summary(text):
40
+ if not HF_API_KEY:
41
+ return None
42
+ url = "https://api-inference.huggingface.co/models/HuggingFaceH4/zephyr-7b-beta"
43
+ headers = {"Authorization": f"Bearer {HF_API_KEY}"}
44
+ payload = {"inputs": text, "parameters": {"max_new_tokens": 500}}
45
+ r = requests.post(url, headers=headers, json=payload, timeout=20)
46
+ return r.json()[0]["generated_text"]
 
 
 
 
 
 
 
47
 
48
  def smart_summary(text):
49
+ for fn in [groq_summary, gemini_summary, hf_summary]:
50
+ try:
51
+ out = fn(text)
52
+ if out:
53
+ return out
54
+ except:
55
+ pass
56
+ return "❌ All AI providers failed."
57
+
58
+ # ================= ELEVENLABS (FIXED) =================
 
 
 
 
59
  def generate_audio_report(text):
60
+ from elevenlabs.client import ElevenLabs
 
 
 
 
61
 
62
+ if not ELEVENLABS_API_KEY:
63
+ raise gr.Error("❌ ELEVENLABS_API_KEY missing")
 
64
 
65
+ if not text:
66
+ raise gr.Error("❌ No report text")
 
 
67
 
 
68
  try:
69
+ client = ElevenLabs(api_key=ELEVENLABS_API_KEY)
70
+
71
+ audio = client.text_to_speech.convert(
72
+ text=text[:400],
73
+ voice_id="21m00Tcm4TlvDq8ikWAM", # Rachel (safe default)
 
 
74
  model_id="eleven_multilingual_v2"
75
  )
76
+
77
  with tempfile.NamedTemporaryFile(delete=False, suffix=".mp3") as f:
78
+ for chunk in audio:
79
  f.write(chunk)
80
  return f.name
81
 
82
+ except Exception:
83
+ raise gr.Error("❌ ElevenLabs 401 Unauthorized → check key / plan")
 
 
 
 
 
 
84
 
85
+ # ================= CALCULATIONS =================
86
  def calculate_wqi(pH, do, nutrients):
87
+ return max(0, min(100, int(((7 - abs(7 - pH))*0.2 + (do/14)*0.5 + (10-nutrients)*0.3)*10)))
 
 
88
 
89
+ def calculate_hsi(flow, temp, sediment):
90
+ return max(0, min(100, int(100 - abs(flow-50)*0.5 - abs(temp-20)*2 - sediment*1.5)))
 
91
 
92
  def calculate_erosion(sediment, construction):
93
+ return max(0, min(100, int(sediment*1.5 + construction*2)))
 
 
 
 
 
 
 
 
 
94
 
95
  def analyze_satellite_image(img):
96
+ if img is None:
97
+ return 0
98
+ gray = np.array(img.convert("L"))
99
+ return int(np.mean(gray)/2.55)
100
 
101
+ # ================= VISUAL =================
102
+ def create_plot(wqi, hsi, erosion, turbidity):
103
  fig = go.Figure()
104
+ fig.add_bar(
105
+ x=["WQI", "HSI", "Erosion", "Turbidity"],
106
+ y=[wqi, hsi, erosion, turbidity]
107
+ )
108
+ fig.update_layout(yaxis=dict(range=[0,100]), title="River Health Metrics")
109
  return fig
110
 
111
+ # ================= PDF =================
112
+ def generate_pdf(summary):
 
 
 
 
 
 
 
 
 
 
113
  pdf = FPDF()
114
  pdf.add_page()
115
+ pdf.set_font("Arial", size=12)
116
+ pdf.multi_cell(0, 8, summary.encode("latin-1", "replace").decode("latin-1"))
117
+ return pdf.output(dest="S").encode("latin-1")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
118
 
119
+ # ================= PIPELINE =================
120
+ def run(flow, temp, sediment, construction, ph, do, nutrients, img):
121
+ wqi = calculate_wqi(ph, do, nutrients)
122
+ hsi = calculate_hsi(flow, temp, sediment)
123
+ erosion = calculate_erosion(sediment, construction)
124
+ turbidity = analyze_satellite_image(img)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
125
 
126
+ prompt = f"""
127
+ You are a senior environmental scientist.
128
+ Write a professional river health report.
129
 
130
+ WQI: {wqi}
131
+ HSI: {hsi}
132
+ Erosion: {erosion}
133
+ Turbidity: {turbidity}
134
+ """
135
 
136
+ summary = smart_summary(prompt)
137
+ fig = create_plot(wqi, hsi, erosion, turbidity)
138
+ pdf_bytes = generate_pdf(summary)
 
 
 
 
139
 
140
+ with tempfile.NamedTemporaryFile(delete=False, suffix=".pdf") as f:
141
+ f.write(pdf_bytes)
142
+ pdf_path = f.name
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
143
 
144
+ status = f"Stability Index: {(wqi+hsi+(100-erosion))//3}/100"
 
145
 
146
+ return status, fig, summary, pdf_path
 
 
147
 
148
+ # ================= UI =================
149
+ with gr.Blocks(title="FlumenIntel") as demo:
150
+ gr.Markdown("# 🌊 FlumenIntel — River Intelligence")
151
+
152
+ with gr.Row():
153
+ with gr.Column():
154
+ flow = gr.Number(value=45, label="Flow Rate")
155
+ temp = gr.Number(value=18, label="Temperature")
156
+ sediment = gr.Slider(0,10,value=2,label="Sediment")
157
+ construction = gr.Slider(0,10,value=0,label="Construction")
158
+ ph = gr.Number(value=7.2,label="pH")
159
+ do = gr.Number(value=9.5,label="Dissolved Oxygen")
160
+ nutrients = gr.Slider(0,10,value=1,label="Nutrients")
161
+ img = gr.Image(type="pil")
162
+ btn = gr.Button("GENERATE REPORT")
163
+
164
+ with gr.Column():
165
+ status = gr.Textbox(label="Status")
166
+ plot = gr.Plot()
167
+ report = gr.Textbox(lines=12,label="AI Report")
168
+ audio_btn = gr.Button("🔊 Listen")
169
+ audio = gr.Audio(type="filepath")
170
+ pdf = gr.File()
171
+
172
+ btn.click(run, [flow,temp,sediment,construction,ph,do,nutrients,img],
173
+ [status,plot,report,pdf])
174
+
175
+ audio_btn.click(generate_audio_report, report, audio)
176
 
177
  if __name__ == "__main__":
178
+ demo.launch()