Nuzwa commited on
Commit
6d09439
·
verified ·
1 Parent(s): a98720a

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +109 -357
app.py CHANGED
@@ -1,20 +1,25 @@
1
  import os
2
  import re
3
  import gradio as gr
 
4
  from dataclasses import dataclass
5
- from typing import List, Dict, Tuple
6
 
 
 
 
 
7
  ARABIC_RE = re.compile(r"[\u0600-\u06FF]")
8
  def detect_lang(text: str) -> str:
9
  return "ur" if ARABIC_RE.search(text) else "en"
10
 
 
11
  @dataclass
12
  class Guardrails:
13
  refusal_msg_ur: str
14
  refusal_msg_en: str
15
  blocked_patterns: List[str]
16
  soft_patterns: List[str]
17
- helplines: List[Dict[str, str]]
18
 
19
  @classmethod
20
  def from_yaml(cls, path: str):
@@ -22,10 +27,10 @@ class Guardrails:
22
  with open(path, "r", encoding="utf-8") as f:
23
  data = yaml.safe_load(f)
24
  return cls(
25
- refusal_msg_ur=data["refusal_msg_ur"],
26
- refusal_msg_en=data["refusal_msg_en"],
27
- blocked_patterns=data["blocked_patterns"],
28
- soft_patterns=data["soft_patterns"],
29
  helplines=data.get("helplines", []),
30
  )
31
 
@@ -41,238 +46,54 @@ class Guardrails:
41
 
42
  GUARD = Guardrails.from_yaml("guardrails.yaml")
43
 
44
- from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline
45
- import torch
46
-
47
  MODEL_ID = os.environ.get("SAFEPak_MODEL_ID", "Qwen/Qwen2-0.5B-Instruct")
48
  try:
49
- TOKENIZER = AutoTokenizer.from_pretrained(MODEL_ID)
50
- MODEL = AutoModelForCausalLM.from_pretrained(MODEL_ID, torch_dtype=torch.float32)
51
- GEN = pipeline(
52
- "text-generation",
53
- model=MODEL,
54
- tokenizer=TOKENIZER,
55
- device_map="auto" if torch.cuda.is_available() else None,
56
- )
57
- TINY_FALLBACK = False
58
  except Exception as e:
59
- print("LLM load failed, tiny fallback active:", e)
60
- TOKENIZER = None
61
- GEN = None
62
- TINY_FALLBACK = True
63
-
64
- def llm_reply(prompt: str, max_new_tokens: int = 120, temperature: float = 0.3) -> str:
65
- if TINY_FALLBACK:
66
- return ""
67
- out = GEN(
68
- prompt,
69
  max_new_tokens=max_new_tokens,
70
- do_sample=True,
71
  temperature=temperature,
72
- pad_token_id=TOKENIZER.eos_token_id,
73
- )[0]["generated_text"]
74
- return out[len(prompt):].strip()
75
-
76
- SAFEPAK_SYSTEM = """You are a multilingual emergency disaster response assistant named “SafePak – 1122 Guide”.
77
- Provide immediate, clear, practical survival and rescue instructions for disasters, accidents, conflict, or panic in Pakistan-like regions.
78
- No clarifying questions in normal mode. Use concise checklists. Begin with a calming line if user seems distressed.
79
- Trigger modules by keywords (fire, gas, earthquake, flood, storm, injury, trapped, explosion, conflict, child).
80
- Offer offline-safe advice. No personal data storage. No medical diagnoses, only first aid."""
81
 
82
- KW = {
83
- "fire": ["fire", "room on fire", "smoke", "جل", "آگ", "جلدی", "smelling smoke"],
84
- "gas": ["gas", "gas leak", "smell gas", "cylinder", "گیس", "لیک", "بو"],
85
- "earthquake": ["earthquake", "tremor", "shaking", "زلزلہ", "جھٹکا", "jhatka"],
86
- "flood": ["flood", "water entering", "overflow", "سیلاب", "بارش", "پانی آ گیا"],
87
- "storm": ["storm", "cyclone", "high winds", "طوفان", "آندھی"],
88
- "injury": ["injury", "bleeding", "cut", "fracture", "burn wound", "زخمی", "خون"],
89
- "explosion": ["explosion", "blast", "bomb blast", "دھماکا"],
90
- "conflict": ["gunfire", "firing", "crossfire", "terror", "bomb", "گولیاں"],
91
- "trapped": ["trapped", "stuck", "buried", "rubble", "پھنس"],
92
- "child": ["child", "baby", "بچہ", "رو رہا"],
93
- }
94
 
95
- def has_kw(text: str, key: str) -> bool:
96
- low = text.lower()
97
- for token in KW.get(key, []):
98
- if token in low:
99
- return True
100
- return False
101
 
102
- def module_response(user_msg: str, lang: str) -> Tuple[List[str], str]:
103
- bullets = []
104
- scenario = "general"
105
- if lang == "ur":
106
- calm = "🕊️ گھبرائیں نہیں — ایک گہری سانس لیں، میں آپ کے ساتھ ہوں۔"
107
- if has_kw(user_msg, "fire"):
108
- scenario = "fire"
109
- bullets = [
110
- "جھک جائیں — دھواں اوپر جاتا ہے۔",
111
- "ناک/منہ پر گیلا کپڑا رکھیں۔",
112
- "اگر محفوظ ہو تو بجلی/گیس مین بند کریں۔",
113
- "گرم دروازہ مت کھولیں؛ دوسرا راستہ لیں۔",
114
- "سیڑھیاں استعمال کریں — لفٹ ہرگز نہیں۔",
115
- "بچوں/بزرگوں کو ساتھ رکھیں، گنتی کریں۔",
116
- calm,
117
- ]
118
- elif has_kw(user_msg, "gas"):
119
- scenario = "gas"
120
- bullets = [
121
- "چنگاری نہ بنائیں (سوئچ/لائٹر/فون فلیش نہیں)۔",
122
- "کھڑکیاں/دروازے کھول کر ہوا دار کریں۔",
123
- "گیس کا مین والو بند کریں۔",
124
- "عمارت خالی کریں، محفوظ فاصلے پر جائیں۔",
125
- calm,
126
- ]
127
- elif has_kw(user_msg, "earthquake"):
128
- scenario = "earthquake"
129
- bullets = [
130
- "Drop, Cover, Hold — جھکیں، ڈھکیں، پکڑیں۔",
131
- "کھڑکیوں/بھاری اشیاء سے دور رہیں۔",
132
- "جھٹکے رکنے پر سیڑھیوں سے باہر نکلیں۔",
133
- "کھلی جگہ میں رکیں؛ لفٹ استعمال نہ کریں۔",
134
- calm,
135
- ]
136
- elif has_kw(user_msg, "flood") or has_kw(user_msg, "storm"):
137
- scenario = "flood"
138
- bullets = [
139
- "اگر محفوظ ہو تو بجلی مین سوئچ بند کریں۔",
140
- "اونچی جگہ/چھت پر منتقل ہوں۔",
141
- "پانی میں گاڑی نہ چلائیں۔",
142
- "اہم کاغذات اور دوائیں ساتھ رکھیں۔",
143
- calm,
144
- ]
145
- elif has_kw(user_msg, "injury"):
146
- scenario = "injury"
147
- bullets = [
148
- "خون بہہ رہا ہو تو مضبوط دباؤ دیں (صاف کپڑا)۔",
149
- "زخم صاف پانی سے دھوئیں؛ گہرا ہو تو مدد لیں۔",
150
- "گردن/کمر چوٹ پر مریض کو مت ہلائیں۔",
151
- calm,
152
- ]
153
- elif has_kw(user_msg, "explosion") or has_kw(user_msg, "conflict"):
154
- scenario = "conflict"
155
- bullets = [
156
- "زمین پر لیٹیں، مضبوط کور لیں، کھڑکیوں سے دور رہیں۔",
157
- "گولیاں مخالف سمت میں کور لیتے ہوئے حرکت کریں۔",
158
- "محفوظ راستہ دیکھ کر پرسکون انداز میں نکلیں۔",
159
- calm,
160
- ]
161
- elif has_kw(user_msg, "trapped") or has_kw(user_msg, "child"):
162
- scenario = "trapped"
163
- bullets = [
164
- "آہستہ سانس — توانائی بچائیں۔",
165
- "صرف جب مدد قریب لگے تب آواز/اشارہ کریں۔",
166
- "دھواں ہو تو نیچے رہیں اور تازہ ہوا کی سمت جائیں۔",
167
- calm,
168
- ]
169
- else:
170
- bullets = [
171
- "قریب ترین خطرات پہچانیں (بجلی، گیس، دھواں، پانی)۔",
172
- "محفوظ کور/خارج ہونے کا راستہ منتخب کریں۔",
173
- "بچوں/بزرگوں کو ساتھ رکھیں؛ لفٹ سے پرہیز کریں۔",
174
- calm,
175
- ]
176
- else:
177
- calm = "🕊️ Stay calm — take a deep breath; I’m with you."
178
- if has_kw(user_msg, "fire"):
179
- scenario = "fire"
180
- bullets = [
181
- "Stay low — smoke rises.",
182
- "Wet cloth over nose/mouth.",
183
- "Cut power/gas if safe.",
184
- "Do not open hot doors.",
185
- "Use stairs, never elevators.",
186
- "Keep kids/elderly with you; headcount.",
187
- calm,
188
- ]
189
- elif has_kw(user_msg, "gas"):
190
- scenario = "gas"
191
- bullets = [
192
- "No sparks (no switches/lighters/flash).",
193
- "Ventilate by opening windows/doors.",
194
- "Shut the main gas valve.",
195
- "Evacuate to a safe distance.",
196
- calm,
197
- ]
198
- elif has_kw(user_msg, "earthquake"):
199
- scenario = "earthquake"
200
- bullets = [
201
- "Drop, Cover, Hold On.",
202
- "Stay away from windows/heavy furniture.",
203
- "Exit via stairs after shaking stops.",
204
- "Wait in open safe area; no elevators.",
205
- calm,
206
- ]
207
- elif has_kw(user_msg, "flood") or has_kw(user_msg, "storm"):
208
- scenario = "flood"
209
- bullets = [
210
- "Turn off main power if safe.",
211
- "Move to higher ground/roof.",
212
- "Do not drive through water.",
213
- "Keep IDs/medicines with you.",
214
- calm,
215
- ]
216
- elif has_kw(user_msg, "injury"):
217
- scenario = "injury"
218
- bullets = [
219
- "Apply firm direct pressure to stop bleeding.",
220
- "Rinse with clean water; seek help if deep.",
221
- "Suspected spine injury — do not move person.",
222
- calm,
223
- ]
224
- elif has_kw(user_msg, "explosion") or has_kw(user_msg, "conflict"):
225
- scenario = "conflict"
226
- bullets = [
227
- "Get low, take solid cover, away from windows.",
228
- "Move opposite to gunfire using cover.",
229
- "Exit safely; avoid crowds and noise.",
230
- calm,
231
- ]
232
- elif has_kw(user_msg, "trapped") or has_kw(user_msg, "child"):
233
- scenario = "trapped"
234
- bullets = [
235
- "Breathe slowly — conserve energy.",
236
- "Signal only when rescuers seem near.",
237
- "Stay low if smoke; move toward fresh air/light.",
238
- calm,
239
- ]
240
- else:
241
- bullets = [
242
- "Identify immediate hazards (electricity, gas, smoke, water).",
243
- "Choose safe cover/exit route.",
244
- "Keep kids/elderly close; avoid elevators.",
245
- calm,
246
- ]
247
- return bullets, scenario
248
 
249
- def gobag_for(scenario: str, lang: str) -> List[str]:
250
- if lang == "ur":
251
- base = ["شناختی کارڈ/کاغذات", "نقدی", "موبائل + پاور بینک", "پینے کا پانی", "خشک کھانا", "دوائیں", "بچوں/بزرگوں کی ضروری اشیاء", "ہلکی چادر/کپڑے", "ٹارچ"]
252
- if scenario == "fire":
253
- return base + ["ماسک/گیلا کپڑا", "فائر سیفٹی آگاہی"]
254
- if scenario == "flood":
255
- return base + ["واٹر پروف بیگز", "چپل/بوٹس"]
256
- if scenario == "earthquake":
257
- return base + ["وسل", "چھوٹی فرسٹ ایڈ کٹ"]
258
- if scenario == "gas":
259
- return base + ["ریسپریٹری ماسک (اگر دستیاب)", "ماچس/لائٹر الگ بیگ میں"]
260
- if scenario == "conflict":
261
- return base + ["low-profile کپڑے", "ضروری نمبر نوٹ بک میں"]
262
- return base
263
- else:
264
- base = ["IDs/documents", "Cash", "Phone + power bank", "Drinking water", "Dry food", "Medicines", "Baby/elderly needs", "Light blanket/clothes", "Torch/flashlight"]
265
- if scenario == "fire":
266
- return base + ["Mask/wet cloth", "Basic fire safety info"]
267
- if scenario == "flood":
268
- return base + ["Waterproof pouches", "Sandals/boots"]
269
- if scenario == "earthquake":
270
- return base + ["Whistle", "Small first-aid kit"]
271
- if scenario == "gas":
272
- return base + ["Respirator mask if available", "Matches/lighter stored separately"]
273
- if scenario == "conflict":
274
- return base + ["Low-profile clothing", "Emergency numbers notebook"]
275
- return base
276
 
277
  def helplines_block(lang: str) -> str:
278
  if not GUARD.helplines:
@@ -283,156 +104,87 @@ def helplines_block(lang: str) -> str:
283
  nm = h.get("name", "").strip()
284
  ph = h.get("phone", "").strip()
285
  if nm and ph:
286
- lines.append(f"{nm}:\n{ph}")
287
- return "\n".join(lines)
288
-
289
- SIM_TRIGGER = re.compile(r"(practice with me|drill mode|drill|پریکٹس|ڈرل)", re.I)
290
-
291
- def start_sim_scenario(user_msg: str) -> Dict:
292
- if has_kw(user_msg, "fire"):
293
- scen = "fire"
294
- elif has_kw(user_msg, "flood"):
295
- scen = "flood"
296
- elif has_kw(user_msg, "earthquake"):
297
- scen = "earthquake"
298
- else:
299
- scen = "earthquake"
300
- return {"active": True, "step": 1, "scenario": scen}
301
-
302
- def sim_prompt(state: Dict, lang: str) -> str:
303
- scen = state.get("scenario", "earthquake")
304
- if lang == "ur":
305
- if state["step"] == 1:
306
- if scen == "fire":
307
- return "ڈرل: کمرے میں آگ اور دھواں ہے — سب سے پہلا قدم کیا ہوگا؟"
308
- if scen == "flood":
309
- return "ڈرل: پانی تیزی سے گھر میں داخل ہو رہا ہے — سب سے پہلا قدم کیا ہوگا؟"
310
- return "ڈرل: زلزلے کے جھٹکے محسوس ہو رہے ہیں — سب سے پہلا قدم کیا ہوگا؟"
311
- elif state["step"] == 2:
312
- return "اچھا—اب دوسرا قدم کیا ہوگا؟ (ایک لائن میں)"
313
- else:
314
- return "آخری حفاظتی قدم کیا ہوگا؟"
315
- else:
316
- if state["step"] == 1:
317
- if scen == "fire":
318
- return "Drill: Room fire with smoke — what is your FIRST action?"
319
- if scen == "flood":
320
- return "Drill: Water entering rapidly — what is your FIRST action?"
321
- return "Drill: Earthquake shaking — what is your FIRST action?"
322
- elif state["step"] == 2:
323
- return "Good — what is your SECOND step? (one short line)"
324
- else:
325
- return "What is the FINAL step to keep everyone safe?"
326
-
327
- def sim_feedback(state: Dict, user_msg: str, lang: str) -> str:
328
- scen = state.get("scenario", "earthquake")
329
- tips_ur = {
330
- "fire": ["جھک جائیں، دھوئیں سے نیچے رہیں", "گیلا کپڑا ناک/منہ پر", "لفٹ نہیں — سیڑھیاں", "گرم دروازہ مت کھولیں"],
331
- "flood": ["بجلی مین سوئچ بند", "اونچی جگہ/چھت پر جانا", "پانی میں گاڑی نہ چلائیں", "گو-بیگ/شناختی دستاویزات"],
332
- "earthquake": ["Drop, Cover, Hold", "کھڑکیوں/بھاری اشیاء سے دور", "جھٹکے رکنے پر سیڑھیوں سے باہر", "لفٹ نہیں"],
333
- }
334
- tips_en = {
335
- "fire": ["Stay low under smoke", "Wet cloth over nose/mouth", "Use stairs, not elevators", "Do not open hot doors"],
336
- "flood": ["Turn off main power", "Move to higher ground/roof", "Avoid driving through water", "Grab go-bag/IDs"],
337
- "earthquake": ["Drop, Cover, Hold", "Stay away from windows/heavy objects", "Exit via stairs after shaking", "No elevators"],
338
- }
339
- good = tips_ur if lang == "ur" else tips_en
340
- score = 0
341
- low = user_msg.lower()
342
- for hint in good[scen]:
343
- for tok in re.findall(r"\w+", hint.lower()):
344
- if tok in low:
345
- score += 1
346
- break
347
- if lang == "ur":
348
- fb = "درست سمت میں ہیں۔ " if score >= 1 else "بہتری کی ضرورت: "
349
- checklist = "\n- " + "\n- ".join(good[scen])
350
- return fb + "محفوظ قدم یہ ہیں:" + checklist
351
- else:
352
- fb = "Good direction. " if score >= 1 else "Improve: "
353
- checklist = "\n- " + "\n- ".join(good[scen])
354
- return fb + "Follow these safe steps:" + checklist
355
-
356
- def build_prompt(user_msg: str, lang: str) -> str:
357
- return f"<system>\n{SAFEPAK_SYSTEM}\n</system>\n<user>\n{user_msg}\n</user>"
358
 
359
- def chat_fn(user_msg: str, chat_history: List[Tuple[str, str]], sim_state: Dict):
 
360
  if not user_msg:
361
  return chat_history, sim_state
362
-
363
  lang = detect_lang(user_msg)
364
- status, _ = GUARD.check(user_msg)
365
-
366
- if SIM_TRIGGER.search(user_msg):
367
- sim_state = start_sim_scenario(user_msg)
368
- reply = sim_prompt(sim_state, lang)
369
- return chat_history + [(user_msg, reply)], sim_state
370
-
371
- if sim_state.get("active"):
372
- feedback = sim_feedback(sim_state, user_msg, lang)
373
- sim_state["step"] += 1
374
- if sim_state["step"] > 3:
375
- sim_state = {"active": False, "step": 0, "scenario": None}
376
- end_line = "ڈرل مکمل — شاباش! محفوظ رہیں۔" if lang == "ur" else "Drill complete — well done. Stay safe."
377
- return chat_history + [(user_msg, feedback + "\n\n" + end_line)], sim_state
378
- else:
379
- prompt_q = sim_prompt(sim_state, lang)
380
- return chat_history + [(user_msg, feedback + "\n\n" + prompt_q)], sim_state
381
 
 
382
  if status == "BLOCK":
383
  msg = GUARD.refusal_msg_ur if lang == "ur" else GUARD.refusal_msg_en
384
- helpb = helplines_block(lang)
385
- if helpb:
386
- msg += "\n\n" + helpb
387
  return chat_history + [(user_msg, msg)], sim_state
388
 
389
- bullets, scenario = module_response(user_msg, lang)
390
- bag = gobag_for(scenario, lang)
391
- if lang == "ur":
392
- base = "✅ ابھی یہ کریں:\n" + "\n".join(f"- {b}" for b in bullets if b) + "\n\n🎒 ساتھ کیا لیں (Go-Bag):\n" + "\n".join(f"- {i}" for i in bag)
 
 
 
 
 
 
 
 
 
393
  else:
394
- base = " Do this now:\n" + "\n".join(f"- {b}" for b in bullets if b) + "\n\n🎒 Take with you (Go-Bag):\n" + "\n".join(f"- {i}" for i in bag)
395
-
396
- if status == "SOFT":
397
- note = "احتیاط: یہ حساس موضوع ہے۔\n\n" if lang == "ur" else "Note: This is a sensitive topic.\n\n"
398
- base = note + base
399
-
400
- refined = ""
401
- if not TINY_FALLBACK:
402
- refine_prompt = (
403
- f"<system>\n{SAFEPAK_SYSTEM}\n"
404
- f"Return ONLY a medium-length emergency guide in the user's language: 5–7 action bullets, "
405
- f"then a Go-Bag sublist (4–8 items). No preface, no extra prose. Be clear, practical, life-saving.\n</system>\n"
406
- f"<user>\nOriginal message:\n{user_msg}\n\nBase content to refine:\n{base}\n</user>"
407
- )
408
- refined = llm_reply(refine_prompt, max_new_tokens=120, temperature=0.3)
409
 
410
- final = refined.strip() if refined else base
411
 
 
412
  if status == "SOFT":
413
- helpb = helplines_block(lang)
414
- if helpb:
415
- final += "\n\n" + helpb
 
416
 
 
417
  return chat_history + [(user_msg, final)], sim_state
418
 
 
419
  with gr.Blocks(title="SafePak – 1122 Guide") as demo:
420
  gr.Markdown("## SafePak – 1122 Guide\nMultilingual emergency assistant (CPU-only).")
421
 
422
- state = gr.State({"active": False, "step": 0, "scenario": None})
 
423
 
424
- chat = gr.Chatbot(height=520)
425
- msg = gr.Textbox(label="Message / پیغام", placeholder="Type here… / یہاں لکھیں…", lines=2)
 
426
  send = gr.Button("Send / بھیجیں")
427
  clear = gr.Button("Clear")
428
 
429
- def on_submit(u, h, s):
430
- new_h, new_s = chat_fn(u, h, s)
431
- return new_h, "", new_s
432
-
433
- msg.submit(on_submit, [msg, chat, state], [chat, msg, state])
434
- send.click(on_submit, [msg, chat, state], [chat, msg, state])
435
- clear.click(lambda: ([], {"active": False, "step": 0, "scenario": None}), [], [chat, state])
436
 
437
  if __name__ == "__main__":
438
- demo.launch()
 
 
 
 
 
1
  import os
2
  import re
3
  import gradio as gr
4
+ from typing import List, Tuple
5
  from dataclasses import dataclass
 
6
 
7
+ import torch
8
+ from transformers import AutoTokenizer, AutoModelForCausalLM
9
+
10
+ # ---------------- Language detection ----------------
11
  ARABIC_RE = re.compile(r"[\u0600-\u06FF]")
12
  def detect_lang(text: str) -> str:
13
  return "ur" if ARABIC_RE.search(text) else "en"
14
 
15
+ # ---------------- Guardrails ----------------
16
  @dataclass
17
  class Guardrails:
18
  refusal_msg_ur: str
19
  refusal_msg_en: str
20
  blocked_patterns: List[str]
21
  soft_patterns: List[str]
22
+ helplines: List[dict]
23
 
24
  @classmethod
25
  def from_yaml(cls, path: str):
 
27
  with open(path, "r", encoding="utf-8") as f:
28
  data = yaml.safe_load(f)
29
  return cls(
30
+ refusal_msg_ur=data.get("refusal_msg_ur", ""),
31
+ refusal_msg_en=data.get("refusal_msg_en", ""),
32
+ blocked_patterns=data.get("blocked_patterns", []),
33
+ soft_patterns=data.get("soft_patterns", []),
34
  helplines=data.get("helplines", []),
35
  )
36
 
 
46
 
47
  GUARD = Guardrails.from_yaml("guardrails.yaml")
48
 
49
+ # ---------------- Model ----------------
 
 
50
  MODEL_ID = os.environ.get("SAFEPak_MODEL_ID", "Qwen/Qwen2-0.5B-Instruct")
51
  try:
52
+ tok = AutoTokenizer.from_pretrained(MODEL_ID)
53
+ model = AutoModelForCausalLM.from_pretrained(MODEL_ID, torch_dtype=torch.float32)
 
 
 
 
 
 
 
54
  except Exception as e:
55
+ print("LLM load failed, fallback active:", e)
56
+ tok = model = None
57
+
58
+ def llm_reply(prompt: str, max_new_tokens: int = 100, temperature: float = 0.3) -> str:
59
+ if not (tok and model):
60
+ # fallback جواب
61
+ return "safePak: میں آپ کی مدد کے لیے حاضر ہوں۔ براہ کرم مزید وضاحت دیں۔"
62
+ ids = tok.encode(prompt, return_tensors="pt")
63
+ out = model.generate(
64
+ ids,
65
  max_new_tokens=max_new_tokens,
 
66
  temperature=temperature,
67
+ do_sample=True,
68
+ pad_token_id=tok.eos_token_id,
69
+ )
70
+ return tok.decode(out[0], skip_special_tokens=True)
 
 
 
 
 
71
 
72
+ # ---------------- Helpline rendering ----------------
73
+ def _clean_phone_for_links(p: str) -> str:
74
+ return "+" + "".join(ch for ch in p if ch.isdigit())
 
 
 
 
 
 
 
 
 
75
 
76
+ def phone_tel_link(p: str) -> str:
77
+ return f"tel:{_clean_phone_for_links(p)}"
 
 
 
 
78
 
79
+ def phone_wa_link(p: str) -> str:
80
+ num = _clean_phone_for_links(p).lstrip("+")
81
+ return f"https://wa.me/{num}"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
82
 
83
+ def render_quick_helplines_md(lang: str = "ur") -> str:
84
+ if not GUARD.helplines:
85
+ return ""
86
+ title = "### فوری ہیلپ لائنز" if lang == "ur" else "### Quick Helplines"
87
+ rows = [title]
88
+ for h in GUARD.helplines:
89
+ name = h.get("name","").strip()
90
+ phone = h.get("phone","").strip()
91
+ if not (name and phone):
92
+ continue
93
+ tel = phone_tel_link(phone)
94
+ wa = phone_wa_link(phone)
95
+ rows.append(f"**{name}** \n{phone}\n[📞 Call]({tel}) &nbsp;&nbsp;[💬 WhatsApp]({wa})")
96
+ return "\n\n".join(rows)
 
 
 
 
 
 
 
 
 
 
 
 
 
97
 
98
  def helplines_block(lang: str) -> str:
99
  if not GUARD.helplines:
 
104
  nm = h.get("name", "").strip()
105
  ph = h.get("phone", "").strip()
106
  if nm and ph:
107
+ tel = phone_tel_link(ph)
108
+ wa = phone_wa_link(ph)
109
+ lines.append(f"**{nm}**\n{ph}\n[📞 Call]({tel}) | [💬 WhatsApp]({wa})")
110
+ return "\n\n".join(lines)
111
+
112
+ # ---------------- Core prompts ----------------
113
+ SAFEPAK_SYSTEM = """
114
+ You are SafePak – 1122 Guide, a multilingual emergency disaster assistant.
115
+ Always reply in user's language.
116
+ Give medium length, action-oriented checklists (5–7 bullets) + Go-Bag tips if evacuation is implied.
117
+ Never panic. End with an emotional calming line.
118
+ """
119
+
120
+ def format_response(base: str, user_msg: str, lang: str) -> str:
121
+ refine_prompt = (
122
+ f"<system>\n{SAFEPAK_SYSTEM}\n</system>\n"
123
+ f"<user>\n{user_msg}\n\nDraft response:\n{base}\n</user>"
124
+ )
125
+ return llm_reply(refine_prompt, max_new_tokens=100, temperature=0.3)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
126
 
127
+ # ---------------- Chat logic ----------------
128
+ def chat_fn(user_msg: str, chat_history: List[Tuple[str, str]], sim_state: dict):
129
  if not user_msg:
130
  return chat_history, sim_state
 
131
  lang = detect_lang(user_msg)
132
+ status, reason = GUARD.check(user_msg)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
133
 
134
+ # BLOCK
135
  if status == "BLOCK":
136
  msg = GUARD.refusal_msg_ur if lang == "ur" else GUARD.refusal_msg_en
 
 
 
137
  return chat_history + [(user_msg, msg)], sim_state
138
 
139
+ # base checklist by keyword
140
+ checklist = ""
141
+ low = user_msg.lower()
142
+ if any(k in low for k in ["fire","آگ","smoke"]):
143
+ checklist = "Fire safety steps"
144
+ elif any(k in low for k in ["زلزلہ","earthquake","tremor"]):
145
+ checklist = "Earthquake safety steps"
146
+ elif any(k in low for k in ["سیلاب","flood","overflow"]):
147
+ checklist = "Flood survival steps"
148
+ elif any(k in low for k in ["gas","گیس","leak"]):
149
+ checklist = "Gas leak safety steps"
150
+ elif any(k in low for k in ["injury","زخمی","bleeding"]):
151
+ checklist = "First aid for injury"
152
  else:
153
+ checklist = "General emergency guidance"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
154
 
155
+ final = format_response(checklist, user_msg, lang)
156
 
157
+ # SOFT warning
158
  if status == "SOFT":
159
+ if lang == "ur":
160
+ final = "⚠️ محتاط رہیں: یہ موضوع حساس ہے۔\n\n" + final
161
+ else:
162
+ final = "⚠️ Note: Sensitive topic.\n\n" + final
163
 
164
+ final += "\n\n" + helplines_block(lang)
165
  return chat_history + [(user_msg, final)], sim_state
166
 
167
+ # ---------------- UI ----------------
168
  with gr.Blocks(title="SafePak – 1122 Guide") as demo:
169
  gr.Markdown("## SafePak – 1122 Guide\nMultilingual emergency assistant (CPU-only).")
170
 
171
+ # ثابت اوپر Helplines panel
172
+ gr.Markdown(render_quick_helplines_md("ur"))
173
 
174
+ state = gr.State({"active": False, "step": 0, "scenario": None})
175
+ chatbot = gr.Chatbot(height=420)
176
+ msg = gr.Textbox(label="پیغام / Message", placeholder="یہاں لکھیں…", lines=2)
177
  send = gr.Button("Send / بھیجیں")
178
  clear = gr.Button("Clear")
179
 
180
+ def _on_submit(u,h,s): return chat_fn(u,h,s)
181
+ msg.submit(_on_submit, [msg,chatbot,state],[chatbot,state])
182
+ send.click(_on_submit, [msg,chatbot,state],[chatbot,state])
183
+ clear.click(lambda: ([], {"active":False,"step":0,"scenario":None}), [], [chatbot,state])
 
 
 
184
 
185
  if __name__ == "__main__":
186
+ demo.queue(concurrency_count=1, max_size=32).launch(
187
+ server_name="0.0.0.0",
188
+ server_port=7860,
189
+ share=False
190
+ )