Real_Time_Image_Captioning / tests /test_safety_classifier.py
A7med-Ame3's picture
Update tests/test_safety_classifier.py
c3f6bab verified
"""
tests/test_safety_classifier.py
──────────────────────────────
Unit tests for the regex-based SafetyClassifier.
Run with: python -m pytest tests/ -v
"""
import sys, os
sys.path.insert(0, os.path.join(os.path.dirname(__file__), ".."))
import pytest
from safety_classifier import SafetyClassifier, ClassificationResult
clf = SafetyClassifier()
# ── Helpers ───────────────────────────────────────────────────────────────────
def is_safe(text: str) -> bool:
return clf.classify(text).label == "SAFE"
def is_dangerous(text: str) -> bool:
return clf.classify(text).label == "DANGEROUS"
def categories(text: str) -> list[str]:
return clf.classify(text).hazards
# ── SAFE cases ────────────────────────────────────────────────────────────────
class TestSafeCaptions:
def test_normal_street(self):
assert is_safe("A quiet residential street with parked cars and green trees.")
def test_indoor_office(self):
assert is_safe("An office with desks, computers, and people working quietly.")
def test_park_scene(self):
assert is_safe("Children playing in a sunny park near a fountain.")
def test_kitchen(self):
assert is_safe("A clean kitchen with a counter, sink, and hanging pots.")
def test_empty_caption(self):
assert is_safe("")
def test_whitespace_only(self):
assert is_safe(" ")
# ── DANGEROUS cases ───────────────────────────────────────────────────────────
class TestDangerousCaptions:
# Fire
def test_fire(self):
assert is_dangerous("There is a large fire burning near the building.")
assert "fire" in categories("Flames are visible through the window.")
def test_smoke(self):
assert is_dangerous("The room is filled with thick smoke.")
# Flood
def test_flooding(self):
assert is_dangerous("The street is flooding rapidly after heavy rain.")
assert "flood" in categories("Water is flooding into the basement.")
# Weapons
def test_gun(self):
assert is_dangerous("A man is holding a pistol near the entrance.")
assert "weapon" in categories("A rifle is leaning against the wall.")
def test_knife(self):
assert is_dangerous("Someone is brandishing a knife in the crowd.")
# Vehicle crash
def test_crash(self):
assert is_dangerous("Two cars have been involved in a collision on the road.")
assert "crash" in categories("There is an overturned truck blocking the highway.")
# Oncoming traffic
def test_oncoming_vehicle(self):
assert is_dangerous("An oncoming car is speeding directly toward the pedestrian.")
# Violence
def test_fight(self):
assert is_dangerous("A brawl has broken out in front of the store.")
# Falls & heights
def test_cliff(self):
assert is_dangerous("The person is standing at the edge of a cliff.")
assert "fall" in categories("A steep drop is just ahead on the trail.")
# Injury / blood
def test_blood(self):
assert is_dangerous("There is blood visible on the floor near the door.")
# Electrical
def test_exposed_wire(self):
assert is_dangerous("An exposed wire is hanging from the ceiling.")
# Slip hazard
def test_wet_floor(self):
assert is_dangerous("The wet floor near the entrance presents a slip hazard.")
# Storm
def test_storm(self):
assert is_dangerous("A tornado is approaching from the west.")
# Chemical
def test_chemical_spill(self):
assert is_dangerous("There is a chemical spill near the laboratory bench.")
# Generic danger keywords
def test_generic_danger(self):
assert is_dangerous("This is a dangerous situation requiring immediate evacuation.")
# ── Edge cases ────────────────────────────────────────────────────────────────
class TestEdgeCases:
def test_case_insensitive(self):
assert is_dangerous("FIRE is spreading across the building.")
assert is_dangerous("There is a KNIFE on the ground.")
def test_multiple_hazards(self):
r = clf.classify("Flooding and fire are both present, and there is broken glass from the crash.")
assert r.is_dangerous
assert len(r.hazards) >= 2
def test_explain_method(self):
breakdown = clf.explain("A fire is burning and an exposed wire is visible.")
assert "fire" in breakdown
assert "electrical" in breakdown
def test_partial_word_no_match(self):
# "firing" a weapon is different from "fire" — ensure no false positive
# (pattern uses \b word boundaries)
r = clf.classify("The CEO is firing employees in the boardroom.")
# "firing" does NOT match \bfire\b — should be SAFE
assert r.label == "SAFE"