import pytest from fastapi.testclient import TestClient from app import app from environment.models import AvigilanceReward, REWARD_FLOAT_FIELDS client = TestClient(app) def _assert_reward_fields_within_open_interval(reward: dict) -> None: for field_name in REWARD_FLOAT_FIELDS: value = reward[field_name] assert 0 < value < 1, f"{field_name} escaped the open interval: {value}" def _build_action(task_id: str, observation: dict) -> dict: if task_id == "task1": return { "task_id": "task1", "fto_grade_action": { "grade": "B", "total_score": 70, "risk_flags": [], "recommended_action": "self_assessment_required", "justification": "Contract test action from pytest.", }, } if task_id == "task2": incident_ids = [item["incident_id"] for item in observation["incident_batch"]] return { "task_id": "task2", "incident_priority_action": { "priority_ranking": incident_ids, "top_3_rationale": "Contract test rationale from pytest.", "defer_list": incident_ids[3:], "escalate_immediately": incident_ids[:2], "pattern_detected": False, "pattern_description": None, }, } incident_ids = [item["incident_id"] for item in observation["incident_queue"]] fto_ids = [item["fto_id"] for item in observation["fto_audit_queue"]] assignments = { "inspector_1": (incident_ids + fto_ids)[:2], "inspector_2": (incident_ids + fto_ids)[2:4], } return { "task_id": "task3", "resource_allocation_action": { "inspector_assignments": assignments, "deferred_items": (incident_ids + fto_ids)[4:], "priority_rationale": "Contract test allocation rationale from pytest.", "predicted_risk_reduction": 0.55, "abstain": False, "abstain_reason": None, }, } def test_root_serves_space_frontend() -> None: response = client.get("/") assert response.status_code == 200 assert "text/html" in response.headers["content-type"] assert "AvigilanceEnv" in response.text assert "Reset Episode" in response.text assert "Avigilance Mission Console" in response.text assert "task-card" in response.text assert "