File size: 2,063 Bytes
3054db6 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 |
import sys
import os
from fastapi.testclient import TestClient
# Add project root to path
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
from app.main import app, load_models
# 1. FORCE LOAD MODELS MANUALLY
# This ensures they are ready before tests run
print("โ๏ธ Forcing Model Load for Testing...")
load_models()
client = TestClient(app)
def test_health_check():
"""Test if the API is alive and models are loaded."""
print("\n๐ Testing Health Endpoint...")
response = client.get("/health")
# Debugging print
print(f" Response: {response.json()}")
assert response.status_code == 200
data = response.json()
# Crucial check: Status MUST be healthy now
assert data["status"] == "healthy"
print("โ
Health Check Passed!")
def test_prediction_endpoint():
"""Test the End-to-End Prediction pipeline with valid data."""
print("\n๐ Testing Prediction Endpoint...")
payload = {
"tmmn": 290.0, "tmmx": 305.0, "rmin": 15.0, "rmax": 45.0,
"vs": 6.5, "pr": 0.0, "erc": 50.0,
"latitude": 34.0, "longitude": -118.0
}
response = client.post("/predict", json=payload)
# If this fails, print the error detail
if response.status_code != 200:
print(f"โ API Error: {response.json()}")
assert response.status_code == 200
data = response.json()
# Verify Content
assert "burning_index_prediction" in data
assert "risk_level_prediction" in data
assert "cluster_zone" in data
print("โ
Prediction Logic Passed!")
print(f" ๐ฅ Predicted BI: {data['burning_index_prediction']}")
print(f" โ ๏ธ Risk Level: {data['risk_level_prediction']}")
if __name__ == "__main__":
try:
test_health_check()
test_prediction_endpoint()
print("\n๐ ALL API TESTS PASSED SUCCESSFULLY.")
except AssertionError as e:
print(f"\nโ TEST FAILED: Assertion Error")
except Exception as e:
print(f"\nโ CRITICAL ERROR: {e}") |