|
|
|
|
|
|
|
|
import os
|
|
|
import sys
|
|
|
import json
|
|
|
|
|
|
def verify_deployment_structure():
|
|
|
"""Verify that all required files exist for deployment"""
|
|
|
|
|
|
print("π Verifying Hugging Face Spaces deployment structure...")
|
|
|
|
|
|
required_files = [
|
|
|
"app.py",
|
|
|
"requirements.txt",
|
|
|
"README.md",
|
|
|
"DEPLOYMENT_GUIDE.md"
|
|
|
]
|
|
|
|
|
|
model_files = [
|
|
|
"models/production_model.pkl",
|
|
|
"models/smoteenn_preprocessor.pkl",
|
|
|
"models/model_info.json"
|
|
|
]
|
|
|
|
|
|
all_files = required_files + model_files
|
|
|
|
|
|
|
|
|
print("\nπ Checking required files:")
|
|
|
missing_files = []
|
|
|
|
|
|
for file_path in all_files:
|
|
|
if os.path.exists(file_path):
|
|
|
size = os.path.getsize(file_path)
|
|
|
print(f" β
{file_path}: {size:,} bytes")
|
|
|
else:
|
|
|
print(f" β {file_path}: Missing")
|
|
|
missing_files.append(file_path)
|
|
|
|
|
|
|
|
|
print("\nπ Checking directory structure:")
|
|
|
if os.path.exists("models"):
|
|
|
print(" β
models/ directory exists")
|
|
|
else:
|
|
|
print(" β models/ directory missing")
|
|
|
missing_files.append("models/")
|
|
|
|
|
|
|
|
|
print("\nπ Validating file content:")
|
|
|
|
|
|
|
|
|
if os.path.exists("app.py"):
|
|
|
with open("app.py", "r") as f:
|
|
|
content = f.read()
|
|
|
if "HospitalReadmissionPredictor" in content:
|
|
|
print(" β
app.py contains main predictor class")
|
|
|
else:
|
|
|
print(" β app.py missing predictor class")
|
|
|
|
|
|
if "gradio" in content:
|
|
|
print(" β
app.py imports Gradio")
|
|
|
else:
|
|
|
print(" β app.py missing Gradio import")
|
|
|
|
|
|
|
|
|
if os.path.exists("requirements.txt"):
|
|
|
with open("requirements.txt", "r") as f:
|
|
|
content = f.read()
|
|
|
required_packages = ["scikit-learn", "joblib", "pandas", "numpy", "gradio"]
|
|
|
for package in required_packages:
|
|
|
if package in content:
|
|
|
print(f" β
requirements.txt includes {package}")
|
|
|
else:
|
|
|
print(f" β requirements.txt missing {package}")
|
|
|
|
|
|
|
|
|
if os.path.exists("models/model_info.json"):
|
|
|
try:
|
|
|
with open("models/model_info.json", "r") as f:
|
|
|
model_info = json.load(f)
|
|
|
print(f" β
Model accuracy: {model_info.get('accuracy', 'N/A')}")
|
|
|
print(f" β
Model type: {model_info.get('model_type', 'N/A')}")
|
|
|
except Exception as e:
|
|
|
print(f" β Error reading model_info.json: {e}")
|
|
|
|
|
|
|
|
|
print(f"\nπ― Deployment Status:")
|
|
|
if not missing_files:
|
|
|
print(" β
All files present - Ready for deployment!")
|
|
|
return True
|
|
|
else:
|
|
|
print(f" β Missing {len(missing_files)} files:")
|
|
|
for file in missing_files:
|
|
|
print(f" - {file}")
|
|
|
return False
|
|
|
|
|
|
def test_local_import():
|
|
|
"""Test if the app can be imported locally"""
|
|
|
|
|
|
print("\nπ§ͺ Testing local imports...")
|
|
|
|
|
|
try:
|
|
|
|
|
|
import pandas as pd
|
|
|
print(" β
pandas imported successfully")
|
|
|
except ImportError as e:
|
|
|
print(f" β pandas import failed: {e}")
|
|
|
|
|
|
try:
|
|
|
import numpy as np
|
|
|
print(" β
numpy imported successfully")
|
|
|
except ImportError as e:
|
|
|
print(f" β numpy import failed: {e}")
|
|
|
|
|
|
try:
|
|
|
import gradio as gr
|
|
|
print(" β
gradio imported successfully")
|
|
|
except ImportError as e:
|
|
|
print(f" β gradio import failed: {e}")
|
|
|
|
|
|
try:
|
|
|
import joblib
|
|
|
print(" β
joblib imported successfully")
|
|
|
except ImportError as e:
|
|
|
print(f" β joblib import failed: {e}")
|
|
|
|
|
|
try:
|
|
|
import sklearn
|
|
|
print(f" β
scikit-learn imported successfully (version: {sklearn.__version__})")
|
|
|
except ImportError as e:
|
|
|
print(f" β scikit-learn import failed: {e}")
|
|
|
|
|
|
def generate_test_data():
|
|
|
"""Generate test data for validation"""
|
|
|
|
|
|
print("\nπ Generating test data...")
|
|
|
|
|
|
test_cases = [
|
|
|
{
|
|
|
"name": "High Risk Patient",
|
|
|
"data": {
|
|
|
"age": 75,
|
|
|
"time_in_hospital": 8,
|
|
|
"n_lab_procedures": 55,
|
|
|
"n_procedures": 4,
|
|
|
"n_medications": 18,
|
|
|
"n_outpatient": 3,
|
|
|
"n_inpatient": 2,
|
|
|
"n_emergency": 2,
|
|
|
"medical_specialty": "Cardiology",
|
|
|
"primary_diagnosis": "Circulatory",
|
|
|
"admission_type": "Emergency",
|
|
|
"discharge_disposition": "Home Health Service",
|
|
|
"glucose_test": ">200",
|
|
|
"a1c_test": ">8",
|
|
|
"diabetes_med": "Yes",
|
|
|
"change_diabetes_med": "Up",
|
|
|
"insulin": "Steady",
|
|
|
"hemoglobin": 9.5,
|
|
|
"sodium": 130
|
|
|
},
|
|
|
"expected_risk": "High"
|
|
|
},
|
|
|
{
|
|
|
"name": "Low Risk Patient",
|
|
|
"data": {
|
|
|
"age": 45,
|
|
|
"time_in_hospital": 2,
|
|
|
"n_lab_procedures": 15,
|
|
|
"n_procedures": 1,
|
|
|
"n_medications": 5,
|
|
|
"n_outpatient": 1,
|
|
|
"n_inpatient": 0,
|
|
|
"n_emergency": 0,
|
|
|
"medical_specialty": "Family/GeneralPractice",
|
|
|
"primary_diagnosis": "Other",
|
|
|
"admission_type": "Elective",
|
|
|
"discharge_disposition": "Home",
|
|
|
"glucose_test": "Norm",
|
|
|
"a1c_test": "Norm",
|
|
|
"diabetes_med": "No",
|
|
|
"change_diabetes_med": "No",
|
|
|
"insulin": "No",
|
|
|
"hemoglobin": 13.5,
|
|
|
"sodium": 142
|
|
|
},
|
|
|
"expected_risk": "Low"
|
|
|
}
|
|
|
]
|
|
|
|
|
|
for i, case in enumerate(test_cases, 1):
|
|
|
print(f"\n Test Case {i}: {case['name']}")
|
|
|
print(f" Expected Risk: {case['expected_risk']}")
|
|
|
print(f" Key features:")
|
|
|
print(f" Age: {case['data']['age']} years")
|
|
|
print(f" Length of Stay: {case['data']['time_in_hospital']} days")
|
|
|
print(f" Medications: {case['data']['n_medications']}")
|
|
|
print(f" Emergency Visits: {case['data']['n_emergency']}")
|
|
|
print(f" Hemoglobin: {case['data']['hemoglobin']} g/dL")
|
|
|
|
|
|
return test_cases
|
|
|
|
|
|
def create_deployment_summary():
|
|
|
"""Create a deployment summary"""
|
|
|
|
|
|
print("\nπ Creating deployment summary...")
|
|
|
|
|
|
summary = {
|
|
|
"deployment_type": "Hugging Face Spaces",
|
|
|
"model_type": "Hospital Readmission Risk Predictor",
|
|
|
"framework": "Gradio",
|
|
|
"files_checked": True,
|
|
|
"ready_for_deployment": verify_deployment_structure(),
|
|
|
"timestamp": "2025-09-10",
|
|
|
"instructions": "Follow DEPLOYMENT_GUIDE.md for step-by-step deployment"
|
|
|
}
|
|
|
|
|
|
with open("deployment_summary.json", "w") as f:
|
|
|
json.dump(summary, f, indent=2)
|
|
|
|
|
|
print(f" β
Deployment summary saved to deployment_summary.json")
|
|
|
|
|
|
return summary
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
print("π Hospital Readmission Predictor - Deployment Verification")
|
|
|
print("=" * 60)
|
|
|
|
|
|
|
|
|
structure_ok = verify_deployment_structure()
|
|
|
test_local_import()
|
|
|
test_cases = generate_test_data()
|
|
|
summary = create_deployment_summary()
|
|
|
|
|
|
print("\n" + "=" * 60)
|
|
|
print("π― FINAL STATUS:")
|
|
|
|
|
|
if structure_ok:
|
|
|
print("β
READY FOR HUGGING FACE SPACES DEPLOYMENT!")
|
|
|
print("\nπ Next Steps:")
|
|
|
print("1. Follow DEPLOYMENT_GUIDE.md")
|
|
|
print("2. Create Hugging Face Space")
|
|
|
print("3. Upload files using Git or web interface")
|
|
|
print("4. Test deployment with provided test cases")
|
|
|
else:
|
|
|
print("β DEPLOYMENT NOT READY")
|
|
|
print("\nπ Required Actions:")
|
|
|
print("1. Fix missing files listed above")
|
|
|
print("2. Run copy_models.py to copy model files")
|
|
|
print("3. Re-run this verification script")
|
|
|
|
|
|
print(f"\nπ Test cases ready: {len(test_cases)}")
|
|
|
print("π Deployment guide: DEPLOYMENT_GUIDE.md")
|
|
|
print("π Summary: deployment_summary.json")
|
|
|
|