Spaces:
Sleeping
Sleeping
| from fastapi import FastAPI | |
| from pydantic import BaseModel | |
| import joblib | |
| import pandas as pd | |
| import json | |
| # Load models | |
| scaler = joblib.load("scaler.joblib") | |
| gmm = joblib.load("gmm_model.joblib") | |
| pca = joblib.load("pca.joblib") | |
| with open("cluster_fatigue_map.json") as f: | |
| cluster_fatigue_map = json.load(f) | |
| # Load trained regressor | |
| model = joblib.load("fatigue_model.joblib") | |
| feature_cols = [ | |
| "AVRR", "SDNN", "RMSSD", "PNN50", "Coefficient_of_Variation", | |
| "Age", "Weight", "Height" | |
| ] | |
| # Define input schema | |
| class FatigueInput(BaseModel): | |
| AVRR: float | |
| SDNN: float | |
| RMSSD: float | |
| PNN50: float | |
| Coefficient_of_Variation: float | |
| Age: int | |
| Weight: float | |
| Height: float | |
| # Create FastAPI app | |
| app = FastAPI() | |
| def debug_predict(input_data: FatigueInput): | |
| input_dict = input_data.dict() | |
| input_df = pd.DataFrame([input_dict])[feature_cols] | |
| scaled_input = scaler.transform(input_df) | |
| pca_input = pca.transform(scaled_input) | |
| return { | |
| "raw_input": input_dict, | |
| "scaled_input": scaled_input.tolist(), | |
| "pca_input": pca_input.tolist(), | |
| "scaler_mean": scaler.mean_.tolist(), | |
| "scaler_scale": scaler.scale_.tolist() | |
| } | |
| def predict(input_data: FatigueInput): | |
| try: | |
| input_dict = input_data.dict() | |
| print("INPUT:", input_dict) | |
| input_df = pd.DataFrame([input_dict])[feature_cols] | |
| scaled_input = scaler.transform(input_df) | |
| print("SCALED:", scaled_input) | |
| pca_input = pca.transform(scaled_input) | |
| print("PCA:", pca_input) | |
| cluster = gmm.predict(pca_input)[0] | |
| probs = gmm.predict_proba(pca_input)[0].astype(float) | |
| print("CLUSTER:", cluster) | |
| print("PROBS:", probs) | |
| fatigue_level = cluster_fatigue_map[str(cluster)] | |
| return { | |
| "cluster": int(cluster), | |
| "fatigue_level": fatigue_level, | |
| "cluster_probabilities": {str(i): float(prob) for i, prob in enumerate(probs)}, | |
| "weighted_fatigue_score": sum(float(probs[i]) * cluster_fatigue_map[str(i)] for i in range(len(probs))) | |
| } | |
| except Exception as e: | |
| return {"error": str(e)} | |
| def predict(input_data: FatigueInput): | |
| try: | |
| input_df = pd.DataFrame([input_data.dict()])[feature_cols] | |
| # Predict fatigue level | |
| predicted_fatigue = model.predict(input_df)[0] | |
| return { | |
| "predicted_fatigue_level": float(predicted_fatigue), | |
| "fatigueLevel": int(round(predicted_fatigue)) | |
| } | |
| except Exception as e: | |
| return {"error": str(e)} |