Spaces:
Sleeping
Sleeping
| import os | |
| import sys | |
| import numpy as np | |
| import pickle | |
| from sklearn.model_selection import train_test_split | |
| from sklearn.metrics import classification_report, accuracy_score, f1_score | |
| from tensorflow.keras.models import load_model | |
| from tensorflow.keras.utils import to_categorical | |
| # Data Paths | |
| BASE_DIR = os.path.dirname(os.path.abspath(__file__)) # This is backend/ | |
| ROOT_DIR = os.path.dirname(BASE_DIR) # This is project root | |
| DATA_DIR = os.path.join(ROOT_DIR, "data sets") | |
| MODEL_PATH = os.path.join(ROOT_DIR, "model.h5") | |
| ENCODER_PATH = os.path.join(ROOT_DIR, "encoder.pkl") | |
| FEATURES_PATH = os.path.join(DATA_DIR, "features_cache.npy") | |
| LABELS_PATH = os.path.join(DATA_DIR, "labels_cache.npy") | |
| def evaluate(): | |
| print("Loading data from cache...") | |
| if not os.path.exists(FEATURES_PATH) or not os.path.exists(LABELS_PATH): | |
| print("Error: Cached features not found. Please train the model first.") | |
| return | |
| X = np.load(FEATURES_PATH) | |
| y = np.load(LABELS_PATH) | |
| print(f"Loaded {len(X)} samples.") | |
| print("Loading Label Encoder...") | |
| with open(ENCODER_PATH, 'rb') as f: | |
| le = pickle.load(f) | |
| # Encode labels (Same logic as training) | |
| y_encoded = to_categorical(le.fit_transform(y)) | |
| # Split (Same random_state as training to ensure same test set) | |
| print("Splitting data (random_state=42)...") | |
| X_train, X_test, y_train, y_test = train_test_split(X, y_encoded, test_size=0.2, random_state=42, stratify=y) | |
| print(f"Test Set Size: {len(X_test)}") | |
| print("Loading Model...") | |
| model = load_model(MODEL_PATH) | |
| print("Evaluating...") | |
| # Predict | |
| y_pred_prob = model.predict(X_test, verbose=0) | |
| y_pred = np.argmax(y_pred_prob, axis=1) | |
| y_true = np.argmax(y_test, axis=1) | |
| # Calculate Metrics | |
| accuracy = accuracy_score(y_true, y_pred) | |
| f1 = f1_score(y_true, y_pred, average='weighted') | |
| print("\n" + "="*30) | |
| print(f"Accuracy: {accuracy:.4f}") | |
| print(f"F1 Score (Weighted): {f1:.4f}") | |
| print("="*30 + "\n") | |
| # Detailed Report | |
| target_names = le.classes_ | |
| print("Classification Report:") | |
| print(classification_report(y_true, y_pred, target_names=target_names)) | |
| if __name__ == "__main__": | |
| evaluate() | |