Spaces:
Sleeping
Sleeping
Commit Β·
d029190
1
Parent(s): 8b5f728
change
Browse files
app/Hackathon_setup/face_recognition.py
CHANGED
|
@@ -43,53 +43,14 @@ trnscm = transforms.Compose([
|
|
| 43 |
transforms.Resize((100, 100)),
|
| 44 |
transforms.ToTensor()
|
| 45 |
])
|
| 46 |
-
CLASS_NAMES = ['Person1', 'Person2'] # Only 2 persons
|
| 47 |
|
| 48 |
# --- Model Filenames ---
|
| 49 |
SIAMESE_MODEL_PATH = current_path + '/siamese_model.t7'
|
| 50 |
DECISION_TREE_MODEL_PATH = current_path + '/decision_tree_model.sav'
|
| 51 |
SCALER_PATH = current_path + '/decision_scaler.sav'
|
| 52 |
|
| 53 |
-
|
| 54 |
-
"""Create a fallback Decision Tree model for Hugging Face deployment"""
|
| 55 |
-
try:
|
| 56 |
-
from sklearn.tree import DecisionTreeClassifier
|
| 57 |
-
from sklearn.preprocessing import StandardScaler
|
| 58 |
-
|
| 59 |
-
print("Creating fallback Decision Tree model...")
|
| 60 |
-
|
| 61 |
-
# Create dummy training data - use 5 features based on debug output
|
| 62 |
-
n_features = 5 # Based on debug output showing 5 features
|
| 63 |
-
n_samples = 300
|
| 64 |
-
|
| 65 |
-
# Generate training data
|
| 66 |
-
np.random.seed(42)
|
| 67 |
-
X_train = np.random.randn(n_samples, n_features)
|
| 68 |
-
|
| 69 |
-
# Create balanced labels (0, 1 for Person1, Person2)
|
| 70 |
-
y_train = np.array([0] * 150 + [1] * 150)
|
| 71 |
-
np.random.shuffle(y_train)
|
| 72 |
-
|
| 73 |
-
# Create and fit scaler
|
| 74 |
-
scaler = StandardScaler()
|
| 75 |
-
X_train_scaled = scaler.fit_transform(X_train)
|
| 76 |
-
|
| 77 |
-
# Create and fit Decision Tree
|
| 78 |
-
classifier = DecisionTreeClassifier(
|
| 79 |
-
criterion='gini',
|
| 80 |
-
max_depth=10,
|
| 81 |
-
random_state=42
|
| 82 |
-
)
|
| 83 |
-
classifier.fit(X_train_scaled, y_train)
|
| 84 |
-
|
| 85 |
-
print(f"β Fallback model created with classes: {classifier.classes_}")
|
| 86 |
-
print(f"β Fallback model features: {scaler.n_features_in_}")
|
| 87 |
-
print(f"β Model trained for 2 persons: Person1 and Person2")
|
| 88 |
-
return scaler, classifier
|
| 89 |
-
|
| 90 |
-
except Exception as e:
|
| 91 |
-
print(f"Failed to create fallback model: {e}")
|
| 92 |
-
return None, None
|
| 93 |
|
| 94 |
def safe_cosine_similarity(embed1, embed2):
|
| 95 |
"""Calculate cosine similarity with fallback methods"""
|
|
@@ -265,17 +226,18 @@ def get_similarity(img1, img2):
|
|
| 265 |
return -1.0
|
| 266 |
|
| 267 |
def get_face_class(img1):
|
| 268 |
-
"""Get face class for a single image"""
|
| 269 |
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
|
| 270 |
|
| 271 |
try:
|
|
|
|
| 272 |
det_img1 = detected_face(img1)
|
| 273 |
if det_img1 is None:
|
| 274 |
det_img1 = Image.fromarray(cv2.cvtColor(img1, cv2.COLOR_BGR2GRAY))
|
| 275 |
-
|
| 276 |
face1_tensor = trnscm(det_img1).unsqueeze(0).to(device)
|
| 277 |
|
| 278 |
-
# Load Siamese Network
|
| 279 |
siamese_net = Siamese().to(device)
|
| 280 |
model_data = torch.load(SIAMESE_MODEL_PATH, map_location=device)
|
| 281 |
if isinstance(model_data, dict) and 'net_dict' in model_data:
|
|
@@ -290,106 +252,58 @@ def get_face_class(img1):
|
|
| 290 |
if embedding_np.ndim == 1:
|
| 291 |
embedding_np = embedding_np.reshape(1, -1)
|
| 292 |
|
| 293 |
-
#
|
| 294 |
scaler = None
|
| 295 |
classifier = None
|
| 296 |
|
| 297 |
-
# Load models with comprehensive error handling
|
| 298 |
try:
|
|
|
|
| 299 |
scaler = safe_load_model(SCALER_PATH, "joblib")
|
| 300 |
classifier = safe_load_model(DECISION_TREE_MODEL_PATH, "joblib")
|
| 301 |
-
print("β
|
| 302 |
|
| 303 |
-
|
| 304 |
-
|
| 305 |
-
|
| 306 |
-
raise ValueError("Corrupted classifier model")
|
| 307 |
|
| 308 |
-
# Validate the models
|
| 309 |
-
if not hasattr(classifier, 'predict'):
|
| 310 |
-
print("β Classifier doesn't have predict method")
|
| 311 |
-
raise ValueError("Invalid classifier")
|
| 312 |
-
|
| 313 |
-
if not hasattr(scaler, 'transform'):
|
| 314 |
-
print("β Scaler doesn't have transform method")
|
| 315 |
-
raise ValueError("Invalid scaler")
|
| 316 |
-
|
| 317 |
-
# Check if classifier has classes_ attribute (indicates it's trained)
|
| 318 |
-
if not hasattr(classifier, 'classes_'):
|
| 319 |
-
print("β Classifier doesn't have classes_ attribute (not trained)")
|
| 320 |
-
raise ValueError("Untrained classifier")
|
| 321 |
-
|
| 322 |
-
|
| 323 |
except Exception as e:
|
| 324 |
-
|
| 325 |
-
|
| 326 |
-
print("Attempting to create fallback model...")
|
| 327 |
try:
|
| 328 |
-
scaler
|
| 329 |
-
|
| 330 |
-
|
| 331 |
-
except Exception as
|
| 332 |
-
print(f"
|
| 333 |
-
|
| 334 |
-
|
| 335 |
-
|
| 336 |
-
|
|
|
|
|
|
|
| 337 |
return "UNKNOWN_CLASS"
|
| 338 |
-
|
| 339 |
-
#
|
| 340 |
embedding_scaled = scaler.transform(embedding_np)
|
| 341 |
|
| 342 |
-
#
|
| 343 |
-
|
| 344 |
-
|
| 345 |
-
|
| 346 |
-
|
| 347 |
-
if embedding_scaled.ndim == 1:
|
| 348 |
-
embedding_scaled = embedding_scaled.reshape(1, -1)
|
| 349 |
-
predicted_label_index = classifier.predict(embedding_scaled)[0]
|
| 350 |
-
else:
|
| 351 |
-
print("Classifier doesn't have predict method")
|
| 352 |
-
return "UNKNOWN_CLASS"
|
| 353 |
-
except Exception as predict_error:
|
| 354 |
-
print(f"Prediction error: {predict_error}")
|
| 355 |
-
# Try alternative prediction method
|
| 356 |
-
try:
|
| 357 |
-
# Try with explicit X parameter
|
| 358 |
-
predicted_label_index = classifier.predict(X=embedding_scaled)[0]
|
| 359 |
-
except Exception as e2:
|
| 360 |
-
print(f"Alternative prediction also failed: {e2}")
|
| 361 |
-
return "UNKNOWN_CLASS"
|
| 362 |
|
| 363 |
-
# Map to class name -
|
| 364 |
-
# Convert prediction to int if it's not already
|
| 365 |
try:
|
| 366 |
predicted_label_index = int(predicted_label_index)
|
| 367 |
-
|
| 368 |
-
|
| 369 |
-
|
| 370 |
-
# Method 1: Check if the prediction is valid using classifier classes
|
| 371 |
-
if hasattr(classifier, 'classes_') and predicted_label_index in classifier.classes_:
|
| 372 |
-
# Find the index of the predicted class in the classifier's classes
|
| 373 |
-
class_index = list(classifier.classes_).index(predicted_label_index)
|
| 374 |
-
if class_index < len(CLASS_NAMES):
|
| 375 |
-
class_name = CLASS_NAMES[class_index]
|
| 376 |
return class_name
|
| 377 |
else:
|
| 378 |
return "UNKNOWN_CLASS"
|
| 379 |
-
|
| 380 |
-
|
| 381 |
-
if 0 <= predicted_label_index < len(CLASS_NAMES):
|
| 382 |
-
class_name = CLASS_NAMES[predicted_label_index]
|
| 383 |
-
return class_name
|
| 384 |
-
|
| 385 |
-
# Method 3: Handle edge cases - return a default class based on prediction modulo
|
| 386 |
-
if isinstance(predicted_label_index, (int, np.integer)):
|
| 387 |
-
default_index = predicted_label_index % len(CLASS_NAMES)
|
| 388 |
-
class_name = CLASS_NAMES[default_index]
|
| 389 |
-
return class_name
|
| 390 |
-
|
| 391 |
-
return "UNKNOWN_CLASS"
|
| 392 |
|
| 393 |
except Exception as e:
|
| 394 |
-
print(f"Error in get_face_class: {e}")
|
| 395 |
return f"Error: {str(e)}"
|
|
|
|
| 43 |
transforms.Resize((100, 100)),
|
| 44 |
transforms.ToTensor()
|
| 45 |
])
|
| 46 |
+
CLASS_NAMES = ['Person1', 'Person2','Person3,'Person4'] # Only 2 persons
|
| 47 |
|
| 48 |
# --- Model Filenames ---
|
| 49 |
SIAMESE_MODEL_PATH = current_path + '/siamese_model.t7'
|
| 50 |
DECISION_TREE_MODEL_PATH = current_path + '/decision_tree_model.sav'
|
| 51 |
SCALER_PATH = current_path + '/decision_scaler.sav'
|
| 52 |
|
| 53 |
+
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 54 |
|
| 55 |
def safe_cosine_similarity(embed1, embed2):
|
| 56 |
"""Calculate cosine similarity with fallback methods"""
|
|
|
|
| 226 |
return -1.0
|
| 227 |
|
| 228 |
def get_face_class(img1):
|
| 229 |
+
"""Get face class for a single image, prioritizing pre-trained models."""
|
| 230 |
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
|
| 231 |
|
| 232 |
try:
|
| 233 |
+
# --- (Face Detection and Siamese Feature Extraction remain the same) ---
|
| 234 |
det_img1 = detected_face(img1)
|
| 235 |
if det_img1 is None:
|
| 236 |
det_img1 = Image.fromarray(cv2.cvtColor(img1, cv2.COLOR_BGR2GRAY))
|
| 237 |
+
|
| 238 |
face1_tensor = trnscm(det_img1).unsqueeze(0).to(device)
|
| 239 |
|
| 240 |
+
# Load Siamese Network (assuming this works)
|
| 241 |
siamese_net = Siamese().to(device)
|
| 242 |
model_data = torch.load(SIAMESE_MODEL_PATH, map_location=device)
|
| 243 |
if isinstance(model_data, dict) and 'net_dict' in model_data:
|
|
|
|
| 252 |
if embedding_np.ndim == 1:
|
| 253 |
embedding_np = embedding_np.reshape(1, -1)
|
| 254 |
|
| 255 |
+
# --- CRITICAL LOADING BLOCK: Focus on your .sav files ---
|
| 256 |
scaler = None
|
| 257 |
classifier = None
|
| 258 |
|
|
|
|
| 259 |
try:
|
| 260 |
+
# 1. Try joblib/sav loading (most likely format)
|
| 261 |
scaler = safe_load_model(SCALER_PATH, "joblib")
|
| 262 |
classifier = safe_load_model(DECISION_TREE_MODEL_PATH, "joblib")
|
| 263 |
+
print("β Successfully loaded pre-trained models via joblib.")
|
| 264 |
|
| 265 |
+
except FileNotFoundError as fnfe:
|
| 266 |
+
print(f"β Model/Scaler file not found: {fnfe}")
|
| 267 |
+
return "UNKNOWN_CLASS" # Stop if files aren't found
|
|
|
|
| 268 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 269 |
except Exception as e:
|
| 270 |
+
# 2. Try pickle loading (fallback for serialization issues)
|
| 271 |
+
print(f"β οΈ Joblib/SAV loading failed: {e}. Trying raw pickle...")
|
|
|
|
| 272 |
try:
|
| 273 |
+
scaler = safe_load_model(SCALER_PATH, "pickle")
|
| 274 |
+
classifier = safe_load_model(DECISION_TREE_MODEL_PATH, "pickle")
|
| 275 |
+
print("β Successfully loaded pre-trained models via raw pickle.")
|
| 276 |
+
except Exception as e_pickle:
|
| 277 |
+
print(f"β Raw pickle loading failed: {e_pickle}")
|
| 278 |
+
# Use UNKNOWN_CLASS instead of generating a new model
|
| 279 |
+
return "UNKNOWN_CLASS"
|
| 280 |
+
|
| 281 |
+
# --- Validation (Ensure loaded object has required methods) ---
|
| 282 |
+
if not hasattr(classifier, 'predict') or not hasattr(scaler, 'transform'):
|
| 283 |
+
print("β Loaded model/scaler object is corrupted or invalid.")
|
| 284 |
return "UNKNOWN_CLASS"
|
| 285 |
+
|
| 286 |
+
# --- Prediction (Standard Code) ---
|
| 287 |
embedding_scaled = scaler.transform(embedding_np)
|
| 288 |
|
| 289 |
+
# Ensure 2D input for prediction
|
| 290 |
+
if embedding_scaled.ndim == 1:
|
| 291 |
+
embedding_scaled = embedding_scaled.reshape(1, -1)
|
| 292 |
+
|
| 293 |
+
predicted_label_index = classifier.predict(embedding_scaled)[0]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 294 |
|
| 295 |
+
# --- Map to class name ---
|
|
|
|
| 296 |
try:
|
| 297 |
predicted_label_index = int(predicted_label_index)
|
| 298 |
+
if 0 <= predicted_label_index < len(CLASS_NAMES):
|
| 299 |
+
class_name = CLASS_NAMES[predicted_label_index]
|
| 300 |
+
print(f"Prediction: Index {predicted_label_index} -> {class_name}")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 301 |
return class_name
|
| 302 |
else:
|
| 303 |
return "UNKNOWN_CLASS"
|
| 304 |
+
except (ValueError, TypeError):
|
| 305 |
+
return "UNKNOWN_CLASS"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 306 |
|
| 307 |
except Exception as e:
|
| 308 |
+
print(f"Error in get_face_class (Main): {e}")
|
| 309 |
return f"Error: {str(e)}"
|
app/Hackathon_setup/face_recognition_model.py
CHANGED
|
@@ -78,4 +78,4 @@ except Exception as e:
|
|
| 78 |
classifier = None
|
| 79 |
|
| 80 |
# Definition of classes as dictionary
|
| 81 |
-
classes = ['Person1','Person2']
|
|
|
|
| 78 |
classifier = None
|
| 79 |
|
| 80 |
# Definition of classes as dictionary
|
| 81 |
+
classes = ['Person1','Person2','Person3','Person4']
|