File size: 2,314 Bytes
4b4f0b9
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
import numpy as np
from PIL import Image
import tensorflow as tf # Or import your PyTorch modules

# --- Load your trained HTCNN model ---
try:
    htcnn_model = tf.keras.models.load_model('htcnn_model.h5') # Replace with your model path
except Exception as e:
    print(f"Error loading HTCNN model: {e}")
    htcnn_model = None

def extract_embedding(face_image):
    """
    Extracts the feature embedding from a face image using the HTCNN model.

    Args:
        face_image (numpy.ndarray): The cropped face image.

    Returns:
        numpy.ndarray: The feature embedding, or None if an error occurs.
    """
    if htcnn_model is None:
        print("HTCNN model not loaded. Cannot extract embedding.")
        return None

    # Preprocess the face image (resize, normalize, etc.) as required by your model
    resized_face = cv2.resize(face_image, (160, 160)) # Example size, adjust as needed
    normalized_face = resized_face / 255.0 # Example normalization

    # Ensure the input has the correct batch dimension
    embedding = htcnn_model.predict(np.expand_dims(normalized_face, axis=0))[0]
    return embedding

def recognize_face(face_image, face_embeddings_db, threshold=0.6):
    """
    Recognizes a face by comparing its embedding with the embeddings in the database.

    Args:
        face_image (numpy.ndarray): The cropped face image.
        face_embeddings_db (dict): Dictionary of known face embeddings (name: embedding).
        threshold (float): The similarity threshold for recognition.

    Returns:
        str or None: The name of the recognized person, or None if no match is found.
    """
    embedding = extract_embedding(face_image)
    if embedding is None:
        return None

    min_distance = float('inf')
    recognized_identity = None

    for name, stored_embedding in face_embeddings_db.items():
        # Calculate the distance (e.g., Euclidean distance) between the embeddings
        distance = np.linalg.norm(embedding - stored_embedding)
        if distance < min_distance:
            min_distance = distance
            recognized_identity = name

    if min_distance < threshold:
        return recognized_identity
    else:
        return None

if __name__ == '__main__':
    # Example usage (requires having a trained HTCNN model and a database)
    pass