emolips / code /config.py
primal-sage's picture
Upload code/config.py with huggingface_hub
4682251 verified
"""
EMOLIPS Configuration
"""
# Emotion categories
EMOTIONS = ["neutral", "happy", "sad", "angry", "fear", "surprise", "disgust"]
NUM_EMOTIONS = 7
# 3DMM coefficients
EXPRESSION_DIM = 64
POSE_DIM = 6
IDENTITY_DIM = 80
# Emotion encoder
EMOTION_EMBEDDING_DIM = 128
# FiLM layers
FILM_HIDDEN_DIM = 256
# Audio features
AUDIO_SAMPLE_RATE = 16000
MEL_BINS = 80
MFCC_COEFFS = 13
# Video settings
VIDEO_FPS = 25
FACE_SIZE = 256
FACE_SIZE_HD = 512
# Landmark settings
NUM_LANDMARKS_DLIB = 68
NUM_LANDMARKS_MEDIAPIPE = 468
NUM_LIP_LANDMARKS = 20
# Action Units (17 core)
AU_LIST = [1, 2, 4, 6, 7, 9, 10, 12, 14, 15, 17, 20, 23, 24, 25, 26, 28]
NUM_AUS = 17
AU_INTENSITY_SCALE = 5 # 0-5
# Training (if fine-tuning)
BATCH_SIZE = 16
LEARNING_RATE = 1e-4
NUM_EPOCHS = 50
WARMUP_STEPS = 1000
# Loss weights
LAMBDA_SYNC = 1.0 # Lip-sync loss
LAMBDA_EMOTION = 0.5 # Emotion classification loss
LAMBDA_CONSISTENCY = 0.3 # Cross-emotion consistency loss
LAMBDA_RECON = 0.8 # Reconstruction loss
LAMBDA_AU = 0.2 # AU prediction loss
# Evaluation
EVAL_SAMPLE_RATE = 5 # Evaluate every N-th frame
SYNCNET_THRESHOLD = 3.0 # LSE-D threshold for "good" sync
# Paths (adjust for your setup)
SADTALKER_DIR = "./SadTalker"
CHECKPOINT_DIR = "./SadTalker/checkpoints"
OUTPUT_DIR = "./outputs"
RESULTS_DIR = "./results"