José Eliel Camargo Molina
commited on
Commit
·
084a06d
1
Parent(s):
660ea47
fixed issues with blurr deblurr not triggering properly.
Browse files- __pycache__/app.cpython-39.pyc +0 -0
- app.py +180 -328
- emotion_responses.csv +69 -22
- stimuli_metadata.csv +7 -0
__pycache__/app.cpython-39.pyc
CHANGED
|
Binary files a/__pycache__/app.cpython-39.pyc and b/__pycache__/app.cpython-39.pyc differ
|
|
|
app.py
CHANGED
|
@@ -13,91 +13,66 @@ AI_FOLDER = "./AI"
|
|
| 13 |
HUMAN_FOLDER = "./Human"
|
| 14 |
CSV_FILE = "emotion_responses.csv"
|
| 15 |
METADATA_FILE = "stimuli_metadata.csv"
|
| 16 |
-
DEBLUR_DURATION_S =
|
| 17 |
|
| 18 |
-
#
|
| 19 |
URL_PARAM_PARTICIPANT_ID = "pid"
|
| 20 |
-
# Randomize emotion choice order per trial (can be overridden by URL param).
|
| 21 |
RANDOMIZE_EMOTION_ORDER_DEFAULT = True
|
| 22 |
RANDOMIZE_EMOTION_ORDER_PARAM = "randomize"
|
| 23 |
-
|
| 24 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 25 |
UNKNOWN_LABEL = "unknown"
|
| 26 |
UNKNOWN_CODE = 0
|
| 27 |
-
|
| 28 |
-
# Filename parsing order from the RIGHT side. Extend if you encode more fields in filenames.
|
| 29 |
-
# Example filename if you extend: "subject_happy_female_asian_front-left.png"
|
| 30 |
FILENAME_FIELD_ORDER = ["emotion"]
|
| 31 |
|
| 32 |
-
|
| 33 |
-
|
| 34 |
-
|
| 35 |
-
|
| 36 |
-
|
| 37 |
-
"surprised": 4,
|
| 38 |
-
"disgusted": 5,
|
| 39 |
-
"fearful": 6,
|
| 40 |
-
"neutral": 7,
|
| 41 |
-
"unknown": 0,
|
| 42 |
-
}
|
| 43 |
-
SEX_CODE_MAP = {
|
| 44 |
-
"male": 1,
|
| 45 |
-
"female": 2,
|
| 46 |
-
"other": 3,
|
| 47 |
-
"unknown": 0,
|
| 48 |
-
}
|
| 49 |
-
ETHNICITY_CODE_MAP = {
|
| 50 |
-
"caucasian": 1,
|
| 51 |
-
"black": 2,
|
| 52 |
-
"asian": 3,
|
| 53 |
-
"latino": 4,
|
| 54 |
-
"middle-eastern": 5,
|
| 55 |
-
"indigenous": 6,
|
| 56 |
-
"other": 7,
|
| 57 |
-
"unknown": 0,
|
| 58 |
-
}
|
| 59 |
-
ANGLE_CODE_MAP = {
|
| 60 |
-
"forward": 1,
|
| 61 |
-
"front-left": 2,
|
| 62 |
-
"front-right": 3,
|
| 63 |
-
"left": 4,
|
| 64 |
-
"right": 5,
|
| 65 |
-
"up": 6,
|
| 66 |
-
"down": 7,
|
| 67 |
-
"unknown": 0,
|
| 68 |
-
}
|
| 69 |
-
TYPE_CODE_MAP = {
|
| 70 |
-
"human": 1,
|
| 71 |
-
"ai": 2,
|
| 72 |
-
"unknown": 0,
|
| 73 |
-
}
|
| 74 |
|
| 75 |
CSV_HEADERS = [
|
| 76 |
-
"participant_id",
|
| 77 |
-
"
|
| 78 |
-
"
|
| 79 |
-
"
|
| 80 |
-
"face_type",
|
| 81 |
-
"face_type_code",
|
| 82 |
-
"correct_emotion",
|
| 83 |
-
"correct_emotion_code",
|
| 84 |
-
"face_sex",
|
| 85 |
-
"face_sex_code",
|
| 86 |
-
"face_ethnicity",
|
| 87 |
-
"face_ethnicity_code",
|
| 88 |
-
"face_angle",
|
| 89 |
-
"face_angle_code",
|
| 90 |
-
"selected_emotion",
|
| 91 |
-
"selected_emotion_code",
|
| 92 |
-
"accuracy",
|
| 93 |
-
"response_time_ms",
|
| 94 |
-
"button_order",
|
| 95 |
-
"timestamp",
|
| 96 |
]
|
| 97 |
|
| 98 |
# --- Data Structure ---
|
| 99 |
class ImageData:
|
| 100 |
-
"""A simple class to hold information about each image."""
|
| 101 |
def __init__(self, path, source, emotion, sex=UNKNOWN_LABEL, ethnicity=UNKNOWN_LABEL, angle=UNKNOWN_LABEL, face_type=UNKNOWN_LABEL):
|
| 102 |
self.path = path
|
| 103 |
self.source = source
|
|
@@ -109,30 +84,21 @@ class ImageData:
|
|
| 109 |
self.name = os.path.basename(path)
|
| 110 |
|
| 111 |
# --- Helper Functions ---
|
| 112 |
-
|
| 113 |
def normalize_label(value):
|
| 114 |
-
if value is None:
|
| 115 |
-
|
| 116 |
-
value = str(value).strip().lower()
|
| 117 |
-
value = value.replace(" ", "-")
|
| 118 |
-
return value
|
| 119 |
|
| 120 |
def get_code(code_map, label):
|
| 121 |
-
|
| 122 |
-
if not label:
|
| 123 |
-
return UNKNOWN_CODE
|
| 124 |
-
return code_map.get(label, UNKNOWN_CODE)
|
| 125 |
|
| 126 |
def load_metadata(metadata_path):
|
| 127 |
-
if not os.path.exists(metadata_path):
|
| 128 |
-
return {}
|
| 129 |
metadata = {}
|
| 130 |
with open(metadata_path, newline='') as f:
|
| 131 |
reader = csv.DictReader(f)
|
| 132 |
for row in reader:
|
| 133 |
name = row.get("image_name") or row.get("filename") or row.get("image")
|
| 134 |
-
if not name:
|
| 135 |
-
continue
|
| 136 |
key = name.strip().lower()
|
| 137 |
entry = {
|
| 138 |
"emotion": normalize_label(row.get("emotion")),
|
|
@@ -149,28 +115,21 @@ def load_metadata(metadata_path):
|
|
| 149 |
def parse_filename_fields(image_path):
|
| 150 |
base_name = os.path.splitext(os.path.basename(image_path))[0]
|
| 151 |
parts = base_name.split('_')
|
| 152 |
-
if len(parts) < 2:
|
| 153 |
-
return {}
|
| 154 |
fields = {}
|
| 155 |
for field in FILENAME_FIELD_ORDER:
|
| 156 |
-
if not parts:
|
| 157 |
-
break
|
| 158 |
fields[field] = normalize_label(parts.pop())
|
| 159 |
return fields
|
| 160 |
|
| 161 |
def resolve_field(metadata, filename_fields, key, default=UNKNOWN_LABEL):
|
| 162 |
value = ""
|
| 163 |
-
if metadata:
|
| 164 |
-
|
| 165 |
-
if not value:
|
| 166 |
-
value = filename_fields.get(key, "")
|
| 167 |
return value or default
|
| 168 |
|
| 169 |
def resolve_face_type(metadata, source):
|
| 170 |
-
if metadata:
|
| 171 |
-
face_type = metadata.get("face_type")
|
| 172 |
-
if face_type:
|
| 173 |
-
return normalize_label(face_type)
|
| 174 |
return normalize_label(source)
|
| 175 |
|
| 176 |
def ensure_csv_file():
|
|
@@ -179,7 +138,7 @@ def ensure_csv_file():
|
|
| 179 |
writer = csv.writer(f)
|
| 180 |
writer.writerow(CSV_HEADERS)
|
| 181 |
return CSV_FILE, ""
|
| 182 |
-
|
| 183 |
with open(CSV_FILE, newline='') as f:
|
| 184 |
reader = csv.reader(f)
|
| 185 |
existing_header = next(reader, None)
|
|
@@ -191,26 +150,12 @@ def ensure_csv_file():
|
|
| 191 |
writer = csv.writer(f)
|
| 192 |
writer.writerow(CSV_HEADERS)
|
| 193 |
return new_file, f"Using new results file: {new_file}"
|
| 194 |
-
|
| 195 |
return CSV_FILE, ""
|
| 196 |
|
| 197 |
-
def parse_randomize_param(value):
|
| 198 |
-
if value is None:
|
| 199 |
-
return None
|
| 200 |
-
value = str(value).strip().lower()
|
| 201 |
-
if value in ("0", "false", "no", "off"):
|
| 202 |
-
return False
|
| 203 |
-
if value in ("1", "true", "yes", "on"):
|
| 204 |
-
return True
|
| 205 |
-
return None
|
| 206 |
-
|
| 207 |
def get_participant_id(request):
|
| 208 |
-
if request is None:
|
| 209 |
-
|
| 210 |
-
|
| 211 |
-
if participant_id is None:
|
| 212 |
-
return ""
|
| 213 |
-
return str(participant_id).strip()
|
| 214 |
|
| 215 |
def scan_images():
|
| 216 |
images = []
|
|
@@ -219,11 +164,9 @@ def scan_images():
|
|
| 219 |
skipped = []
|
| 220 |
|
| 221 |
for folder, source in [(AI_FOLDER, "AI"), (HUMAN_FOLDER, "Human")]:
|
| 222 |
-
if not os.path.exists(folder):
|
| 223 |
-
continue
|
| 224 |
for filename in os.listdir(folder):
|
| 225 |
-
if not filename.lower().endswith(('.jpg', '.jpeg', '.png')):
|
| 226 |
-
continue
|
| 227 |
path = os.path.join(folder, filename)
|
| 228 |
meta_key = filename.lower()
|
| 229 |
meta = metadata.get(meta_key) or metadata.get(os.path.splitext(meta_key)[0]) or {}
|
|
@@ -241,49 +184,29 @@ def scan_images():
|
|
| 241 |
|
| 242 |
emotions.add(emotion)
|
| 243 |
images.append(ImageData(path, source, emotion, sex=sex, ethnicity=ethnicity, angle=angle, face_type=face_type))
|
| 244 |
-
|
| 245 |
-
if skipped:
|
| 246 |
-
print(f"[DEBUG] Skipped {len(skipped)} images without an emotion label.")
|
| 247 |
-
|
| 248 |
return images, emotions
|
| 249 |
|
| 250 |
-
# --- Backend Functions ---
|
| 251 |
-
|
| 252 |
def crop_face(image_path, target_size=512):
|
| 253 |
-
|
| 254 |
-
Crops the image to the largest detected face, then resizes and pads it
|
| 255 |
-
to a fixed square size. Returns original if no face is found.
|
| 256 |
-
"""
|
| 257 |
-
if not os.path.exists(image_path):
|
| 258 |
-
return None
|
| 259 |
img = cv2.imread(image_path)
|
| 260 |
-
if img is None:
|
| 261 |
-
return None
|
| 262 |
|
| 263 |
gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
|
| 264 |
-
|
| 265 |
cascade_path = cv2.data.haarcascades + 'haarcascade_frontalface_default.xml'
|
| 266 |
-
|
| 267 |
-
|
| 268 |
-
|
| 269 |
-
cropped = img
|
| 270 |
-
else:
|
| 271 |
face_cascade = cv2.CascadeClassifier(cascade_path)
|
| 272 |
faces = face_cascade.detectMultiScale(gray, 1.3, 5)
|
| 273 |
-
|
| 274 |
-
if len(faces) == 0:
|
| 275 |
-
# If no face is detected, use the whole image
|
| 276 |
-
cropped = img
|
| 277 |
-
else:
|
| 278 |
-
# Get the largest face and add padding
|
| 279 |
x, y, w, h = max(faces, key=lambda f: f[2] * f[3])
|
| 280 |
padding = int(0.3 * w)
|
| 281 |
x, y = max(0, x - padding), max(0, y - padding)
|
| 282 |
w, h = min(img.shape[1] - x, w + 2 * padding), min(img.shape[0] - y, h + 2 * padding)
|
| 283 |
cropped = img[y:y+h, x:x+w]
|
| 284 |
|
| 285 |
-
# --- NEW RESIZING AND PADDING LOGIC ---
|
| 286 |
-
# 1. Resize the image to fit within the target size while maintaining aspect ratio
|
| 287 |
h, w, _ = cropped.shape
|
| 288 |
if h > w:
|
| 289 |
new_h = target_size
|
|
@@ -293,278 +216,207 @@ def crop_face(image_path, target_size=512):
|
|
| 293 |
new_h = int(h * (target_size / w))
|
| 294 |
|
| 295 |
resized_img = cv2.resize(cropped, (new_w, new_h), interpolation=cv2.INTER_AREA)
|
| 296 |
-
|
| 297 |
-
# 2. Create a black square canvas
|
| 298 |
canvas = np.zeros((target_size, target_size, 3), dtype=np.uint8)
|
| 299 |
-
|
| 300 |
-
# 3. Paste the resized image onto the center of the canvas
|
| 301 |
y_offset = (target_size - new_h) // 2
|
| 302 |
x_offset = (target_size - new_w) // 2
|
| 303 |
canvas[y_offset:y_offset+new_h, x_offset:x_offset+new_w] = resized_img
|
| 304 |
|
| 305 |
-
# 4. Convert to RGB for Gradio display
|
| 306 |
return cv2.cvtColor(canvas, cv2.COLOR_BGR2RGB)
|
| 307 |
|
|
|
|
|
|
|
| 308 |
def initialize_experiment(request: gr.Request):
|
| 309 |
-
"""Scans folders for images and prepares the experiment state."""
|
| 310 |
os.makedirs(AI_FOLDER, exist_ok=True)
|
| 311 |
os.makedirs(HUMAN_FOLDER, exist_ok=True)
|
| 312 |
-
|
| 313 |
images, emotions = scan_images()
|
|
|
|
| 314 |
if not images:
|
| 315 |
-
return None, "Error: No images found.
|
| 316 |
-
|
| 317 |
-
sorted_emotions = sorted(list(emotions))
|
| 318 |
-
if not sorted_emotions:
|
| 319 |
-
return None, "Error: No valid emotion labels found in image names or metadata.", gr.update(interactive=False)
|
| 320 |
|
| 321 |
session_id = str(uuid.uuid4())
|
| 322 |
participant_id = get_participant_id(request)
|
| 323 |
if not participant_id:
|
| 324 |
participant_id = f"anon-{session_id}"
|
| 325 |
-
|
| 326 |
else:
|
| 327 |
-
|
| 328 |
-
|
| 329 |
-
randomize_emotions = RANDOMIZE_EMOTION_ORDER_DEFAULT
|
| 330 |
-
if request is not None:
|
| 331 |
-
override = parse_randomize_param(request.query_params.get(RANDOMIZE_EMOTION_ORDER_PARAM))
|
| 332 |
-
if override is not None:
|
| 333 |
-
randomize_emotions = override
|
| 334 |
|
| 335 |
csv_file, csv_status = ensure_csv_file()
|
| 336 |
-
|
| 337 |
-
if csv_status:
|
| 338 |
-
status_lines.append(csv_status)
|
| 339 |
-
|
| 340 |
random.shuffle(images)
|
| 341 |
initial_state = {
|
| 342 |
"participant_id": participant_id,
|
| 343 |
"session_id": session_id,
|
| 344 |
"csv_file": csv_file,
|
| 345 |
"all_images": images,
|
| 346 |
-
"emotions":
|
| 347 |
"current_index": -1,
|
| 348 |
"current_choices": [],
|
| 349 |
-
"randomize_emotions":
|
| 350 |
"start_time": None,
|
| 351 |
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 352 |
|
| 353 |
-
return initial_state, "\n
|
| 354 |
|
| 355 |
def start_interface(state):
|
| 356 |
-
|
| 357 |
-
|
| 358 |
-
|
| 359 |
-
gr.update(visible=True), # instructions_section
|
| 360 |
-
gr.update(visible=True), # start_btn
|
| 361 |
-
gr.update(visible=False), # main_section
|
| 362 |
-
)
|
| 363 |
-
return (
|
| 364 |
-
gr.update(visible=False), # instructions_section
|
| 365 |
-
gr.update(visible=False), # start_btn
|
| 366 |
-
gr.update(visible=True), # main_section
|
| 367 |
-
)
|
| 368 |
|
| 369 |
def show_next_image(state):
|
| 370 |
-
|
| 371 |
-
if not state:
|
| 372 |
-
return (
|
| 373 |
-
state,
|
| 374 |
-
None,
|
| 375 |
-
"No experiment state available.",
|
| 376 |
-
gr.update(visible=False),
|
| 377 |
-
gr.update(visible=False),
|
| 378 |
-
)
|
| 379 |
|
| 380 |
state["current_index"] += 1
|
| 381 |
index = state["current_index"]
|
| 382 |
|
| 383 |
if index >= len(state["all_images"]):
|
| 384 |
-
return (
|
| 385 |
-
state,
|
| 386 |
-
None,
|
| 387 |
-
"Experiment complete! Thank you for participating.",
|
| 388 |
-
gr.update(visible=False), # next_image_btn
|
| 389 |
-
gr.update(visible=False), # emotion_choice
|
| 390 |
-
)
|
| 391 |
|
| 392 |
image_data = state["all_images"][index]
|
| 393 |
cropped_image = crop_face(image_data.path)
|
| 394 |
|
| 395 |
if cropped_image is None:
|
| 396 |
-
|
| 397 |
-
|
| 398 |
-
None,
|
| 399 |
-
f"Error loading image: {image_data.name}",
|
| 400 |
-
gr.update(visible=True), # show Next so user can skip the broken one
|
| 401 |
-
gr.update(visible=False), # emotion_choice
|
| 402 |
-
)
|
| 403 |
|
| 404 |
state["start_time"] = time.monotonic()
|
| 405 |
-
|
| 406 |
-
|
| 407 |
choices = list(state["emotions"])
|
| 408 |
if state.get("randomize_emotions"):
|
| 409 |
choices = random.sample(choices, k=len(choices))
|
| 410 |
state["current_choices"] = choices
|
|
|
|
| 411 |
|
| 412 |
return (
|
| 413 |
state,
|
| 414 |
-
cropped_image,
|
|
|
|
| 415 |
f"Image {index + 1} of {len(state['all_images'])}",
|
| 416 |
-
gr.update(visible=False), #
|
| 417 |
-
gr.update(
|
|
|
|
| 418 |
)
|
| 419 |
|
| 420 |
def on_emotion_select(state, selected_emotion):
|
| 421 |
-
|
| 422 |
-
if not state or not selected_emotion:
|
| 423 |
-
|
| 424 |
-
|
| 425 |
-
|
| 426 |
-
# Try to save; don't let errors block UI updates
|
| 427 |
try:
|
| 428 |
start_time = state.get("start_time") or time.monotonic()
|
| 429 |
response_time_ms = int(round((time.monotonic() - start_time) * 1000))
|
| 430 |
image_data = state["all_images"][state["current_index"]]
|
| 431 |
-
|
|
|
|
|
|
|
| 432 |
with open(state["csv_file"], 'a', newline='') as f:
|
| 433 |
writer = csv.writer(f)
|
| 434 |
writer.writerow([
|
| 435 |
-
state["participant_id"],
|
| 436 |
-
|
| 437 |
-
image_data.
|
| 438 |
-
image_data.
|
| 439 |
-
image_data.
|
| 440 |
-
get_code(
|
| 441 |
-
|
| 442 |
-
|
| 443 |
-
image_data.sex,
|
| 444 |
-
get_code(SEX_CODE_MAP, image_data.sex),
|
| 445 |
-
image_data.ethnicity,
|
| 446 |
-
get_code(ETHNICITY_CODE_MAP, image_data.ethnicity),
|
| 447 |
-
image_data.angle,
|
| 448 |
-
get_code(ANGLE_CODE_MAP, image_data.angle),
|
| 449 |
-
selected_emotion,
|
| 450 |
-
get_code(EMOTION_CODE_MAP, selected_emotion),
|
| 451 |
-
accuracy,
|
| 452 |
-
response_time_ms,
|
| 453 |
-
"|".join(state.get("current_choices", [])),
|
| 454 |
datetime.now().isoformat(),
|
| 455 |
])
|
| 456 |
-
print(f"[DEBUG]
|
| 457 |
except Exception as e:
|
| 458 |
-
print("
|
| 459 |
-
|
| 460 |
-
|
| 461 |
-
|
| 462 |
-
|
| 463 |
-
|
| 464 |
-
|
| 465 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 466 |
|
| 467 |
-
# --- Gradio
|
| 468 |
-
with gr.Blocks(theme=gr.themes.Soft()) as app:
|
| 469 |
state = gr.State()
|
| 470 |
-
|
| 471 |
gr.Markdown("# Face Emotion Recognition Study")
|
| 472 |
|
|
|
|
| 473 |
with gr.Column(visible=True) as instructions_section:
|
| 474 |
-
gr.Markdown(
|
| 475 |
-
"""
|
| 476 |
-
## Instructions
|
| 477 |
-
1. An image of a face will appear. It will start very blurry.
|
| 478 |
-
2. The image will gradually become clear over 10 seconds.
|
| 479 |
-
3. As soon as you recognize the emotion, select the corresponding option below.
|
| 480 |
-
4. The image will become fully clear, and a "Next Image" button will appear.
|
| 481 |
-
5. Click "Next Image" to continue the study.
|
| 482 |
-
|
| 483 |
-
**Please respond as quickly and accurately as you can. Your response time is being measured.**
|
| 484 |
-
"""
|
| 485 |
-
)
|
| 486 |
start_btn = gr.Button("START STUDY", variant="primary")
|
| 487 |
status_text = gr.Markdown("")
|
| 488 |
|
|
|
|
| 489 |
with gr.Column(visible=False) as main_section:
|
| 490 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 491 |
progress_text = gr.Markdown("")
|
| 492 |
-
|
|
|
|
|
|
|
|
|
|
| 493 |
|
| 494 |
-
|
| 495 |
|
| 496 |
-
#
|
| 497 |
-
app.load(
|
| 498 |
-
fn=initialize_experiment,
|
| 499 |
-
outputs=[state, status_text, start_btn]
|
| 500 |
-
).then(
|
| 501 |
-
fn=None,
|
| 502 |
-
js=f"""() => {{
|
| 503 |
-
// define animation helpers once per session
|
| 504 |
-
window.animationFrameId = null;
|
| 505 |
-
window.deblurImage = function() {{
|
| 506 |
-
const img = document.querySelector("#image_display img");
|
| 507 |
-
if (!img) return;
|
| 508 |
-
const duration = {DEBLUR_DURATION_S * 1000};
|
| 509 |
-
const initialBlur = 20;
|
| 510 |
-
let startTime = null;
|
| 511 |
-
function animate(currentTime) {{
|
| 512 |
-
if (!startTime) startTime = currentTime;
|
| 513 |
-
const elapsedTime = currentTime - startTime;
|
| 514 |
-
const progress = Math.min(elapsedTime / duration, 1);
|
| 515 |
-
const currentBlur = initialBlur * (1 - progress);
|
| 516 |
-
img.style.filter = 'blur(' + currentBlur + 'px)';
|
| 517 |
-
if (progress < 1) {{
|
| 518 |
-
window.animationFrameId = requestAnimationFrame(animate);
|
| 519 |
-
}}
|
| 520 |
-
}}
|
| 521 |
-
cancelAnimationFrame(window.animationFrameId);
|
| 522 |
-
const img2 = document.querySelector("#image_display img");
|
| 523 |
-
if (img2) img2.style.filter = 'blur(' + initialBlur + 'px)';
|
| 524 |
-
window.animationFrameId = requestAnimationFrame(animate);
|
| 525 |
-
}};
|
| 526 |
-
window.unblurImmediately = function() {{
|
| 527 |
-
cancelAnimationFrame(window.animationFrameId);
|
| 528 |
-
const img = document.querySelector("#image_display img");
|
| 529 |
-
if (img) img.style.filter = 'blur(0px)';
|
| 530 |
-
}};
|
| 531 |
-
}}"""
|
| 532 |
-
)
|
| 533 |
|
|
|
|
| 534 |
start_btn.click(
|
| 535 |
-
fn=start_interface,
|
| 536 |
-
inputs=[state],
|
| 537 |
-
outputs=[instructions_section, start_btn, main_section]
|
| 538 |
).then(
|
| 539 |
-
fn=show_next_image,
|
| 540 |
-
inputs=[state],
|
| 541 |
-
outputs=[state,
|
| 542 |
).then(
|
| 543 |
-
fn=None,
|
| 544 |
-
js="() => window.deblurImage()"
|
| 545 |
)
|
| 546 |
|
| 547 |
-
#
|
| 548 |
emotion_choice.change(
|
| 549 |
-
fn=on_emotion_select,
|
| 550 |
-
inputs=[state, emotion_choice],
|
| 551 |
-
outputs=[emotion_choice, next_image_btn]
|
| 552 |
-
js="() => window.unblurImmediately()"
|
| 553 |
)
|
| 554 |
|
|
|
|
| 555 |
next_image_btn.click(
|
| 556 |
-
fn=show_next_image,
|
| 557 |
-
inputs=[state],
|
| 558 |
-
outputs=[state,
|
| 559 |
).then(
|
| 560 |
-
fn=None,
|
| 561 |
-
js="() => window.deblurImage()"
|
| 562 |
)
|
| 563 |
|
| 564 |
if __name__ == "__main__":
|
| 565 |
-
|
| 566 |
-
print("Please create two folders: './AI' and './Human'")
|
| 567 |
-
print("Place images in them named like 'any_name_happy.jpg', 'some_face_sad.png', etc.")
|
| 568 |
-
print(f"Optional metadata file: '{METADATA_FILE}' with columns image_name, emotion, sex, ethnicity, angle, face_type.")
|
| 569 |
-
print(f"Participant ID via URL param '?{URL_PARAM_PARTICIPANT_ID}=...'")
|
| 570 |
-
app.launch()
|
|
|
|
| 13 |
HUMAN_FOLDER = "./Human"
|
| 14 |
CSV_FILE = "emotion_responses.csv"
|
| 15 |
METADATA_FILE = "stimuli_metadata.csv"
|
| 16 |
+
DEBLUR_DURATION_S = 5 # Seconds to go from Blur -> Clear
|
| 17 |
|
| 18 |
+
# --- Advanced Features Config ---
|
| 19 |
URL_PARAM_PARTICIPANT_ID = "pid"
|
|
|
|
| 20 |
RANDOMIZE_EMOTION_ORDER_DEFAULT = True
|
| 21 |
RANDOMIZE_EMOTION_ORDER_PARAM = "randomize"
|
| 22 |
+
CHOICE_PLACEHOLDER = "Select an emotion..."
|
| 23 |
+
|
| 24 |
+
# --- CSS STYLES ---
|
| 25 |
+
APP_CSS = f"""
|
| 26 |
+
#emotion_choice, #emotion_choice .wrap {{ max-height: 260px; overflow-y: auto; }}
|
| 27 |
+
#next_btn {{ margin: 8px 0 12px 0; }}
|
| 28 |
+
|
| 29 |
+
@media (max-width: 640px) {{
|
| 30 |
+
#img_anim img, #img_static img {{ max-height: 280px; object-fit: contain; }}
|
| 31 |
+
}}
|
| 32 |
+
|
| 33 |
+
/* --- ANIMATED IMAGE (The Test) --- */
|
| 34 |
+
/* 1. Start HEAVILY BLURRED by default */
|
| 35 |
+
#img_anim img {{
|
| 36 |
+
filter: blur(50px);
|
| 37 |
+
display: block;
|
| 38 |
+
transform: scale(1.0);
|
| 39 |
+
}}
|
| 40 |
+
|
| 41 |
+
/* 2. The JS adds this class to animate it to clear */
|
| 42 |
+
.image-clear {{
|
| 43 |
+
transition: filter {DEBLUR_DURATION_S}s linear !important;
|
| 44 |
+
filter: blur(0px) !important;
|
| 45 |
+
}}
|
| 46 |
+
|
| 47 |
+
/* --- STATIC IMAGE (The Result) --- */
|
| 48 |
+
/* No special CSS needed. It will just be a normal, clear image.
|
| 49 |
+
We ensure it aligns perfectly with the animated one. */
|
| 50 |
+
#img_static img {{
|
| 51 |
+
display: block;
|
| 52 |
+
filter: blur(0px);
|
| 53 |
+
}}
|
| 54 |
+
"""
|
| 55 |
+
|
| 56 |
+
# --- Constants & Mappings ---
|
| 57 |
UNKNOWN_LABEL = "unknown"
|
| 58 |
UNKNOWN_CODE = 0
|
|
|
|
|
|
|
|
|
|
| 59 |
FILENAME_FIELD_ORDER = ["emotion"]
|
| 60 |
|
| 61 |
+
EMOTION_CODE_MAP = {"happy": 1, "sad": 2, "angry": 3, "surprised": 4, "disgusted": 5, "fearful": 6, "neutral": 7, "unknown": 0}
|
| 62 |
+
SEX_CODE_MAP = {"male": 1, "female": 2, "other": 3, "unknown": 0}
|
| 63 |
+
ETHNICITY_CODE_MAP = {"caucasian": 1, "black": 2, "asian": 3, "latino": 4, "middle-eastern": 5, "indigenous": 6, "other": 7, "unknown": 0}
|
| 64 |
+
ANGLE_CODE_MAP = {"forward": 1, "front-left": 2, "front-right": 3, "left": 4, "right": 5, "up": 6, "down": 7, "unknown": 0}
|
| 65 |
+
TYPE_CODE_MAP = {"human": 1, "ai": 2, "unknown": 0}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 66 |
|
| 67 |
CSV_HEADERS = [
|
| 68 |
+
"participant_id", "session_id", "image_name", "image_source", "face_type", "face_type_code",
|
| 69 |
+
"correct_emotion", "correct_emotion_code", "face_sex", "face_sex_code", "face_ethnicity", "face_ethnicity_code",
|
| 70 |
+
"face_angle", "face_angle_code", "selected_emotion", "selected_emotion_code", "accuracy",
|
| 71 |
+
"response_time_ms", "button_order", "timestamp",
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 72 |
]
|
| 73 |
|
| 74 |
# --- Data Structure ---
|
| 75 |
class ImageData:
|
|
|
|
| 76 |
def __init__(self, path, source, emotion, sex=UNKNOWN_LABEL, ethnicity=UNKNOWN_LABEL, angle=UNKNOWN_LABEL, face_type=UNKNOWN_LABEL):
|
| 77 |
self.path = path
|
| 78 |
self.source = source
|
|
|
|
| 84 |
self.name = os.path.basename(path)
|
| 85 |
|
| 86 |
# --- Helper Functions ---
|
|
|
|
| 87 |
def normalize_label(value):
|
| 88 |
+
if value is None: return ""
|
| 89 |
+
return str(value).strip().lower().replace(" ", "-")
|
|
|
|
|
|
|
|
|
|
| 90 |
|
| 91 |
def get_code(code_map, label):
|
| 92 |
+
return code_map.get(normalize_label(label), UNKNOWN_CODE)
|
|
|
|
|
|
|
|
|
|
| 93 |
|
| 94 |
def load_metadata(metadata_path):
|
| 95 |
+
if not os.path.exists(metadata_path): return {}
|
|
|
|
| 96 |
metadata = {}
|
| 97 |
with open(metadata_path, newline='') as f:
|
| 98 |
reader = csv.DictReader(f)
|
| 99 |
for row in reader:
|
| 100 |
name = row.get("image_name") or row.get("filename") or row.get("image")
|
| 101 |
+
if not name: continue
|
|
|
|
| 102 |
key = name.strip().lower()
|
| 103 |
entry = {
|
| 104 |
"emotion": normalize_label(row.get("emotion")),
|
|
|
|
| 115 |
def parse_filename_fields(image_path):
|
| 116 |
base_name = os.path.splitext(os.path.basename(image_path))[0]
|
| 117 |
parts = base_name.split('_')
|
| 118 |
+
if len(parts) < 2: return {}
|
|
|
|
| 119 |
fields = {}
|
| 120 |
for field in FILENAME_FIELD_ORDER:
|
| 121 |
+
if not parts: break
|
|
|
|
| 122 |
fields[field] = normalize_label(parts.pop())
|
| 123 |
return fields
|
| 124 |
|
| 125 |
def resolve_field(metadata, filename_fields, key, default=UNKNOWN_LABEL):
|
| 126 |
value = ""
|
| 127 |
+
if metadata: value = normalize_label(metadata.get(key))
|
| 128 |
+
if not value: value = filename_fields.get(key, "")
|
|
|
|
|
|
|
| 129 |
return value or default
|
| 130 |
|
| 131 |
def resolve_face_type(metadata, source):
|
| 132 |
+
if metadata and metadata.get("face_type"): return normalize_label(metadata.get("face_type"))
|
|
|
|
|
|
|
|
|
|
| 133 |
return normalize_label(source)
|
| 134 |
|
| 135 |
def ensure_csv_file():
|
|
|
|
| 138 |
writer = csv.writer(f)
|
| 139 |
writer.writerow(CSV_HEADERS)
|
| 140 |
return CSV_FILE, ""
|
| 141 |
+
|
| 142 |
with open(CSV_FILE, newline='') as f:
|
| 143 |
reader = csv.reader(f)
|
| 144 |
existing_header = next(reader, None)
|
|
|
|
| 150 |
writer = csv.writer(f)
|
| 151 |
writer.writerow(CSV_HEADERS)
|
| 152 |
return new_file, f"Using new results file: {new_file}"
|
|
|
|
| 153 |
return CSV_FILE, ""
|
| 154 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 155 |
def get_participant_id(request):
|
| 156 |
+
if request is None: return ""
|
| 157 |
+
pid = request.query_params.get(URL_PARAM_PARTICIPANT_ID)
|
| 158 |
+
return str(pid).strip() if pid else ""
|
|
|
|
|
|
|
|
|
|
| 159 |
|
| 160 |
def scan_images():
|
| 161 |
images = []
|
|
|
|
| 164 |
skipped = []
|
| 165 |
|
| 166 |
for folder, source in [(AI_FOLDER, "AI"), (HUMAN_FOLDER, "Human")]:
|
| 167 |
+
if not os.path.exists(folder): continue
|
|
|
|
| 168 |
for filename in os.listdir(folder):
|
| 169 |
+
if not filename.lower().endswith(('.jpg', '.jpeg', '.png')): continue
|
|
|
|
| 170 |
path = os.path.join(folder, filename)
|
| 171 |
meta_key = filename.lower()
|
| 172 |
meta = metadata.get(meta_key) or metadata.get(os.path.splitext(meta_key)[0]) or {}
|
|
|
|
| 184 |
|
| 185 |
emotions.add(emotion)
|
| 186 |
images.append(ImageData(path, source, emotion, sex=sex, ethnicity=ethnicity, angle=angle, face_type=face_type))
|
| 187 |
+
|
| 188 |
+
if skipped: print(f"[DEBUG] Skipped {len(skipped)} images without emotion label.")
|
|
|
|
|
|
|
| 189 |
return images, emotions
|
| 190 |
|
|
|
|
|
|
|
| 191 |
def crop_face(image_path, target_size=512):
|
| 192 |
+
if not os.path.exists(image_path): return None
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 193 |
img = cv2.imread(image_path)
|
| 194 |
+
if img is None: return None
|
|
|
|
| 195 |
|
| 196 |
gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
|
|
|
|
| 197 |
cascade_path = cv2.data.haarcascades + 'haarcascade_frontalface_default.xml'
|
| 198 |
+
cropped = img
|
| 199 |
+
|
| 200 |
+
if os.path.exists(cascade_path):
|
|
|
|
|
|
|
| 201 |
face_cascade = cv2.CascadeClassifier(cascade_path)
|
| 202 |
faces = face_cascade.detectMultiScale(gray, 1.3, 5)
|
| 203 |
+
if len(faces) > 0:
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 204 |
x, y, w, h = max(faces, key=lambda f: f[2] * f[3])
|
| 205 |
padding = int(0.3 * w)
|
| 206 |
x, y = max(0, x - padding), max(0, y - padding)
|
| 207 |
w, h = min(img.shape[1] - x, w + 2 * padding), min(img.shape[0] - y, h + 2 * padding)
|
| 208 |
cropped = img[y:y+h, x:x+w]
|
| 209 |
|
|
|
|
|
|
|
| 210 |
h, w, _ = cropped.shape
|
| 211 |
if h > w:
|
| 212 |
new_h = target_size
|
|
|
|
| 216 |
new_h = int(h * (target_size / w))
|
| 217 |
|
| 218 |
resized_img = cv2.resize(cropped, (new_w, new_h), interpolation=cv2.INTER_AREA)
|
|
|
|
|
|
|
| 219 |
canvas = np.zeros((target_size, target_size, 3), dtype=np.uint8)
|
|
|
|
|
|
|
| 220 |
y_offset = (target_size - new_h) // 2
|
| 221 |
x_offset = (target_size - new_w) // 2
|
| 222 |
canvas[y_offset:y_offset+new_h, x_offset:x_offset+new_w] = resized_img
|
| 223 |
|
|
|
|
| 224 |
return cv2.cvtColor(canvas, cv2.COLOR_BGR2RGB)
|
| 225 |
|
| 226 |
+
# --- Backend Logic ---
|
| 227 |
+
|
| 228 |
def initialize_experiment(request: gr.Request):
|
|
|
|
| 229 |
os.makedirs(AI_FOLDER, exist_ok=True)
|
| 230 |
os.makedirs(HUMAN_FOLDER, exist_ok=True)
|
|
|
|
| 231 |
images, emotions = scan_images()
|
| 232 |
+
|
| 233 |
if not images:
|
| 234 |
+
return None, "Error: No images found.", gr.update(interactive=False)
|
|
|
|
|
|
|
|
|
|
|
|
|
| 235 |
|
| 236 |
session_id = str(uuid.uuid4())
|
| 237 |
participant_id = get_participant_id(request)
|
| 238 |
if not participant_id:
|
| 239 |
participant_id = f"anon-{session_id}"
|
| 240 |
+
msg = f"Participant ID: {participant_id}"
|
| 241 |
else:
|
| 242 |
+
msg = f"Participant ID: {participant_id}"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 243 |
|
| 244 |
csv_file, csv_status = ensure_csv_file()
|
| 245 |
+
|
|
|
|
|
|
|
|
|
|
| 246 |
random.shuffle(images)
|
| 247 |
initial_state = {
|
| 248 |
"participant_id": participant_id,
|
| 249 |
"session_id": session_id,
|
| 250 |
"csv_file": csv_file,
|
| 251 |
"all_images": images,
|
| 252 |
+
"emotions": sorted(list(emotions)),
|
| 253 |
"current_index": -1,
|
| 254 |
"current_choices": [],
|
| 255 |
+
"randomize_emotions": RANDOMIZE_EMOTION_ORDER_DEFAULT,
|
| 256 |
"start_time": None,
|
| 257 |
}
|
| 258 |
+
|
| 259 |
+
if request:
|
| 260 |
+
val = request.query_params.get(RANDOMIZE_EMOTION_ORDER_PARAM)
|
| 261 |
+
if val and val.lower() in ['0','false','no']:
|
| 262 |
+
initial_state["randomize_emotions"] = False
|
| 263 |
|
| 264 |
+
return initial_state, f"{msg}\n{csv_status}", gr.update(interactive=True)
|
| 265 |
|
| 266 |
def start_interface(state):
|
| 267 |
+
if not state:
|
| 268 |
+
return gr.update(visible=True), gr.update(visible=True), gr.update(visible=False)
|
| 269 |
+
return gr.update(visible=False), gr.update(visible=False), gr.update(visible=True)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 270 |
|
| 271 |
def show_next_image(state):
|
| 272 |
+
# Returns: [state, img_anim, img_static, progress_text, anim_visible, static_visible, choices_update]
|
| 273 |
+
if not state:
|
| 274 |
+
return state, None, None, "Error", gr.update(visible=False), gr.update(visible=False), gr.update(visible=False)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 275 |
|
| 276 |
state["current_index"] += 1
|
| 277 |
index = state["current_index"]
|
| 278 |
|
| 279 |
if index >= len(state["all_images"]):
|
| 280 |
+
return state, None, None, "Experiment complete!", gr.update(visible=False), gr.update(visible=False), gr.update(visible=False)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 281 |
|
| 282 |
image_data = state["all_images"][index]
|
| 283 |
cropped_image = crop_face(image_data.path)
|
| 284 |
|
| 285 |
if cropped_image is None:
|
| 286 |
+
# Recursive skip if image fails to load
|
| 287 |
+
return show_next_image(state)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 288 |
|
| 289 |
state["start_time"] = time.monotonic()
|
| 290 |
+
|
|
|
|
| 291 |
choices = list(state["emotions"])
|
| 292 |
if state.get("randomize_emotions"):
|
| 293 |
choices = random.sample(choices, k=len(choices))
|
| 294 |
state["current_choices"] = choices
|
| 295 |
+
choices_with_placeholder = [CHOICE_PLACEHOLDER] + choices
|
| 296 |
|
| 297 |
return (
|
| 298 |
state,
|
| 299 |
+
cropped_image, # For Animated Component
|
| 300 |
+
cropped_image, # For Static Component
|
| 301 |
f"Image {index + 1} of {len(state['all_images'])}",
|
| 302 |
+
gr.update(visible=True, interactive=False), # Show Animated
|
| 303 |
+
gr.update(visible=False), # Hide Static
|
| 304 |
+
gr.update(choices=choices_with_placeholder, value=CHOICE_PLACEHOLDER, visible=True, interactive=True),
|
| 305 |
)
|
| 306 |
|
| 307 |
def on_emotion_select(state, selected_emotion):
|
| 308 |
+
# Returns: [anim_visible, static_visible, choices_interactive, next_btn_interactive]
|
| 309 |
+
if not state or not selected_emotion or normalize_label(selected_emotion) == normalize_label(CHOICE_PLACEHOLDER):
|
| 310 |
+
# Do nothing if placeholder selected
|
| 311 |
+
return gr.update(), gr.update(), gr.update(), gr.update()
|
| 312 |
+
|
|
|
|
| 313 |
try:
|
| 314 |
start_time = state.get("start_time") or time.monotonic()
|
| 315 |
response_time_ms = int(round((time.monotonic() - start_time) * 1000))
|
| 316 |
image_data = state["all_images"][state["current_index"]]
|
| 317 |
+
normalized_sel = normalize_label(selected_emotion)
|
| 318 |
+
accuracy = "correct" if normalized_sel == image_data.emotion else "incorrect"
|
| 319 |
+
|
| 320 |
with open(state["csv_file"], 'a', newline='') as f:
|
| 321 |
writer = csv.writer(f)
|
| 322 |
writer.writerow([
|
| 323 |
+
state["participant_id"], state["session_id"], image_data.name, image_data.source,
|
| 324 |
+
image_data.face_type, get_code(TYPE_CODE_MAP, image_data.face_type),
|
| 325 |
+
image_data.emotion, get_code(EMOTION_CODE_MAP, image_data.emotion),
|
| 326 |
+
image_data.sex, get_code(SEX_CODE_MAP, image_data.sex),
|
| 327 |
+
image_data.ethnicity, get_code(ETHNICITY_CODE_MAP, image_data.ethnicity),
|
| 328 |
+
image_data.angle, get_code(ANGLE_CODE_MAP, image_data.angle),
|
| 329 |
+
normalized_sel, get_code(EMOTION_CODE_MAP, normalized_sel),
|
| 330 |
+
accuracy, response_time_ms, "|".join(state.get("current_choices", [])),
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 331 |
datetime.now().isoformat(),
|
| 332 |
])
|
| 333 |
+
print(f"[DEBUG] Saved {normalized_sel} ({response_time_ms}ms)")
|
| 334 |
except Exception as e:
|
| 335 |
+
print(f"Error saving CSV: {e}")
|
| 336 |
+
|
| 337 |
+
# Hide Animated, Show Static (Snap), Disable Dropdown, Enable Next
|
| 338 |
+
return gr.update(visible=False), gr.update(visible=True), gr.update(interactive=False), gr.update(interactive=True)
|
| 339 |
+
|
| 340 |
+
# --- JAVASCRIPT ---
|
| 341 |
+
# Logic: Find the animated image element, reset its class to remove 'image-clear',
|
| 342 |
+
# force a reflow, then add 'image-clear' to start the transition.
|
| 343 |
+
js_functions = """
|
| 344 |
+
() => {
|
| 345 |
+
window.triggerDeblur = function() {
|
| 346 |
+
const el = document.querySelector("#img_anim img");
|
| 347 |
+
if (el) {
|
| 348 |
+
// 1. Reset to start state (Blurred)
|
| 349 |
+
el.classList.remove('image-clear');
|
| 350 |
+
|
| 351 |
+
// 2. Force Browser Reflow (Crucial for restarting CSS animations)
|
| 352 |
+
void el.offsetWidth;
|
| 353 |
+
|
| 354 |
+
// 3. Start Animation
|
| 355 |
+
setTimeout(() => {
|
| 356 |
+
el.classList.add('image-clear');
|
| 357 |
+
}, 100);
|
| 358 |
+
}
|
| 359 |
+
};
|
| 360 |
+
}
|
| 361 |
+
"""
|
| 362 |
|
| 363 |
+
# --- Gradio App ---
|
| 364 |
+
with gr.Blocks(theme=gr.themes.Soft(), css=APP_CSS) as app:
|
| 365 |
state = gr.State()
|
|
|
|
| 366 |
gr.Markdown("# Face Emotion Recognition Study")
|
| 367 |
|
| 368 |
+
# 1. Landing Page
|
| 369 |
with gr.Column(visible=True) as instructions_section:
|
| 370 |
+
gr.Markdown(f"## Instructions\nIdentify the emotion as the image becomes clear ({DEBLUR_DURATION_S}s).")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 371 |
start_btn = gr.Button("START STUDY", variant="primary")
|
| 372 |
status_text = gr.Markdown("")
|
| 373 |
|
| 374 |
+
# 2. Main Experiment Interface
|
| 375 |
with gr.Column(visible=False) as main_section:
|
| 376 |
+
# Image Stack: Two images occupy the same conceptual space
|
| 377 |
+
with gr.Group():
|
| 378 |
+
# Animated Image: Visible initially, performs blur->clear
|
| 379 |
+
image_anim = gr.Image(label="", elem_id="img_anim", height=400, width=400, interactive=False, show_label=False, visible=True)
|
| 380 |
+
# Static Image: Hidden initially, shows instantly when user selects answer
|
| 381 |
+
image_static = gr.Image(label="", elem_id="img_static", height=400, width=400, interactive=False, show_label=False, visible=False)
|
| 382 |
+
|
| 383 |
progress_text = gr.Markdown("")
|
| 384 |
+
|
| 385 |
+
# Controls
|
| 386 |
+
emotion_choice = gr.Radio(choices=[], label="Select the emotion", visible=False, interactive=True, elem_id="emotion_choice")
|
| 387 |
+
next_image_btn = gr.Button("Next Image ▶", variant="secondary", visible=True, interactive=False, elem_id="next_btn")
|
| 388 |
|
| 389 |
+
# --- Event Wiring ---
|
| 390 |
|
| 391 |
+
# App Load
|
| 392 |
+
app.load(fn=initialize_experiment, outputs=[state, status_text, start_btn]).then(fn=None, js=js_functions)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 393 |
|
| 394 |
+
# Start Button -> Show Interface -> Load First Image -> Trigger Animation
|
| 395 |
start_btn.click(
|
| 396 |
+
fn=start_interface, inputs=[state], outputs=[instructions_section, start_btn, main_section]
|
|
|
|
|
|
|
| 397 |
).then(
|
| 398 |
+
fn=show_next_image,
|
| 399 |
+
inputs=[state],
|
| 400 |
+
outputs=[state, image_anim, image_static, progress_text, image_anim, image_static, emotion_choice]
|
| 401 |
).then(
|
| 402 |
+
fn=None, js="() => window.triggerDeblur()"
|
|
|
|
| 403 |
)
|
| 404 |
|
| 405 |
+
# Emotion Selected -> Swap Images (Snap to Clear) -> Save Data
|
| 406 |
emotion_choice.change(
|
| 407 |
+
fn=on_emotion_select,
|
| 408 |
+
inputs=[state, emotion_choice],
|
| 409 |
+
outputs=[image_anim, image_static, emotion_choice, next_image_btn]
|
|
|
|
| 410 |
)
|
| 411 |
|
| 412 |
+
# Next Button -> Load New Image -> Reset Layout -> Trigger Animation
|
| 413 |
next_image_btn.click(
|
| 414 |
+
fn=show_next_image,
|
| 415 |
+
inputs=[state],
|
| 416 |
+
outputs=[state, image_anim, image_static, progress_text, image_anim, image_static, emotion_choice]
|
| 417 |
).then(
|
| 418 |
+
fn=None, js="() => window.triggerDeblur()"
|
|
|
|
| 419 |
)
|
| 420 |
|
| 421 |
if __name__ == "__main__":
|
| 422 |
+
app.launch()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
emotion_responses.csv
CHANGED
|
@@ -1,22 +1,69 @@
|
|
| 1 |
-
|
| 2 |
-
|
| 3 |
-
|
| 4 |
-
|
| 5 |
-
|
| 6 |
-
|
| 7 |
-
|
| 8 |
-
|
| 9 |
-
|
| 10 |
-
|
| 11 |
-
|
| 12 |
-
|
| 13 |
-
|
| 14 |
-
|
| 15 |
-
|
| 16 |
-
|
| 17 |
-
|
| 18 |
-
|
| 19 |
-
|
| 20 |
-
|
| 21 |
-
|
| 22 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
participant_id,session_id,image_name,image_source,face_type,face_type_code,correct_emotion,correct_emotion_code,face_sex,face_sex_code,face_ethnicity,face_ethnicity_code,face_angle,face_angle_code,selected_emotion,selected_emotion_code,accuracy,response_time_ms,button_order,timestamp
|
| 2 |
+
anon-293b45d4-bb79-4821-916d-a1fb77799b2b,293b45d4-bb79-4821-916d-a1fb77799b2b,woman_surprised.png,Human,human,1,surprised,4,unknown,0,unknown,0,unknown,0,surprised,4,correct,7226,surprised|disgusted|happy|angry,2026-01-08T20:14:30.560025
|
| 3 |
+
anon-293b45d4-bb79-4821-916d-a1fb77799b2b,293b45d4-bb79-4821-916d-a1fb77799b2b,man_angry.png,Human,human,1,angry,3,unknown,0,unknown,0,unknown,0,angry,3,correct,9197,happy|surprised|disgusted|angry,2026-01-08T20:14:40.821270
|
| 4 |
+
anon-c4a3c916-e736-4de4-913c-b84e29660747,c4a3c916-e736-4de4-913c-b84e29660747,kid_surprised.png,AI,ai,2,surprised,4,unknown,0,unknown,0,unknown,0,disgusted,5,incorrect,8907,happy|angry|disgusted|surprised,2026-01-08T20:17:48.571466
|
| 5 |
+
anon-c4a3c916-e736-4de4-913c-b84e29660747,c4a3c916-e736-4de4-913c-b84e29660747,woman_disgusted.png,Human,human,1,disgusted,5,unknown,0,unknown,0,unknown,0,disgusted,5,correct,4333,surprised|disgusted|happy|angry,2026-01-08T20:17:55.467188
|
| 6 |
+
anon-c4a3c916-e736-4de4-913c-b84e29660747,c4a3c916-e736-4de4-913c-b84e29660747,man_angry.png,Human,human,1,angry,3,unknown,0,unknown,0,unknown,0,happy,1,incorrect,6839,happy|angry|surprised|disgusted,2026-01-08T20:18:10.221377
|
| 7 |
+
anon-c4a3c916-e736-4de4-913c-b84e29660747,c4a3c916-e736-4de4-913c-b84e29660747,woman2_happy.png,AI,ai,2,happy,1,unknown,0,unknown,0,unknown,0,angry,3,incorrect,3415,happy|surprised|angry|disgusted,2026-01-08T20:18:15.074756
|
| 8 |
+
anon-c4a3c916-e736-4de4-913c-b84e29660747,c4a3c916-e736-4de4-913c-b84e29660747,oldman_angry.png,AI,ai,2,angry,3,unknown,0,unknown,0,unknown,0,happy,1,incorrect,1693,surprised|happy|angry|disgusted,2026-01-08T20:18:17.922186
|
| 9 |
+
anon-c4a3c916-e736-4de4-913c-b84e29660747,c4a3c916-e736-4de4-913c-b84e29660747,woman_surprised.png,Human,human,1,surprised,4,unknown,0,unknown,0,unknown,0,surprised,4,correct,936,angry|disgusted|happy|surprised,2026-01-08T20:18:19.603907
|
| 10 |
+
anon-90019529-c56c-4dc3-83ed-744cd30d6d7e,90019529-c56c-4dc3-83ed-744cd30d6d7e,woman2_happy.png,AI,ai,2,happy,1,unknown,0,unknown,0,unknown,0,angry,3,incorrect,3358,happy|angry|disgusted|surprised,2026-01-08T20:24:34.229398
|
| 11 |
+
anon-90019529-c56c-4dc3-83ed-744cd30d6d7e,90019529-c56c-4dc3-83ed-744cd30d6d7e,man_angry.png,Human,human,1,angry,3,unknown,0,unknown,0,unknown,0,disgusted,5,incorrect,1771,happy|surprised|disgusted|angry,2026-01-08T20:24:37.144745
|
| 12 |
+
anon-0819290d-1006-43fc-8b44-c2d564710e3e,0819290d-1006-43fc-8b44-c2d564710e3e,kid_surprised.png,AI,ai,2,surprised,4,unknown,0,unknown,0,unknown,0,happy,1,incorrect,4620,surprised|angry|disgusted|happy,2026-01-08T20:24:57.620828
|
| 13 |
+
anon-0819290d-1006-43fc-8b44-c2d564710e3e,0819290d-1006-43fc-8b44-c2d564710e3e,woman_surprised.png,Human,human,1,surprised,4,unknown,0,unknown,0,unknown,0,disgusted,5,incorrect,7994,angry|surprised|disgusted|happy,2026-01-08T20:25:06.930289
|
| 14 |
+
anon-8c1f7011-2d88-420c-b67e-421c07f2a340,8c1f7011-2d88-420c-b67e-421c07f2a340,oldman_angry.png,AI,ai,2,angry,3,male,1,caucasian,1,forward,1,angry,3,correct,1731,angry|disgusted|surprised|happy,2026-01-08T20:28:40.158440
|
| 15 |
+
anon-8c1f7011-2d88-420c-b67e-421c07f2a340,8c1f7011-2d88-420c-b67e-421c07f2a340,kid_surprised.png,AI,ai,2,surprised,4,female,2,caucasian,1,forward,1,surprised,4,correct,1231,happy|surprised|angry|disgusted,2026-01-08T20:28:42.069297
|
| 16 |
+
anon-8c1f7011-2d88-420c-b67e-421c07f2a340,8c1f7011-2d88-420c-b67e-421c07f2a340,woman2_happy.png,AI,ai,2,happy,1,female,2,black,2,forward,1,happy,1,correct,943,angry|disgusted|happy|surprised,2026-01-08T20:28:43.460471
|
| 17 |
+
anon-8c1f7011-2d88-420c-b67e-421c07f2a340,8c1f7011-2d88-420c-b67e-421c07f2a340,woman_disgusted.png,Human,human,1,disgusted,5,female,2,caucasian,1,front-left,2,angry,3,incorrect,937,disgusted|happy|surprised|angry,2026-01-08T20:28:45.133349
|
| 18 |
+
anon-8c1f7011-2d88-420c-b67e-421c07f2a340,8c1f7011-2d88-420c-b67e-421c07f2a340,woman_surprised.png,Human,human,1,surprised,4,female,2,caucasian,1,front-right,3,disgusted,5,incorrect,860,disgusted|angry|surprised|happy,2026-01-08T20:28:46.770888
|
| 19 |
+
anon-8c1f7011-2d88-420c-b67e-421c07f2a340,8c1f7011-2d88-420c-b67e-421c07f2a340,man_angry.png,Human,human,1,angry,3,male,1,black,2,forward,1,happy,1,incorrect,1254,angry|surprised|happy|disgusted,2026-01-08T20:28:48.598528
|
| 20 |
+
anon-4b299a41-fc84-4d91-ac64-63c7995a8e8c,4b299a41-fc84-4d91-ac64-63c7995a8e8c,kid_surprised.png,AI,ai,2,surprised,4,female,2,caucasian,1,forward,1,disgusted,5,incorrect,1178,disgusted|angry|happy|surprised,2026-01-08T20:32:29.882922
|
| 21 |
+
anon-4b299a41-fc84-4d91-ac64-63c7995a8e8c,4b299a41-fc84-4d91-ac64-63c7995a8e8c,woman_disgusted.png,Human,human,1,disgusted,5,female,2,caucasian,1,front-left,2,happy,1,incorrect,847,angry|happy|surprised|disgusted,2026-01-08T20:32:32.561794
|
| 22 |
+
anon-4b299a41-fc84-4d91-ac64-63c7995a8e8c,4b299a41-fc84-4d91-ac64-63c7995a8e8c,oldman_angry.png,AI,ai,2,angry,3,male,1,caucasian,1,forward,1,surprised,4,incorrect,613,angry|happy|surprised|disgusted,2026-01-08T20:32:33.831538
|
| 23 |
+
anon-4b299a41-fc84-4d91-ac64-63c7995a8e8c,4b299a41-fc84-4d91-ac64-63c7995a8e8c,woman_surprised.png,Human,human,1,surprised,4,female,2,caucasian,1,front-right,3,surprised,4,correct,494,angry|disgusted|happy|surprised,2026-01-08T20:32:34.849506
|
| 24 |
+
anon-4b299a41-fc84-4d91-ac64-63c7995a8e8c,4b299a41-fc84-4d91-ac64-63c7995a8e8c,woman2_happy.png,AI,ai,2,happy,1,female,2,black,2,forward,1,happy,1,correct,805,disgusted|happy|surprised|angry,2026-01-08T20:32:36.389731
|
| 25 |
+
anon-4b299a41-fc84-4d91-ac64-63c7995a8e8c,4b299a41-fc84-4d91-ac64-63c7995a8e8c,man_angry.png,Human,human,1,angry,3,male,1,black,2,forward,1,disgusted,5,incorrect,667,happy|surprised|disgusted|angry,2026-01-08T20:32:37.746231
|
| 26 |
+
anon-800cad3a-e396-44e3-b225-68e2880ceadf,800cad3a-e396-44e3-b225-68e2880ceadf,woman_surprised.png,Human,human,1,surprised,4,female,2,caucasian,1,front-right,3,disgusted,5,incorrect,1102,disgusted|angry|surprised|happy,2026-01-08T20:32:47.661429
|
| 27 |
+
anon-800cad3a-e396-44e3-b225-68e2880ceadf,800cad3a-e396-44e3-b225-68e2880ceadf,kid_surprised.png,AI,ai,2,surprised,4,female,2,caucasian,1,forward,1,angry,3,incorrect,2565,happy|disgusted|surprised|angry,2026-01-08T20:32:51.774891
|
| 28 |
+
anon-800cad3a-e396-44e3-b225-68e2880ceadf,800cad3a-e396-44e3-b225-68e2880ceadf,woman2_happy.png,AI,ai,2,happy,1,female,2,black,2,forward,1,surprised,4,incorrect,828,disgusted|happy|surprised|angry,2026-01-08T20:32:53.701283
|
| 29 |
+
anon-800cad3a-e396-44e3-b225-68e2880ceadf,800cad3a-e396-44e3-b225-68e2880ceadf,woman_disgusted.png,Human,human,1,disgusted,5,female,2,caucasian,1,front-left,2,happy,1,incorrect,1475,happy|angry|surprised|disgusted,2026-01-08T20:32:56.217087
|
| 30 |
+
anon-800cad3a-e396-44e3-b225-68e2880ceadf,800cad3a-e396-44e3-b225-68e2880ceadf,oldman_angry.png,AI,ai,2,angry,3,male,1,caucasian,1,forward,1,surprised,4,incorrect,5211,disgusted|surprised|happy|angry,2026-01-08T20:33:02.344766
|
| 31 |
+
anon-800cad3a-e396-44e3-b225-68e2880ceadf,800cad3a-e396-44e3-b225-68e2880ceadf,man_angry.png,Human,human,1,angry,3,male,1,black,2,forward,1,happy,1,incorrect,1167,happy|angry|surprised|disgusted,2026-01-08T20:33:04.307786
|
| 32 |
+
anon-6bfd56f1-e24e-4bf8-8d4f-e1695c1a0acd,6bfd56f1-e24e-4bf8-8d4f-e1695c1a0acd,oldman_angry.png,AI,ai,2,angry,3,male,1,caucasian,1,forward,1,disgusted,5,incorrect,1240,surprised|disgusted|happy|angry,2026-01-08T20:33:17.946078
|
| 33 |
+
anon-6bfd56f1-e24e-4bf8-8d4f-e1695c1a0acd,6bfd56f1-e24e-4bf8-8d4f-e1695c1a0acd,woman_disgusted.png,Human,human,1,disgusted,5,female,2,caucasian,1,front-left,2,surprised,4,incorrect,3260,angry|disgusted|surprised|happy,2026-01-08T20:33:22.407241
|
| 34 |
+
anon-6bfd56f1-e24e-4bf8-8d4f-e1695c1a0acd,6bfd56f1-e24e-4bf8-8d4f-e1695c1a0acd,woman_surprised.png,Human,human,1,surprised,4,female,2,caucasian,1,front-right,3,surprised,4,correct,2028,surprised|disgusted|happy|angry,2026-01-08T20:33:24.976249
|
| 35 |
+
anon-6bfd56f1-e24e-4bf8-8d4f-e1695c1a0acd,6bfd56f1-e24e-4bf8-8d4f-e1695c1a0acd,kid_surprised.png,AI,ai,2,surprised,4,female,2,caucasian,1,forward,1,happy,1,incorrect,1157,happy|angry|surprised|disgusted,2026-01-08T20:33:26.665156
|
| 36 |
+
anon-6bfd56f1-e24e-4bf8-8d4f-e1695c1a0acd,6bfd56f1-e24e-4bf8-8d4f-e1695c1a0acd,man_angry.png,Human,human,1,angry,3,male,1,black,2,forward,1,happy,1,incorrect,5349,disgusted|angry|happy|surprised,2026-01-08T20:33:33.734201
|
| 37 |
+
anon-1478b947-bba0-4486-90a4-ce00c5cf2996,1478b947-bba0-4486-90a4-ce00c5cf2996,kid_surprised.png,AI,ai,2,surprised,4,female,2,caucasian,1,forward,1,surprised,4,correct,3300,surprised|angry|happy|disgusted,2026-01-08T20:43:16.234329
|
| 38 |
+
anon-1478b947-bba0-4486-90a4-ce00c5cf2996,1478b947-bba0-4486-90a4-ce00c5cf2996,oldman_angry.png,AI,ai,2,angry,3,male,1,caucasian,1,forward,1,disgusted,5,incorrect,12651,disgusted|surprised|happy|angry,2026-01-08T20:43:31.598273
|
| 39 |
+
anon-1478b947-bba0-4486-90a4-ce00c5cf2996,1478b947-bba0-4486-90a4-ce00c5cf2996,woman_surprised.png,Human,human,1,surprised,4,female,2,caucasian,1,front-right,3,happy,1,incorrect,931,angry|happy|disgusted|surprised,2026-01-08T20:43:33.111170
|
| 40 |
+
anon-1478b947-bba0-4486-90a4-ce00c5cf2996,1478b947-bba0-4486-90a4-ce00c5cf2996,man_angry.png,Human,human,1,angry,3,male,1,black,2,forward,1,angry,3,correct,788,surprised|happy|angry|disgusted,2026-01-08T20:43:34.468996
|
| 41 |
+
anon-1478b947-bba0-4486-90a4-ce00c5cf2996,1478b947-bba0-4486-90a4-ce00c5cf2996,woman2_happy.png,AI,ai,2,happy,1,female,2,black,2,forward,1,disgusted,5,incorrect,1301,angry|surprised|disgusted|happy,2026-01-08T20:43:36.342882
|
| 42 |
+
anon-1478b947-bba0-4486-90a4-ce00c5cf2996,1478b947-bba0-4486-90a4-ce00c5cf2996,woman_disgusted.png,Human,human,1,disgusted,5,female,2,caucasian,1,front-left,2,angry,3,incorrect,1047,disgusted|happy|surprised|angry,2026-01-08T20:43:37.940345
|
| 43 |
+
anon-1c102c98-80df-4149-b598-8cb3d4feebeb,1c102c98-80df-4149-b598-8cb3d4feebeb,kid_surprised.png,AI,ai,2,surprised,4,female,2,caucasian,1,forward,1,surprised,4,correct,2216,disgusted|surprised|happy|angry,2026-01-08T20:48:47.766427
|
| 44 |
+
anon-1c102c98-80df-4149-b598-8cb3d4feebeb,1c102c98-80df-4149-b598-8cb3d4feebeb,oldman_angry.png,AI,ai,2,angry,3,male,1,caucasian,1,forward,1,surprised,4,incorrect,1078,happy|disgusted|surprised|angry,2026-01-08T20:48:49.587460
|
| 45 |
+
anon-1c102c98-80df-4149-b598-8cb3d4feebeb,1c102c98-80df-4149-b598-8cb3d4feebeb,woman_disgusted.png,Human,human,1,disgusted,5,female,2,caucasian,1,front-left,2,happy,1,incorrect,1144,angry|disgusted|surprised|happy,2026-01-08T20:48:51.954683
|
| 46 |
+
anon-1c102c98-80df-4149-b598-8cb3d4feebeb,1c102c98-80df-4149-b598-8cb3d4feebeb,woman2_happy.png,AI,ai,2,happy,1,female,2,black,2,forward,1,surprised,4,incorrect,1460,happy|disgusted|surprised|angry,2026-01-08T20:48:54.523725
|
| 47 |
+
anon-132bd0b1-0187-4191-901b-cfd4434b3cce,132bd0b1-0187-4191-901b-cfd4434b3cce,woman2_happy.png,AI,ai,2,happy,1,female,2,black,2,forward,1,angry,3,incorrect,5592,disgusted|surprised|angry|happy,2026-01-08T20:49:40.646632
|
| 48 |
+
anon-132bd0b1-0187-4191-901b-cfd4434b3cce,132bd0b1-0187-4191-901b-cfd4434b3cce,woman_disgusted.png,Human,human,1,disgusted,5,female,2,caucasian,1,front-left,2,disgusted,5,correct,5517,angry|disgusted|surprised|happy,2026-01-08T20:49:47.471801
|
| 49 |
+
anon-132bd0b1-0187-4191-901b-cfd4434b3cce,132bd0b1-0187-4191-901b-cfd4434b3cce,man_angry.png,Human,human,1,angry,3,male,1,black,2,forward,1,happy,1,incorrect,1580,surprised|angry|happy|disgusted,2026-01-08T20:49:50.541265
|
| 50 |
+
anon-e07c3aa4-8b6b-41ae-8efa-77c1542e18b4,e07c3aa4-8b6b-41ae-8efa-77c1542e18b4,oldman_angry.png,AI,ai,2,angry,3,male,1,caucasian,1,forward,1,angry,3,correct,3377,disgusted|happy|angry|surprised,2026-01-08T20:51:44.169517
|
| 51 |
+
anon-e07c3aa4-8b6b-41ae-8efa-77c1542e18b4,e07c3aa4-8b6b-41ae-8efa-77c1542e18b4,woman_surprised.png,Human,human,1,surprised,4,female,2,caucasian,1,front-right,3,angry,3,incorrect,1434,happy|angry|surprised|disgusted,2026-01-08T20:51:47.668494
|
| 52 |
+
anon-e07c3aa4-8b6b-41ae-8efa-77c1542e18b4,e07c3aa4-8b6b-41ae-8efa-77c1542e18b4,woman_disgusted.png,Human,human,1,disgusted,5,female,2,caucasian,1,front-left,2,happy,1,incorrect,4397,disgusted|angry|surprised|happy,2026-01-08T20:51:55.385533
|
| 53 |
+
anon-e07c3aa4-8b6b-41ae-8efa-77c1542e18b4,e07c3aa4-8b6b-41ae-8efa-77c1542e18b4,kid_surprised.png,AI,ai,2,surprised,4,female,2,caucasian,1,forward,1,happy,1,incorrect,4194,happy|disgusted|angry|surprised,2026-01-08T20:52:00.573445
|
| 54 |
+
anon-5eba27cd-b6d1-4f1b-bc9a-0bef0ea6d251,5eba27cd-b6d1-4f1b-bc9a-0bef0ea6d251,woman_disgusted.png,Human,human,1,disgusted,5,female,2,caucasian,1,front-left,2,happy,1,incorrect,4122,disgusted|surprised|happy|angry,2026-01-08T20:54:59.957487
|
| 55 |
+
anon-82cbc393-d79d-484d-8e20-4f4ea4fb3485,82cbc393-d79d-484d-8e20-4f4ea4fb3485,oldman_angry.png,AI,ai,2,angry,3,male,1,caucasian,1,forward,1,happy,1,incorrect,3139,surprised|happy|angry|disgusted,2026-01-08T20:57:27.921219
|
| 56 |
+
anon-ebcf1d6a-7150-4960-a7dd-d8eb4bb55d2c,ebcf1d6a-7150-4960-a7dd-d8eb4bb55d2c,woman_disgusted.png,Human,human,1,disgusted,5,female,2,caucasian,1,front-left,2,surprised,4,incorrect,3344,surprised|disgusted|happy|angry,2026-01-08T20:58:55.093129
|
| 57 |
+
anon-ebcf1d6a-7150-4960-a7dd-d8eb4bb55d2c,ebcf1d6a-7150-4960-a7dd-d8eb4bb55d2c,man_angry.png,Human,human,1,angry,3,male,1,black,2,forward,1,surprised,4,incorrect,3016,angry|happy|surprised|disgusted,2026-01-08T20:59:03.532575
|
| 58 |
+
anon-ebcf1d6a-7150-4960-a7dd-d8eb4bb55d2c,ebcf1d6a-7150-4960-a7dd-d8eb4bb55d2c,kid_surprised.png,AI,ai,2,surprised,4,female,2,caucasian,1,forward,1,angry,3,incorrect,2947,disgusted|surprised|happy|angry,2026-01-08T20:59:08.803406
|
| 59 |
+
anon-ebcf1d6a-7150-4960-a7dd-d8eb4bb55d2c,ebcf1d6a-7150-4960-a7dd-d8eb4bb55d2c,oldman_angry.png,AI,ai,2,angry,3,male,1,caucasian,1,forward,1,happy,1,incorrect,1667,disgusted|surprised|happy|angry,2026-01-08T20:59:11.572648
|
| 60 |
+
anon-ebcf1d6a-7150-4960-a7dd-d8eb4bb55d2c,ebcf1d6a-7150-4960-a7dd-d8eb4bb55d2c,woman_surprised.png,Human,human,1,surprised,4,female,2,caucasian,1,front-right,3,surprised,4,correct,4128,surprised|angry|disgusted|happy,2026-01-08T20:59:26.792283
|
| 61 |
+
anon-1efc4fa8-b4f2-488b-b5f7-8a2c35ba1ded,1efc4fa8-b4f2-488b-b5f7-8a2c35ba1ded,man_angry.png,Human,human,1,angry,3,male,1,black,2,forward,1,happy,1,incorrect,3174,angry|happy|surprised|disgusted,2026-01-08T21:01:10.029903
|
| 62 |
+
anon-1efc4fa8-b4f2-488b-b5f7-8a2c35ba1ded,1efc4fa8-b4f2-488b-b5f7-8a2c35ba1ded,woman_disgusted.png,Human,human,1,disgusted,5,female,2,caucasian,1,front-left,2,disgusted,5,correct,2901,angry|surprised|disgusted|happy,2026-01-08T21:01:15.104556
|
| 63 |
+
anon-25f74813-2c25-496c-a738-593891e58029,25f74813-2c25-496c-a738-593891e58029,woman_disgusted.png,Human,human,1,disgusted,5,female,2,caucasian,1,front-left,2,angry,3,incorrect,4405,surprised|happy|angry|disgusted,2026-01-08T21:02:53.461864
|
| 64 |
+
anon-5324ca1d-61b0-4c88-a3d9-65430ad19912,5324ca1d-61b0-4c88-a3d9-65430ad19912,woman2_happy.png,AI,ai,2,happy,1,female,2,black,2,forward,1,surprised,4,incorrect,3477,disgusted|surprised|happy|angry,2026-01-08T21:08:05.371495
|
| 65 |
+
anon-5324ca1d-61b0-4c88-a3d9-65430ad19912,5324ca1d-61b0-4c88-a3d9-65430ad19912,woman_disgusted.png,Human,human,1,disgusted,5,female,2,caucasian,1,front-left,2,angry,3,incorrect,4564,disgusted|happy|surprised|angry,2026-01-08T21:08:11.967255
|
| 66 |
+
anon-5324ca1d-61b0-4c88-a3d9-65430ad19912,5324ca1d-61b0-4c88-a3d9-65430ad19912,oldman_angry.png,AI,ai,2,angry,3,male,1,caucasian,1,forward,1,angry,3,correct,3396,angry|disgusted|happy|surprised,2026-01-08T21:08:17.105324
|
| 67 |
+
anon-5324ca1d-61b0-4c88-a3d9-65430ad19912,5324ca1d-61b0-4c88-a3d9-65430ad19912,kid_surprised.png,AI,ai,2,surprised,4,female,2,caucasian,1,forward,1,surprised,4,correct,2010,happy|surprised|angry|disgusted,2026-01-08T21:08:19.970215
|
| 68 |
+
anon-5324ca1d-61b0-4c88-a3d9-65430ad19912,5324ca1d-61b0-4c88-a3d9-65430ad19912,man_angry.png,Human,human,1,angry,3,male,1,black,2,forward,1,disgusted,5,incorrect,2108,angry|surprised|disgusted|happy,2026-01-08T21:08:23.183190
|
| 69 |
+
anon-5324ca1d-61b0-4c88-a3d9-65430ad19912,5324ca1d-61b0-4c88-a3d9-65430ad19912,woman_surprised.png,Human,human,1,surprised,4,female,2,caucasian,1,front-right,3,angry,3,incorrect,1526,disgusted|surprised|happy|angry,2026-01-08T21:08:26.154828
|
stimuli_metadata.csv
ADDED
|
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
image_name,emotion,sex,ethnicity,angle,face_type
|
| 2 |
+
man_angry.png,angry,male,black,forward,human
|
| 3 |
+
woman_disgusted.jpg,disgusted,female,caucasian,front-left,human
|
| 4 |
+
woman_surprised.png,surprised,female,caucasian,front-right,human
|
| 5 |
+
kid_surprised.jpg,surprised,female,caucasian,forward,ai
|
| 6 |
+
woman2_happy.jpg,happy,female,black,forward,ai
|
| 7 |
+
oldman_angry.jpg,angry,male,caucasian,forward,ai
|