José Eliel Camargo Molina commited on
Commit
7d6b3a1
·
1 Parent(s): ef0676d

added questionaire and polish

Browse files
Human/{woman_disgusted.png → woman_fearful.png} RENAMED
File without changes
Human/{woman_surprised.png → woman_happy.png} RENAMED
File without changes
__pycache__/app.cpython-39.pyc CHANGED
Binary files a/__pycache__/app.cpython-39.pyc and b/__pycache__/app.cpython-39.pyc differ
 
app.py CHANGED
@@ -6,31 +6,35 @@ import random
6
  import time
7
  import csv
8
  import uuid
 
9
  from datetime import datetime
 
10
 
11
  # --- Configuration ---
12
  AI_FOLDER = "./AI"
13
  HUMAN_FOLDER = "./Human"
14
- CSV_FILE = "emotion_responses.csv"
 
15
  METADATA_FILE = "stimuli_metadata.csv"
16
  DEBLUR_DURATION_S = 5 # Seconds to go from Blur -> Clear
17
 
18
  # --- Advanced Features Config ---
19
  URL_PARAM_PARTICIPANT_ID = "pid"
20
- RANDOMIZE_EMOTION_ORDER_DEFAULT = True
 
21
  RANDOMIZE_EMOTION_ORDER_PARAM = "randomize"
22
  CHOICE_PLACEHOLDER = "Select an emotion..."
23
 
 
 
 
 
 
24
  # --- CSS STYLES ---
25
  APP_CSS = f"""
26
  #emotion_choice, #emotion_choice .wrap {{ max-height: 260px; overflow-y: auto; }}
27
- #next_btn {{ margin: 8px 0 12px 0; }}
28
- #start_btn,
29
- #start_btn button,
30
- #start_btn .gr-button,
31
- #next_btn,
32
- #next_btn button,
33
- #next_btn .gr-button {{
34
  font-size: 20px !important;
35
  padding: 12px 22px !important;
36
  min-height: 48px !important;
@@ -62,7 +66,13 @@ APP_CSS = f"""
62
  }}
63
 
64
  @media (max-width: 640px) {{
65
- #img_anim img, #img_static img {{ max-height: 280px; object-fit: contain; }}
 
 
 
 
 
 
66
  }}
67
 
68
  /* --- ANIMATED IMAGE (The Test) --- */
@@ -79,12 +89,16 @@ APP_CSS = f"""
79
  filter: blur(0px) !important;
80
  }}
81
 
82
- /* --- STATIC IMAGE (The Result) --- */
83
- /* No special CSS needed. It will just be a normal, clear image.
84
- We ensure it aligns perfectly with the animated one. */
85
- #img_static img {{
86
- display: block;
87
- filter: blur(0px);
 
 
 
 
88
  }}
89
 
90
  #progress_text {{
@@ -93,6 +107,76 @@ APP_CSS = f"""
93
  line-height: 1.2;
94
  }}
95
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
96
  #app_title {{
97
  text-align: center;
98
  margin-bottom: 16px;
@@ -109,25 +193,79 @@ APP_CSS = f"""
109
 
110
  # --- Constants & Mappings ---
111
  UNKNOWN_LABEL = "unknown"
112
- UNKNOWN_CODE = 0
113
  FILENAME_FIELD_ORDER = ["emotion"]
114
 
115
- EMOTION_CODE_MAP = {"happy": 1, "sad": 2, "fearful": 3, "exuberant": 4, "unknown": 0}
116
- SEX_CODE_MAP = {"male": 1, "female": 2, "other": 3, "unknown": 0}
117
- ETHNICITY_CODE_MAP = {"caucasian": 1, "black": 2, "asian": 3, "latino": 4, "middle-eastern": 5, "indigenous": 6, "other": 7, "unknown": 0}
118
- ANGLE_CODE_MAP = {"forward": 1, "front-left": 2, "front-right": 3, "left": 4, "right": 5, "up": 6, "down": 7, "unknown": 0}
119
- TYPE_CODE_MAP = {"human": 1, "ai": 2, "unknown": 0}
120
-
121
- CSV_HEADERS = [
122
- "participant_id", "session_id", "image_name", "image_source", "face_type", "face_type_code",
123
- "correct_emotion", "correct_emotion_code", "face_sex", "face_sex_code", "face_ethnicity", "face_ethnicity_code",
124
- "face_angle", "face_angle_code", "selected_emotion", "selected_emotion_code", "accuracy",
125
- "response_time_ms", "button_order", "timestamp",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
126
  ]
127
 
128
  # --- Data Structure ---
129
  class ImageData:
130
- def __init__(self, path, source, emotion, sex=UNKNOWN_LABEL, ethnicity=UNKNOWN_LABEL, angle=UNKNOWN_LABEL, face_type=UNKNOWN_LABEL):
 
 
 
 
 
 
 
 
 
 
 
131
  self.path = path
132
  self.source = source
133
  self.emotion = emotion
@@ -136,14 +274,19 @@ class ImageData:
136
  self.angle = angle
137
  self.face_type = face_type
138
  self.name = os.path.basename(path)
 
 
139
 
140
  # --- Helper Functions ---
141
  def normalize_label(value):
142
  if value is None: return ""
143
  return str(value).strip().lower().replace(" ", "-")
144
 
145
- def get_code(code_map, label):
146
- return code_map.get(normalize_label(label), UNKNOWN_CODE)
 
 
 
147
 
148
  def load_metadata(metadata_path):
149
  if not os.path.exists(metadata_path): return {}
@@ -186,25 +329,107 @@ def resolve_face_type(metadata, source):
186
  if metadata and metadata.get("face_type"): return normalize_label(metadata.get("face_type"))
187
  return normalize_label(source)
188
 
189
- def ensure_csv_file():
190
- if not os.path.exists(CSV_FILE):
191
- with open(CSV_FILE, 'w', newline='') as f:
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
192
  writer = csv.writer(f)
193
- writer.writerow(CSV_HEADERS)
194
- return CSV_FILE, ""
195
-
196
- with open(CSV_FILE, newline='') as f:
197
  reader = csv.reader(f)
198
  existing_header = next(reader, None)
199
- if existing_header != CSV_HEADERS:
200
- base, ext = os.path.splitext(CSV_FILE)
201
  timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
202
- new_file = f"{base}_{timestamp}{ext or '.csv'}"
203
- with open(new_file, 'w', newline='') as f:
 
 
 
 
 
 
 
204
  writer = csv.writer(f)
205
- writer.writerow(CSV_HEADERS)
206
- return new_file, f"Using new results file: {new_file}"
207
- return CSV_FILE, ""
 
 
 
 
 
 
 
 
 
 
 
 
208
 
209
  def get_participant_id(request):
210
  if request is None: return ""
@@ -215,7 +440,9 @@ def scan_images():
215
  images = []
216
  emotions = set()
217
  metadata = load_metadata(METADATA_FILE)
218
- skipped = []
 
 
219
 
220
  for folder, source in [(AI_FOLDER, "AI"), (HUMAN_FOLDER, "Human")]:
221
  if not os.path.exists(folder): continue
@@ -226,20 +453,52 @@ def scan_images():
226
  meta = metadata.get(meta_key) or metadata.get(os.path.splitext(meta_key)[0]) or {}
227
  filename_fields = parse_filename_fields(path)
228
 
229
- emotion = resolve_field(meta, filename_fields, "emotion", "")
 
230
  if not emotion or emotion == UNKNOWN_LABEL:
231
- skipped.append(filename)
 
 
 
232
  continue
233
 
234
  sex = resolve_field(meta, filename_fields, "sex", UNKNOWN_LABEL)
235
  ethnicity = resolve_field(meta, filename_fields, "ethnicity", UNKNOWN_LABEL)
236
  angle = resolve_field(meta, filename_fields, "angle", UNKNOWN_LABEL)
 
 
 
237
  face_type = resolve_face_type(meta, source) or UNKNOWN_LABEL
 
 
238
 
239
  emotions.add(emotion)
240
- images.append(ImageData(path, source, emotion, sex=sex, ethnicity=ethnicity, angle=angle, face_type=face_type))
 
 
 
 
 
 
 
 
 
 
 
 
241
 
242
- if skipped: print(f"[DEBUG] Skipped {len(skipped)} images without emotion label.")
 
 
 
 
 
 
 
 
 
 
 
243
  return images, emotions
244
 
245
  def crop_face(image_path, target_size=512):
@@ -295,19 +554,39 @@ def initialize_experiment(request: gr.Request):
295
  else:
296
  msg = f"Participant ID: {participant_id}"
297
 
298
- csv_file, csv_status = ensure_csv_file()
299
-
300
- random.shuffle(images)
 
 
 
 
 
 
 
 
 
 
 
301
  initial_state = {
302
  "participant_id": participant_id,
303
  "session_id": session_id,
304
- "csv_file": csv_file,
305
- "all_images": images,
306
- "emotions": sorted(list(emotions)),
 
 
 
 
307
  "current_index": -1,
308
  "current_choices": [],
309
  "randomize_emotions": RANDOMIZE_EMOTION_ORDER_DEFAULT,
310
  "start_time": None,
 
 
 
 
 
311
  }
312
 
313
  if request:
@@ -315,23 +594,56 @@ def initialize_experiment(request: gr.Request):
315
  if val and val.lower() in ['0','false','no']:
316
  initial_state["randomize_emotions"] = False
317
 
 
 
 
 
318
  return initial_state, f"{msg}\n{csv_status}", gr.update(interactive=True)
319
 
320
  def start_interface(state):
321
  if not state:
322
- return gr.update(visible=True), gr.update(visible=True), gr.update(visible=False)
323
- return gr.update(visible=False), gr.update(visible=False), gr.update(visible=True)
 
 
 
 
 
 
 
 
 
 
 
 
324
 
325
  def show_next_image(state):
326
- # Returns: [state, img_anim, img_static, progress_text, anim_visible, static_visible, choices_update]
327
- if not state:
328
- return state, None, None, "Error", gr.update(visible=False), gr.update(visible=False), gr.update(visible=False)
 
 
 
 
 
 
329
 
330
  state["current_index"] += 1
331
  index = state["current_index"]
332
 
333
  if index >= len(state["all_images"]):
334
- return state, None, None, "# Experiment complete!", gr.update(visible=False), gr.update(visible=False), gr.update(visible=False)
 
 
 
 
 
 
 
 
 
 
 
335
 
336
  image_data = state["all_images"][index]
337
  cropped_image = crop_face(image_data.path)
@@ -342,73 +654,314 @@ def show_next_image(state):
342
 
343
  state["start_time"] = time.monotonic()
344
 
 
345
  choices = list(state["emotions"])
346
- if state.get("randomize_emotions"):
347
- choices = random.sample(choices, k=len(choices))
348
  state["current_choices"] = choices
349
  choices_with_placeholder = [CHOICE_PLACEHOLDER] + choices
350
 
351
  return (
352
- state,
353
- cropped_image, # For Animated Component
354
- cropped_image, # For Static Component
355
- f"Image {index + 1} of {len(state['all_images'])}",
356
- gr.update(visible=True, interactive=False), # Show Animated
357
- gr.update(visible=False), # Hide Static
358
  gr.update(choices=choices_with_placeholder, value=CHOICE_PLACEHOLDER, visible=True, interactive=True),
 
359
  )
360
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
361
  def on_emotion_select(state, selected_emotion):
362
- # Returns: [anim_visible, static_visible, choices_interactive, next_btn_interactive]
363
  if not state or not selected_emotion or normalize_label(selected_emotion) == normalize_label(CHOICE_PLACEHOLDER):
364
  # Do nothing if placeholder selected
365
- return gr.update(), gr.update(), gr.update(), gr.update()
366
 
367
  try:
368
  start_time = state.get("start_time") or time.monotonic()
369
  response_time_ms = int(round((time.monotonic() - start_time) * 1000))
370
  image_data = state["all_images"][state["current_index"]]
371
- normalized_sel = normalize_label(selected_emotion)
372
  accuracy = "correct" if normalized_sel == image_data.emotion else "incorrect"
373
-
374
- with open(state["csv_file"], 'a', newline='') as f:
 
 
 
 
 
 
 
 
 
 
 
 
 
375
  writer = csv.writer(f)
376
- writer.writerow([
377
- state["participant_id"], state["session_id"], image_data.name, image_data.source,
378
- image_data.face_type, get_code(TYPE_CODE_MAP, image_data.face_type),
379
- image_data.emotion, get_code(EMOTION_CODE_MAP, image_data.emotion),
380
- image_data.sex, get_code(SEX_CODE_MAP, image_data.sex),
381
- image_data.ethnicity, get_code(ETHNICITY_CODE_MAP, image_data.ethnicity),
382
- image_data.angle, get_code(ANGLE_CODE_MAP, image_data.angle),
383
- normalized_sel, get_code(EMOTION_CODE_MAP, normalized_sel),
384
- accuracy, response_time_ms, "|".join(state.get("current_choices", [])),
385
- datetime.now().isoformat(),
386
- ])
387
- print(f"[DEBUG] Saved {normalized_sel} ({response_time_ms}ms)")
388
  except Exception as e:
389
  print(f"Error saving CSV: {e}")
390
 
391
  # Hide Animated, Show Static (Snap), Disable Dropdown, Enable Next
392
- return gr.update(visible=False), gr.update(visible=True), gr.update(interactive=False), gr.update(interactive=True)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
393
 
394
  # --- JAVASCRIPT ---
395
  # Logic: Find the animated image element, reset its class to remove 'image-clear',
396
  # force a reflow, then add 'image-clear' to start the transition.
397
  js_functions = """
398
  () => {
 
 
 
 
 
 
 
 
 
 
 
 
399
  window.triggerDeblur = function() {
400
  const el = document.querySelector("#img_anim img");
401
  if (el) {
402
- // 1. Reset to start state (Blurred)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
403
  el.classList.remove('image-clear');
404
-
405
- // 2. Force Browser Reflow (Crucial for restarting CSS animations)
406
- void el.offsetWidth;
407
-
408
- // 3. Start Animation
409
- setTimeout(() => {
410
- el.classList.add('image-clear');
411
- }, 100);
412
  }
413
  };
414
  }
@@ -431,8 +984,6 @@ with gr.Blocks(theme=gr.themes.Soft(), css=APP_CSS) as app:
431
  with gr.Group():
432
  # Animated Image: Visible initially, performs blur->clear
433
  image_anim = gr.Image(label="", elem_id="img_anim", height=400, width=400, interactive=False, show_label=False, visible=True)
434
- # Static Image: Hidden initially, shows instantly when user selects answer
435
- image_static = gr.Image(label="", elem_id="img_static", height=400, width=400, interactive=False, show_label=False, visible=False)
436
 
437
  progress_text = gr.Markdown("", elem_id="progress_text")
438
 
@@ -440,6 +991,63 @@ with gr.Blocks(theme=gr.themes.Soft(), css=APP_CSS) as app:
440
  emotion_choice = gr.Radio(choices=[], label="Select the emotion", visible=False, interactive=True, elem_id="emotion_choice")
441
  next_image_btn = gr.Button("Next Image ▶", variant="secondary", visible=True, interactive=False, elem_id="next_btn")
442
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
443
  # --- Event Wiring ---
444
 
445
  # App Load
@@ -447,11 +1055,23 @@ with gr.Blocks(theme=gr.themes.Soft(), css=APP_CSS) as app:
447
 
448
  # Start Button -> Show Interface -> Load First Image -> Trigger Animation
449
  start_btn.click(
450
- fn=start_interface, inputs=[state], outputs=[instructions_section, start_btn, main_section]
 
 
 
 
 
 
 
 
451
  ).then(
452
- fn=show_next_image,
453
- inputs=[state],
454
- outputs=[state, image_anim, image_static, progress_text, image_anim, image_static, emotion_choice]
 
 
 
 
455
  ).then(
456
  fn=None, js="() => window.triggerDeblur()"
457
  )
@@ -460,17 +1080,122 @@ with gr.Blocks(theme=gr.themes.Soft(), css=APP_CSS) as app:
460
  emotion_choice.change(
461
  fn=on_emotion_select,
462
  inputs=[state, emotion_choice],
463
- outputs=[image_anim, image_static, emotion_choice, next_image_btn]
 
 
 
464
  )
465
 
466
  # Next Button -> Load New Image -> Reset Layout -> Trigger Animation
467
  next_image_btn.click(
468
- fn=show_next_image,
469
- inputs=[state],
470
- outputs=[state, image_anim, image_static, progress_text, image_anim, image_static, emotion_choice]
 
 
 
 
 
 
 
 
 
 
471
  ).then(
472
  fn=None, js="() => window.triggerDeblur()"
473
  )
474
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
475
  if __name__ == "__main__":
476
  app.launch()
 
6
  import time
7
  import csv
8
  import uuid
9
+ import shutil
10
  from datetime import datetime
11
+ from functools import partial
12
 
13
  # --- Configuration ---
14
  AI_FOLDER = "./AI"
15
  HUMAN_FOLDER = "./Human"
16
+ PART1_CSV_FILE = "emotion_responses_part1.csv"
17
+ PART2_CSV_FILE = "emotion_responses_part2.csv"
18
  METADATA_FILE = "stimuli_metadata.csv"
19
  DEBLUR_DURATION_S = 5 # Seconds to go from Blur -> Clear
20
 
21
  # --- Advanced Features Config ---
22
  URL_PARAM_PARTICIPANT_ID = "pid"
23
+ # Keep emotion order fixed across all participants.
24
+ RANDOMIZE_EMOTION_ORDER_DEFAULT = False
25
  RANDOMIZE_EMOTION_ORDER_PARAM = "randomize"
26
  CHOICE_PLACEHOLDER = "Select an emotion..."
27
 
28
+ # --- Sampling Config ---
29
+ BALANCE_SUBSET_DEFAULT = True
30
+ MAX_PER_STRATUM = None # Optionally set to an int to cap trials per (type, emotion)
31
+ ALLOWED_ANGLES = {"forward"} # Restrict to front-facing stimuli.
32
+
33
  # --- CSS STYLES ---
34
  APP_CSS = f"""
35
  #emotion_choice, #emotion_choice .wrap {{ max-height: 260px; overflow-y: auto; }}
36
+ #start_btn > button,
37
+ #next_btn > button {{
 
 
 
 
 
38
  font-size: 20px !important;
39
  padding: 12px 22px !important;
40
  min-height: 48px !important;
 
66
  }}
67
 
68
  @media (max-width: 640px) {{
69
+ #img_anim img {{ max-height: 280px; object-fit: contain; }}
70
+ }}
71
+
72
+ #img_anim img {{
73
+ width: 100%;
74
+ height: 100%;
75
+ object-fit: contain;
76
  }}
77
 
78
  /* --- ANIMATED IMAGE (The Test) --- */
 
89
  filter: blur(0px) !important;
90
  }}
91
 
92
+ /* Snap instantly to clear when the participant selects an answer. */
93
+ .image-snap {{
94
+ transition: none !important;
95
+ filter: blur(0px) !important;
96
+ }}
97
+
98
+ /* Force a blurred state immediately (used before loading the next image). */
99
+ .image-preblur {{
100
+ transition: none !important;
101
+ filter: blur(50px) !important;
102
  }}
103
 
104
  #progress_text {{
 
107
  line-height: 1.2;
108
  }}
109
 
110
+ #part2_section {{
111
+ padding: 16px 20px;
112
+ box-sizing: border-box;
113
+ }}
114
+
115
+ #part2_section h1,
116
+ #part2_section h2 {{
117
+ font-size: 44px !important;
118
+ }}
119
+
120
+ #part2_instructions_section {{
121
+ text-align: center;
122
+ }}
123
+
124
+ #part2_instructions_section h1 {{
125
+ font-size: 64px !important;
126
+ margin-bottom: 8px !important;
127
+ }}
128
+
129
+ #part2_instructions_section h2 {{
130
+ font-size: 28px !important;
131
+ }}
132
+
133
+ #part2_start_btn > button {{
134
+ font-size: 22px !important;
135
+ padding: 12px 26px !important;
136
+ }}
137
+
138
+ #part2_completion_text {{
139
+ text-align: center;
140
+ }}
141
+
142
+ #part2_completion_text h1 {{
143
+ font-size: 140px !important;
144
+ margin: 0 !important;
145
+ line-height: 1 !important;
146
+ }}
147
+
148
+ #part2_completion_text h2 {{
149
+ font-size: 48px !important;
150
+ margin-top: 8px !important;
151
+ }}
152
+
153
+ #part2_section input[type="number"] {{
154
+ font-size: 30px !important;
155
+ font-weight: 700 !important;
156
+ }}
157
+
158
+ #part2_section label,
159
+ #part2_section .wrap label,
160
+ #part2_section .wrap span {{
161
+ font-size: 20px !important;
162
+ }}
163
+
164
+ #part2_section .wrap {{
165
+ display: flex !important;
166
+ flex-direction: row !important;
167
+ flex-wrap: wrap !important;
168
+ gap: 8px 12px;
169
+ }}
170
+
171
+ #part2_section input[type="radio"] {{
172
+ transform: scale(1.35);
173
+ margin-right: 8px;
174
+ }}
175
+
176
+ #part2_section .wrap label {{
177
+ padding: 6px 10px !important;
178
+ }}
179
+
180
  #app_title {{
181
  text-align: center;
182
  margin-bottom: 16px;
 
193
 
194
  # --- Constants & Mappings ---
195
  UNKNOWN_LABEL = "unknown"
 
196
  FILENAME_FIELD_ORDER = ["emotion"]
197
 
198
+ # Fixed emotion set and order for all trials.
199
+ EMOTION_CHOICES_ORDER = [
200
+ "neutral",
201
+ "happy",
202
+ "angry",
203
+ "afraid",
204
+ "disgusted",
205
+ "sad",
206
+ "surprised",
207
+ ]
208
+ ALLOWED_EMOTIONS = set(EMOTION_CHOICES_ORDER)
209
+ EMOTION_ALIASES = {
210
+ "fearful": "afraid",
211
+ "fear": "afraid",
212
+ }
213
+
214
+ # --- Stimulus Types ---
215
+ STIMULUS_TYPE_REAL = "real_kdef"
216
+ STIMULUS_TYPE_AI = "ai_kdef_like"
217
+
218
+ # --- Ratings Config (Part 2) ---
219
+ RATING_SCALE_MIN = 1
220
+ RATING_SCALE_MAX = 7
221
+ SCALE_CHOICES = list(range(RATING_SCALE_MIN, RATING_SCALE_MAX + 1))
222
+
223
+ # --- Part 2 Rating Keys ---
224
+ PART2_KEYS = ["age", "masc", "attr", "quality", "artifact"]
225
+
226
+ # Part-specific outputs: one row per image per part, with minimal metadata.
227
+ PART1_HEADERS = [
228
+ "participant_id",
229
+ "session_id",
230
+ "stimulus_id",
231
+ "stimulus_type",
232
+ "target_emotion",
233
+ "emotion_trial_index",
234
+ "emotion_rt_ms",
235
+ "selected_emotion",
236
+ "accuracy",
237
+ "emotion_timestamp",
238
+ ]
239
+
240
+ PART2_HEADERS = [
241
+ "participant_id",
242
+ "session_id",
243
+ "stimulus_id",
244
+ "stimulus_type",
245
+ "target_emotion",
246
+ "matching_trial_index",
247
+ "match_age_rating",
248
+ "match_masc_rating",
249
+ "match_attr_rating",
250
+ "match_quality_rating",
251
+ "match_artifact_rating",
252
+ "matching_timestamp",
253
  ]
254
 
255
  # --- Data Structure ---
256
  class ImageData:
257
+ def __init__(
258
+ self,
259
+ path,
260
+ source,
261
+ emotion,
262
+ sex=UNKNOWN_LABEL,
263
+ ethnicity=UNKNOWN_LABEL,
264
+ angle=UNKNOWN_LABEL,
265
+ face_type=UNKNOWN_LABEL,
266
+ stimulus_type=UNKNOWN_LABEL,
267
+ stimulus_id="",
268
+ ):
269
  self.path = path
270
  self.source = source
271
  self.emotion = emotion
 
274
  self.angle = angle
275
  self.face_type = face_type
276
  self.name = os.path.basename(path)
277
+ self.stimulus_id = stimulus_id or os.path.splitext(self.name)[0].strip().lower()
278
+ self.stimulus_type = stimulus_type or UNKNOWN_LABEL
279
 
280
  # --- Helper Functions ---
281
  def normalize_label(value):
282
  if value is None: return ""
283
  return str(value).strip().lower().replace(" ", "-")
284
 
285
+ def canonicalize_emotion(label):
286
+ norm = normalize_label(label)
287
+ if not norm:
288
+ return ""
289
+ return EMOTION_ALIASES.get(norm, norm)
290
 
291
  def load_metadata(metadata_path):
292
  if not os.path.exists(metadata_path): return {}
 
329
  if metadata and metadata.get("face_type"): return normalize_label(metadata.get("face_type"))
330
  return normalize_label(source)
331
 
332
+ def resolve_stimulus_type(face_type, source):
333
+ ft = normalize_label(face_type) or normalize_label(source)
334
+ if ft in {"human", "real", "real-kdef", "real_kdef"}:
335
+ return STIMULUS_TYPE_REAL
336
+ if ft in {"ai", "synthetic", "ai-kdef-like", "ai_kdef_like"}:
337
+ return STIMULUS_TYPE_AI
338
+ # Fall back to folder/source label.
339
+ return STIMULUS_TYPE_REAL if normalize_label(source) == "human" else STIMULUS_TYPE_AI
340
+
341
+ def make_stimulus_id(filename):
342
+ stem = os.path.splitext(os.path.basename(filename))[0]
343
+ return stem.strip().lower()
344
+
345
+ def select_balanced_subset(images, max_per_stratum=None):
346
+ if not images:
347
+ return []
348
+ strata = {}
349
+ for img in images:
350
+ key = (img.stimulus_type, img.emotion)
351
+ strata.setdefault(key, []).append(img)
352
+ counts = {k: len(v) for k, v in strata.items()}
353
+ if not counts:
354
+ return images
355
+ min_count = min(counts.values())
356
+ if max_per_stratum is not None:
357
+ min_count = min(min_count, int(max_per_stratum))
358
+ if min_count <= 0:
359
+ return images
360
+
361
+ sampled = []
362
+ for key, items in strata.items():
363
+ if len(items) <= min_count:
364
+ sampled.extend(items)
365
+ else:
366
+ sampled.extend(random.sample(items, k=min_count))
367
+ random.shuffle(sampled)
368
+ print(f"[DEBUG] Balanced subset: {len(sampled)} trials across {len(strata)} strata (per-stratum={min_count}).")
369
+ return sampled
370
+
371
+ def build_row_template(state, image_data):
372
+ # Minimal row template: only fields that are written to the part CSVs.
373
+ return {
374
+ "participant_id": state.get("participant_id", ""),
375
+ "session_id": state.get("session_id", ""),
376
+ "stimulus_id": image_data.stimulus_id,
377
+ "stimulus_type": image_data.stimulus_type,
378
+ "target_emotion": image_data.emotion,
379
+ # Part 1 fields (filled in later).
380
+ "emotion_trial_index": "",
381
+ "emotion_rt_ms": "",
382
+ "selected_emotion": "",
383
+ "accuracy": "",
384
+ "emotion_timestamp": "",
385
+ # Part 2 fields (filled in later).
386
+ "matching_trial_index": "",
387
+ "match_age_rating": "",
388
+ "match_masc_rating": "",
389
+ "match_attr_rating": "",
390
+ "match_quality_rating": "",
391
+ "match_artifact_rating": "",
392
+ "matching_timestamp": "",
393
+ }
394
+
395
+ def ensure_csv_file_for(file_path, headers):
396
+ if not os.path.exists(file_path):
397
+ with open(file_path, "w", newline="") as f:
398
  writer = csv.writer(f)
399
+ writer.writerow(headers)
400
+ return file_path, ""
401
+
402
+ with open(file_path, newline="") as f:
403
  reader = csv.reader(f)
404
  existing_header = next(reader, None)
405
+ if existing_header != headers:
406
+ base, ext = os.path.splitext(file_path)
407
  timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
408
+ backup_file = f"{base}_{timestamp}{ext or '.csv'}"
409
+ try:
410
+ shutil.copy2(file_path, backup_file)
411
+ backup_msg = f"Copied existing results to: {backup_file}"
412
+ except Exception as e:
413
+ backup_msg = f"Could not copy existing results ({e})."
414
+
415
+ # Reinitialize the base file with the expected header.
416
+ with open(file_path, "w", newline="") as f:
417
  writer = csv.writer(f)
418
+ writer.writerow(headers)
419
+ return file_path, f"{backup_msg}\nReinitialized results file: {file_path}"
420
+ return file_path, ""
421
+
422
+ def ensure_csv_files():
423
+ part1_file, part1_status = ensure_csv_file_for(PART1_CSV_FILE, PART1_HEADERS)
424
+ part2_file, part2_status = ensure_csv_file_for(PART2_CSV_FILE, PART2_HEADERS)
425
+ statuses = [s for s in [part1_status, part2_status] if s]
426
+ status_lines = [
427
+ f"Part 1 file: {part1_file}",
428
+ f"Part 2 file: {part2_file}",
429
+ ]
430
+ status_lines.extend(statuses)
431
+ status_msg = "\n".join(status_lines)
432
+ return part1_file, part2_file, status_msg
433
 
434
  def get_participant_id(request):
435
  if request is None: return ""
 
440
  images = []
441
  emotions = set()
442
  metadata = load_metadata(METADATA_FILE)
443
+ skipped_missing_emotion = []
444
+ skipped_angle = []
445
+ skipped_emotion = []
446
 
447
  for folder, source in [(AI_FOLDER, "AI"), (HUMAN_FOLDER, "Human")]:
448
  if not os.path.exists(folder): continue
 
453
  meta = metadata.get(meta_key) or metadata.get(os.path.splitext(meta_key)[0]) or {}
454
  filename_fields = parse_filename_fields(path)
455
 
456
+ emotion_raw = resolve_field(meta, filename_fields, "emotion", "")
457
+ emotion = canonicalize_emotion(emotion_raw)
458
  if not emotion or emotion == UNKNOWN_LABEL:
459
+ skipped_missing_emotion.append(filename)
460
+ continue
461
+ if emotion not in ALLOWED_EMOTIONS:
462
+ skipped_emotion.append((filename, emotion_raw))
463
  continue
464
 
465
  sex = resolve_field(meta, filename_fields, "sex", UNKNOWN_LABEL)
466
  ethnicity = resolve_field(meta, filename_fields, "ethnicity", UNKNOWN_LABEL)
467
  angle = resolve_field(meta, filename_fields, "angle", UNKNOWN_LABEL)
468
+ if ALLOWED_ANGLES and angle not in ALLOWED_ANGLES:
469
+ skipped_angle.append((filename, angle))
470
+ continue
471
  face_type = resolve_face_type(meta, source) or UNKNOWN_LABEL
472
+ stimulus_type = resolve_stimulus_type(face_type, source)
473
+ stimulus_id = make_stimulus_id(filename)
474
 
475
  emotions.add(emotion)
476
+ images.append(
477
+ ImageData(
478
+ path,
479
+ source,
480
+ emotion,
481
+ sex=sex,
482
+ ethnicity=ethnicity,
483
+ angle=angle,
484
+ face_type=face_type,
485
+ stimulus_type=stimulus_type,
486
+ stimulus_id=stimulus_id,
487
+ )
488
+ )
489
 
490
+ if skipped_missing_emotion:
491
+ print(f"[DEBUG] Skipped {len(skipped_missing_emotion)} images without emotion label.")
492
+ if skipped_angle:
493
+ print(
494
+ f"[DEBUG] Filtered out {len(skipped_angle)} images due to angle "
495
+ f"(allowed={sorted(ALLOWED_ANGLES)})."
496
+ )
497
+ if skipped_emotion:
498
+ print(
499
+ f"[DEBUG] Filtered out {len(skipped_emotion)} images due to emotion "
500
+ f"(allowed={EMOTION_CHOICES_ORDER})."
501
+ )
502
  return images, emotions
503
 
504
  def crop_face(image_path, target_size=512):
 
554
  else:
555
  msg = f"Participant ID: {participant_id}"
556
 
557
+ csv_file_part1, csv_file_part2, csv_status = ensure_csv_files()
558
+
559
+ # Optionally select a balanced subset across (stimulus_type, emotion).
560
+ selected_images = images
561
+ if BALANCE_SUBSET_DEFAULT:
562
+ balanced = select_balanced_subset(images, MAX_PER_STRATUM)
563
+ if balanced:
564
+ selected_images = balanced
565
+
566
+ available_emotions = {img.emotion for img in selected_images}
567
+ missing_emotions = [e for e in EMOTION_CHOICES_ORDER if e not in available_emotions]
568
+ if missing_emotions:
569
+ print(f"[DEBUG] No stimuli found for emotions: {missing_emotions}")
570
+ random.shuffle(selected_images)
571
  initial_state = {
572
  "participant_id": participant_id,
573
  "session_id": session_id,
574
+ "csv_file": csv_file_part1,
575
+ "csv_file_part1": csv_file_part1,
576
+ "csv_file_part2": csv_file_part2,
577
+ "all_images": selected_images,
578
+ "part2_images": [],
579
+ # Fixed order across participants.
580
+ "emotions": list(EMOTION_CHOICES_ORDER),
581
  "current_index": -1,
582
  "current_choices": [],
583
  "randomize_emotions": RANDOMIZE_EMOTION_ORDER_DEFAULT,
584
  "start_time": None,
585
+ "phase": "emotion",
586
+ "part2_started": False,
587
+ "part2_index": -1,
588
+ "part2_start_time": None,
589
+ "part2_touched": {k: False for k in PART2_KEYS},
590
  }
591
 
592
  if request:
 
594
  if val and val.lower() in ['0','false','no']:
595
  initial_state["randomize_emotions"] = False
596
 
597
+ part2_images = list(selected_images)
598
+ random.shuffle(part2_images)
599
+ initial_state["part2_images"] = part2_images
600
+
601
  return initial_state, f"{msg}\n{csv_status}", gr.update(interactive=True)
602
 
603
  def start_interface(state):
604
  if not state:
605
+ return (
606
+ gr.update(visible=True),
607
+ gr.update(visible=True),
608
+ gr.update(visible=False),
609
+ gr.update(visible=False),
610
+ gr.update(visible=False),
611
+ )
612
+ return (
613
+ gr.update(visible=False),
614
+ gr.update(visible=False),
615
+ gr.update(visible=True),
616
+ gr.update(visible=False),
617
+ gr.update(visible=False),
618
+ )
619
 
620
  def show_next_image(state):
621
+ # Returns: [state, img_anim_update, progress_text, choices_update, next_btn_update]
622
+ if not state:
623
+ return (
624
+ state,
625
+ gr.update(visible=False, interactive=False),
626
+ "Error",
627
+ gr.update(visible=False, interactive=False),
628
+ gr.update(visible=False, interactive=False),
629
+ )
630
 
631
  state["current_index"] += 1
632
  index = state["current_index"]
633
 
634
  if index >= len(state["all_images"]):
635
+ state["part2_started"] = False
636
+ state["part2_index"] = -1
637
+ state["part2_start_time"] = None
638
+ state["part2_touched"] = {k: False for k in PART2_KEYS}
639
+ state["phase"] = "part2_instructions"
640
+ return (
641
+ state,
642
+ gr.update(visible=False),
643
+ "",
644
+ gr.update(visible=False, interactive=False),
645
+ gr.update(visible=False, interactive=False),
646
+ )
647
 
648
  image_data = state["all_images"][index]
649
  cropped_image = crop_face(image_data.path)
 
654
 
655
  state["start_time"] = time.monotonic()
656
 
657
+ # Keep emotion order fixed across all trials and participants.
658
  choices = list(state["emotions"])
 
 
659
  state["current_choices"] = choices
660
  choices_with_placeholder = [CHOICE_PLACEHOLDER] + choices
661
 
662
  return (
663
+ state,
664
+ gr.update(value=cropped_image, visible=True, interactive=False),
665
+ f"Image {index + 1} of {len(state['all_images'])}",
 
 
 
666
  gr.update(choices=choices_with_placeholder, value=CHOICE_PLACEHOLDER, visible=True, interactive=True),
667
+ gr.update(interactive=False, visible=True),
668
  )
669
 
670
+ def update_sections_for_phase(state):
671
+ if not state:
672
+ return gr.update(), gr.update(), gr.update()
673
+ phase = state.get("phase")
674
+ if phase == "emotion":
675
+ return gr.update(visible=True), gr.update(visible=False), gr.update(visible=False)
676
+ if phase == "part2_instructions":
677
+ return gr.update(visible=False), gr.update(visible=True), gr.update(visible=False)
678
+ if phase in {"part2", "complete"}:
679
+ return gr.update(visible=False), gr.update(visible=False), gr.update(visible=True)
680
+ return gr.update(), gr.update(), gr.update()
681
+
682
+ def start_part2(state):
683
+ if not state or state.get("phase") != "part2_instructions":
684
+ return state
685
+ state["phase"] = "part2"
686
+ state["part2_started"] = False
687
+ state["part2_index"] = -1
688
+ state["part2_start_time"] = None
689
+ state["part2_touched"] = {k: False for k in PART2_KEYS}
690
+ return state
691
+
692
  def on_emotion_select(state, selected_emotion):
693
+ # Returns: [state, image_update, choices_interactive, next_btn_interactive]
694
  if not state or not selected_emotion or normalize_label(selected_emotion) == normalize_label(CHOICE_PLACEHOLDER):
695
  # Do nothing if placeholder selected
696
+ return state, gr.update(), gr.update(), gr.update()
697
 
698
  try:
699
  start_time = state.get("start_time") or time.monotonic()
700
  response_time_ms = int(round((time.monotonic() - start_time) * 1000))
701
  image_data = state["all_images"][state["current_index"]]
702
+ normalized_sel = canonicalize_emotion(selected_emotion)
703
  accuracy = "correct" if normalized_sel == image_data.emotion else "incorrect"
704
+ trial_index = state.get("current_index", -1) + 1
705
+
706
+ row = build_row_template(
707
+ state,
708
+ image_data,
709
+ )
710
+ row["selected_emotion"] = normalized_sel
711
+ row["accuracy"] = accuracy
712
+
713
+ row["emotion_trial_index"] = trial_index
714
+ row["emotion_rt_ms"] = response_time_ms
715
+ row["emotion_timestamp"] = datetime.now().isoformat()
716
+
717
+ part1_file = state.get("csv_file_part1") or state.get("csv_file")
718
+ with open(part1_file, "a", newline="") as f:
719
  writer = csv.writer(f)
720
+ writer.writerow([row[h] for h in PART1_HEADERS])
721
+ print(f"[DEBUG] Saved Part 1 rating ({normalized_sel}, {response_time_ms}ms) -> {part1_file}")
 
 
 
 
 
 
 
 
 
 
722
  except Exception as e:
723
  print(f"Error saving CSV: {e}")
724
 
725
  # Hide Animated, Show Static (Snap), Disable Dropdown, Enable Next
726
+ return (
727
+ state,
728
+ gr.update(visible=True, interactive=False),
729
+ gr.update(interactive=False),
730
+ gr.update(interactive=True),
731
+ )
732
+
733
+ # --- Part 2 Helpers ---
734
+
735
+ def _to_int(value):
736
+ if value is None or value == "":
737
+ return ""
738
+ try:
739
+ return int(value)
740
+ except Exception:
741
+ return ""
742
+
743
+ # --- Part 2: Face Rating Logic ---
744
+
745
+ def start_part2_phase(state):
746
+ # Returns: [state, main_section, part2_section]
747
+ if not state or state.get("phase") != "part2" or state.get("part2_started"):
748
+ return state, gr.update(), gr.update()
749
+ state["part2_started"] = True
750
+ state["part2_index"] = -1
751
+ state["part2_start_time"] = None
752
+ state["part2_touched"] = {k: False for k in PART2_KEYS}
753
+ return state, gr.update(visible=False), gr.update(visible=True)
754
+
755
+ def _no_part2_updates(state):
756
+ # Returns: [state, part2_image, part2_progress_text, part2_status_text, part2_completion_text,
757
+ # part2_age_radio, part2_masc_radio, part2_attr_radio, part2_quality_radio, part2_artifact_radio,
758
+ # part2_next_btn]
759
+ return (
760
+ state,
761
+ gr.update(visible=False),
762
+ gr.update(value="", visible=False),
763
+ gr.update(value="", visible=False),
764
+ gr.update(),
765
+ gr.update(),
766
+ gr.update(),
767
+ gr.update(),
768
+ gr.update(),
769
+ gr.update(),
770
+ gr.update(),
771
+ )
772
+
773
+ def _part2_reset_updates():
774
+ return (
775
+ gr.update(value=None, interactive=True), # part2_age_radio
776
+ gr.update(value=None, interactive=True), # part2_masc_radio
777
+ gr.update(value=None, interactive=True), # part2_attr_radio
778
+ gr.update(value=None, interactive=True), # part2_quality_radio
779
+ gr.update(value=None, interactive=True), # part2_artifact_radio
780
+ )
781
+
782
+ def show_next_part2_image(state):
783
+ if not state or state.get("phase") != "part2" or not state.get("part2_started"):
784
+ return _no_part2_updates(state)
785
+
786
+ images = state.get("part2_images") or state.get("all_images") or []
787
+ state["part2_index"] = state.get("part2_index", -1) + 1
788
+ index = state["part2_index"]
789
+
790
+ if index >= len(images):
791
+ state["phase"] = "complete"
792
+ completion_md = "# ✅\n## Complete!"
793
+ return (
794
+ state,
795
+ gr.update(value=None, visible=False),
796
+ gr.update(value="", visible=False),
797
+ gr.update(value="", visible=False),
798
+ gr.update(value=completion_md, visible=True),
799
+ gr.update(interactive=False),
800
+ gr.update(interactive=False),
801
+ gr.update(interactive=False),
802
+ gr.update(interactive=False),
803
+ gr.update(interactive=False),
804
+ gr.update(interactive=False),
805
+ )
806
+
807
+ image_data = images[index]
808
+ cropped_image = crop_face(image_data.path)
809
+ if cropped_image is None:
810
+ return show_next_part2_image(state)
811
+
812
+ state["part2_start_time"] = time.monotonic()
813
+ state["part2_touched"] = {k: False for k in PART2_KEYS}
814
+ reset_updates = _part2_reset_updates()
815
+
816
+ return (
817
+ state,
818
+ gr.update(value=cropped_image, visible=True),
819
+ gr.update(value=f"Image {index + 1} of {len(images)}", visible=True),
820
+ gr.update(value="Rate all five items to enable Next.", visible=True),
821
+ gr.update(value="", visible=False),
822
+ reset_updates[0],
823
+ reset_updates[1],
824
+ reset_updates[2],
825
+ reset_updates[3],
826
+ reset_updates[4],
827
+ gr.update(interactive=False),
828
+ )
829
+
830
+ def _mark_part2_touched(state, _value, key):
831
+ if not state or state.get("phase") != "part2" or not state.get("part2_started"):
832
+ return state, gr.update(), gr.update(), gr.update()
833
+ touched = dict(state.get("part2_touched") or {})
834
+ touched[key] = _value not in (None, "")
835
+ state["part2_touched"] = touched
836
+ ready = all(touched.get(k, False) for k in PART2_KEYS)
837
+ message = "All items answered. Click Next." if ready else "Rate all five items to continue."
838
+ return state, gr.update(interactive=ready), gr.update(message), gr.update()
839
+
840
+ def advance_part2(state, age_rating, masc_rating, attr_rating, quality_rating, artifact_rating):
841
+ if not state or state.get("phase") != "part2" or not state.get("part2_started"):
842
+ return _no_part2_updates(state)
843
+
844
+ values = {
845
+ "age": age_rating,
846
+ "masc": masc_rating,
847
+ "attr": attr_rating,
848
+ "quality": quality_rating,
849
+ "artifact": artifact_rating,
850
+ }
851
+ missing = [k for k, v in values.items() if v in (None, "")]
852
+ if missing:
853
+ state["part2_touched"] = {k: (values[k] not in (None, "")) for k in PART2_KEYS}
854
+ return (
855
+ state,
856
+ gr.update(),
857
+ gr.update(),
858
+ gr.update("Please answer all five items before continuing."),
859
+ gr.update(),
860
+ gr.update(),
861
+ gr.update(),
862
+ gr.update(),
863
+ gr.update(),
864
+ gr.update(),
865
+ gr.update(interactive=False),
866
+ )
867
+
868
+ images = state.get("part2_images") or state.get("all_images") or []
869
+ index = state.get("part2_index", -1)
870
+ if index < 0 or index >= len(images):
871
+ return (
872
+ state,
873
+ gr.update(),
874
+ gr.update(),
875
+ gr.update("No rating target available."),
876
+ gr.update(),
877
+ gr.update(),
878
+ gr.update(),
879
+ gr.update(),
880
+ gr.update(),
881
+ gr.update(),
882
+ gr.update(interactive=False),
883
+ )
884
+
885
+ start_time = state.get("part2_start_time") or time.monotonic()
886
+ response_time_ms = int(round((time.monotonic() - start_time) * 1000))
887
+ image_data = images[index]
888
+ trial_index = index + 1
889
+
890
+ try:
891
+ row = build_row_template(
892
+ state,
893
+ image_data,
894
+ )
895
+ row["match_age_rating"] = _to_int(age_rating)
896
+ row["match_masc_rating"] = _to_int(masc_rating)
897
+ row["match_attr_rating"] = _to_int(attr_rating)
898
+ row["match_quality_rating"] = _to_int(quality_rating)
899
+ row["match_artifact_rating"] = _to_int(artifact_rating)
900
+
901
+ row["matching_trial_index"] = trial_index
902
+ row["matching_timestamp"] = datetime.now().isoformat()
903
+
904
+ part2_file = state.get("csv_file_part2") or state.get("csv_file")
905
+ with open(part2_file, "a", newline="") as f:
906
+ writer = csv.writer(f)
907
+ writer.writerow([row[h] for h in PART2_HEADERS])
908
+ print(f"[DEBUG] Saved Part 2 ratings ({response_time_ms}ms) -> {part2_file}")
909
+ except Exception as e:
910
+ print(f"Error saving Part 2 CSV: {e}")
911
+ return (
912
+ state,
913
+ gr.update(),
914
+ gr.update(),
915
+ gr.update(f"Error saving ratings: {e}"),
916
+ gr.update(),
917
+ gr.update(),
918
+ gr.update(),
919
+ gr.update(),
920
+ gr.update(),
921
+ gr.update(),
922
+ gr.update(interactive=True),
923
+ )
924
+
925
+ return show_next_part2_image(state)
926
 
927
  # --- JAVASCRIPT ---
928
  # Logic: Find the animated image element, reset its class to remove 'image-clear',
929
  # force a reflow, then add 'image-clear' to start the transition.
930
  js_functions = """
931
  () => {
932
+ window.preBlur = function() {
933
+ const el = document.querySelector("#img_anim img");
934
+ if (!el) return;
935
+ // Immediately remove any clear/snap state and force a blurred render.
936
+ el.classList.remove("image-clear");
937
+ el.classList.remove("image-snap");
938
+ el.classList.add("image-preblur");
939
+ el.style.transition = "none";
940
+ el.style.filter = "blur(50px)";
941
+ void el.offsetWidth;
942
+ };
943
+
944
  window.triggerDeblur = function() {
945
  const el = document.querySelector("#img_anim img");
946
  if (el) {
947
+ // Ensure we start from a blurred state, then animate to clear.
948
+ window.preBlur();
949
+ requestAnimationFrame(() => {
950
+ requestAnimationFrame(() => {
951
+ el.style.transition = "";
952
+ el.style.filter = "";
953
+ el.classList.remove("image-preblur");
954
+ el.classList.add("image-clear");
955
+ });
956
+ });
957
+ }
958
+ };
959
+
960
+ window.snapClear = function() {
961
+ const el = document.querySelector("#img_anim img");
962
+ if (el) {
963
  el.classList.remove('image-clear');
964
+ el.classList.add('image-snap');
 
 
 
 
 
 
 
965
  }
966
  };
967
  }
 
984
  with gr.Group():
985
  # Animated Image: Visible initially, performs blur->clear
986
  image_anim = gr.Image(label="", elem_id="img_anim", height=400, width=400, interactive=False, show_label=False, visible=True)
 
 
987
 
988
  progress_text = gr.Markdown("", elem_id="progress_text")
989
 
 
991
  emotion_choice = gr.Radio(choices=[], label="Select the emotion", visible=False, interactive=True, elem_id="emotion_choice")
992
  next_image_btn = gr.Button("Next Image ▶", variant="secondary", visible=True, interactive=False, elem_id="next_btn")
993
 
994
+ # 3. Part 2 Instructions
995
+ with gr.Column(visible=False, elem_id="part2_instructions_section") as part2_instructions_section:
996
+ gr.Markdown(
997
+ "# Part 2\n"
998
+ "## You will now rate each face on several dimensions.\n"
999
+ "## Use the 1–7 scale for each item, then click Next Face ▶."
1000
+ )
1001
+ part2_start_btn = gr.Button("Start Part 2 ▶", variant="primary", elem_id="part2_start_btn")
1002
+
1003
+ # 4. Part 2: Rate The Images
1004
+ with gr.Column(visible=False, elem_id="part2_section") as part2_section:
1005
+ gr.Markdown(
1006
+ "# Rate The Images\n"
1007
+ "## Use the 1–7 scale for each item."
1008
+ )
1009
+ with gr.Row():
1010
+ with gr.Column(scale=1):
1011
+ part2_image = gr.Image(
1012
+ label="",
1013
+ height=400,
1014
+ width=400,
1015
+ interactive=False,
1016
+ show_label=False,
1017
+ visible=False,
1018
+ )
1019
+ part2_progress_text = gr.Markdown("", visible=False)
1020
+ part2_status_text = gr.Markdown("", visible=False)
1021
+ part2_completion_text = gr.Markdown("", elem_id="part2_completion_text", visible=False)
1022
+ with gr.Column(scale=1):
1023
+ part2_age_radio = gr.Radio(
1024
+ choices=SCALE_CHOICES,
1025
+ value=None,
1026
+ label="Perceived age (1 = very young, 7 = very old)",
1027
+ )
1028
+ part2_masc_radio = gr.Radio(
1029
+ choices=SCALE_CHOICES,
1030
+ value=None,
1031
+ label="Femininity–masculinity (1 = very feminine, 7 = very masculine)",
1032
+ )
1033
+ part2_attr_radio = gr.Radio(
1034
+ choices=SCALE_CHOICES,
1035
+ value=None,
1036
+ label="Attractiveness (1 = not at all, 7 = very attractive)",
1037
+ )
1038
+ part2_quality_radio = gr.Radio(
1039
+ choices=SCALE_CHOICES,
1040
+ value=None,
1041
+ label="Image quality (1 = very poor, 7 = excellent)",
1042
+ )
1043
+ part2_artifact_radio = gr.Radio(
1044
+ choices=SCALE_CHOICES,
1045
+ value=None,
1046
+ label="Artifacts / oddness (1 = none, 7 = a lot)",
1047
+ )
1048
+
1049
+ part2_next_btn = gr.Button("Next Face ▶", variant="primary", interactive=False)
1050
+
1051
  # --- Event Wiring ---
1052
 
1053
  # App Load
 
1055
 
1056
  # Start Button -> Show Interface -> Load First Image -> Trigger Animation
1057
  start_btn.click(
1058
+ fn=start_interface,
1059
+ inputs=[state],
1060
+ outputs=[instructions_section, start_btn, main_section, part2_instructions_section, part2_section],
1061
+ show_progress="hidden",
1062
+ js="() => window.preBlur && window.preBlur()",
1063
+ ).then(
1064
+ fn=show_next_image,
1065
+ inputs=[state],
1066
+ outputs=[state, image_anim, progress_text, emotion_choice, next_image_btn],
1067
  ).then(
1068
+ fn=update_sections_for_phase,
1069
+ inputs=[state],
1070
+ outputs=[
1071
+ main_section,
1072
+ part2_instructions_section,
1073
+ part2_section,
1074
+ ],
1075
  ).then(
1076
  fn=None, js="() => window.triggerDeblur()"
1077
  )
 
1080
  emotion_choice.change(
1081
  fn=on_emotion_select,
1082
  inputs=[state, emotion_choice],
1083
+ outputs=[state, image_anim, emotion_choice, next_image_btn],
1084
+ show_progress="hidden",
1085
+ ).then(
1086
+ fn=None, js="() => window.snapClear()"
1087
  )
1088
 
1089
  # Next Button -> Load New Image -> Reset Layout -> Trigger Animation
1090
  next_image_btn.click(
1091
+ fn=show_next_image,
1092
+ inputs=[state],
1093
+ outputs=[state, image_anim, progress_text, emotion_choice, next_image_btn],
1094
+ show_progress="hidden",
1095
+ js="() => window.preBlur && window.preBlur()",
1096
+ ).then(
1097
+ fn=update_sections_for_phase,
1098
+ inputs=[state],
1099
+ outputs=[
1100
+ main_section,
1101
+ part2_instructions_section,
1102
+ part2_section,
1103
+ ],
1104
  ).then(
1105
  fn=None, js="() => window.triggerDeblur()"
1106
  )
1107
 
1108
+ # Part 2 Start -> Show ratings block -> Load first rating image
1109
+ part2_start_btn.click(
1110
+ fn=start_part2,
1111
+ inputs=[state],
1112
+ outputs=[state],
1113
+ show_progress="hidden",
1114
+ ).then(
1115
+ fn=update_sections_for_phase,
1116
+ inputs=[state],
1117
+ outputs=[
1118
+ main_section,
1119
+ part2_instructions_section,
1120
+ part2_section,
1121
+ ],
1122
+ ).then(
1123
+ fn=start_part2_phase,
1124
+ inputs=[state],
1125
+ outputs=[
1126
+ state,
1127
+ main_section,
1128
+ part2_section,
1129
+ ],
1130
+ ).then(
1131
+ fn=show_next_part2_image,
1132
+ inputs=[state],
1133
+ outputs=[
1134
+ state,
1135
+ part2_image,
1136
+ part2_progress_text,
1137
+ part2_status_text,
1138
+ part2_completion_text,
1139
+ part2_age_radio,
1140
+ part2_masc_radio,
1141
+ part2_attr_radio,
1142
+ part2_quality_radio,
1143
+ part2_artifact_radio,
1144
+ part2_next_btn,
1145
+ ],
1146
+ )
1147
+
1148
+ # Part 2 gating: require interaction with all five ratings
1149
+ part2_age_radio.change(
1150
+ fn=partial(_mark_part2_touched, key="age"),
1151
+ inputs=[state, part2_age_radio],
1152
+ outputs=[state, part2_next_btn, part2_status_text, part2_completion_text],
1153
+ show_progress="hidden",
1154
+ )
1155
+ part2_masc_radio.change(
1156
+ fn=partial(_mark_part2_touched, key="masc"),
1157
+ inputs=[state, part2_masc_radio],
1158
+ outputs=[state, part2_next_btn, part2_status_text, part2_completion_text],
1159
+ show_progress="hidden",
1160
+ )
1161
+ part2_attr_radio.change(
1162
+ fn=partial(_mark_part2_touched, key="attr"),
1163
+ inputs=[state, part2_attr_radio],
1164
+ outputs=[state, part2_next_btn, part2_status_text, part2_completion_text],
1165
+ show_progress="hidden",
1166
+ )
1167
+ part2_quality_radio.change(
1168
+ fn=partial(_mark_part2_touched, key="quality"),
1169
+ inputs=[state, part2_quality_radio],
1170
+ outputs=[state, part2_next_btn, part2_status_text, part2_completion_text],
1171
+ show_progress="hidden",
1172
+ )
1173
+ part2_artifact_radio.change(
1174
+ fn=partial(_mark_part2_touched, key="artifact"),
1175
+ inputs=[state, part2_artifact_radio],
1176
+ outputs=[state, part2_next_btn, part2_status_text, part2_completion_text],
1177
+ show_progress="hidden",
1178
+ )
1179
+
1180
+ # Part 2 Next -> Save and advance
1181
+ part2_next_btn.click(
1182
+ fn=advance_part2,
1183
+ inputs=[state, part2_age_radio, part2_masc_radio, part2_attr_radio, part2_quality_radio, part2_artifact_radio],
1184
+ outputs=[
1185
+ state,
1186
+ part2_image,
1187
+ part2_progress_text,
1188
+ part2_status_text,
1189
+ part2_completion_text,
1190
+ part2_age_radio,
1191
+ part2_masc_radio,
1192
+ part2_attr_radio,
1193
+ part2_quality_radio,
1194
+ part2_artifact_radio,
1195
+ part2_next_btn,
1196
+ ],
1197
+ show_progress="hidden",
1198
+ )
1199
+
1200
  if __name__ == "__main__":
1201
  app.launch()
emotion_responses.csv DELETED
@@ -1,109 +0,0 @@
1
- participant_id,session_id,image_name,image_source,face_type,face_type_code,correct_emotion,correct_emotion_code,face_sex,face_sex_code,face_ethnicity,face_ethnicity_code,face_angle,face_angle_code,selected_emotion,selected_emotion_code,accuracy,response_time_ms,button_order,timestamp
2
- anon-293b45d4-bb79-4821-916d-a1fb77799b2b,293b45d4-bb79-4821-916d-a1fb77799b2b,woman_surprised.png,Human,human,1,surprised,4,unknown,0,unknown,0,unknown,0,surprised,4,correct,7226,surprised|disgusted|happy|angry,2026-01-08T20:14:30.560025
3
- anon-293b45d4-bb79-4821-916d-a1fb77799b2b,293b45d4-bb79-4821-916d-a1fb77799b2b,man_angry.png,Human,human,1,angry,3,unknown,0,unknown,0,unknown,0,angry,3,correct,9197,happy|surprised|disgusted|angry,2026-01-08T20:14:40.821270
4
- anon-c4a3c916-e736-4de4-913c-b84e29660747,c4a3c916-e736-4de4-913c-b84e29660747,kid_surprised.png,AI,ai,2,surprised,4,unknown,0,unknown,0,unknown,0,disgusted,5,incorrect,8907,happy|angry|disgusted|surprised,2026-01-08T20:17:48.571466
5
- anon-c4a3c916-e736-4de4-913c-b84e29660747,c4a3c916-e736-4de4-913c-b84e29660747,woman_disgusted.png,Human,human,1,disgusted,5,unknown,0,unknown,0,unknown,0,disgusted,5,correct,4333,surprised|disgusted|happy|angry,2026-01-08T20:17:55.467188
6
- anon-c4a3c916-e736-4de4-913c-b84e29660747,c4a3c916-e736-4de4-913c-b84e29660747,man_angry.png,Human,human,1,angry,3,unknown,0,unknown,0,unknown,0,happy,1,incorrect,6839,happy|angry|surprised|disgusted,2026-01-08T20:18:10.221377
7
- anon-c4a3c916-e736-4de4-913c-b84e29660747,c4a3c916-e736-4de4-913c-b84e29660747,woman2_happy.png,AI,ai,2,happy,1,unknown,0,unknown,0,unknown,0,angry,3,incorrect,3415,happy|surprised|angry|disgusted,2026-01-08T20:18:15.074756
8
- anon-c4a3c916-e736-4de4-913c-b84e29660747,c4a3c916-e736-4de4-913c-b84e29660747,oldman_angry.png,AI,ai,2,angry,3,unknown,0,unknown,0,unknown,0,happy,1,incorrect,1693,surprised|happy|angry|disgusted,2026-01-08T20:18:17.922186
9
- anon-c4a3c916-e736-4de4-913c-b84e29660747,c4a3c916-e736-4de4-913c-b84e29660747,woman_surprised.png,Human,human,1,surprised,4,unknown,0,unknown,0,unknown,0,surprised,4,correct,936,angry|disgusted|happy|surprised,2026-01-08T20:18:19.603907
10
- anon-90019529-c56c-4dc3-83ed-744cd30d6d7e,90019529-c56c-4dc3-83ed-744cd30d6d7e,woman2_happy.png,AI,ai,2,happy,1,unknown,0,unknown,0,unknown,0,angry,3,incorrect,3358,happy|angry|disgusted|surprised,2026-01-08T20:24:34.229398
11
- anon-90019529-c56c-4dc3-83ed-744cd30d6d7e,90019529-c56c-4dc3-83ed-744cd30d6d7e,man_angry.png,Human,human,1,angry,3,unknown,0,unknown,0,unknown,0,disgusted,5,incorrect,1771,happy|surprised|disgusted|angry,2026-01-08T20:24:37.144745
12
- anon-0819290d-1006-43fc-8b44-c2d564710e3e,0819290d-1006-43fc-8b44-c2d564710e3e,kid_surprised.png,AI,ai,2,surprised,4,unknown,0,unknown,0,unknown,0,happy,1,incorrect,4620,surprised|angry|disgusted|happy,2026-01-08T20:24:57.620828
13
- anon-0819290d-1006-43fc-8b44-c2d564710e3e,0819290d-1006-43fc-8b44-c2d564710e3e,woman_surprised.png,Human,human,1,surprised,4,unknown,0,unknown,0,unknown,0,disgusted,5,incorrect,7994,angry|surprised|disgusted|happy,2026-01-08T20:25:06.930289
14
- anon-8c1f7011-2d88-420c-b67e-421c07f2a340,8c1f7011-2d88-420c-b67e-421c07f2a340,oldman_angry.png,AI,ai,2,angry,3,male,1,caucasian,1,forward,1,angry,3,correct,1731,angry|disgusted|surprised|happy,2026-01-08T20:28:40.158440
15
- anon-8c1f7011-2d88-420c-b67e-421c07f2a340,8c1f7011-2d88-420c-b67e-421c07f2a340,kid_surprised.png,AI,ai,2,surprised,4,female,2,caucasian,1,forward,1,surprised,4,correct,1231,happy|surprised|angry|disgusted,2026-01-08T20:28:42.069297
16
- anon-8c1f7011-2d88-420c-b67e-421c07f2a340,8c1f7011-2d88-420c-b67e-421c07f2a340,woman2_happy.png,AI,ai,2,happy,1,female,2,black,2,forward,1,happy,1,correct,943,angry|disgusted|happy|surprised,2026-01-08T20:28:43.460471
17
- anon-8c1f7011-2d88-420c-b67e-421c07f2a340,8c1f7011-2d88-420c-b67e-421c07f2a340,woman_disgusted.png,Human,human,1,disgusted,5,female,2,caucasian,1,front-left,2,angry,3,incorrect,937,disgusted|happy|surprised|angry,2026-01-08T20:28:45.133349
18
- anon-8c1f7011-2d88-420c-b67e-421c07f2a340,8c1f7011-2d88-420c-b67e-421c07f2a340,woman_surprised.png,Human,human,1,surprised,4,female,2,caucasian,1,front-right,3,disgusted,5,incorrect,860,disgusted|angry|surprised|happy,2026-01-08T20:28:46.770888
19
- anon-8c1f7011-2d88-420c-b67e-421c07f2a340,8c1f7011-2d88-420c-b67e-421c07f2a340,man_angry.png,Human,human,1,angry,3,male,1,black,2,forward,1,happy,1,incorrect,1254,angry|surprised|happy|disgusted,2026-01-08T20:28:48.598528
20
- anon-4b299a41-fc84-4d91-ac64-63c7995a8e8c,4b299a41-fc84-4d91-ac64-63c7995a8e8c,kid_surprised.png,AI,ai,2,surprised,4,female,2,caucasian,1,forward,1,disgusted,5,incorrect,1178,disgusted|angry|happy|surprised,2026-01-08T20:32:29.882922
21
- anon-4b299a41-fc84-4d91-ac64-63c7995a8e8c,4b299a41-fc84-4d91-ac64-63c7995a8e8c,woman_disgusted.png,Human,human,1,disgusted,5,female,2,caucasian,1,front-left,2,happy,1,incorrect,847,angry|happy|surprised|disgusted,2026-01-08T20:32:32.561794
22
- anon-4b299a41-fc84-4d91-ac64-63c7995a8e8c,4b299a41-fc84-4d91-ac64-63c7995a8e8c,oldman_angry.png,AI,ai,2,angry,3,male,1,caucasian,1,forward,1,surprised,4,incorrect,613,angry|happy|surprised|disgusted,2026-01-08T20:32:33.831538
23
- anon-4b299a41-fc84-4d91-ac64-63c7995a8e8c,4b299a41-fc84-4d91-ac64-63c7995a8e8c,woman_surprised.png,Human,human,1,surprised,4,female,2,caucasian,1,front-right,3,surprised,4,correct,494,angry|disgusted|happy|surprised,2026-01-08T20:32:34.849506
24
- anon-4b299a41-fc84-4d91-ac64-63c7995a8e8c,4b299a41-fc84-4d91-ac64-63c7995a8e8c,woman2_happy.png,AI,ai,2,happy,1,female,2,black,2,forward,1,happy,1,correct,805,disgusted|happy|surprised|angry,2026-01-08T20:32:36.389731
25
- anon-4b299a41-fc84-4d91-ac64-63c7995a8e8c,4b299a41-fc84-4d91-ac64-63c7995a8e8c,man_angry.png,Human,human,1,angry,3,male,1,black,2,forward,1,disgusted,5,incorrect,667,happy|surprised|disgusted|angry,2026-01-08T20:32:37.746231
26
- anon-800cad3a-e396-44e3-b225-68e2880ceadf,800cad3a-e396-44e3-b225-68e2880ceadf,woman_surprised.png,Human,human,1,surprised,4,female,2,caucasian,1,front-right,3,disgusted,5,incorrect,1102,disgusted|angry|surprised|happy,2026-01-08T20:32:47.661429
27
- anon-800cad3a-e396-44e3-b225-68e2880ceadf,800cad3a-e396-44e3-b225-68e2880ceadf,kid_surprised.png,AI,ai,2,surprised,4,female,2,caucasian,1,forward,1,angry,3,incorrect,2565,happy|disgusted|surprised|angry,2026-01-08T20:32:51.774891
28
- anon-800cad3a-e396-44e3-b225-68e2880ceadf,800cad3a-e396-44e3-b225-68e2880ceadf,woman2_happy.png,AI,ai,2,happy,1,female,2,black,2,forward,1,surprised,4,incorrect,828,disgusted|happy|surprised|angry,2026-01-08T20:32:53.701283
29
- anon-800cad3a-e396-44e3-b225-68e2880ceadf,800cad3a-e396-44e3-b225-68e2880ceadf,woman_disgusted.png,Human,human,1,disgusted,5,female,2,caucasian,1,front-left,2,happy,1,incorrect,1475,happy|angry|surprised|disgusted,2026-01-08T20:32:56.217087
30
- anon-800cad3a-e396-44e3-b225-68e2880ceadf,800cad3a-e396-44e3-b225-68e2880ceadf,oldman_angry.png,AI,ai,2,angry,3,male,1,caucasian,1,forward,1,surprised,4,incorrect,5211,disgusted|surprised|happy|angry,2026-01-08T20:33:02.344766
31
- anon-800cad3a-e396-44e3-b225-68e2880ceadf,800cad3a-e396-44e3-b225-68e2880ceadf,man_angry.png,Human,human,1,angry,3,male,1,black,2,forward,1,happy,1,incorrect,1167,happy|angry|surprised|disgusted,2026-01-08T20:33:04.307786
32
- anon-6bfd56f1-e24e-4bf8-8d4f-e1695c1a0acd,6bfd56f1-e24e-4bf8-8d4f-e1695c1a0acd,oldman_angry.png,AI,ai,2,angry,3,male,1,caucasian,1,forward,1,disgusted,5,incorrect,1240,surprised|disgusted|happy|angry,2026-01-08T20:33:17.946078
33
- anon-6bfd56f1-e24e-4bf8-8d4f-e1695c1a0acd,6bfd56f1-e24e-4bf8-8d4f-e1695c1a0acd,woman_disgusted.png,Human,human,1,disgusted,5,female,2,caucasian,1,front-left,2,surprised,4,incorrect,3260,angry|disgusted|surprised|happy,2026-01-08T20:33:22.407241
34
- anon-6bfd56f1-e24e-4bf8-8d4f-e1695c1a0acd,6bfd56f1-e24e-4bf8-8d4f-e1695c1a0acd,woman_surprised.png,Human,human,1,surprised,4,female,2,caucasian,1,front-right,3,surprised,4,correct,2028,surprised|disgusted|happy|angry,2026-01-08T20:33:24.976249
35
- anon-6bfd56f1-e24e-4bf8-8d4f-e1695c1a0acd,6bfd56f1-e24e-4bf8-8d4f-e1695c1a0acd,kid_surprised.png,AI,ai,2,surprised,4,female,2,caucasian,1,forward,1,happy,1,incorrect,1157,happy|angry|surprised|disgusted,2026-01-08T20:33:26.665156
36
- anon-6bfd56f1-e24e-4bf8-8d4f-e1695c1a0acd,6bfd56f1-e24e-4bf8-8d4f-e1695c1a0acd,man_angry.png,Human,human,1,angry,3,male,1,black,2,forward,1,happy,1,incorrect,5349,disgusted|angry|happy|surprised,2026-01-08T20:33:33.734201
37
- anon-1478b947-bba0-4486-90a4-ce00c5cf2996,1478b947-bba0-4486-90a4-ce00c5cf2996,kid_surprised.png,AI,ai,2,surprised,4,female,2,caucasian,1,forward,1,surprised,4,correct,3300,surprised|angry|happy|disgusted,2026-01-08T20:43:16.234329
38
- anon-1478b947-bba0-4486-90a4-ce00c5cf2996,1478b947-bba0-4486-90a4-ce00c5cf2996,oldman_angry.png,AI,ai,2,angry,3,male,1,caucasian,1,forward,1,disgusted,5,incorrect,12651,disgusted|surprised|happy|angry,2026-01-08T20:43:31.598273
39
- anon-1478b947-bba0-4486-90a4-ce00c5cf2996,1478b947-bba0-4486-90a4-ce00c5cf2996,woman_surprised.png,Human,human,1,surprised,4,female,2,caucasian,1,front-right,3,happy,1,incorrect,931,angry|happy|disgusted|surprised,2026-01-08T20:43:33.111170
40
- anon-1478b947-bba0-4486-90a4-ce00c5cf2996,1478b947-bba0-4486-90a4-ce00c5cf2996,man_angry.png,Human,human,1,angry,3,male,1,black,2,forward,1,angry,3,correct,788,surprised|happy|angry|disgusted,2026-01-08T20:43:34.468996
41
- anon-1478b947-bba0-4486-90a4-ce00c5cf2996,1478b947-bba0-4486-90a4-ce00c5cf2996,woman2_happy.png,AI,ai,2,happy,1,female,2,black,2,forward,1,disgusted,5,incorrect,1301,angry|surprised|disgusted|happy,2026-01-08T20:43:36.342882
42
- anon-1478b947-bba0-4486-90a4-ce00c5cf2996,1478b947-bba0-4486-90a4-ce00c5cf2996,woman_disgusted.png,Human,human,1,disgusted,5,female,2,caucasian,1,front-left,2,angry,3,incorrect,1047,disgusted|happy|surprised|angry,2026-01-08T20:43:37.940345
43
- anon-1c102c98-80df-4149-b598-8cb3d4feebeb,1c102c98-80df-4149-b598-8cb3d4feebeb,kid_surprised.png,AI,ai,2,surprised,4,female,2,caucasian,1,forward,1,surprised,4,correct,2216,disgusted|surprised|happy|angry,2026-01-08T20:48:47.766427
44
- anon-1c102c98-80df-4149-b598-8cb3d4feebeb,1c102c98-80df-4149-b598-8cb3d4feebeb,oldman_angry.png,AI,ai,2,angry,3,male,1,caucasian,1,forward,1,surprised,4,incorrect,1078,happy|disgusted|surprised|angry,2026-01-08T20:48:49.587460
45
- anon-1c102c98-80df-4149-b598-8cb3d4feebeb,1c102c98-80df-4149-b598-8cb3d4feebeb,woman_disgusted.png,Human,human,1,disgusted,5,female,2,caucasian,1,front-left,2,happy,1,incorrect,1144,angry|disgusted|surprised|happy,2026-01-08T20:48:51.954683
46
- anon-1c102c98-80df-4149-b598-8cb3d4feebeb,1c102c98-80df-4149-b598-8cb3d4feebeb,woman2_happy.png,AI,ai,2,happy,1,female,2,black,2,forward,1,surprised,4,incorrect,1460,happy|disgusted|surprised|angry,2026-01-08T20:48:54.523725
47
- anon-132bd0b1-0187-4191-901b-cfd4434b3cce,132bd0b1-0187-4191-901b-cfd4434b3cce,woman2_happy.png,AI,ai,2,happy,1,female,2,black,2,forward,1,angry,3,incorrect,5592,disgusted|surprised|angry|happy,2026-01-08T20:49:40.646632
48
- anon-132bd0b1-0187-4191-901b-cfd4434b3cce,132bd0b1-0187-4191-901b-cfd4434b3cce,woman_disgusted.png,Human,human,1,disgusted,5,female,2,caucasian,1,front-left,2,disgusted,5,correct,5517,angry|disgusted|surprised|happy,2026-01-08T20:49:47.471801
49
- anon-132bd0b1-0187-4191-901b-cfd4434b3cce,132bd0b1-0187-4191-901b-cfd4434b3cce,man_angry.png,Human,human,1,angry,3,male,1,black,2,forward,1,happy,1,incorrect,1580,surprised|angry|happy|disgusted,2026-01-08T20:49:50.541265
50
- anon-e07c3aa4-8b6b-41ae-8efa-77c1542e18b4,e07c3aa4-8b6b-41ae-8efa-77c1542e18b4,oldman_angry.png,AI,ai,2,angry,3,male,1,caucasian,1,forward,1,angry,3,correct,3377,disgusted|happy|angry|surprised,2026-01-08T20:51:44.169517
51
- anon-e07c3aa4-8b6b-41ae-8efa-77c1542e18b4,e07c3aa4-8b6b-41ae-8efa-77c1542e18b4,woman_surprised.png,Human,human,1,surprised,4,female,2,caucasian,1,front-right,3,angry,3,incorrect,1434,happy|angry|surprised|disgusted,2026-01-08T20:51:47.668494
52
- anon-e07c3aa4-8b6b-41ae-8efa-77c1542e18b4,e07c3aa4-8b6b-41ae-8efa-77c1542e18b4,woman_disgusted.png,Human,human,1,disgusted,5,female,2,caucasian,1,front-left,2,happy,1,incorrect,4397,disgusted|angry|surprised|happy,2026-01-08T20:51:55.385533
53
- anon-e07c3aa4-8b6b-41ae-8efa-77c1542e18b4,e07c3aa4-8b6b-41ae-8efa-77c1542e18b4,kid_surprised.png,AI,ai,2,surprised,4,female,2,caucasian,1,forward,1,happy,1,incorrect,4194,happy|disgusted|angry|surprised,2026-01-08T20:52:00.573445
54
- anon-5eba27cd-b6d1-4f1b-bc9a-0bef0ea6d251,5eba27cd-b6d1-4f1b-bc9a-0bef0ea6d251,woman_disgusted.png,Human,human,1,disgusted,5,female,2,caucasian,1,front-left,2,happy,1,incorrect,4122,disgusted|surprised|happy|angry,2026-01-08T20:54:59.957487
55
- anon-82cbc393-d79d-484d-8e20-4f4ea4fb3485,82cbc393-d79d-484d-8e20-4f4ea4fb3485,oldman_angry.png,AI,ai,2,angry,3,male,1,caucasian,1,forward,1,happy,1,incorrect,3139,surprised|happy|angry|disgusted,2026-01-08T20:57:27.921219
56
- anon-ebcf1d6a-7150-4960-a7dd-d8eb4bb55d2c,ebcf1d6a-7150-4960-a7dd-d8eb4bb55d2c,woman_disgusted.png,Human,human,1,disgusted,5,female,2,caucasian,1,front-left,2,surprised,4,incorrect,3344,surprised|disgusted|happy|angry,2026-01-08T20:58:55.093129
57
- anon-ebcf1d6a-7150-4960-a7dd-d8eb4bb55d2c,ebcf1d6a-7150-4960-a7dd-d8eb4bb55d2c,man_angry.png,Human,human,1,angry,3,male,1,black,2,forward,1,surprised,4,incorrect,3016,angry|happy|surprised|disgusted,2026-01-08T20:59:03.532575
58
- anon-ebcf1d6a-7150-4960-a7dd-d8eb4bb55d2c,ebcf1d6a-7150-4960-a7dd-d8eb4bb55d2c,kid_surprised.png,AI,ai,2,surprised,4,female,2,caucasian,1,forward,1,angry,3,incorrect,2947,disgusted|surprised|happy|angry,2026-01-08T20:59:08.803406
59
- anon-ebcf1d6a-7150-4960-a7dd-d8eb4bb55d2c,ebcf1d6a-7150-4960-a7dd-d8eb4bb55d2c,oldman_angry.png,AI,ai,2,angry,3,male,1,caucasian,1,forward,1,happy,1,incorrect,1667,disgusted|surprised|happy|angry,2026-01-08T20:59:11.572648
60
- anon-ebcf1d6a-7150-4960-a7dd-d8eb4bb55d2c,ebcf1d6a-7150-4960-a7dd-d8eb4bb55d2c,woman_surprised.png,Human,human,1,surprised,4,female,2,caucasian,1,front-right,3,surprised,4,correct,4128,surprised|angry|disgusted|happy,2026-01-08T20:59:26.792283
61
- anon-1efc4fa8-b4f2-488b-b5f7-8a2c35ba1ded,1efc4fa8-b4f2-488b-b5f7-8a2c35ba1ded,man_angry.png,Human,human,1,angry,3,male,1,black,2,forward,1,happy,1,incorrect,3174,angry|happy|surprised|disgusted,2026-01-08T21:01:10.029903
62
- anon-1efc4fa8-b4f2-488b-b5f7-8a2c35ba1ded,1efc4fa8-b4f2-488b-b5f7-8a2c35ba1ded,woman_disgusted.png,Human,human,1,disgusted,5,female,2,caucasian,1,front-left,2,disgusted,5,correct,2901,angry|surprised|disgusted|happy,2026-01-08T21:01:15.104556
63
- anon-25f74813-2c25-496c-a738-593891e58029,25f74813-2c25-496c-a738-593891e58029,woman_disgusted.png,Human,human,1,disgusted,5,female,2,caucasian,1,front-left,2,angry,3,incorrect,4405,surprised|happy|angry|disgusted,2026-01-08T21:02:53.461864
64
- anon-5324ca1d-61b0-4c88-a3d9-65430ad19912,5324ca1d-61b0-4c88-a3d9-65430ad19912,woman2_happy.png,AI,ai,2,happy,1,female,2,black,2,forward,1,surprised,4,incorrect,3477,disgusted|surprised|happy|angry,2026-01-08T21:08:05.371495
65
- anon-5324ca1d-61b0-4c88-a3d9-65430ad19912,5324ca1d-61b0-4c88-a3d9-65430ad19912,woman_disgusted.png,Human,human,1,disgusted,5,female,2,caucasian,1,front-left,2,angry,3,incorrect,4564,disgusted|happy|surprised|angry,2026-01-08T21:08:11.967255
66
- anon-5324ca1d-61b0-4c88-a3d9-65430ad19912,5324ca1d-61b0-4c88-a3d9-65430ad19912,oldman_angry.png,AI,ai,2,angry,3,male,1,caucasian,1,forward,1,angry,3,correct,3396,angry|disgusted|happy|surprised,2026-01-08T21:08:17.105324
67
- anon-5324ca1d-61b0-4c88-a3d9-65430ad19912,5324ca1d-61b0-4c88-a3d9-65430ad19912,kid_surprised.png,AI,ai,2,surprised,4,female,2,caucasian,1,forward,1,surprised,4,correct,2010,happy|surprised|angry|disgusted,2026-01-08T21:08:19.970215
68
- anon-5324ca1d-61b0-4c88-a3d9-65430ad19912,5324ca1d-61b0-4c88-a3d9-65430ad19912,man_angry.png,Human,human,1,angry,3,male,1,black,2,forward,1,disgusted,5,incorrect,2108,angry|surprised|disgusted|happy,2026-01-08T21:08:23.183190
69
- anon-5324ca1d-61b0-4c88-a3d9-65430ad19912,5324ca1d-61b0-4c88-a3d9-65430ad19912,woman_surprised.png,Human,human,1,surprised,4,female,2,caucasian,1,front-right,3,angry,3,incorrect,1526,disgusted|surprised|happy|angry,2026-01-08T21:08:26.154828
70
- anon-ef8a2376-5302-4a0d-81d0-99707d8435c5,ef8a2376-5302-4a0d-81d0-99707d8435c5,oldman_angry.png,AI,ai,2,angry,3,male,1,caucasian,1,forward,1,surprised,4,incorrect,6409,surprised|happy|angry|disgusted,2026-01-12T18:02:56.368785
71
- anon-ef8a2376-5302-4a0d-81d0-99707d8435c5,ef8a2376-5302-4a0d-81d0-99707d8435c5,woman_disgusted.png,Human,human,1,disgusted,5,female,2,caucasian,1,front-left,2,surprised,4,incorrect,4558,angry|happy|disgusted|surprised,2026-01-12T18:03:02.664964
72
- anon-ef8a2376-5302-4a0d-81d0-99707d8435c5,ef8a2376-5302-4a0d-81d0-99707d8435c5,man_angry.png,Human,human,1,angry,3,male,1,black,2,forward,1,disgusted,5,incorrect,1427,surprised|disgusted|angry|happy,2026-01-12T18:03:06.820770
73
- anon-ef8a2376-5302-4a0d-81d0-99707d8435c5,ef8a2376-5302-4a0d-81d0-99707d8435c5,kid_surprised.png,AI,ai,2,surprised,4,female,2,caucasian,1,forward,1,disgusted,5,incorrect,564,happy|angry|disgusted|surprised,2026-01-12T18:03:09.981722
74
- anon-ef8a2376-5302-4a0d-81d0-99707d8435c5,ef8a2376-5302-4a0d-81d0-99707d8435c5,woman2_happy.png,AI,ai,2,happy,1,female,2,black,2,forward,1,disgusted,5,incorrect,4451,disgusted|surprised|angry|happy,2026-01-12T18:03:16.923929
75
- anon-ef8a2376-5302-4a0d-81d0-99707d8435c5,ef8a2376-5302-4a0d-81d0-99707d8435c5,woman_surprised.png,Human,human,1,surprised,4,female,2,caucasian,1,front-right,3,surprised,4,correct,3346,happy|angry|disgusted|surprised,2026-01-12T18:03:28.836123
76
- anon-905a5865-5e29-4261-b17a-718713983f96,905a5865-5e29-4261-b17a-718713983f96,woman_disgusted.png,Human,human,1,disgusted,5,female,2,caucasian,1,front-left,2,happy,1,incorrect,1638,happy|angry|disgusted|surprised,2026-01-12T18:07:25.325540
77
- anon-905a5865-5e29-4261-b17a-718713983f96,905a5865-5e29-4261-b17a-718713983f96,woman2_happy.png,AI,ai,2,happy,1,female,2,black,2,forward,1,surprised,4,incorrect,415,surprised|angry|disgusted|happy,2026-01-12T18:07:26.188603
78
- anon-905a5865-5e29-4261-b17a-718713983f96,905a5865-5e29-4261-b17a-718713983f96,woman_surprised.png,Human,human,1,surprised,4,female,2,caucasian,1,front-right,3,surprised,4,correct,448,surprised|disgusted|angry|happy,2026-01-12T18:07:27.791489
79
- anon-905a5865-5e29-4261-b17a-718713983f96,905a5865-5e29-4261-b17a-718713983f96,kid_surprised.png,AI,ai,2,surprised,4,female,2,caucasian,1,forward,1,happy,1,incorrect,393,happy|disgusted|angry|surprised,2026-01-12T18:07:28.655568
80
- anon-905a5865-5e29-4261-b17a-718713983f96,905a5865-5e29-4261-b17a-718713983f96,man_angry.png,Human,human,1,angry,3,male,1,black,2,forward,1,disgusted,5,incorrect,662,disgusted|angry|surprised|happy,2026-01-12T18:07:30.303637
81
- anon-905a5865-5e29-4261-b17a-718713983f96,905a5865-5e29-4261-b17a-718713983f96,oldman_angry.png,AI,ai,2,angry,3,male,1,caucasian,1,forward,1,happy,1,incorrect,613,happy|angry|surprised|disgusted,2026-01-12T18:07:31.388541
82
- anon-f3ec3b52-a5ad-43a8-95ef-52549fd0e318,f3ec3b52-a5ad-43a8-95ef-52549fd0e318,woman_surprised.png,Human,human,1,surprised,4,female,2,caucasian,1,front-right,3,happy,1,incorrect,2039,happy|angry|disgusted|surprised,2026-01-12T18:10:25.560611
83
- anon-f3ec3b52-a5ad-43a8-95ef-52549fd0e318,f3ec3b52-a5ad-43a8-95ef-52549fd0e318,woman_disgusted.png,Human,human,1,disgusted,5,female,2,caucasian,1,front-left,2,happy,1,incorrect,711,disgusted|happy|surprised|angry,2026-01-12T18:10:27.353334
84
- anon-f3ec3b52-a5ad-43a8-95ef-52549fd0e318,f3ec3b52-a5ad-43a8-95ef-52549fd0e318,oldman_angry.png,AI,ai,2,angry,3,male,1,caucasian,1,forward,1,happy,1,incorrect,769,disgusted|surprised|angry|happy,2026-01-12T18:10:28.705236
85
- anon-f3ec3b52-a5ad-43a8-95ef-52549fd0e318,f3ec3b52-a5ad-43a8-95ef-52549fd0e318,woman2_happy.png,AI,ai,2,happy,1,female,2,black,2,forward,1,happy,1,correct,697,disgusted|angry|happy|surprised,2026-01-12T18:10:30.716086
86
- anon-f3ec3b52-a5ad-43a8-95ef-52549fd0e318,f3ec3b52-a5ad-43a8-95ef-52549fd0e318,kid_surprised.png,AI,ai,2,surprised,4,female,2,caucasian,1,forward,1,happy,1,incorrect,1142,surprised|angry|disgusted|happy,2026-01-12T18:10:32.321496
87
- anon-f3ec3b52-a5ad-43a8-95ef-52549fd0e318,f3ec3b52-a5ad-43a8-95ef-52549fd0e318,man_angry.png,Human,human,1,angry,3,male,1,black,2,forward,1,angry,3,correct,1163,disgusted|happy|angry|surprised,2026-01-12T18:10:34.821756
88
- anon-f68775d0-0e3c-4cb5-9270-1ab19dd8efd8,f68775d0-0e3c-4cb5-9270-1ab19dd8efd8,woman2_happy.png,AI,ai,2,happy,1,female,2,black,2,forward,1,surprised,4,incorrect,2587,surprised|happy|disgusted|angry,2026-01-12T18:15:23.419125
89
- anon-f68775d0-0e3c-4cb5-9270-1ab19dd8efd8,f68775d0-0e3c-4cb5-9270-1ab19dd8efd8,kid_surprised.png,AI,ai,2,surprised,4,female,2,caucasian,1,forward,1,angry,3,incorrect,614,disgusted|surprised|angry|happy,2026-01-12T18:15:25.055288
90
- anon-f68775d0-0e3c-4cb5-9270-1ab19dd8efd8,f68775d0-0e3c-4cb5-9270-1ab19dd8efd8,woman_surprised.png,Human,human,1,surprised,4,female,2,caucasian,1,front-right,3,surprised,4,correct,530,angry|surprised|happy|disgusted,2026-01-12T18:15:26.023464
91
- anon-f68775d0-0e3c-4cb5-9270-1ab19dd8efd8,f68775d0-0e3c-4cb5-9270-1ab19dd8efd8,oldman_angry.png,AI,ai,2,angry,3,male,1,caucasian,1,forward,1,surprised,4,incorrect,802,surprised|happy|disgusted|angry,2026-01-12T18:15:27.305190
92
- anon-f68775d0-0e3c-4cb5-9270-1ab19dd8efd8,f68775d0-0e3c-4cb5-9270-1ab19dd8efd8,woman_disgusted.png,Human,human,1,disgusted,5,female,2,caucasian,1,front-left,2,disgusted,5,correct,776,surprised|happy|angry|disgusted,2026-01-12T18:15:29.232630
93
- anon-f68775d0-0e3c-4cb5-9270-1ab19dd8efd8,f68775d0-0e3c-4cb5-9270-1ab19dd8efd8,man_angry.png,Human,human,1,angry,3,male,1,black,2,forward,1,surprised,4,incorrect,2721,angry|disgusted|surprised|happy,2026-01-12T18:15:32.579968
94
- anon-6267f136-51c9-4796-bef0-19e61ee1ea21,6267f136-51c9-4796-bef0-19e61ee1ea21,woman2_happy.png,AI,ai,2,happy,1,female,2,black,2,forward,1,disgusted,5,incorrect,1211,happy|angry|disgusted|surprised,2026-01-12T18:16:00.622301
95
- anon-6267f136-51c9-4796-bef0-19e61ee1ea21,6267f136-51c9-4796-bef0-19e61ee1ea21,man_angry.png,Human,human,1,angry,3,male,1,black,2,forward,1,angry,3,correct,546,surprised|angry|disgusted|happy,2026-01-12T18:16:01.854200
96
- anon-6267f136-51c9-4796-bef0-19e61ee1ea21,6267f136-51c9-4796-bef0-19e61ee1ea21,kid_surprised.png,AI,ai,2,surprised,4,female,2,caucasian,1,forward,1,angry,3,incorrect,814,angry|happy|surprised|disgusted,2026-01-12T18:16:03.091831
97
- anon-6267f136-51c9-4796-bef0-19e61ee1ea21,6267f136-51c9-4796-bef0-19e61ee1ea21,oldman_angry.png,AI,ai,2,angry,3,male,1,caucasian,1,forward,1,disgusted,5,incorrect,573,happy|angry|disgusted|surprised,2026-01-12T18:16:04.122240
98
- anon-6267f136-51c9-4796-bef0-19e61ee1ea21,6267f136-51c9-4796-bef0-19e61ee1ea21,woman_disgusted.png,Human,human,1,disgusted,5,female,2,caucasian,1,front-left,2,angry,3,incorrect,731,angry|surprised|disgusted|happy,2026-01-12T18:16:05.353845
99
- anon-6267f136-51c9-4796-bef0-19e61ee1ea21,6267f136-51c9-4796-bef0-19e61ee1ea21,woman_surprised.png,Human,human,1,surprised,4,female,2,caucasian,1,front-right,3,angry,3,incorrect,1161,happy|disgusted|angry|surprised,2026-01-12T18:16:07.347624
100
- anon-908210e7-a71c-4068-809b-dfb47adac3e4,908210e7-a71c-4068-809b-dfb47adac3e4,kid_surprised.png,AI,ai,2,surprised,4,female,2,caucasian,1,forward,1,disgusted,5,incorrect,2260,disgusted|happy|angry|surprised,2026-01-12T18:18:56.785905
101
- anon-c0785749-e72e-46c4-96fd-93e937c9a042,c0785749-e72e-46c4-96fd-93e937c9a042,woman2_happy.png,AI,ai,2,happy,1,female,2,black,2,forward,1,disgusted,5,incorrect,1613,happy|disgusted|surprised|angry,2026-01-12T18:19:06.491275
102
- anon-588d6e24-218e-48a5-bd5d-5abf2550f187,588d6e24-218e-48a5-bd5d-5abf2550f187,woman_surprised.png,Human,human,1,surprised,4,female,2,caucasian,1,front-right,3,surprised,4,correct,17158,surprised|disgusted|angry|happy,2026-01-12T18:20:18.220739
103
- anon-588d6e24-218e-48a5-bd5d-5abf2550f187,588d6e24-218e-48a5-bd5d-5abf2550f187,woman2_happy.png,AI,ai,2,happy,1,female,2,black,2,forward,1,happy,1,correct,5431,angry|disgusted|happy|surprised,2026-01-12T18:20:26.191698
104
- anon-baeb1830-c64a-48dc-a607-e696ea19c3bd,baeb1830-c64a-48dc-a607-e696ea19c3bd,oldman_angry.png,AI,ai,2,angry,3,male,1,caucasian,1,forward,1,surprised,4,incorrect,4566,happy|surprised|angry|disgusted,2026-01-12T18:23:30.223535
105
- anon-baeb1830-c64a-48dc-a607-e696ea19c3bd,baeb1830-c64a-48dc-a607-e696ea19c3bd,woman_surprised.png,Human,human,1,surprised,4,female,2,caucasian,1,front-right,3,disgusted,5,incorrect,2251,surprised|angry|disgusted|happy,2026-01-12T18:23:33.653426
106
- anon-baeb1830-c64a-48dc-a607-e696ea19c3bd,baeb1830-c64a-48dc-a607-e696ea19c3bd,woman2_happy.png,AI,ai,2,happy,1,female,2,black,2,forward,1,happy,1,correct,1007,angry|surprised|disgusted|happy,2026-01-12T18:23:35.761200
107
- anon-baeb1830-c64a-48dc-a607-e696ea19c3bd,baeb1830-c64a-48dc-a607-e696ea19c3bd,kid_surprised.png,AI,ai,2,surprised,4,female,2,caucasian,1,forward,1,disgusted,5,incorrect,1361,disgusted|angry|happy|surprised,2026-01-12T18:23:37.854051
108
- anon-baeb1830-c64a-48dc-a607-e696ea19c3bd,baeb1830-c64a-48dc-a607-e696ea19c3bd,woman_disgusted.png,Human,human,1,disgusted,5,female,2,caucasian,1,front-left,2,surprised,4,incorrect,1348,disgusted|surprised|angry|happy,2026-01-12T18:23:39.919865
109
- anon-baeb1830-c64a-48dc-a607-e696ea19c3bd,baeb1830-c64a-48dc-a607-e696ea19c3bd,man_angry.png,Human,human,1,angry,3,male,1,black,2,forward,1,disgusted,5,incorrect,890,happy|surprised|angry|disgusted,2026-01-12T18:23:41.687609
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
stimuli_metadata.csv CHANGED
@@ -1,7 +1,7 @@
1
  image_name,emotion,sex,ethnicity,angle,face_type
2
  man_angry.png,angry,male,black,forward,human
3
- woman_disgusted.jpg,disgusted,female,caucasian,front-left,human
4
- woman_surprised.png,surprised,female,caucasian,front-right,human
5
- kid_surprised.jpg,surprised,female,caucasian,forward,ai
6
- woman2_happy.jpg,happy,female,black,forward,ai
7
- oldman_angry.jpg,angry,male,caucasian,forward,ai
 
1
  image_name,emotion,sex,ethnicity,angle,face_type
2
  man_angry.png,angry,male,black,forward,human
3
+ woman_fearful.png,afraid,female,caucasian,forward,human
4
+ woman_happy.png,happy,female,caucasian,forward,human
5
+ kid_surprised.png,surprised,female,caucasian,forward,ai
6
+ woman2_happy.png,happy,female,black,forward,ai
7
+ oldman_angry.png,angry,male,caucasian,forward,ai
view_responses.ipynb ADDED
@@ -0,0 +1,363 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "cells": [
3
+ {
4
+ "cell_type": "markdown",
5
+ "id": "9c34ed5c",
6
+ "metadata": {},
7
+ "source": [
8
+ "# Load And Merge Part 1 + Part 2\n",
9
+ "\n",
10
+ "This notebook loads the latest part-specific CSVs and merges them into one DataFrame.\n"
11
+ ]
12
+ },
13
+ {
14
+ "cell_type": "code",
15
+ "execution_count": 1,
16
+ "id": "29c7b15d",
17
+ "metadata": {},
18
+ "outputs": [
19
+ {
20
+ "data": {
21
+ "text/plain": [
22
+ "(PosixPath('emotion_responses_part1.csv'),\n",
23
+ " PosixPath('emotion_responses_part2.csv'))"
24
+ ]
25
+ },
26
+ "execution_count": 1,
27
+ "metadata": {},
28
+ "output_type": "execute_result"
29
+ }
30
+ ],
31
+ "source": [
32
+ "from pathlib import Path\n",
33
+ "import pandas as pd\n",
34
+ "\n",
35
+ "data_dir = Path('.')\n",
36
+ "\n",
37
+ "part1_candidates = sorted(\n",
38
+ " data_dir.glob('emotion_responses_part1*.csv'),\n",
39
+ " key=lambda p: p.stat().st_mtime,\n",
40
+ ")\n",
41
+ "part2_candidates = sorted(\n",
42
+ " data_dir.glob('emotion_responses_part2*.csv'),\n",
43
+ " key=lambda p: p.stat().st_mtime,\n",
44
+ ")\n",
45
+ "\n",
46
+ "if not part1_candidates:\n",
47
+ " raise FileNotFoundError('No emotion_responses_part1*.csv files found.')\n",
48
+ "if not part2_candidates:\n",
49
+ " raise FileNotFoundError('No emotion_responses_part2*.csv files found.')\n",
50
+ "\n",
51
+ "part1_path = part1_candidates[-1]\n",
52
+ "part2_path = part2_candidates[-1]\n",
53
+ "\n",
54
+ "part1_path, part2_path\n"
55
+ ]
56
+ },
57
+ {
58
+ "cell_type": "code",
59
+ "execution_count": 2,
60
+ "id": "e4a8048b",
61
+ "metadata": {},
62
+ "outputs": [
63
+ {
64
+ "data": {
65
+ "text/html": [
66
+ "<div>\n",
67
+ "<style scoped>\n",
68
+ " .dataframe tbody tr th:only-of-type {\n",
69
+ " vertical-align: middle;\n",
70
+ " }\n",
71
+ "\n",
72
+ " .dataframe tbody tr th {\n",
73
+ " vertical-align: top;\n",
74
+ " }\n",
75
+ "\n",
76
+ " .dataframe thead th {\n",
77
+ " text-align: right;\n",
78
+ " }\n",
79
+ "</style>\n",
80
+ "<table border=\"1\" class=\"dataframe\">\n",
81
+ " <thead>\n",
82
+ " <tr style=\"text-align: right;\">\n",
83
+ " <th></th>\n",
84
+ " <th>participant_id</th>\n",
85
+ " <th>session_id</th>\n",
86
+ " <th>stimulus_id</th>\n",
87
+ " <th>stimulus_type</th>\n",
88
+ " <th>target_emotion</th>\n",
89
+ " <th>emotion_trial_index</th>\n",
90
+ " <th>emotion_rt_ms</th>\n",
91
+ " <th>selected_emotion</th>\n",
92
+ " <th>accuracy</th>\n",
93
+ " <th>matching_trial_index</th>\n",
94
+ " <th>matching_rt_ms</th>\n",
95
+ " <th>match_age_rating</th>\n",
96
+ " <th>match_masc_rating</th>\n",
97
+ " <th>match_attr_rating</th>\n",
98
+ " <th>match_quality_rating</th>\n",
99
+ " <th>match_artifact_rating</th>\n",
100
+ " <th>emotion_timestamp</th>\n",
101
+ " <th>matching_timestamp</th>\n",
102
+ " </tr>\n",
103
+ " </thead>\n",
104
+ " <tbody>\n",
105
+ " <tr>\n",
106
+ " <th>0</th>\n",
107
+ " <td>anon-f821e984-aa8a-46f0-b8d0-757c0ea9fdf9</td>\n",
108
+ " <td>f821e984-aa8a-46f0-b8d0-757c0ea9fdf9</td>\n",
109
+ " <td>kid_surprised</td>\n",
110
+ " <td>ai_kdef_like</td>\n",
111
+ " <td>surprised</td>\n",
112
+ " <td>1</td>\n",
113
+ " <td>6412</td>\n",
114
+ " <td>happy</td>\n",
115
+ " <td>incorrect</td>\n",
116
+ " <td>4</td>\n",
117
+ " <td>3029</td>\n",
118
+ " <td>7</td>\n",
119
+ " <td>6</td>\n",
120
+ " <td>6</td>\n",
121
+ " <td>5</td>\n",
122
+ " <td>5</td>\n",
123
+ " <td>2026-01-27T15:49:25.470659</td>\n",
124
+ " <td>2026-01-27T15:49:53.751731</td>\n",
125
+ " </tr>\n",
126
+ " <tr>\n",
127
+ " <th>1</th>\n",
128
+ " <td>anon-f821e984-aa8a-46f0-b8d0-757c0ea9fdf9</td>\n",
129
+ " <td>f821e984-aa8a-46f0-b8d0-757c0ea9fdf9</td>\n",
130
+ " <td>woman_happy</td>\n",
131
+ " <td>real_kdef</td>\n",
132
+ " <td>happy</td>\n",
133
+ " <td>2</td>\n",
134
+ " <td>1279</td>\n",
135
+ " <td>afraid</td>\n",
136
+ " <td>incorrect</td>\n",
137
+ " <td>1</td>\n",
138
+ " <td>6313</td>\n",
139
+ " <td>1</td>\n",
140
+ " <td>3</td>\n",
141
+ " <td>3</td>\n",
142
+ " <td>3</td>\n",
143
+ " <td>3</td>\n",
144
+ " <td>2026-01-27T15:49:27.734481</td>\n",
145
+ " <td>2026-01-27T15:49:43.941520</td>\n",
146
+ " </tr>\n",
147
+ " <tr>\n",
148
+ " <th>2</th>\n",
149
+ " <td>anon-f821e984-aa8a-46f0-b8d0-757c0ea9fdf9</td>\n",
150
+ " <td>f821e984-aa8a-46f0-b8d0-757c0ea9fdf9</td>\n",
151
+ " <td>man_angry</td>\n",
152
+ " <td>real_kdef</td>\n",
153
+ " <td>angry</td>\n",
154
+ " <td>3</td>\n",
155
+ " <td>1146</td>\n",
156
+ " <td>disgusted</td>\n",
157
+ " <td>incorrect</td>\n",
158
+ " <td>6</td>\n",
159
+ " <td>4219</td>\n",
160
+ " <td>3</td>\n",
161
+ " <td>3</td>\n",
162
+ " <td>2</td>\n",
163
+ " <td>3</td>\n",
164
+ " <td>2</td>\n",
165
+ " <td>2026-01-27T15:49:29.848369</td>\n",
166
+ " <td>2026-01-27T15:50:01.982456</td>\n",
167
+ " </tr>\n",
168
+ " <tr>\n",
169
+ " <th>3</th>\n",
170
+ " <td>anon-f821e984-aa8a-46f0-b8d0-757c0ea9fdf9</td>\n",
171
+ " <td>f821e984-aa8a-46f0-b8d0-757c0ea9fdf9</td>\n",
172
+ " <td>woman2_happy</td>\n",
173
+ " <td>ai_kdef_like</td>\n",
174
+ " <td>happy</td>\n",
175
+ " <td>4</td>\n",
176
+ " <td>3915</td>\n",
177
+ " <td>happy</td>\n",
178
+ " <td>correct</td>\n",
179
+ " <td>2</td>\n",
180
+ " <td>3194</td>\n",
181
+ " <td>3</td>\n",
182
+ " <td>2</td>\n",
183
+ " <td>2</td>\n",
184
+ " <td>2</td>\n",
185
+ " <td>4</td>\n",
186
+ " <td>2026-01-27T15:49:34.789089</td>\n",
187
+ " <td>2026-01-27T15:49:47.174309</td>\n",
188
+ " </tr>\n",
189
+ " <tr>\n",
190
+ " <th>4</th>\n",
191
+ " <td>anon-f821e984-aa8a-46f0-b8d0-757c0ea9fdf9</td>\n",
192
+ " <td>f821e984-aa8a-46f0-b8d0-757c0ea9fdf9</td>\n",
193
+ " <td>woman_fearful</td>\n",
194
+ " <td>real_kdef</td>\n",
195
+ " <td>afraid</td>\n",
196
+ " <td>5</td>\n",
197
+ " <td>648</td>\n",
198
+ " <td>angry</td>\n",
199
+ " <td>incorrect</td>\n",
200
+ " <td>3</td>\n",
201
+ " <td>3474</td>\n",
202
+ " <td>7</td>\n",
203
+ " <td>7</td>\n",
204
+ " <td>7</td>\n",
205
+ " <td>7</td>\n",
206
+ " <td>7</td>\n",
207
+ " <td>2026-01-27T15:49:36.150139</td>\n",
208
+ " <td>2026-01-27T15:49:50.681104</td>\n",
209
+ " </tr>\n",
210
+ " <tr>\n",
211
+ " <th>5</th>\n",
212
+ " <td>anon-f821e984-aa8a-46f0-b8d0-757c0ea9fdf9</td>\n",
213
+ " <td>f821e984-aa8a-46f0-b8d0-757c0ea9fdf9</td>\n",
214
+ " <td>oldman_angry</td>\n",
215
+ " <td>ai_kdef_like</td>\n",
216
+ " <td>angry</td>\n",
217
+ " <td>6</td>\n",
218
+ " <td>460</td>\n",
219
+ " <td>afraid</td>\n",
220
+ " <td>incorrect</td>\n",
221
+ " <td>5</td>\n",
222
+ " <td>3944</td>\n",
223
+ " <td>5</td>\n",
224
+ " <td>4</td>\n",
225
+ " <td>3</td>\n",
226
+ " <td>2</td>\n",
227
+ " <td>1</td>\n",
228
+ " <td>2026-01-27T15:49:37.158011</td>\n",
229
+ " <td>2026-01-27T15:49:57.736323</td>\n",
230
+ " </tr>\n",
231
+ " </tbody>\n",
232
+ "</table>\n",
233
+ "</div>"
234
+ ],
235
+ "text/plain": [
236
+ " participant_id \\\n",
237
+ "0 anon-f821e984-aa8a-46f0-b8d0-757c0ea9fdf9 \n",
238
+ "1 anon-f821e984-aa8a-46f0-b8d0-757c0ea9fdf9 \n",
239
+ "2 anon-f821e984-aa8a-46f0-b8d0-757c0ea9fdf9 \n",
240
+ "3 anon-f821e984-aa8a-46f0-b8d0-757c0ea9fdf9 \n",
241
+ "4 anon-f821e984-aa8a-46f0-b8d0-757c0ea9fdf9 \n",
242
+ "5 anon-f821e984-aa8a-46f0-b8d0-757c0ea9fdf9 \n",
243
+ "\n",
244
+ " session_id stimulus_id stimulus_type \\\n",
245
+ "0 f821e984-aa8a-46f0-b8d0-757c0ea9fdf9 kid_surprised ai_kdef_like \n",
246
+ "1 f821e984-aa8a-46f0-b8d0-757c0ea9fdf9 woman_happy real_kdef \n",
247
+ "2 f821e984-aa8a-46f0-b8d0-757c0ea9fdf9 man_angry real_kdef \n",
248
+ "3 f821e984-aa8a-46f0-b8d0-757c0ea9fdf9 woman2_happy ai_kdef_like \n",
249
+ "4 f821e984-aa8a-46f0-b8d0-757c0ea9fdf9 woman_fearful real_kdef \n",
250
+ "5 f821e984-aa8a-46f0-b8d0-757c0ea9fdf9 oldman_angry ai_kdef_like \n",
251
+ "\n",
252
+ " target_emotion emotion_trial_index emotion_rt_ms selected_emotion \\\n",
253
+ "0 surprised 1 6412 happy \n",
254
+ "1 happy 2 1279 afraid \n",
255
+ "2 angry 3 1146 disgusted \n",
256
+ "3 happy 4 3915 happy \n",
257
+ "4 afraid 5 648 angry \n",
258
+ "5 angry 6 460 afraid \n",
259
+ "\n",
260
+ " accuracy matching_trial_index matching_rt_ms match_age_rating \\\n",
261
+ "0 incorrect 4 3029 7 \n",
262
+ "1 incorrect 1 6313 1 \n",
263
+ "2 incorrect 6 4219 3 \n",
264
+ "3 correct 2 3194 3 \n",
265
+ "4 incorrect 3 3474 7 \n",
266
+ "5 incorrect 5 3944 5 \n",
267
+ "\n",
268
+ " match_masc_rating match_attr_rating match_quality_rating \\\n",
269
+ "0 6 6 5 \n",
270
+ "1 3 3 3 \n",
271
+ "2 3 2 3 \n",
272
+ "3 2 2 2 \n",
273
+ "4 7 7 7 \n",
274
+ "5 4 3 2 \n",
275
+ "\n",
276
+ " match_artifact_rating emotion_timestamp \\\n",
277
+ "0 5 2026-01-27T15:49:25.470659 \n",
278
+ "1 3 2026-01-27T15:49:27.734481 \n",
279
+ "2 2 2026-01-27T15:49:29.848369 \n",
280
+ "3 4 2026-01-27T15:49:34.789089 \n",
281
+ "4 7 2026-01-27T15:49:36.150139 \n",
282
+ "5 1 2026-01-27T15:49:37.158011 \n",
283
+ "\n",
284
+ " matching_timestamp \n",
285
+ "0 2026-01-27T15:49:53.751731 \n",
286
+ "1 2026-01-27T15:49:43.941520 \n",
287
+ "2 2026-01-27T15:50:01.982456 \n",
288
+ "3 2026-01-27T15:49:47.174309 \n",
289
+ "4 2026-01-27T15:49:50.681104 \n",
290
+ "5 2026-01-27T15:49:57.736323 "
291
+ ]
292
+ },
293
+ "execution_count": 2,
294
+ "metadata": {},
295
+ "output_type": "execute_result"
296
+ }
297
+ ],
298
+ "source": [
299
+ "merge_keys = [\"participant_id\", \"session_id\", \"stimulus_id\"]\n",
300
+ "\n",
301
+ "df_part1 = pd.read_csv(part1_path)\n",
302
+ "df_part2 = pd.read_csv(part2_path)\n",
303
+ "\n",
304
+ "df_merged = df_part1.merge(\n",
305
+ " df_part2,\n",
306
+ " on=merge_keys,\n",
307
+ " how=\"outer\",\n",
308
+ ")\n",
309
+ "\n",
310
+ "def coalesce(df, col):\n",
311
+ " left = f\"{col}_x\"\n",
312
+ " right = f\"{col}_y\"\n",
313
+ " if left in df.columns and right in df.columns:\n",
314
+ " df[col] = df[left].combine_first(df[right])\n",
315
+ " df.drop(columns=[left, right], inplace=True)\n",
316
+ "\n",
317
+ "# These are intentionally repeated in both files; keep just one copy.\n",
318
+ "coalesce(df_merged, \"stimulus_type\")\n",
319
+ "coalesce(df_merged, \"target_emotion\")\n",
320
+ "\n",
321
+ "analysis_order = [\n",
322
+ " \"participant_id\", \"session_id\", \"stimulus_id\", \"stimulus_type\", \"target_emotion\",\n",
323
+ " \"emotion_trial_index\", \"emotion_rt_ms\", \"selected_emotion\", \"accuracy\",\n",
324
+ " \"matching_trial_index\", \"matching_rt_ms\",\n",
325
+ " \"match_age_rating\", \"match_masc_rating\", \"match_attr_rating\", \"match_quality_rating\", \"match_artifact_rating\",\n",
326
+ " \"emotion_timestamp\", \"matching_timestamp\",\n",
327
+ "]\n",
328
+ "\n",
329
+ "df_merged = df_merged[[c for c in analysis_order if c in df_merged.columns]]\n",
330
+ "df_merged\n"
331
+ ]
332
+ },
333
+ {
334
+ "cell_type": "code",
335
+ "execution_count": null,
336
+ "id": "e6cb990e",
337
+ "metadata": {},
338
+ "outputs": [],
339
+ "source": []
340
+ }
341
+ ],
342
+ "metadata": {
343
+ "kernelspec": {
344
+ "display_name": "workhorse",
345
+ "language": "python",
346
+ "name": "python3"
347
+ },
348
+ "language_info": {
349
+ "codemirror_mode": {
350
+ "name": "ipython",
351
+ "version": 3
352
+ },
353
+ "file_extension": ".py",
354
+ "mimetype": "text/x-python",
355
+ "name": "python",
356
+ "nbconvert_exporter": "python",
357
+ "pygments_lexer": "ipython3",
358
+ "version": "3.9.0"
359
+ }
360
+ },
361
+ "nbformat": 4,
362
+ "nbformat_minor": 5
363
+ }