{ "cells": [ { "cell_type": "markdown", "metadata": {}, "source": [ "# InterAct Dataset - Quick Start\n", "\n", "This notebook demonstrates how to load and explore the InterAct dataset." ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "import sqlite3\n", "import numpy as np\n", "import os" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "## 1. Loading the Databases\n", "\n", "The dataset includes two SQLite databases:\n", "- `scenarios.db` - scenario metadata (relationships, emotions, descriptions)\n", "- `actors.db` - actor info and recording sessions" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "# Connect to databases\n", "scenarios_db = sqlite3.connect('scenarios.db')\n", "actors_db = sqlite3.connect('actors.db')" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "# View available relationships\n", "relationships = scenarios_db.execute('SELECT * FROM relationships').fetchall()\n", "print(f\"Total relationships: {len(relationships)}\")\n", "print(\"Sample relationships:\")\n", "for r in relationships[:5]:\n", " print(f\" {r[0]}: {r[1]}\")" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "# View available emotions\n", "emotions = scenarios_db.execute('SELECT * FROM emotions').fetchall()\n", "print(f\"Total emotions: {len(emotions)}\")\n", "print(\"Sample emotions:\")\n", "for e in emotions[:5]:\n", " print(f\" {e[0]}: {e[1]}\")" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "# Query scenarios by emotion (e.g., find all \"anger\" scenarios)\n", "anger_scenarios = scenarios_db.execute('''\n", " SELECT s.id, r.name, e.name, s.scenario \n", " FROM scenarios s\n", " JOIN relationships r ON s.relationship_id = r.id\n", " JOIN emotions e ON s.primary_emotion_id = e.id\n", " WHERE e.name = 'anger'\n", " LIMIT 3\n", "''').fetchall()\n", "\n", "print(\"Scenarios with 'anger' emotion:\")\n", "for s in anger_scenarios:\n", " print(f\"\\nScenario {s[0]} ({s[1]} / {s[2]}):\")\n", " print(f\" {s[3][:100]}...\")" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "## 2. Finding Actor Pairs\n", "\n", "Each recording session has one male and one female actor. The `sessions` table maps dates to actor pairs." ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "# View all sessions\n", "sessions = actors_db.execute('SELECT * FROM sessions').fetchall()\n", "print(\"Recording sessions:\")\n", "print(\"Date | Male | Female\")\n", "print(\"-\" * 28)\n", "for s in sessions:\n", " print(f\"{s[0]} | {s[1]} | {s[2]}\")" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "# View all actors\n", "actors = actors_db.execute('SELECT * FROM actors').fetchall()\n", "print(\"Actors:\")\n", "for a in actors:\n", " print(f\" {a[0]}: {a[1]}\")" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "def get_actor_pair(date):\n", " \"\"\"Get the male and female actor IDs for a given recording date.\"\"\"\n", " result = actors_db.execute(\n", " 'SELECT male_id, female_id FROM sessions WHERE date = ?', \n", " (date,)\n", " ).fetchone()\n", " return result\n", "\n", "# Example: get actors for a specific date\n", "date = '20231119'\n", "male_id, female_id = get_actor_pair(date)\n", "print(f\"Session {date}: male={male_id}, female={female_id}\")" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "## 3. Loading Performance Data\n", "\n", "Performance files follow the naming convention: `__.`" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "# Example performance\n", "date = '20231119'\n", "actor_id = '001'\n", "scenario_id = '051'\n", "\n", "# File paths\n", "bvh_path = f'bvhs/{date}_{actor_id}_{scenario_id}.bvh'\n", "face_ict_path = f'face_ict/{date}_{actor_id}_{scenario_id}.npy'\n", "face_arkit_path = f'face_arkit/{date}_{actor_id}_{scenario_id}.npy'\n", "wav_path = f'wav/{date}_{actor_id}_{scenario_id}.wav'\n", "\n", "print(f\"BVH: {bvh_path}\")\n", "print(f\"Face ICT: {face_ict_path}\")\n", "print(f\"Face ARKit: {face_arkit_path}\")\n", "print(f\"Audio: {wav_path}\")" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "# Load face blendshape parameters\n", "face_ict = np.load(face_ict_path)\n", "face_arkit = np.load(face_arkit_path)\n", "\n", "print(f\"Face ICT shape: {face_ict.shape}\") # (N, 55) - N frames, 55 ICT blendshapes\n", "print(f\"Face ARKit shape: {face_arkit.shape}\") # (N, 51) - N frames, 51 ARKit blendshapes\n", "\n", "n_frames = face_ict.shape[0]\n", "duration_sec = n_frames / 30 # 30 fps\n", "print(f\"\\nFrames: {n_frames}\")\n", "print(f\"Duration: {duration_sec:.1f} seconds\")" ] }, { "cell_type": "markdown", "metadata": {}, "source": "## 4. Loading Both Actors in an Interaction\n\nFor two-person interaction research, load data from both actors in a scene." }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": "def load_interaction(date, scenario_id):\n \"\"\"Load face and audio data for both actors in an interaction.\"\"\"\n male_id, female_id = get_actor_pair(date)\n \n data = {}\n for actor_id, role in [(male_id, 'male'), (female_id, 'female')]:\n prefix = f'{date}_{actor_id}_{scenario_id}'\n data[role] = {\n 'actor_id': actor_id,\n 'face_ict': np.load(f'face_ict/{prefix}.npy'),\n 'face_arkit': np.load(f'face_arkit/{prefix}.npy'),\n 'wav_path': f'wav/{prefix}.wav',\n 'bvh_path': f'bvhs/{prefix}.bvh',\n }\n \n return data\n\n# Load an interaction\ninteraction = load_interaction('20231119', '051')\n\nprint(\"Male actor:\", interaction['male']['actor_id'])\nprint(f\" Face shape: {interaction['male']['face_ict'].shape}\")\nprint(\"\\nFemale actor:\", interaction['female']['actor_id'])\nprint(f\" Face shape: {interaction['female']['face_ict'].shape}\")" }, { "cell_type": "markdown", "metadata": {}, "source": "## 5. Basic Visualization\n\nPlot face blendshape values over time." }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": "import matplotlib.pyplot as plt\n\n# Plot jawOpen blendshape over time\ntime = np.arange(n_frames) / 30 # Convert to seconds\n\nfig, ax = plt.subplots(figsize=(12, 3))\n\n# ARKit blendshape index (see body_to_render.blend for full list)\nax.plot(time, face_arkit[:, 24]) # 24 = jawOpen\nax.set_ylabel('jawOpen')\nax.set_ylim(0, 1)\nax.set_xlabel('Time (seconds)')\nax.set_title(f'Face Blendshape: {date}_{actor_id}_{scenario_id}')\n\nplt.tight_layout()\nplt.show()" }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": "# Compare jaw movement between both actors\nfig, ax = plt.subplots(figsize=(12, 4))\n\nn_frames = interaction['male']['face_arkit'].shape[0]\ntime = np.arange(n_frames) / 30\n\nax.plot(time, interaction['male']['face_arkit'][:, 24], label='Male jawOpen', alpha=0.7)\nax.plot(time, interaction['female']['face_arkit'][:, 24], label='Female jawOpen', alpha=0.7)\n\nax.set_xlabel('Time (seconds)')\nax.set_ylabel('jawOpen')\nax.legend()\nax.set_title('Jaw Movement Comparison - Two-Person Interaction')\nplt.tight_layout()\nplt.show()" }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "# Clean up\n", "scenarios_db.close()\n", "actors_db.close()" ] } ], "metadata": { "kernelspec": { "display_name": "Python 3", "language": "python", "name": "python3" }, "language_info": { "name": "python", "version": "3.10.0" } }, "nbformat": 4, "nbformat_minor": 4 }