splug / PWAToggle.html
trysem's picture
Create PWAToggle.html
538ab08 verified
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>AI Extraction Studio | PWA</title>
<!-- Unified Tailwind CSS -->
<script src="https://cdn.tailwindcss.com"></script>
<!-- Unified Fonts -->
<link rel="preconnect" href="https://fonts.googleapis.com">
<link rel="preconnect" href="https://fonts.gstatic.com" crossorigin>
<link href="https://fonts.googleapis.com/css2?family=Montserrat:wght@700;800&family=Inter:wght@300;400;500;600;700&display=swap" rel="stylesheet">
<!-- Icons for Videoflow -->
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.4.0/css/all.min.css">
<!-- Shared Libraries -->
<script src="https://cdnjs.cloudflare.com/ajax/libs/jszip/3.10.1/jszip.min.js"></script>
<script src="https://cdn.jsdelivr.net/npm/@vladmandic/face-api@1.7.12/dist/face-api.js"></script>
<!-- React & Babel for Immager -->
<script crossorigin src="https://unpkg.com/react@18/umd/react.production.min.js"></script>
<script crossorigin src="https://unpkg.com/react-dom@18/umd/react-dom.production.min.js"></script>
<script src="https://unpkg.com/@babel/standalone/babel.min.js"></script>
<style>
/* Base Resets */
body { margin: 0; padding: 0; overflow-x: hidden; background-color: #000; }
/* --- Immager Custom CSS --- */
@keyframes spin { 100% { transform: rotate(360deg); } }
.animate-spin { animation: spin 1s linear infinite; }
@keyframes slideIn { from { opacity: 0; transform: translateY(20px); } to { opacity: 1; transform: translateY(0); } }
.animate-in { animation: slideIn 0.5s ease-out forwards; }
/* --- Videoflow Custom CSS --- */
.glass-panel { background: rgba(255, 255, 255, 0.03); backdrop-filter: blur(12px); border: 1px solid rgba(255, 255, 255, 0.08); }
.gradient-text { background: linear-gradient(135deg, #818cf8 0%, #c084fc 100%); -webkit-background-clip: text; -webkit-text-fill-color: transparent; }
.status-dot { width: 8px; height: 8px; border-radius: 50%; display: inline-block; }
#results-container::-webkit-scrollbar { width: 5px; }
#results-container::-webkit-scrollbar-track { background: transparent; }
#results-container::-webkit-scrollbar-thumb { background: #334155; border-radius: 10px; }
.scan-line { position: absolute; top: 0; left: 0; width: 100%; height: 2px; background: #818cf8; box-shadow: 0 0 15px #818cf8; animation: scan 2s linear infinite; display: none; z-index: 10; }
@keyframes scan { 0% { top: 0%; } 100% { top: 100%; } }
.toggle-checkbox:checked { right: 0; border-color: #6366f1; }
.toggle-checkbox:checked + .toggle-label { background-color: #6366f1; }
.toggle-checkbox { right: 4px; z-index: 1; border-color: #e2e8f0; transition: all 0.3s; }
.toggle-label { background-color: #cbd5e1; transition: all 0.3s; }
</style>
</head>
<body>
<!-- ========================================== -->
<!-- GLOBAL UI: APP SWITCHER -->
<!-- ========================================== -->
<div class="fixed bottom-6 left-1/2 transform -translate-x-1/2 z-[9999] bg-black/60 backdrop-blur-xl border border-white/10 p-1.5 rounded-full flex gap-1 shadow-[0_0_30px_rgba(0,0,0,0.8)]">
<button onclick="switchApp('immager')" id="tab-immager" class="px-5 py-2.5 rounded-full text-sm font-semibold transition-all duration-300 bg-indigo-600 text-white shadow-lg flex items-center gap-2">
<svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"><rect width="18" height="18" x="3" y="3" rx="2" ry="2"/><circle cx="9" cy="9" r="2"/><path d="m21 15-3.086-3.086a2 2 0 0 0-2.828 0L6 21"/></svg>
Immager (Images)
</button>
<button onclick="switchApp('videoflow')" id="tab-videoflow" class="px-5 py-2.5 rounded-full text-sm font-semibold transition-all duration-300 text-white/60 hover:text-white hover:bg-white/10 flex items-center gap-2">
<i class="fas fa-video"></i>
HumanFrame (Video)
</button>
</div>
<!-- ========================================== -->
<!-- APP 1: IMMAGER (REACT) -->
<!-- ========================================== -->
<div id="app-immager" class="w-full min-h-screen bg-neutral-950 text-neutral-100 font-sans selection:bg-indigo-500/30">
<div id="root"></div>
</div>
<!-- ========================================== -->
<!-- APP 2: VIDEOFLOW (VANILLA JS) -->
<!-- ========================================== -->
<div id="app-videoflow" class="w-full min-h-screen bg-[#0f172a] text-slate-200 selection:bg-indigo-500/30 hidden relative" style="font-family: 'Inter', sans-serif;">
<!-- Navbar -->
<nav class="border-b border-white/5 bg-[#0f172a]/80 backdrop-blur-md sticky top-0 z-50">
<div class="max-w-7xl mx-auto px-6 h-16 flex items-center justify-between">
<div class="flex items-center gap-3">
<div class="w-10 h-10 bg-indigo-600 rounded-xl flex items-center justify-center shadow-lg shadow-indigo-500/20">
<i class="fas fa-user-check text-white text-lg"></i>
</div>
<h1 class="text-xl font-bold tracking-tight">HumanFrame<span class="gradient-text">AI</span></h1>
</div>
<div class="flex gap-3">
<div id="model-badge" class="flex items-center gap-2 bg-slate-800/50 px-3 py-1.5 rounded-full border border-white/5 text-xs font-medium text-slate-400">
<span class="status-dot bg-amber-500 animate-pulse"></span> MediaPipe Loading...
</div>
<div id="face-badge" class="flex items-center gap-2 bg-slate-800/50 px-3 py-1.5 rounded-full border border-white/5 text-xs font-medium text-slate-400 hidden">
<span class="status-dot bg-amber-500 animate-pulse"></span> FaceAPI Loading...
</div>
</div>
</div>
</nav>
<main class="max-w-7xl mx-auto px-6 py-10 pb-24">
<div class="grid grid-cols-1 lg:grid-cols-12 gap-8">
<!-- Left Panel: Configuration -->
<div class="lg:col-span-4 space-y-6">
<!-- Upload Zone -->
<div id="drop-zone" class="glass-panel rounded-2xl p-8 text-center border-2 border-dashed border-indigo-500/20 hover:border-indigo-500/50 transition-all cursor-pointer group relative overflow-hidden">
<input type="file" id="video-input" class="hidden" accept="video/*">
<div class="relative z-10">
<div class="w-14 h-14 bg-indigo-500/10 rounded-2xl flex items-center justify-center mx-auto mb-3 group-hover:scale-110 transition-transform duration-300">
<i class="fas fa-video text-indigo-400 text-xl"></i>
</div>
<h3 class="text-md font-semibold text-white mb-1">Upload Video</h3>
<p class="text-xs text-slate-400">Drag & drop or click to browse</p>
</div>
</div>
<!-- Settings & Advanced -->
<div id="settings-panel" class="glass-panel rounded-2xl p-6 space-y-6 opacity-40 pointer-events-none transition-all">
<!-- Basic Engine -->
<div class="space-y-4">
<div class="flex items-center justify-between border-b border-white/5 pb-2">
<h4 class="font-bold text-white text-xs uppercase tracking-wider">Engine Settings</h4>
</div>
<div class="space-y-2">
<label class="text-xs font-semibold text-slate-400 flex justify-between">
Scan Interval
<span id="interval-val" class="text-indigo-400">0.5s</span>
</label>
<input type="range" id="scan-rate" min="0.1" max="2.0" step="0.1" value="0.5" class="w-full h-1.5 bg-slate-700 rounded-lg appearance-none cursor-pointer accent-indigo-500">
</div>
<div class="space-y-2">
<label class="text-xs font-semibold text-slate-400 flex justify-between">
Detection Confidence
<span id="conf-val" class="text-indigo-400">50%</span>
</label>
<input type="range" id="confidence" min="0.3" max="0.9" step="0.05" value="0.5" class="w-full h-1.5 bg-slate-700 rounded-lg appearance-none cursor-pointer accent-indigo-500">
</div>
</div>
<!-- Advanced Features -->
<div class="space-y-4">
<div class="flex items-center justify-between border-b border-white/5 pb-2">
<h4 class="font-bold text-white text-xs uppercase tracking-wider text-purple-400">Advanced Extraction</h4>
</div>
<!-- Extract All Frames Toggle -->
<div class="flex items-center justify-between mb-4 border-b border-white/5 pb-4">
<div class="flex flex-col">
<span class="text-sm font-medium text-slate-200">Extract Full Frames</span>
<span class="text-[10px] text-slate-500">Extracts entire frames, skipping AI filters</span>
</div>
<div class="relative inline-block w-10 mr-2 align-middle select-none transition duration-200 ease-in">
<input type="checkbox" name="toggle" id="extract-all-toggle" class="toggle-checkbox absolute block w-5 h-5 rounded-full bg-white border-4 appearance-none cursor-pointer"/>
<label for="extract-all-toggle" class="toggle-label block overflow-hidden h-5 rounded-full bg-slate-600 cursor-pointer"></label>
</div>
</div>
<div id="advanced-filters-container" class="space-y-4 transition-all duration-300">
<!-- Smart Body Crop Toggle -->
<div class="flex items-center justify-between">
<div class="flex flex-col">
<span class="text-sm font-medium text-slate-200">Smart Body Crop</span>
<span class="text-[10px] text-slate-500">Extracts the person bounding box</span>
</div>
<div class="relative inline-block w-10 mr-2 align-middle select-none transition duration-200 ease-in">
<input type="checkbox" name="toggle" id="auto-crop-toggle" class="toggle-checkbox absolute block w-5 h-5 rounded-full bg-white border-4 appearance-none cursor-pointer"/>
<label for="auto-crop-toggle" class="toggle-label block overflow-hidden h-5 rounded-full bg-slate-600 cursor-pointer"></label>
</div>
</div>
<!-- 512x512 Face Crop Toggle -->
<div class="flex items-center justify-between">
<div class="flex flex-col">
<span class="text-sm font-medium text-slate-200">Tight Face Crop (512px)</span>
<span class="text-[10px] text-slate-500">Extracts faces specifically in 512x512</span>
</div>
<div class="relative inline-block w-10 mr-2 align-middle select-none transition duration-200 ease-in">
<input type="checkbox" name="toggle" id="face-crop-toggle" class="toggle-checkbox absolute block w-5 h-5 rounded-full bg-white border-4 appearance-none cursor-pointer"/>
<label for="face-crop-toggle" class="toggle-label block overflow-hidden h-5 rounded-full bg-slate-600 cursor-pointer"></label>
</div>
</div>
<!-- Require Visible Face Toggle -->
<div class="flex items-center justify-between">
<div class="flex flex-col">
<span class="text-sm font-medium text-slate-200">Require Visible Face</span>
<span class="text-[10px] text-slate-500">Skip frames with bodies but no faces</span>
</div>
<div class="relative inline-block w-10 mr-2 align-middle select-none transition duration-200 ease-in">
<input type="checkbox" name="toggle" id="require-face-toggle" class="toggle-checkbox absolute block w-5 h-5 rounded-full bg-white border-4 appearance-none cursor-pointer"/>
<label for="require-face-toggle" class="toggle-label block overflow-hidden h-5 rounded-full bg-slate-600 cursor-pointer"></label>
</div>
</div>
<!-- Target Face Match -->
<div class="bg-slate-800/40 p-3 rounded-xl border border-white/5">
<div class="flex flex-col mb-2">
<span class="text-sm font-medium text-slate-200">Target Face Match</span>
<span class="text-[10px] text-slate-500">Only extract frames containing this person</span>
</div>
<div class="flex items-center gap-3 mt-3">
<div id="face-upload-btn" class="flex-grow bg-slate-700 hover:bg-slate-600 text-xs text-center py-2 rounded-lg cursor-pointer transition-colors border border-dashed border-slate-500">
<i class="fas fa-camera mr-1"></i> Upload Target Face
</div>
<input type="file" id="face-input" class="hidden" accept="image/*">
<img id="target-face-preview" class="hidden w-10 h-10 object-cover rounded-full border-2 border-indigo-500 shadow-[0_0_10px_rgba(99,102,241,0.5)]">
</div>
<p id="face-status-text" class="text-[10px] text-amber-400 mt-2 hidden text-center"><i class="fas fa-spinner animate-spin"></i> Analyzing face...</p>
</div>
</div>
</div>
<button id="start-btn" class="w-full bg-indigo-600 hover:bg-indigo-500 text-white font-bold py-3 rounded-xl shadow-xl shadow-indigo-500/10 transition-all flex items-center justify-center gap-3">
<i class="fas fa-microchip"></i> Start AI Extraction
</button>
</div>
<!-- Monitoring Window -->
<div class="glass-panel rounded-2xl overflow-hidden relative shadow-2xl group border-2 border-[#0f172a]">
<div class="scan-line" id="scanner"></div>
<canvas id="preview-canvas" class="w-full aspect-video bg-black object-contain"></canvas>
<div class="absolute bottom-0 left-0 right-0 p-3 bg-gradient-to-t from-black/80 to-transparent flex items-center justify-between">
<span class="text-[10px] font-bold tracking-widest text-indigo-400 uppercase">Live Monitor</span>
<div id="fps-counter" class="text-[10px] font-mono text-slate-400">-- FPS</div>
</div>
</div>
</div>
<!-- Right Panel: Results Gallery -->
<div class="lg:col-span-8 flex flex-col h-[calc(100vh-160px)] min-h-[600px]">
<div class="glass-panel rounded-3xl flex flex-col h-full overflow-hidden border border-white/10 relative">
<!-- Toolbar -->
<div class="p-6 border-b border-white/5 flex flex-col md:flex-row gap-4 items-start md:items-center justify-between bg-white/[0.02]">
<div>
<h2 class="text-xl font-bold text-white">Detection Gallery</h2>
<p id="stats-text" class="text-sm text-slate-500">System idle. Awaiting video upload.</p>
</div>
<div class="flex flex-wrap gap-2">
<button id="select-all-btn" class="hidden px-4 py-2.5 text-slate-300 hover:text-white text-sm font-medium transition-colors border border-transparent hover:border-white/10 rounded-xl">
Select All
</button>
<button id="export-immager-btn" class="hidden px-5 py-2.5 bg-purple-600 hover:bg-purple-500 text-white text-sm font-bold rounded-xl transition-all flex items-center gap-2 shadow-lg shadow-purple-500/20">
<i class="fas fa-magic"></i> Export All to Immager
</button>
<button id="download-btn" class="hidden px-5 py-2.5 bg-emerald-500 hover:bg-emerald-400 text-white text-sm font-bold rounded-xl transition-all flex items-center gap-2 shadow-lg shadow-emerald-500/20">
<i class="fas fa-file-export"></i> Download All
</button>
</div>
</div>
<!-- Progress Bar -->
<div id="progress-container" class="px-6 py-4 bg-indigo-500/5 hidden border-b border-white/5">
<div class="flex justify-between items-center mb-2">
<span class="text-xs font-bold text-indigo-300 uppercase tracking-tighter" id="status-label">Analyzing Frames...</span>
<span class="text-xs font-mono text-indigo-300" id="progress-percent">0%</span>
</div>
<div class="w-full bg-white/5 h-1.5 rounded-full overflow-hidden">
<div id="progress-bar" class="h-full bg-indigo-500 transition-all duration-300 shadow-[0_0_10px_#6366f1]" style="width: 0%"></div>
</div>
</div>
<!-- Gallery Grid -->
<div id="results-container" class="flex-grow overflow-y-auto p-6 relative">
<div id="results" class="flex flex-wrap gap-4 content-start"></div>
<!-- Placeholder -->
<div id="empty-state" class="absolute inset-0 flex flex-col items-center justify-center opacity-20">
<i class="fas fa-images text-7xl mb-6"></i>
<p class="text-lg font-medium">No frames extracted yet</p>
</div>
</div>
</div>
</div>
</div>
</main>
<video id="hidden-video" class="hidden" muted></video>
<!-- Fullscreen Image Preview Modal -->
<div id="video-preview-modal" class="fixed inset-0 z-[100] bg-black/90 hidden flex items-center justify-center p-4 backdrop-blur-md">
<button id="close-preview" class="absolute top-6 right-6 text-white text-4xl hover:text-indigo-400 transition-colors z-50">
<i class="fas fa-times"></i>
</button>
<img id="preview-modal-img" src="" class="max-w-full max-h-[90vh] object-contain rounded-lg shadow-2xl border border-white/10">
</div>
</div>
<!-- ========================================== -->
<!-- LOGIC: APP SWITCHER -->
<!-- ========================================== -->
<script>
function switchApp(appName) {
const immagerApp = document.getElementById('app-immager');
const videoflowApp = document.getElementById('app-videoflow');
const tabImmager = document.getElementById('tab-immager');
const tabVideoflow = document.getElementById('tab-videoflow');
if (appName === 'immager') {
immagerApp.classList.remove('hidden');
videoflowApp.classList.add('hidden');
tabImmager.classList.replace('bg-transparent', 'bg-indigo-600');
tabImmager.classList.replace('text-white/60', 'text-white');
tabImmager.classList.remove('hover:bg-white/10');
tabVideoflow.classList.replace('bg-indigo-600', 'bg-transparent');
tabVideoflow.classList.replace('text-white', 'text-white/60');
tabVideoflow.classList.add('hover:bg-white/10');
window.scrollTo(0,0);
} else {
immagerApp.classList.add('hidden');
videoflowApp.classList.remove('hidden');
tabVideoflow.classList.replace('bg-transparent', 'bg-indigo-600');
tabVideoflow.classList.replace('text-white/60', 'text-white');
tabVideoflow.classList.remove('hover:bg-white/10');
tabImmager.classList.replace('bg-indigo-600', 'bg-transparent');
tabImmager.classList.replace('text-white', 'text-white/60');
tabImmager.classList.add('hover:bg-white/10');
window.scrollTo(0,0);
}
}
</script>
<!-- ========================================== -->
<!-- LOGIC: APP 1 (IMMAGER - REACT) -->
<!-- ========================================== -->
<script type="text/babel">
const { useState, useEffect, useCallback, useRef } = React;
const LogoIcon = ({size=24, className=""}) => (
<svg xmlns="http://www.w3.org/2000/svg" width={size} height={size} viewBox="0 0 512 512" className={className}>
<rect width="512" height="512" rx="120" fill="#12C369"/>
<circle cx="380" cy="130" r="55" fill="#ffffff"/>
<path d="M310 280 L440 430 L180 430 Z" fill="#91E6B3" stroke="#91E6B3" strokeWidth="30" strokeLinejoin="round"/>
<path d="M220 220 L340 430 L100 430 Z" fill="#ffffff" stroke="#ffffff" strokeWidth="40" strokeLinejoin="round"/>
</svg>
);
const UsersIcon = ({size=24, className=""}) => (<svg xmlns="http://www.w3.org/2000/svg" width={size} height={size} viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2" strokeLinecap="round" strokeLinejoin="round" className={className}><path d="M16 21v-2a4 4 0 0 0-4-4H6a4 4 0 0 0-4 4v2"/><circle cx="9" cy="7" r="4"/><path d="M22 21v-2a4 4 0 0 0-3-3.87"/><path d="M16 3.13a4 4 0 0 1 0 7.75"/></svg>);
const Trash2Icon = ({size=24, className=""}) => (<svg xmlns="http://www.w3.org/2000/svg" width={size} height={size} viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2" strokeLinecap="round" strokeLinejoin="round" className={className}><path d="M3 6h18"/><path d="M19 6v14c0 1-1 2-2 2H7c-1 0-2-1-2-2V6"/><path d="M8 6V4c0-1 1-2 2-2h4c1 0 2 1 2 2v2"/><line x1="10" x2="10" y1="11" y2="17"/><line x1="14" x2="14" y1="11" y2="17"/></svg>);
const UploadCloudIcon = ({size=24, className=""}) => (<svg xmlns="http://www.w3.org/2000/svg" width={size} height={size} viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2" strokeLinecap="round" strokeLinejoin="round" className={className}><path d="M4 14.899A7 7 0 1 1 15.71 8h1.79a4.5 4.5 0 0 1 2.5 8.242"/><path d="M12 12v9"/><path d="m16 16-4-4-4 4"/></svg>);
const Loader2Icon = ({size=24, className=""}) => (<svg xmlns="http://www.w3.org/2000/svg" width={size} height={size} viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2" strokeLinecap="round" strokeLinejoin="round" className={className}><path d="M21 12a9 9 0 1 1-6.219-8.56"/></svg>);
const CheckCircleIcon = ({size=24, className=""}) => (<svg xmlns="http://www.w3.org/2000/svg" width={size} height={size} viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2" strokeLinecap="round" strokeLinejoin="round" className={className}><path d="M22 11.08V12a10 10 0 1 1-5.93-9.14"/><path d="m9 11 3 3L22 4"/></svg>);
const ImageIcon = ({size=24, className=""}) => (<svg xmlns="http://www.w3.org/2000/svg" width={size} height={size} viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2" strokeLinecap="round" strokeLinejoin="round" className={className}><rect width="18" height="18" x="3" y="3" rx="2" ry="2"/><circle cx="9" cy="9" r="2"/><path d="m21 15-3.086-3.086a2 2 0 0 0-2.828 0L6 21"/></svg>);
const CheckSquareIcon = ({size=24, className=""}) => (<svg xmlns="http://www.w3.org/2000/svg" width={size} height={size} viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2" strokeLinecap="round" strokeLinejoin="round" className={className}><polyline points="9 11 12 14 22 4"/><path d="M21 12v7a2 2 0 0 1-2 2H5a2 2 0 0 1-2-2V5a2 2 0 0 1 2-2h11"/></svg>);
const SquareIcon = ({size=24, className=""}) => (<svg xmlns="http://www.w3.org/2000/svg" width={size} height={size} viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2" strokeLinecap="round" strokeLinejoin="round" className={className}><rect width="18" height="18" x="3" y="3" rx="2" ry="2"/></svg>);
const DownloadIcon = ({size=24, className=""}) => (<svg xmlns="http://www.w3.org/2000/svg" width={size} height={size} viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2" strokeLinecap="round" strokeLinejoin="round" className={className}><path d="M21 15v4a2 2 0 0 1-2 2H5a2 2 0 0 1-2-2v-4"/><polyline points="7 10 12 15 17 10"/><line x1="12" x2="12" y1="15" y2="3"/></svg>);
const SettingsIcon = ({size=24, className=""}) => (<svg xmlns="http://www.w3.org/2000/svg" width={size} height={size} viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2" strokeLinecap="round" strokeLinejoin="round" className={className}><path d="M12.22 2h-.44a2 2 0 0 0-2 2v.18a2 2 0 0 1-1 1.73l-.43.25a2 2 0 0 1-2 0l-.15-.08a2 2 0 0 0-2.73.73l-.22.38a2 2 0 0 0 .73 2.73l.15.1a2 2 0 0 1 1 1.72v.51a2 2 0 0 1-1 1.74l-.15.09a2 2 0 0 0-.73 2.73l.22.38a2 2 0 0 0 2.73.73l.15-.08a2 2 0 0 1 2 0l.43.25a2 2 0 0 1 1-1.73V20a2 2 0 0 0 2 2h.44a2 2 0 0 0 2-2v-.18a2 2 0 0 1 1-1.73l.43-.25a2 2 0 0 1 2 0l.15.08a2 2 0 0 0 2.73-.73l.22-.39a2 2 0 0 0-.73-2.73l-.15-.08a2 2 0 0 1-1-1.74v-.5a2 2 0 0 1 1-1.74l.15-.09a2 2 0 0 0 .73-2.73l-.22-.38a2 2 0 0 0-2.73-.73l-.15.08a2 2 0 0 1-2 0l-.43-.25a2 2 0 0 1-1-1.73V4a2 2 0 0 0-2-2z"/><circle cx="12" cy="12" r="3"/></svg>);
const EditIcon = ({size=24, className=""}) => (<svg xmlns="http://www.w3.org/2000/svg" width={size} height={size} viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2" strokeLinecap="round" strokeLinejoin="round" className={className}><path d="M12 20h9"/><path d="M16.5 3.5a2.12 2.12 0 0 1 3 3L7 19l-4 1 1-4Z"/></svg>);
const XIcon = ({size=24, className=""}) => (<svg xmlns="http://www.w3.org/2000/svg" width={size} height={size} viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2" strokeLinecap="round" strokeLinejoin="round" className={className}><line x1="18" y1="6" x2="6" y2="18"/><line x1="6" y1="6" x2="18" y2="18"/></svg>);
function FaceExtractApp() {
const [isModelLoading, setIsModelLoading] = useState(true);
const [isProcessing, setIsProcessing] = useState(false);
const [progress, setProgress] = useState(0);
const [statusText, setStatusText] = useState('Initializing Engine...');
const [faceGroups, setFaceGroups] = useState([]);
const [selectedFaceIds, setSelectedFaceIds] = useState(new Set());
const fileInputRef = useRef(null);
const [extractedFaces, setExtractedFaces] = useState([]);
const [matchThreshold, setMatchThreshold] = useState(0.50);
const [cropSettings, setCropSettings] = useState({ padding: 0.05, topPadding: 0.2, shape: 'square' });
const [showSettings, setShowSettings] = useState(false);
const [editingFace, setEditingFace] = useState(null);
const editorImgRef = useRef(null);
const [dragState, setDragState] = useState({ isDragging: false, startX: 0, startY: 0, initialOffsetX: 0, initialOffsetY: 0 });
const [dragOverGroupId, setDragOverGroupId] = useState(null);
const [isDraggingFace, setIsDraggingFace] = useState(false);
const draggedFaceRef = useRef(null);
const [pendingTransferFiles, setPendingTransferFiles] = useState(null);
// Listen for cross-app transfer event
useEffect(() => {
const handleTransfer = (e) => {
setPendingTransferFiles(e.detail);
};
window.addEventListener('SEND_TO_IMMAGER', handleTransfer);
return () => window.removeEventListener('SEND_TO_IMMAGER', handleTransfer);
}, []);
// Initialize models
useEffect(() => {
const initModels = async () => {
while(!window.faceapi || !window.JSZip) {
await new Promise(r => setTimeout(r, 100));
}
try {
setStatusText("Loading AI Models (~5MB)...");
const MODEL_URL = 'https://cdn.jsdelivr.net/npm/@vladmandic/face-api@1.7.12/model/';
await window.faceapi.nets.ssdMobilenetv1.loadFromUri(MODEL_URL);
await window.faceapi.nets.faceLandmark68Net.loadFromUri(MODEL_URL);
await window.faceapi.nets.faceRecognitionNet.loadFromUri(MODEL_URL);
setIsModelLoading(false);
setStatusText("Ready");
} catch (error) {
console.error(error);
setStatusText("Error loading AI models.");
}
};
initModels();
}, []);
// Trigger processing of transferred files once models load
useEffect(() => {
if (pendingTransferFiles && pendingTransferFiles.length > 0 && !isModelLoading && !isProcessing) {
processImages(pendingTransferFiles);
setPendingTransferFiles(null);
}
}, [pendingTransferFiles, isModelLoading, isProcessing]);
const generateCrop = (img, box, settings, manualOffsets = { x: 0, y: 0, zoom: 1, resolution: 'auto' }) => {
const canvas = document.createElement('canvas');
const ctx = canvas.getContext('2d');
let padX = box.width * settings.padding;
let padY = box.height * settings.padding;
let tw = box.width + (padX * 2);
let th = box.height + padY + (box.height * settings.topPadding);
if (settings.shape === 'square') {
const size = Math.max(tw, th);
tw = size;
th = size;
}
tw /= manualOffsets.zoom;
th /= manualOffsets.zoom;
let cx = box.x + box.width / 2;
let cy = box.y + box.height / 2 - (box.height * settings.topPadding / 2) + (padY / 2);
cx += manualOffsets.x;
cy += manualOffsets.y;
const cropX = Math.max(0, cx - tw / 2);
const cropY = Math.max(0, cy - th / 2);
const cropW = Math.min(img.width - cropX, tw);
const cropH = Math.min(img.height - cropY, th);
let targetW = cropW;
let targetH = cropH;
if (manualOffsets.resolution && manualOffsets.resolution !== 'auto') {
const res = parseInt(manualOffsets.resolution, 10);
targetW = res;
targetH = settings.shape === 'original' ? Math.round(res * (cropH / cropW)) : res;
}
canvas.width = targetW;
canvas.height = targetH;
ctx.drawImage(img, cropX, cropY, cropW, cropH, 0, 0, targetW, targetH);
return canvas.toDataURL('image/jpeg', 0.9);
};
const openEditor = async (face, groupIndex, faceIndex) => {
const img = new Image();
img.src = face.sourceUrl;
await new Promise(r => img.onload = r);
editorImgRef.current = img;
setEditingFace({
...face,
groupIndex,
faceIndex,
manualOffsets: face.manualOffsets || { x: 0, y: 0, zoom: 1, resolution: 'auto' },
previewUrl: face.cropDataUrl
});
setDragState({ isDragging: false, startX: 0, startY: 0, initialOffsetX: 0, initialOffsetY: 0 });
};
const updateManualCrop = (updates) => {
if (!editingFace || !editorImgRef.current) return;
const newOffsets = { ...editingFace.manualOffsets, ...updates };
const newPreview = generateCrop(editorImgRef.current, editingFace.originalBox, cropSettings, newOffsets);
setEditingFace(prev => ({ ...prev, manualOffsets: newOffsets, previewUrl: newPreview }));
};
const saveManualCrop = () => {
const newGroups = [...faceGroups];
newGroups[editingFace.groupIndex].faces[editingFace.faceIndex].cropDataUrl = editingFace.previewUrl;
newGroups[editingFace.groupIndex].faces[editingFace.faceIndex].manualOffsets = editingFace.manualOffsets;
setFaceGroups(newGroups);
setEditingFace(null);
};
const updateGroupName = (groupId, newName) => {
setFaceGroups(prevGroups => prevGroups.map(g => g.id === groupId ? { ...g, name: newName } : g));
};
const handleDragStart = (e, faceId, sourceGroupId) => {
draggedFaceRef.current = { faceId, sourceGroupId };
e.dataTransfer.effectAllowed = 'move';
e.dataTransfer.setData('text/plain', faceId);
setTimeout(() => { setIsDraggingFace(true); }, 0);
};
const handleDragEnd = (e) => {
setIsDraggingFace(false);
setDragOverGroupId(null);
draggedFaceRef.current = null;
};
const handleDragOver = (e, targetGroupId) => {
e.preventDefault();
e.dataTransfer.dropEffect = 'move';
if (dragOverGroupId !== targetGroupId) setDragOverGroupId(targetGroupId);
};
const handleDragLeave = (e) => { e.preventDefault(); };
const handleDrop = (e, targetGroupId) => {
e.preventDefault();
setIsDraggingFace(false);
setDragOverGroupId(null);
const data = draggedFaceRef.current;
if (!data) return;
const { faceId, sourceGroupId } = data;
if (sourceGroupId === targetGroupId) return;
setTimeout(() => {
setFaceGroups(prevGroups => {
let movedFace = null;
let updatedGroups = prevGroups.map(group => {
if (group.id === sourceGroupId) {
const faceIndex = group.faces.findIndex(f => f.id === faceId);
if (faceIndex > -1) {
movedFace = group.faces[faceIndex];
return { ...group, faces: group.faces.filter(f => f.id !== faceId) };
}
}
return group;
});
if (!movedFace) return prevGroups;
if (targetGroupId === 'new-group') {
updatedGroups.unshift({
id: `group-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`,
name: '',
baseDescriptor: movedFace.descriptor,
faces: [movedFace]
});
} else {
updatedGroups = updatedGroups.map(group => {
if (group.id === targetGroupId) return { ...group, faces: [...group.faces, movedFace] };
return group;
});
}
return updatedGroups.filter(g => g.faces.length > 0);
});
}, 0);
};
const applyGlobalCropSettings = async (newSettings) => {
setCropSettings(newSettings);
if (extractedFaces.length === 0) return;
setIsProcessing(true);
setStatusText("Applying new crop settings...");
await new Promise(resolve => setTimeout(resolve, 50));
try {
const imageCache = {};
const updatedFaces = [];
for (let i = 0; i < extractedFaces.length; i++) {
const face = extractedFaces[i];
let img = imageCache[face.sourceUrl];
if (!img) {
img = new Image();
img.src = face.sourceUrl;
await new Promise(r => img.onload = r);
imageCache[face.sourceUrl] = img;
}
const newCropDataUrl = generateCrop(img, face.originalBox, newSettings, face.manualOffsets || { x: 0, y: 0, zoom: 1, resolution: 'auto' });
updatedFaces.push({ ...face, cropDataUrl: newCropDataUrl });
}
setExtractedFaces(updatedFaces);
setFaceGroups(prevGroups => prevGroups.map(group => ({
...group,
faces: group.faces.map(gFace => {
const updatedFace = updatedFaces.find(uf => uf.id === gFace.id);
return updatedFace ? { ...gFace, cropDataUrl: updatedFace.cropDataUrl } : gFace;
})
})));
} catch (error) {
console.error("Error applying crop settings:", error);
} finally {
setIsProcessing(false);
setStatusText("Done");
}
};
const clusterFaces = useCallback((facesToCluster, threshold) => {
const groups = [];
facesToCluster.forEach(face => {
let foundGroup = false;
for (let group of groups) {
const distance = window.faceapi.euclideanDistance(group.baseDescriptor, face.descriptor);
if (distance < threshold) {
group.faces.push(face);
foundGroup = true;
break;
}
}
if (!foundGroup) {
groups.push({
id: `group-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`,
name: '',
baseDescriptor: face.descriptor,
faces: [face]
});
}
});
groups.sort((a, b) => b.faces.length - a.faces.length);
setFaceGroups(groups);
}, []);
const processImages = async (files) => {
if (!files || files.length === 0) return;
setIsProcessing(true);
// Append rather than replace if adding from VideoFlow while groups exist
const startingFaces = faceGroups.length > 0 ? extractedFaces : [];
const allExtractedFaces = [...startingFaces];
for (let i = 0; i < files.length; i++) {
const file = files[i];
if (!file.type.startsWith('image/')) continue;
setStatusText(`Scanning image ${i + 1} of ${files.length}...`);
setProgress(((i) / files.length) * 100);
try {
const img = await new Promise((resolve, reject) => {
const reader = new FileReader();
reader.onload = (e) => {
const image = new Image();
image.src = e.target.result;
image.onload = () => resolve(image);
image.onerror = reject;
};
reader.onerror = reject;
reader.readAsDataURL(file);
});
const detections = await window.faceapi.detectAllFaces(img)
.withFaceLandmarks()
.withFaceDescriptors();
const sourceUrl = URL.createObjectURL(file);
detections.forEach((det, idx) => {
const cropDataUrl = generateCrop(img, det.detection.box, cropSettings);
allExtractedFaces.push({
id: `face-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`,
sourceFile: file.name,
sourceUrl: sourceUrl,
originalBox: det.detection.box,
cropDataUrl,
descriptor: det.descriptor
});
});
} catch (err) { console.error(`Error processing ${file.name}`, err); }
}
setStatusText("Organizing faces by person...");
setExtractedFaces(allExtractedFaces);
clusterFaces(allExtractedFaces, matchThreshold);
setIsProcessing(false);
setProgress(100);
setStatusText("Done");
};
const onDrop = useCallback((e) => {
e.preventDefault();
if (isModelLoading || isProcessing) return;
const files = Array.from(e.dataTransfer.files);
processImages(files);
}, [isModelLoading, isProcessing]);
const onFileChange = (e) => {
if (e.target.files && e.target.files.length > 0) processImages(Array.from(e.target.files));
};
const toggleFace = (id) => {
const next = new Set(selectedFaceIds);
next.has(id) ? next.delete(id) : next.add(id);
setSelectedFaceIds(next);
};
const toggleGroup = (group) => {
const next = new Set(selectedFaceIds);
const allSelected = group.faces.every(f => next.has(f.id));
group.faces.forEach(f => allSelected ? next.delete(f.id) : next.add(f.id));
setSelectedFaceIds(next);
};
const clearAll = () => {
setFaceGroups([]); setExtractedFaces([]); setSelectedFaceIds(new Set()); setProgress(0);
};
const downloadSelected = async () => {
if (selectedFaceIds.size === 0) return;
setIsProcessing(true);
setStatusText("Generating ZIP archive...");
try {
const zip = new window.JSZip();
let totalExported = 0;
faceGroups.forEach((group, gIndex) => {
const hasName = group.name && group.name.trim() !== '';
const folderName = hasName ? group.name.trim() : `Person_${gIndex + 1}`;
const filePrefix = hasName ? group.name.replace(/[^a-z0-9]/gi, '').toLowerCase() : 'face';
const selectedInGroup = group.faces.filter(f => selectedFaceIds.has(f.id));
if (selectedInGroup.length > 0) {
const folder = zip.folder(folderName);
selectedInGroup.forEach((face, fIndex) => {
const base64Data = face.cropDataUrl.split(',')[1];
const fileName = hasName ? `${filePrefix}${fIndex + 1}.jpg` : `face_${fIndex + 1}.jpg`;
folder.file(fileName, base64Data, {base64: true});
totalExported++;
});
}
});
const content = await zip.generateAsync({type: "blob"});
const url = URL.createObjectURL(content);
const a = document.createElement("a");
a.href = url;
a.download = `Extracted_Faces_${totalExported}.zip`;
document.body.appendChild(a);
a.click();
document.body.removeChild(a);
URL.revokeObjectURL(url);
} catch (e) {
console.error("ZIP Generation Failed", e);
alert("Failed to generate ZIP file.");
} finally {
setIsProcessing(false);
setStatusText("Ready");
}
};
const downloadSingleFace = (e, face, group, fIndex) => {
e.stopPropagation();
const hasName = group.name && group.name.trim() !== '';
const filePrefix = hasName ? group.name.replace(/[^a-z0-9]/gi, '').toLowerCase() : 'face';
const fileName = hasName ? `${filePrefix}${fIndex + 1}.jpg` : `face_${fIndex + 1}.jpg`;
const a = document.createElement("a");
a.href = face.cropDataUrl;
a.download = fileName;
document.body.appendChild(a);
a.click();
document.body.removeChild(a);
};
return (
<div className="min-h-screen font-sans">
<header className="sticky top-0 z-30 bg-neutral-950/80 backdrop-blur-md border-b border-neutral-800 px-6 py-4 flex items-center justify-between">
<div className="flex items-center gap-4">
<LogoIcon size={42} className="shadow-lg drop-shadow-md" />
<div className="flex flex-col">
<h1 className="text-2xl font-extrabold tracking-[0.2em] text-white uppercase" style={{ fontFamily: "'Montserrat', sans-serif" }}>immager</h1>
<p className="text-[10px] sm:text-xs text-neutral-400 tracking-wider uppercase mt-0.5">crop faces easily in bulk</p>
</div>
</div>
<div className="flex items-center gap-4">
<button onClick={() => setShowSettings(true)} className="text-sm flex items-center gap-2 text-neutral-400 hover:text-white transition-colors">
<SettingsIcon size={16} /> Crop Settings
</button>
{faceGroups.length > 0 && (
<button onClick={clearAll} className="text-sm flex items-center gap-2 text-neutral-400 hover:text-white transition-colors">
<Trash2Icon size={16} /> Start Over
</button>
)}
</div>
</header>
<main className="max-w-7xl mx-auto p-6 pb-32">
{faceGroups.length === 0 && (
<div className="mt-12">
<div
onDragOver={(e) => e.preventDefault()}
onDrop={onDrop}
onClick={() => !isModelLoading && !isProcessing && fileInputRef.current.click()}
className={`
relative w-full max-w-2xl mx-auto flex flex-col items-center justify-center p-16
border-2 border-dashed rounded-3xl transition-all duration-200
${isModelLoading || isProcessing
? 'border-neutral-800 bg-neutral-900/30 cursor-not-allowed'
: 'border-neutral-700 bg-neutral-900/50 hover:bg-neutral-800 hover:border-indigo-500 cursor-pointer'}
`}
>
<input type="file" multiple accept="image/*" ref={fileInputRef} onChange={onFileChange} className="hidden" />
{(isModelLoading || isProcessing) ? (
<div className="flex flex-col items-center text-center">
<Loader2Icon size={48} className="text-indigo-500 animate-spin mb-6" />
<h3 className="text-xl font-medium text-white mb-2">{statusText}</h3>
{isProcessing && progress > 0 && (
<div className="w-full max-w-xs bg-neutral-800 rounded-full h-2 mt-4 overflow-hidden">
<div className="bg-indigo-500 h-2 rounded-full transition-all duration-300 ease-out" style={{ width: `${progress}%` }} />
</div>
)}
</div>
) : (
<div className="flex flex-col items-center text-center">
<div className="bg-neutral-800 p-4 rounded-2xl mb-6 shadow-inner">
<UploadCloudIcon size={40} className="text-indigo-400" />
</div>
<h3 className="text-2xl font-medium text-white mb-3">Upload Images</h3>
<p className="text-neutral-400 max-w-sm mb-6">Drag and drop your photos here, or click to browse. We'll find and group the faces.</p>
<button className="bg-white text-black px-6 py-2.5 rounded-full font-medium hover:bg-neutral-200 transition-colors">Select Images</button>
</div>
)}
</div>
{!isModelLoading && !isProcessing && (
<div className="grid grid-cols-1 md:grid-cols-3 gap-6 max-w-4xl mx-auto mt-16 text-center">
<div className="bg-neutral-900/30 p-6 rounded-2xl border border-neutral-800/50">
<UsersIcon size={24} className="mx-auto text-indigo-400 mb-4" />
<h4 className="font-medium text-white mb-2">Smart Clustering</h4>
<p className="text-sm text-neutral-400">Groups faces of the same person together automatically.</p>
</div>
<div className="bg-neutral-900/30 p-6 rounded-2xl border border-neutral-800/50">
<ImageIcon size={24} className="mx-auto text-emerald-400 mb-4" />
<h4 className="font-medium text-white mb-2">Auto-Cropping</h4>
<p className="text-sm text-neutral-400">Extracts perfectly framed headshots ready for use.</p>
</div>
<div className="bg-neutral-900/30 p-6 rounded-2xl border border-neutral-800/50">
<CheckCircleIcon size={24} className="mx-auto text-amber-400 mb-4" />
<h4 className="font-medium text-white mb-2">100% Private</h4>
<p className="text-sm text-neutral-400">Everything runs entirely inside your browser. No server uploads.</p>
</div>
</div>
)}
</div>
)}
{faceGroups.length > 0 && (
<div className="animate-in">
{isDraggingFace && (
<div
className={`fixed top-24 left-1/2 -translate-x-1/2 z-50 w-full max-w-sm border-2 border-dashed rounded-2xl flex flex-col items-center justify-center p-6 shadow-2xl backdrop-blur-md transition-all ${dragOverGroupId === 'new-group' ? 'border-indigo-400 bg-indigo-500/30 scale-105' : 'border-neutral-500 bg-neutral-900/80 scale-100'}`}
onDragOver={(e) => handleDragOver(e, 'new-group')}
onDragLeave={handleDragLeave}
onDrop={(e) => handleDrop(e, 'new-group')}
>
<UsersIcon size={32} className="text-white mb-2" />
<p className="text-white font-medium text-center">Drop here to create a new person</p>
</div>
)}
<div className="flex flex-col md:flex-row md:items-center justify-between gap-4 mb-8">
<div>
<h2 className="text-2xl font-semibold tracking-tight">Found {faceGroups.length} People</h2>
<div className="text-sm text-neutral-400 mt-1">Total unique faces: {faceGroups.reduce((acc, curr) => acc + curr.faces.length, 0)}</div>
</div>
<div className="flex flex-col items-end gap-1 bg-neutral-900/80 p-3 rounded-xl border border-neutral-800 shadow-inner">
<label className="text-sm text-neutral-300 font-medium flex justify-between w-full">
<span>Grouping Tolerance</span>
<span className="text-indigo-400 font-bold">{matchThreshold.toFixed(2)}</span>
</label>
<input type="range" min="0.30" max="0.70" step="0.01" value={matchThreshold} onChange={(e) => { const val = parseFloat(e.target.value); setMatchThreshold(val); clusterFaces(extractedFaces, val); }} className="w-48 md:w-64 accent-indigo-500 cursor-pointer" title="Lower = stricter matching. Higher = looser matching." />
<span className="text-xs text-neutral-500">Slide right to merge similar people</span>
</div>
</div>
<div className="columns-1 md:columns-2 lg:columns-3 xl:columns-4 gap-6 space-y-6">
{faceGroups.map((group, groupIndex) => {
const allSelected = group.faces.every(f => selectedFaceIds.has(f.id));
const someSelected = !allSelected && group.faces.some(f => selectedFaceIds.has(f.id));
return (
<div key={group.id} className={`break-inside-avoid bg-neutral-900 rounded-2xl border transition-colors overflow-hidden group ${dragOverGroupId === group.id ? 'border-indigo-500 shadow-[0_0_15px_rgba(99,102,241,0.3)]' : 'border-neutral-800'}`} onDragOver={(e) => handleDragOver(e, group.id)} onDragLeave={handleDragLeave} onDrop={(e) => handleDrop(e, group.id)}>
<div className="px-5 py-4 flex items-center justify-between border-b border-neutral-800/50 bg-neutral-900/80">
<div className="flex items-center gap-3">
<div className="w-8 h-8 rounded-full overflow-hidden border border-neutral-700">
<img src={group.faces[0].cropDataUrl} className="w-full h-full object-cover pointer-events-none" alt={`Person ${groupIndex + 1}`} />
</div>
<div className="flex flex-col">
<input type="text" value={group.name} placeholder={`Person ${groupIndex + 1}`} onChange={(e) => updateGroupName(group.id, e.target.value)} className="text-sm font-medium bg-transparent border-b border-transparent hover:border-neutral-600 focus:border-indigo-500 outline-none text-white w-32 placeholder:text-neutral-400 transition-colors" />
<div className="text-xs text-neutral-500">{group.faces.length} shots</div>
</div>
</div>
<button onClick={() => toggleGroup(group)} className="text-neutral-400 hover:text-white transition-colors p-1" title={allSelected ? "Deselect All" : "Select All"}>
{allSelected ? <CheckSquareIcon size={20} className="text-indigo-500" /> : someSelected ? <CheckSquareIcon size={20} className="text-indigo-500 opacity-50" /> : <SquareIcon size={20} />}
</button>
</div>
<div className="p-4 grid grid-cols-3 gap-3">
{group.faces.map((face, fIndex) => {
const isSelected = selectedFaceIds.has(face.id);
return (
<div key={face.id} draggable={true} onDragStart={(e) => handleDragStart(e, face.id, group.id)} onDragEnd={handleDragEnd} onClick={() => toggleFace(face.id)} className={`relative aspect-square rounded-xl overflow-hidden cursor-grab active:cursor-grabbing group/item transition-all duration-200 border-2 ${isSelected ? 'border-indigo-500 scale-95' : 'border-transparent hover:border-neutral-600'}`}>
<img src={face.cropDataUrl} alt="Crop" draggable="false" className="w-full h-full object-cover select-none pointer-events-none" loading="lazy" />
<div className={`absolute top-2 right-2 opacity-0 group-hover/item:opacity-100 transition-opacity z-10 flex gap-1.5`}>
<button onClick={(e) => downloadSingleFace(e, face, group, fIndex)} className="p-1.5 bg-neutral-900/80 hover:bg-emerald-500 rounded-lg text-white backdrop-blur border border-neutral-700 transition-colors shadow-sm" title="Download Image"><DownloadIcon size={14} /></button>
<button onClick={(e) => { e.stopPropagation(); openEditor(face, groupIndex, fIndex); }} className="p-1.5 bg-neutral-900/80 hover:bg-indigo-500 rounded-lg text-white backdrop-blur border border-neutral-700 transition-colors shadow-sm" title="Manual Crop"><EditIcon size={14} /></button>
</div>
<div className={`absolute inset-0 pointer-events-none flex items-center justify-center transition-opacity ${isSelected ? 'bg-indigo-500/20 opacity-100' : 'bg-black/40 opacity-0 group-hover/item:opacity-100'}`}>
<div className={`w-6 h-6 rounded-full border-2 flex items-center justify-center ${isSelected ? 'bg-indigo-500 border-indigo-500' : 'border-white/70'}`}>{isSelected && <CheckCircleIcon size={14} className="text-white" />}</div>
</div>
</div>
);
})}
</div>
</div>
);
})}
</div>
</div>
)}
</main>
{faceGroups.length > 0 && (
<div className={`fixed bottom-20 left-1/2 -translate-x-1/2 z-40 transition-all duration-300 ${selectedFaceIds.size > 0 ? 'translate-y-0 opacity-100' : 'translate-y-10 opacity-0 pointer-events-none'}`}>
<div className="bg-neutral-900 border border-neutral-800 shadow-2xl rounded-full p-2 pl-6 pr-3 flex items-center gap-6">
<div className="font-medium text-sm"><span className="text-indigo-400 font-bold">{selectedFaceIds.size}</span> faces selected</div>
<button onClick={downloadSelected} disabled={isProcessing} className={`flex items-center gap-2 bg-indigo-600 text-white px-5 py-2.5 rounded-full text-sm font-medium hover:bg-indigo-500 transition-colors disabled:opacity-50 disabled:cursor-not-allowed`}>
{isProcessing ? <Loader2Icon size={18} className="animate-spin" /> : <DownloadIcon size={18} />}
{isProcessing ? 'Creating ZIP...' : 'Download'}
</button>
</div>
</div>
)}
{showSettings && (
<div className="fixed inset-0 bg-black/80 z-50 flex items-center justify-center p-4">
<div className="bg-neutral-900 border border-neutral-800 rounded-2xl p-6 w-full max-w-md shadow-2xl animate-in">
<div className="flex items-center justify-between mb-6">
<h3 className="text-lg font-semibold text-white">Smart Crop Settings</h3>
<button onClick={() => setShowSettings(false)} className="text-neutral-400 hover:text-white"><XIcon size={20}/></button>
</div>
<div className="space-y-4">
<div>
<label className="block text-sm text-neutral-400 mb-2">Padding Style</label>
<select disabled={isProcessing} value={cropSettings.padding} onChange={(e) => applyGlobalCropSettings({...cropSettings, padding: parseFloat(e.target.value)})} className="w-full bg-neutral-800 border border-neutral-700 rounded-lg p-2.5 text-white outline-none focus:border-indigo-500 disabled:opacity-50">
<option value="0.05">Very Tight (Exclude others)</option>
<option value="0.15">Tight</option>
<option value="0.3">Normal</option>
<option value="0.5">Wide</option>
</select>
</div>
<div>
<label className="block text-sm text-neutral-400 mb-2">Shape</label>
<select disabled={isProcessing} value={cropSettings.shape} onChange={(e) => applyGlobalCropSettings({...cropSettings, shape: e.target.value})} className="w-full bg-neutral-800 border border-neutral-700 rounded-lg p-2.5 text-white outline-none focus:border-indigo-500 disabled:opacity-50">
<option value="square">Square</option>
<option value="original">Original Aspect</option>
</select>
</div>
<p className="text-xs text-neutral-500 mt-4 leading-relaxed">Changes apply instantly to all currently extracted faces.</p>
<button onClick={() => setShowSettings(false)} disabled={isProcessing} className="w-full mt-6 bg-indigo-600 hover:bg-indigo-500 text-white py-2.5 rounded-lg font-medium transition-colors disabled:opacity-50">Done</button>
</div>
</div>
</div>
)}
{editingFace && (
<div className="fixed inset-0 bg-black/80 z-50 flex items-center justify-center p-4">
<div className="bg-neutral-900 border border-neutral-800 rounded-2xl p-6 w-full max-w-lg flex flex-col items-center shadow-2xl animate-in">
<div className="w-full flex items-center justify-between mb-4">
<h3 className="text-lg font-semibold text-white">Manual Crop Adjust (Drag to Pan)</h3>
<button onClick={() => setEditingFace(null)} className="text-neutral-400 hover:text-white"><XIcon size={20}/></button>
</div>
<div className={`relative w-64 h-64 bg-neutral-800 rounded-xl overflow-hidden mb-6 border border-neutral-700 flex items-center justify-center shadow-inner ${dragState.isDragging ? 'cursor-grabbing' : 'cursor-grab'}`} onMouseDown={(e) => { setDragState({ isDragging: true, startX: e.clientX, startY: e.clientY, initialOffsetX: editingFace.manualOffsets.x, initialOffsetY: editingFace.manualOffsets.y }); }} onMouseMove={(e) => { if (!dragState.isDragging || !editingFace) return; const dx = e.clientX - dragState.startX; const dy = e.clientY - dragState.startY; const scale = Math.max(1, editingFace.originalBox.width / 128) / editingFace.manualOffsets.zoom; updateManualCrop({ x: dragState.initialOffsetX - (dx * scale), y: dragState.initialOffsetY - (dy * scale) }); }} onMouseUp={() => setDragState(prev => ({ ...prev, isDragging: false }))} onMouseLeave={() => setDragState(prev => ({ ...prev, isDragging: false }))}>
<img src={editingFace.previewUrl} style={{ imageRendering: editingFace.manualOffsets.resolution !== 'auto' && parseInt(editingFace.manualOffsets.resolution) < 150 ? 'pixelated' : 'auto' }} className="max-w-full max-h-full object-contain pointer-events-none select-none" alt="Preview" draggable="false" />
</div>
<div className="w-full space-y-5">
<div>
<label className="flex justify-between text-sm text-neutral-400 mb-2">
<span>Export Resolution</span>
<span>{editingFace.manualOffsets.resolution === 'auto' ? 'Auto' : `${editingFace.manualOffsets.resolution}x${editingFace.manualOffsets.resolution}`}</span>
</label>
<select value={editingFace.manualOffsets.resolution} onChange={(e) => updateManualCrop({ resolution: e.target.value })} className="w-full bg-neutral-800 border border-neutral-700 rounded-lg p-2.5 text-white outline-none focus:border-indigo-500">
<option value="auto">Auto (Original Detected Size)</option>
<option value="56">56 x 56</option>
<option value="128">128 x 128</option>
<option value="256">256 x 256</option>
<option value="512">512 x 512</option>
</select>
</div>
<div>
<label className="flex justify-between text-sm text-neutral-400 mb-2">
<span>Zoom</span>
<span>{editingFace.manualOffsets.zoom.toFixed(1)}x</span>
</label>
<input type="range" min="0.5" max="2.5" step="0.1" value={editingFace.manualOffsets.zoom} onChange={(e) => updateManualCrop({ zoom: parseFloat(e.target.value) })} className="w-full accent-indigo-500" />
</div>
</div>
<div className="w-full flex gap-3 mt-8">
<button onClick={() => setEditingFace(null)} className="flex-1 py-2.5 rounded-lg border border-neutral-700 text-white hover:bg-neutral-800 transition-colors">Cancel</button>
<button onClick={saveManualCrop} className="flex-1 py-2.5 rounded-lg bg-indigo-600 hover:bg-indigo-500 text-white font-medium transition-colors shadow-lg">Apply Crop</button>
</div>
</div>
</div>
)}
</div>
);
}
const root = ReactDOM.createRoot(document.getElementById('root'));
root.render(<FaceExtractApp />);
</script>
<!-- ========================================== -->
<!-- LOGIC: APP 2 (VIDEOFLOW - MODULE) -->
<!-- ========================================== -->
<script type="module">
import { ObjectDetector, FilesetResolver } from "https://cdn.jsdelivr.net/npm/@mediapipe/tasks-vision/vision_bundle.mjs";
const videoInput = document.getElementById('video-input');
const dropZone = document.getElementById('drop-zone');
const videoEl = document.getElementById('hidden-video');
const previewCanvas = document.getElementById('preview-canvas');
const resultsEl = document.getElementById('results');
const progressBar = document.getElementById('progress-bar');
const progressPercent = document.getElementById('progress-percent');
const statusLabel = document.getElementById('status-label');
const startBtn = document.getElementById('start-btn');
const downloadBtn = document.getElementById('download-btn');
const exportImmagerBtn = document.getElementById('export-immager-btn');
const selectAllBtn = document.getElementById('select-all-btn');
const modelBadge = document.getElementById('model-badge');
const faceBadge = document.getElementById('face-badge');
const settingsPanel = document.getElementById('settings-panel');
const statsText = document.getElementById('stats-text');
const scannerLine = document.getElementById('scanner');
const emptyState = document.getElementById('empty-state');
const extractAllToggle = document.getElementById('extract-all-toggle');
const advancedFiltersContainer = document.getElementById('advanced-filters-container');
const autoCropToggle = document.getElementById('auto-crop-toggle');
const faceCropToggle = document.getElementById('face-crop-toggle');
const requireFaceToggle = document.getElementById('require-face-toggle');
const faceUploadBtn = document.getElementById('face-upload-btn');
const faceInput = document.getElementById('face-input');
const facePreview = document.getElementById('target-face-preview');
const faceStatusText = document.getElementById('face-status-text');
let detector;
let extractedFrames = [];
let isProcessing = false;
let targetFaceDescriptor = null;
let isFaceApiLoaded = false;
// Setup frame selection memory globally for Vanilla JS
window.selectedFrames = new Set();
document.getElementById('scan-rate').oninput = (e) => document.getElementById('interval-val').innerText = e.target.value + 's';
document.getElementById('confidence').oninput = (e) => document.getElementById('conf-val').innerText = Math.round(e.target.value * 100) + '%';
// Grey out options logic
extractAllToggle.addEventListener('change', (e) => {
if(e.target.checked) {
advancedFiltersContainer.style.opacity = '0.3';
advancedFiltersContainer.style.pointerEvents = 'none';
} else {
advancedFiltersContainer.style.opacity = '1';
advancedFiltersContainer.style.pointerEvents = 'auto';
}
});
async function initMediaPipe() {
try {
const vision = await FilesetResolver.forVisionTasks("https://cdn.jsdelivr.net/npm/@mediapipe/tasks-vision@latest/wasm");
detector = await ObjectDetector.createFromOptions(vision, {
baseOptions: {
modelAssetPath: `https://storage.googleapis.com/mediapipe-models/object_detector/efficientdet_lite0/float16/1/efficientdet_lite0.tflite`,
delegate: "GPU"
},
scoreThreshold: 0.5,
runningMode: "IMAGE"
});
modelBadge.innerHTML = '<span class="status-dot bg-emerald-500"></span> MediaPipe Ready';
modelBadge.classList.replace('text-slate-400', 'text-emerald-400');
} catch (err) {
console.error("MediaPipe Error:", err);
modelBadge.innerHTML = '<span class="status-dot bg-red-500"></span> Engine Error';
}
}
async function initFaceAPI() {
faceBadge.classList.remove('hidden');
try {
const MODEL_URL = 'https://cdn.jsdelivr.net/npm/@vladmandic/face-api@1.7.12/model/';
await Promise.all([
window.faceapi.nets.ssdMobilenetv1.loadFromUri(MODEL_URL),
window.faceapi.nets.faceLandmark68Net.loadFromUri(MODEL_URL),
window.faceapi.nets.faceRecognitionNet.loadFromUri(MODEL_URL)
]);
isFaceApiLoaded = true;
faceBadge.innerHTML = '<span class="status-dot bg-purple-500"></span> FaceAPI Ready';
faceBadge.classList.replace('text-slate-400', 'text-purple-400');
} catch(err) {
console.error("FaceAPI Error:", err);
faceBadge.innerHTML = '<span class="status-dot bg-red-500"></span> FaceAPI Error';
}
}
initMediaPipe();
setTimeout(initFaceAPI, 500);
faceUploadBtn.onclick = () => faceInput.click();
faceInput.onchange = async (e) => {
const file = e.target.files[0];
if (!file) return;
if (!isFaceApiLoaded) {
alert("Please wait for FaceAPI to finish loading...");
return;
}
faceStatusText.classList.remove('hidden');
faceStatusText.innerHTML = '<i class="fas fa-spinner animate-spin"></i> Analyzing face...';
faceStatusText.className = "text-[10px] text-amber-400 mt-2 text-center";
try {
const url = URL.createObjectURL(file);
facePreview.onload = async () => {
try {
const options = new window.faceapi.SsdMobilenetv1Options({ minConfidence: 0.2 });
const detection = await window.faceapi.detectSingleFace(facePreview, options).withFaceLandmarks().withFaceDescriptor();
if (detection) {
targetFaceDescriptor = detection.descriptor;
faceStatusText.innerHTML = '<i class="fas fa-check text-emerald-400"></i> Target Face Locked';
faceStatusText.className = "text-[10px] text-emerald-400 mt-2 text-center font-bold";
facePreview.classList.remove('border-red-500');
} else {
targetFaceDescriptor = null;
facePreview.classList.add('border-red-500');
faceStatusText.innerHTML = '<i class="fas fa-times text-red-500"></i> No face detected.';
faceStatusText.className = "text-[10px] text-red-500 mt-2 text-center";
}
} catch (err) {
console.error("Detection error:", err);
faceStatusText.innerHTML = '<i class="fas fa-times text-red-500"></i> Processing error.';
}
};
facePreview.src = url;
faceUploadBtn.classList.add('hidden');
facePreview.classList.remove('hidden');
} catch (err) {
console.error(err);
faceStatusText.innerHTML = 'Error reading image.';
}
};
dropZone.onclick = () => videoInput.click();
videoInput.onchange = (e) => {
const file = e.target.files[0];
if (file) {
settingsPanel.style.opacity = "1";
settingsPanel.style.pointerEvents = "all";
dropZone.querySelector('h3').innerText = file.name;
statsText.innerText = "Video loaded. Ready to scan.";
}
};
startBtn.onclick = async () => {
if (isProcessing) {
isProcessing = false;
return;
}
const file = videoInput.files[0];
if (!file || !detector) return;
isProcessing = true;
extractedFrames = [];
window.selectedFrames.clear(); // Reset selections
updateActionButtons();
resultsEl.innerHTML = '';
if (emptyState) emptyState.classList.add('hidden');
document.getElementById('progress-container').classList.remove('hidden');
if (scannerLine) scannerLine.style.display = "block";
startBtn.innerHTML = '<i class="fas fa-stop"></i> Stop Analysis';
startBtn.classList.replace('bg-indigo-600', 'bg-red-600');
const url = URL.createObjectURL(file);
videoEl.src = url;
videoEl.onloadedmetadata = async () => {
const ctx = previewCanvas.getContext('2d');
previewCanvas.width = videoEl.videoWidth;
previewCanvas.height = videoEl.videoHeight;
const duration = videoEl.duration;
const step = parseFloat(document.getElementById('scan-rate').value);
const confidence = parseFloat(document.getElementById('confidence').value);
const extractAll = extractAllToggle.checked;
const doAutoCrop = autoCropToggle.checked;
const doFaceCrop = faceCropToggle.checked;
const requireFace = requireFaceToggle.checked;
const matchFace = targetFaceDescriptor !== null;
let lastTime = performance.now();
for (let time = 0; time < duration; time += step) {
if (!isProcessing) break;
videoEl.currentTime = time;
await new Promise(r => videoEl.onseeked = r);
ctx.drawImage(videoEl, 0, 0);
if (extractAll) {
// Skip detection entirely and just capture the full frame
const fullFrameData = previewCanvas.toDataURL('image/jpeg', 0.9);
extractedFrames.push({ data: fullFrameData, time: time, type: 'Full' });
addFrameToUI(fullFrameData, time, 'Full', extractedFrames.length - 1);
} else {
detector.setOptions({ scoreThreshold: confidence });
const results = detector.detect(previewCanvas);
let people = results.detections.filter(d =>
d.categories.some(c => c.categoryName === 'person')
);
let validEntities = [];
if (people.length > 0) {
if (matchFace || requireFace || doFaceCrop) {
const faces = await window.faceapi.detectAllFaces(previewCanvas, new window.faceapi.SsdMobilenetv1Options({minConfidence: 0.3})).withFaceLandmarks().withFaceDescriptors();
if (matchFace) {
const matchingFaces = faces.filter(f => window.faceapi.euclideanDistance(targetFaceDescriptor, f.descriptor) < 0.55);
matchingFaces.forEach(f => {
let linkedPerson = people.find(p => isFaceInBody(f.detection.box, p.boundingBox));
validEntities.push({ personBox: linkedPerson ? linkedPerson.boundingBox : null, faceBox: f.detection.box, isMatch: true });
});
} else if (requireFace) {
faces.forEach(f => {
let linkedPerson = people.find(p => isFaceInBody(f.detection.box, p.boundingBox));
validEntities.push({ personBox: linkedPerson ? linkedPerson.boundingBox : null, faceBox: f.detection.box, isMatch: false });
});
} else {
people.forEach(p => {
let linkedFace = faces.find(f => isFaceInBody(f.detection.box, p.boundingBox));
validEntities.push({ personBox: p.boundingBox, faceBox: linkedFace ? linkedFace.detection.box : null, isMatch: false });
});
}
} else {
people.forEach(p => validEntities.push({ personBox: p.boundingBox, faceBox: null, isMatch: false }));
}
}
if (validEntities.length > 0) {
if (!doAutoCrop && !doFaceCrop) {
const fullFrameData = previewCanvas.toDataURL('image/jpeg', 0.9);
extractedFrames.push({ data: fullFrameData, time: time, type: 'Full' });
addFrameToUI(fullFrameData, time, 'Full', extractedFrames.length - 1);
} else {
validEntities.forEach(entity => {
if (doAutoCrop && entity.personBox) {
const box = entity.personBox;
const padX = box.width * 0.15;
const padY = box.height * 0.15;
const cX = Math.max(0, box.originX - padX);
const cY = Math.max(0, box.originY - padY);
const cW = Math.min(previewCanvas.width - cX, box.width + padX * 2);
const cH = Math.min(previewCanvas.height - cY, box.height + padY * 2);
const cropCanvas = document.createElement('canvas');
cropCanvas.width = cW;
cropCanvas.height = cH;
cropCanvas.getContext('2d').drawImage(previewCanvas, cX, cY, cW, cH, 0, 0, cW, cH);
const frameData = cropCanvas.toDataURL('image/jpeg', 0.9);
extractedFrames.push({ data: frameData, time: time, type: 'Body' });
addFrameToUI(frameData, time, 'Body', extractedFrames.length - 1);
}
if (doFaceCrop && entity.faceBox) {
const fBox = entity.faceBox;
const size = Math.max(fBox.width, fBox.height) * 2.0;
const centerX = fBox.x + fBox.width / 2;
const centerY = fBox.y + fBox.height / 2;
const sX = Math.max(0, centerX - size / 2);
const sY = Math.max(0, centerY - size / 2);
const sW = Math.min(previewCanvas.width - sX, size);
const sH = Math.min(previewCanvas.height - sY, size);
const faceCanvas = document.createElement('canvas');
faceCanvas.width = 512;
faceCanvas.height = 512;
const fCtx = faceCanvas.getContext('2d');
fCtx.fillStyle = '#000000';
fCtx.fillRect(0, 0, 512, 512);
const scale = 512 / size;
const dX = (sX - (centerX - size/2)) * scale;
const dY = (sY - (centerY - size/2)) * scale;
const dW = sW * scale;
const dH = sH * scale;
fCtx.drawImage(previewCanvas, sX, sY, sW, sH, dX, dY, dW, dH);
const faceData = faceCanvas.toDataURL('image/jpeg', 0.95);
extractedFrames.push({ data: faceData, time: time, type: 'Face' });
addFrameToUI(faceData, time, 'Face', extractedFrames.length - 1);
}
});
}
validEntities.forEach(entity => {
if (entity.personBox) {
ctx.strokeStyle = entity.isMatch ? '#c084fc' : '#818cf8';
ctx.lineWidth = 4;
ctx.strokeRect(entity.personBox.originX, entity.personBox.originY, entity.personBox.width, entity.personBox.height);
if (entity.isMatch) {
ctx.fillStyle = '#c084fc';
ctx.font = '20px Arial';
ctx.fillText("TARGET MATCH", entity.personBox.originX, entity.personBox.originY - 10);
}
}
if (entity.faceBox) {
ctx.strokeStyle = '#34d399';
ctx.lineWidth = 2;
ctx.strokeRect(entity.faceBox.x, entity.faceBox.y, entity.faceBox.width, entity.faceBox.height);
}
});
}
} // <-- End of else statement wrapping the detection logic
const pct = Math.min(100, Math.round((time / duration) * 100));
progressBar.style.width = `${pct}%`;
progressPercent.innerText = `${pct}%`;
statsText.innerText = `Extracted ${extractedFrames.length} specific instances.`;
const now = performance.now();
const fps = Math.round(1000 / (now - lastTime));
document.getElementById('fps-counter').innerText = `${fps} SEEK/S`;
lastTime = now;
}
cleanup();
};
};
function isFaceInBody(faceBox, bodyBox) {
if (!bodyBox || !faceBox) return false;
const fCenterX = faceBox.x + faceBox.width / 2;
const fCenterY = faceBox.y + faceBox.height / 2;
return fCenterX >= bodyBox.originX && fCenterX <= bodyBox.originX + bodyBox.width &&
fCenterY >= bodyBox.originY && fCenterY <= bodyBox.originY + bodyBox.height;
}
function addFrameToUI(src, time, type, index) {
const wrapper = document.createElement('div');
const sizeClasses = type === 'Face' ? "w-32 h-32" : (type === 'Body' ? "h-40 w-auto min-w-[100px]" : "w-64 h-auto aspect-video");
const badgeColor = type === 'Face' ? 'bg-emerald-600' : (type === 'Body' ? 'bg-indigo-600' : 'bg-slate-600');
wrapper.className = `frame-wrapper group relative bg-slate-900 rounded-xl overflow-hidden border border-white/5 hover:border-indigo-500/50 transition-all shadow-xl flex-shrink-0 cursor-pointer ${sizeClasses}`;
wrapper.id = `frame-wrapper-${index}`;
wrapper.onclick = () => window.toggleFrameSelection(index);
wrapper.innerHTML = `
<img src="${src}" class="w-full h-full object-contain bg-black/50 pointer-events-none select-none">
<!-- Overlay Checkmark for Selections -->
<div class="selection-overlay absolute inset-0 bg-indigo-500/20 opacity-0 transition-opacity flex items-center justify-center pointer-events-none">
<div class="bg-indigo-500 rounded-full w-8 h-8 flex items-center justify-center text-white shadow-lg">
<i class="fas fa-check"></i>
</div>
</div>
<!-- Action Buttons -->
<div class="absolute top-2 left-2 flex gap-1 z-10 opacity-0 group-hover:opacity-100 transition-opacity">
<!-- Zoom/Preview Button -->
<button class="bg-black/70 hover:bg-indigo-600 text-white p-2 rounded-lg text-xs transition-colors shadow" onclick="event.stopPropagation(); window.showPreview('${src}')" title="Preview Frame">
<i class="fas fa-search-plus"></i>
</button>
<!-- Download Button -->
<button class="bg-black/70 hover:bg-emerald-600 text-white p-2 rounded-lg text-xs transition-colors shadow" onclick="event.stopPropagation(); window.downloadSingleFrame('${src}', ${time}, '${type}', ${index})" title="Download Frame">
<i class="fas fa-download"></i>
</button>
</div>
<!-- Status Tags -->
<div class="absolute bottom-2 left-2 bg-black/60 px-2 py-0.5 rounded text-[9px] font-mono text-indigo-300 pointer-events-none">
T+ ${time.toFixed(1)}s
</div>
<div class="absolute top-2 right-2 ${badgeColor} px-1.5 py-0.5 rounded text-[8px] font-bold text-white uppercase shadow pointer-events-none">
${type}
</div>
`;
resultsEl.appendChild(wrapper);
const container = document.getElementById('results-container');
if (container) container.scrollTop = container.scrollHeight;
}
// --- GLOBAL UI HELPERS FOR VIDEOFLOW SELECTIONS & PREVIEWS ---
window.toggleFrameSelection = function(index) {
if (window.selectedFrames.has(index)) {
window.selectedFrames.delete(index);
} else {
window.selectedFrames.add(index);
}
const frameDiv = document.getElementById(`frame-wrapper-${index}`);
const overlay = frameDiv.querySelector('.selection-overlay');
if (window.selectedFrames.has(index)) {
overlay.classList.remove('opacity-0');
overlay.classList.add('opacity-100');
frameDiv.classList.add('ring-2', 'ring-indigo-500');
} else {
overlay.classList.add('opacity-0');
overlay.classList.remove('opacity-100');
frameDiv.classList.remove('ring-2', 'ring-indigo-500');
}
updateActionButtons();
};
window.showPreview = function(src) {
document.getElementById('preview-modal-img').src = src;
document.getElementById('video-preview-modal').classList.remove('hidden');
};
window.downloadSingleFrame = function(src, time, type, index) {
const a = document.createElement('a');
a.href = src;
a.download = `frame_${time.toFixed(2)}s_${type}_${index}.jpg`;
document.body.appendChild(a);
a.click();
document.body.removeChild(a);
};
document.getElementById('close-preview').onclick = () => document.getElementById('video-preview-modal').classList.add('hidden');
document.getElementById('video-preview-modal').onclick = (e) => {
if(e.target === document.getElementById('video-preview-modal')) {
document.getElementById('video-preview-modal').classList.add('hidden');
}
};
function updateActionButtons() {
const count = window.selectedFrames.size;
const total = extractedFrames.length;
if (count > 0) {
downloadBtn.innerHTML = `<i class="fas fa-file-export"></i> Download ${count} Selected`;
exportImmagerBtn.innerHTML = `<i class="fas fa-magic"></i> Export ${count} to Immager`;
selectAllBtn.innerHTML = "Deselect All";
} else {
downloadBtn.innerHTML = `<i class="fas fa-file-export"></i> Download All`;
exportImmagerBtn.innerHTML = `<i class="fas fa-magic"></i> Export All to Immager`;
selectAllBtn.innerHTML = "Select All";
}
if (total > 0) {
downloadBtn.classList.remove('hidden');
exportImmagerBtn.classList.remove('hidden');
selectAllBtn.classList.remove('hidden');
} else {
downloadBtn.classList.add('hidden');
exportImmagerBtn.classList.add('hidden');
selectAllBtn.classList.add('hidden');
}
}
selectAllBtn.onclick = () => {
if (window.selectedFrames.size > 0) {
window.selectedFrames.clear();
} else {
extractedFrames.forEach((_, i) => window.selectedFrames.add(i));
}
extractedFrames.forEach((_, i) => {
const frameDiv = document.getElementById(`frame-wrapper-${i}`);
if(!frameDiv) return;
const overlay = frameDiv.querySelector('.selection-overlay');
if (window.selectedFrames.has(i)) {
overlay.classList.remove('opacity-0');
overlay.classList.add('opacity-100');
frameDiv.classList.add('ring-2', 'ring-indigo-500');
} else {
overlay.classList.add('opacity-0');
overlay.classList.remove('opacity-100');
frameDiv.classList.remove('ring-2', 'ring-indigo-500');
}
});
updateActionButtons();
};
function dataURLtoFile(dataurl, filename) {
let arr = dataurl.split(','), mime = arr[0].match(/:(.*?);/)[1],
bstr = atob(arr[1]), n = bstr.length, u8arr = new Uint8Array(n);
while(n--){
u8arr[n] = bstr.charCodeAt(n);
}
return new File([u8arr], filename, {type:mime});
}
exportImmagerBtn.onclick = () => {
const framesToExport = window.selectedFrames.size > 0
? Array.from(window.selectedFrames).map(i => extractedFrames[i])
: extractedFrames;
if (framesToExport.length === 0) return;
const originalText = exportImmagerBtn.innerHTML;
exportImmagerBtn.innerHTML = '<i class="fas fa-spinner animate-spin"></i> Transferring...';
exportImmagerBtn.disabled = true;
setTimeout(() => {
const files = framesToExport.map((f, i) => {
const filename = `videoflow_${f.time.toFixed(2)}s_${f.type}_${i}.jpg`;
return dataURLtoFile(f.data, filename);
});
// Dispatch event to React Immager
window.dispatchEvent(new CustomEvent('SEND_TO_IMMAGER', { detail: files }));
// Navigate to Immager View automatically
switchApp('immager');
exportImmagerBtn.innerHTML = originalText;
exportImmagerBtn.disabled = false;
}, 100);
};
function cleanup() {
isProcessing = false;
startBtn.innerHTML = '<i class="fas fa-microchip"></i> Start AI Extraction';
startBtn.classList.replace('bg-red-600', 'bg-indigo-600');
if (scannerLine) scannerLine.style.display = "none";
statusLabel.innerText = "Process Complete";
window.selectedFrames.clear(); // Reset Selection
if(extractedFrames.length > 0) {
updateActionButtons();
} else {
if (emptyState) emptyState.classList.remove('hidden');
statsText.innerText = "Scan complete. No matching frames found.";
updateActionButtons();
}
}
downloadBtn.onclick = async () => {
const framesToExport = window.selectedFrames.size > 0
? Array.from(window.selectedFrames).map(i => extractedFrames[i])
: extractedFrames;
if (framesToExport.length === 0) return;
const originalText = downloadBtn.innerHTML;
downloadBtn.innerHTML = '<i class="fas fa-spinner animate-spin"></i> Zipping...';
downloadBtn.disabled = true;
const zip = new window.JSZip();
framesToExport.forEach((f, index) => {
const base64Data = f.data.replace(/^data:image\/(png|jpg|jpeg);base64,/, "");
zip.file(`frame_${f.time.toFixed(2)}s_${f.type}_${index}.jpg`, base64Data, {base64: true});
});
try {
const content = await zip.generateAsync({type: "blob"});
const url = URL.createObjectURL(content);
const a = document.createElement('a');
a.href = url;
a.download = `HumanFrames_Extracted.zip`;
a.click();
} catch (err) {
console.error("Zipping failed", err);
} finally {
downloadBtn.innerHTML = originalText;
downloadBtn.disabled = false;
}
};
</script>
<!-- ========================================== -->
<!-- DYNAMIC PWA REGISTRATION (SINGLE FILE) -->
<!-- ========================================== -->
<script>
(function() {
// 1. Generate & Inject Manifest dynamically
const manifestObj = {
"name": "AI Extraction Studio",
"short_name": "AI Studio",
"start_url": ".",
"display": "standalone",
"background_color": "#000000",
"theme_color": "#4f46e5",
"description": "Image cropping and video extraction AI tools running locally in your browser.",
"icons": [{
"src": "data:image/svg+xml;utf8,<svg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 512 512'><rect width='512' height='512' rx='120' fill='%234f46e5'/><path d='M150 350 L250 150 L350 350 Z' fill='white'/></svg>",
"sizes": "512x512",
"type": "image/svg+xml",
"purpose": "any maskable"
}]
};
const manifestBlob = new Blob([JSON.stringify(manifestObj)], {type: 'application/json'});
const manifestUrl = URL.createObjectURL(manifestBlob);
const link = document.createElement('link');
link.rel = 'manifest';
link.href = manifestUrl;
document.head.appendChild(link);
// 2. Generate & Register Service Worker for offline/PWA install capability
const swCode = `
const CACHE_NAME = 'ai-studio-v1';
self.addEventListener('install', (e) => {
self.skipWaiting();
});
self.addEventListener('activate', (e) => {
self.clients.claim();
});
self.addEventListener('fetch', (e) => {
// Pass-through fetch to allow models and CDNs to load normally
e.respondWith(fetch(e.request).catch(() => new Response('Offline Mode Active', {status: 503})));
});
`;
const swBlob = new Blob([swCode], {type: 'application/javascript'});
const swUrl = URL.createObjectURL(swBlob);
if ('serviceWorker' in navigator) {
navigator.serviceWorker.register(swUrl)
.then(reg => console.log('Single-File PWA Service Worker Registered!'))
.catch(err => console.error('SW Registration Failed:', err));
}
})();
</script>
</body>
</html>