import React, { useState, useCallback, useMemo, useEffect, useRef } from 'react'; import type { MediaFile } from './types'; import { GenerationStatus } from './types'; import FileUploader from './components/FileUploader'; import MediaItem from './components/MediaItem'; import { generateCaption, refineCaption, checkCaptionQuality } from './services/geminiService'; import { generateCaptionQwen, refineCaptionQwen, checkQualityQwen } from './services/qwenService'; import { generateCaptionGrok, refineCaptionGrok, checkQualityGrok } from './services/grokService'; import { sendComfyPrompt } from './services/comfyService'; import { DownloadIcon, SparklesIcon, WandIcon, LoaderIcon, CopyIcon, UploadCloudIcon, XIcon, CheckCircleIcon, AlertTriangleIcon, StopIcon, TrashIcon } from './components/Icons'; import { DEFAULT_COMFY_WORKFLOW } from './constants/defaultWorkflow'; declare const process: { env: { API_KEY?: string; [key: string]: string | undefined; } }; declare global { interface AIStudio { hasSelectedApiKey: () => Promise; openSelectKey: () => Promise; } interface Window { JSZip: any; aistudio?: AIStudio; } } type ApiProvider = 'gemini' | 'qwen' | 'grok'; type OSType = 'windows' | 'linux'; const GEMINI_MODELS = [ { id: 'gemini-3-pro-preview', name: 'Gemini 3 Pro (High Quality)' }, { id: 'gemini-3-flash-preview', name: 'Gemini 3 Flash (Fast)' }, { id: 'gemini-2.5-pro-preview-09-2025', name: 'Gemini 2.5 Pro (Multimodal)' }, { id: 'gemini-2.5-flash-native-audio-preview-09-2025', name: 'Gemini 2.5 Flash (Multimedia Speed)' } ]; const QWEN_MODELS = [ { id: 'thesby/Qwen3-VL-8B-NSFW-Caption-V4.5', name: 'Thesby Qwen 3 VL 8B NSFW Caption V4.5' }, { id: 'huihui-ai/Huihui-Qwen3-VL-8B-Instruct-abliterated', name: 'Huihui Qwen 3 VL 8B Abliterated (Uncensored)' }, { id: 'Qwen/Qwen3-VL-8B-Instruct-FP8', name: 'Qwen 3 VL 8B FP8' }, ]; const GROK_MODELS = [ { id: 'grok-2-vision-1212', name: 'Grok 2 Vision (12-12)' }, { id: 'grok-vision-beta', name: 'Grok Vision Beta' } ]; const DEFAULT_BULK_INSTRUCTIONS = `Dont use ambiguous language "perhaps" for example. Describe EVERYTHING visible: characters, clothing, actions, background, objects, lighting, and camera angle. Refrain from using generic phrases like "character, male, figure of" and use specific terminology: "woman, girl, boy, man". Do not mention the art style.`; const DEFAULT_REFINEMENT_INSTRUCTIONS = `Refine the caption to be more descriptive and cinematic. Ensure all colors and materials are mentioned.`; const App: React.FC = () => { // --- STATE --- const [mediaFiles, setMediaFiles] = useState([]); const [triggerWord, setTriggerWord] = useState('MyStyle'); const [apiProvider, setApiProvider] = useState('gemini'); const [geminiApiKey, setGeminiApiKey] = useState(process.env.API_KEY || ''); const [geminiModel, setGeminiModel] = useState(GEMINI_MODELS[0].id); const [hasSelectedKey, setHasSelectedKey] = useState(false); // xAI Grok Options const [grokApiKey, setGrokApiKey] = useState(''); const [grokModel, setGrokModel] = useState(GROK_MODELS[0].id); // Qwen Options const [qwenEndpoint, setQwenEndpoint] = useState(''); const [useCustomQwenModel, setUseCustomQwenModel] = useState(false); const [customQwenModelId, setCustomQwenModelId] = useState(''); const [qwenModel, setQwenModel] = useState(QWEN_MODELS[0].id); const [qwenOsType, setQwenOsType] = useState(() => navigator.userAgent.includes("Windows") ? 'windows' : 'linux'); const [qwenInstallDir, setQwenInstallDir] = useState(() => navigator.userAgent.includes("Windows") ? 'C:\\AI\\qwen_local' : '/home/user/ai/qwen_local'); const [qwenMaxTokens, setQwenMaxTokens] = useState(8192); const [qwen8Bit, setQwen8Bit] = useState(false); const [qwenEager, setQwenEager] = useState(false); const [qwenVideoFrameCount, setQwenVideoFrameCount] = useState(8); // Offline Local Snapshot Options const [useOfflineSnapshot, setUseOfflineSnapshot] = useState(false); const [snapshotPath, setSnapshotPath] = useState(''); const [virtualModelName, setVirtualModelName] = useState('thesby/Qwen3-VL-8B-NSFW-Caption-V4.5'); // ComfyUI Options const [isComfyEnabled, setIsComfyEnabled] = useState(false); const [comfyUrl, setComfyUrl] = useState('http://localhost:5000'); const [comfyWorkflow, setComfyWorkflow] = useState(DEFAULT_COMFY_WORKFLOW); const [comfyWorkflowName, setComfyWorkflowName] = useState('Default Workflow'); const [comfySeed, setComfySeed] = useState(-1); const [comfySteps, setComfySteps] = useState(4); const [activePreviewId, setActivePreviewId] = useState(null); // Secure Bridge Options const [useSecureBridge, setUseSecureBridge] = useState(false); const [isFirstTimeBridge, setIsFirstTimeBridge] = useState(false); const [bridgeOsType, setBridgeOsType] = useState(() => navigator.userAgent.includes("Windows") ? 'windows' : 'linux'); const [bridgeInstallPath, setBridgeInstallPath] = useState(() => navigator.userAgent.includes("Windows") ? 'C:\\AI\\bridge' : '/home/user/ai/bridge'); // Queue and Performance const [useRequestQueue, setUseRequestQueue] = useState(true); const [concurrentTasks, setConcurrentTasks] = useState(1); const [isQueueRunning, setIsQueueRunning] = useState(false); // Dataset / Instructions const [bulkGenerationInstructions, setBulkGenerationInstructions] = useState(DEFAULT_BULK_INSTRUCTIONS); const [bulkRefinementInstructions, setBulkRefinementInstructions] = useState(DEFAULT_REFINEMENT_INSTRUCTIONS); const [autofitTextareas, setAutofitTextareas] = useState(false); const [showSideBySidePreview, setShowSideBySidePreview] = useState(false); const [datasetPrefix, setDatasetPrefix] = useState('item'); const [isCharacterTaggingEnabled, setIsCharacterTaggingEnabled] = useState(false); const [characterShowName, setCharacterShowName] = useState(''); const [isExporting, setIsExporting] = useState(false); const abortControllerRef = useRef(new AbortController()); // --- EFFECTS --- useEffect(() => { if (window.aistudio) { window.aistudio.hasSelectedApiKey().then(setHasSelectedKey); } const isHttps = window.location.protocol === 'https:'; if (!qwenEndpoint) { setQwenEndpoint(isHttps ? '' : 'http://localhost:8000/v1'); } }, [qwenEndpoint]); // Handle Modal Keyboard Navigation useEffect(() => { const handleKeyDown = (e: KeyboardEvent) => { if (!activePreviewId) return; if (e.key === 'ArrowRight') handleNextPreview(); if (e.key === 'ArrowLeft') handlePrevPreview(); if (e.key === 'Escape') setActivePreviewId(null); }; window.addEventListener('keydown', handleKeyDown); return () => window.removeEventListener('keydown', handleKeyDown); }, [activePreviewId, mediaFiles]); // --- MEMOIZED VALUES --- const hasValidConfig = useMemo(() => { if (apiProvider === 'gemini') return !!geminiApiKey; if (apiProvider === 'grok') return !!grokApiKey; return qwenEndpoint !== ''; }, [apiProvider, geminiApiKey, grokApiKey, qwenEndpoint]); const selectedFiles = useMemo(() => { return (mediaFiles || []).filter(mf => mf.isSelected); }, [mediaFiles]); const currentPreviewItem = useMemo(() => (mediaFiles || []).find(m => m.id === activePreviewId), [mediaFiles, activePreviewId]); const qwenEffectiveModel = useMemo(() => { if (useOfflineSnapshot) return virtualModelName; return useCustomQwenModel ? customQwenModelId : qwenModel; }, [useOfflineSnapshot, virtualModelName, useCustomQwenModel, customQwenModelId, qwenModel]); const qwenStartCommand = useMemo(() => { const isWin = qwenOsType === 'windows'; const path = qwenInstallDir.replace(/[\\/]+$/, ''); // Model logic for command const modelToLoad = useOfflineSnapshot ? snapshotPath : (useCustomQwenModel ? customQwenModelId : qwenModel); const activate = isWin ? `venv\\Scripts\\activate` : `source venv/bin/activate`; const python = isWin ? `python` : `python3`; const offlineEnv = isWin ? `set HF_HUB_OFFLINE=1` : `export HF_HUB_OFFLINE=1`; let args = `--model "${modelToLoad}" --max-model-len ${qwenMaxTokens}`; if (useOfflineSnapshot) { args += ` --served-model-name "${virtualModelName}"`; } if (qwen8Bit) args += ` --load-format bitsandbytes --quantization bitsandbytes`; if (qwenEager) args += ` --enforce-eager`; const baseCmd = isWin ? `cd /d "${path}" && ${useOfflineSnapshot ? `${offlineEnv} && ` : ''}${activate} && ${python} -m vllm.entrypoints.openai.api_server ${args}` : `cd "${path}" && ${useOfflineSnapshot ? `${offlineEnv} && ` : ''}${activate} && ${python} -m vllm.entrypoints.openai.api_server ${args}`; return baseCmd; }, [qwenOsType, qwenInstallDir, useCustomQwenModel, customQwenModelId, qwenModel, qwenMaxTokens, qwen8Bit, qwenEager, useOfflineSnapshot, snapshotPath, virtualModelName]); const bridgeStartCommand = useMemo(() => { const isWindows = bridgeOsType === 'windows'; const path = bridgeInstallPath.replace(/[\\/]+$/, ''); const activateCmd = isWindows ? `call venv\\Scripts\\activate` : `source venv/bin/activate`; const pipCmd = `pip install flask flask-cors requests`; const setupCmd = isWindows ? `python -m venv venv && ${activateCmd} && ${pipCmd}` : `python3 -m venv venv && ${activateCmd} && ${pipCmd}`; return isWindows ? `cd /d "${path}" && ${isFirstTimeBridge ? `${setupCmd} && ` : ''}${activateCmd} && python bridge.py` : `cd "${path}" && ${isFirstTimeBridge ? `${setupCmd} && ` : ''}${activateCmd} && python3 bridge.py`; }, [bridgeInstallPath, bridgeOsType, isFirstTimeBridge]); const isTunnelRequired = useMemo(() => { return window.location.protocol === 'https:' && (qwenEndpoint.includes('localhost') || qwenEndpoint.includes('127.0.0.1')); }, [qwenEndpoint]); // --- HANDLERS --- const handleSelectApiKey = async () => { if (window.aistudio) { await window.aistudio.openSelectKey(); setHasSelectedKey(true); } }; const updateFile = useCallback((id: string, updates: Partial) => { setMediaFiles(prev => (prev || []).map(mf => (mf.id === id ? { ...mf, ...updates } : mf))); }, []); const handleFilesAdded = useCallback(async (files: File[]) => { const mediaFilesList = files.filter(file => file.type.startsWith('image/') || file.type.startsWith('video/')); const textFilesList = files.filter(file => file.name.toLowerCase().endsWith('.txt')); // Create a map of filename (no extension) to the text file object for quick lookup const textFilesMap = new Map(); textFilesList.forEach(f => { const baseName = f.name.substring(0, f.name.lastIndexOf('.')); textFilesMap.set(baseName.toLowerCase(), f); }); const newMediaFiles = await Promise.all(mediaFilesList.map(async (file) => { const baseName = file.name.substring(0, file.name.lastIndexOf('.')); let initialCaption = ''; const matchedTxtFile = textFilesMap.get(baseName.toLowerCase()); if (matchedTxtFile) { try { initialCaption = await matchedTxtFile.text(); } catch (e) { console.error(`Failed to read caption for ${file.name}`, e); } } return { id: `${file.name}-${Math.random()}`, file, previewUrl: URL.createObjectURL(file), caption: initialCaption.trim(), status: GenerationStatus.IDLE, isSelected: false, customInstructions: '', comfyStatus: 'idle' } as MediaFile; })); setMediaFiles(prev => [...(prev || []), ...newMediaFiles]); }, []); const handleCheckQuality = useCallback(async (id: string) => { const fileToProcess = (mediaFiles || []).find(mf => mf.id === id); if (!hasValidConfig || !fileToProcess || !fileToProcess.caption) return; updateFile(id, { status: GenerationStatus.CHECKING, errorMessage: undefined }); try { let score = 0; if (apiProvider === 'gemini') { score = await checkCaptionQuality(fileToProcess.file, fileToProcess.caption, abortControllerRef.current.signal, geminiApiKey, geminiModel); } else if (apiProvider === 'grok') { score = await checkQualityGrok(grokApiKey, grokModel, fileToProcess.file, fileToProcess.caption, qwenVideoFrameCount, abortControllerRef.current.signal); } else { score = await checkQualityQwen('', qwenEndpoint, qwenEffectiveModel, fileToProcess.file, fileToProcess.caption, qwenVideoFrameCount, abortControllerRef.current.signal); } updateFile(id, { qualityScore: score, status: GenerationStatus.SUCCESS }); } catch (err: any) { if (err.name === 'AbortError' || err.message === 'AbortError') { updateFile(id, { status: GenerationStatus.IDLE, errorMessage: "Stopped by user" }); } else { updateFile(id, { status: GenerationStatus.ERROR, errorMessage: err.message }); } } }, [mediaFiles, apiProvider, qwenEndpoint, qwenEffectiveModel, qwenVideoFrameCount, grokApiKey, grokModel, hasValidConfig, updateFile, geminiApiKey, geminiModel]); const handleGenerateCaption = useCallback(async (id: string, itemInstructions?: string) => { const fileToProcess = (mediaFiles || []).find(mf => mf.id === id); if (!hasValidConfig || !fileToProcess) return; updateFile(id, { status: GenerationStatus.GENERATING, errorMessage: undefined, qualityScore: undefined }); const combinedInstructions = `${bulkGenerationInstructions}\n\n${itemInstructions || ''}`.trim(); try { let caption = ''; if (apiProvider === 'gemini') { caption = await generateCaption(fileToProcess.file, triggerWord, combinedInstructions, isCharacterTaggingEnabled, characterShowName, abortControllerRef.current.signal, geminiApiKey, geminiModel); } else if (apiProvider === 'grok') { caption = await generateCaptionGrok(grokApiKey, grokModel, fileToProcess.file, triggerWord, combinedInstructions, isCharacterTaggingEnabled, characterShowName, qwenVideoFrameCount, abortControllerRef.current.signal); } else { caption = await generateCaptionQwen('', qwenEndpoint, qwenEffectiveModel, fileToProcess.file, triggerWord, combinedInstructions, isCharacterTaggingEnabled, characterShowName, qwenVideoFrameCount, abortControllerRef.current.signal); } updateFile(id, { caption, status: GenerationStatus.SUCCESS }); } catch (err: any) { if (err.name === 'AbortError' || err.message === 'AbortError') { updateFile(id, { status: GenerationStatus.IDLE, errorMessage: "Stopped by user" }); } else { updateFile(id, { status: GenerationStatus.ERROR, errorMessage: err.message }); } } }, [mediaFiles, triggerWord, apiProvider, qwenEndpoint, qwenEffectiveModel, qwenVideoFrameCount, grokApiKey, grokModel, bulkGenerationInstructions, isCharacterTaggingEnabled, characterShowName, hasValidConfig, updateFile, geminiApiKey, geminiModel]); const handleRefineCaptionItem = useCallback(async (id: string, itemInstructions?: string) => { const fileToProcess = (mediaFiles || []).find(mf => mf.id === id); if (!hasValidConfig || !fileToProcess || !fileToProcess.caption) return; updateFile(id, { status: GenerationStatus.GENERATING, errorMessage: undefined }); const combinedInstructions = `${bulkRefinementInstructions}\n\n${itemInstructions || ''}`.trim(); try { let caption = ''; if (apiProvider === 'gemini') { caption = await refineCaption(fileToProcess.file, fileToProcess.caption, combinedInstructions, abortControllerRef.current.signal, geminiApiKey, geminiModel); } else if (apiProvider === 'grok') { caption = await refineCaptionGrok(grokApiKey, grokModel, fileToProcess.file, fileToProcess.caption, combinedInstructions, qwenVideoFrameCount, abortControllerRef.current.signal); } else { caption = await refineCaptionQwen('', qwenEndpoint, qwenEffectiveModel, fileToProcess.file, fileToProcess.caption, combinedInstructions, qwenVideoFrameCount, abortControllerRef.current.signal); } updateFile(id, { caption, status: GenerationStatus.SUCCESS }); } catch (err: any) { if (err.name === 'AbortError' || err.message === 'AbortError') { updateFile(id, { status: GenerationStatus.IDLE, errorMessage: "Stopped by user" }); } else { updateFile(id, { status: GenerationStatus.ERROR, errorMessage: err.message }); } } }, [mediaFiles, apiProvider, qwenEndpoint, qwenEffectiveModel, qwenVideoFrameCount, grokApiKey, grokModel, bulkRefinementInstructions, hasValidConfig, updateFile, geminiApiKey, geminiModel]); // --- QUEUE CONTROLLER --- const runTasksInQueue = async (tasks: (() => Promise)[]) => { setIsQueueRunning(true); const pool = new Set>(); for (const task of tasks) { if (abortControllerRef.current.signal.aborted) break; const promise = task(); pool.add(promise); promise.finally(() => pool.delete(promise)); if (pool.size >= concurrentTasks) { await Promise.race(pool); } } await Promise.all(pool); setIsQueueRunning(false); }; const handleBulkGenerate = () => { const tasks = selectedFiles.map(file => () => handleGenerateCaption(file.id, file.customInstructions)); if (useRequestQueue) { runTasksInQueue(tasks); } else { tasks.forEach(t => t()); } }; const handleBulkRefine = () => { const tasks = selectedFiles.map(file => () => handleRefineCaptionItem(file.id, file.customInstructions)); if (useRequestQueue) { runTasksInQueue(tasks); } else { tasks.forEach(t => t()); } }; const handleBulkQualityCheck = () => { const tasks = selectedFiles.map(file => () => handleCheckQuality(file.id)); if (useRequestQueue) { runTasksInQueue(tasks); } else { tasks.forEach(t => t()); } }; const handleClearWorkflow = useCallback(() => { setComfyWorkflow(DEFAULT_COMFY_WORKFLOW); setComfyWorkflowName('Default Workflow'); }, []); const handleComfyPreview = useCallback(async (id: string) => { const item = (mediaFiles || []).find(m => m.id === id); if (!item || !comfyWorkflow || !comfyUrl) return; updateFile(id, { comfyStatus: 'generating', comfyErrorMessage: undefined }); try { const previewUrl = await sendComfyPrompt(comfyUrl, comfyWorkflow, item.caption, comfySeed, comfySteps, useSecureBridge, abortControllerRef.current.signal); updateFile(id, { comfyPreviewUrl: previewUrl, comfyStatus: 'success' }); } catch (err: any) { if (err.name === 'AbortError' || err.message === 'Aborted') { updateFile(id, { comfyStatus: 'idle', comfyErrorMessage: "Stopped" }); } else { updateFile(id, { comfyStatus: 'error', comfyErrorMessage: err.message }); } } }, [mediaFiles, comfyWorkflow, comfyUrl, comfySeed, comfySteps, useSecureBridge, updateFile]); const handleBulkPreview = () => { selectedFiles.forEach(file => handleComfyPreview(file.id)); }; const handleDeleteSelected = useCallback(() => { setMediaFiles(prev => { const remaining = (prev || []).filter(mf => !mf.isSelected); return remaining || []; }); }, []); const handleStopTasks = () => { abortControllerRef.current.abort(); abortControllerRef.current = new AbortController(); setIsQueueRunning(false); setMediaFiles(prev => (prev || []).map(mf => { if (mf.status === GenerationStatus.GENERATING || mf.status === GenerationStatus.CHECKING) { return { ...mf, status: GenerationStatus.IDLE, errorMessage: "Stopped by user" }; } if (mf.comfyStatus === 'generating') { return { ...mf, comfyStatus: 'idle', comfyErrorMessage: "Stopped" }; } return mf; })); }; const handleExportDataset = useCallback(async () => { if (selectedFiles.length === 0) return; const JSZip = (window as any).JSZip; if (!JSZip) return alert("JSZip not loaded."); setIsExporting(true); try { const zip = new JSZip(); const prefix = datasetPrefix.trim() || 'item'; selectedFiles.forEach((mf, idx) => { const fileExt = mf.file.name.split('.').pop() || 'dat'; const finalName = `${prefix}_${idx + 1}`; zip.file(`${finalName}.${fileExt}`, mf.file); zip.file(`${finalName}.txt`, mf.caption || ""); }); const content = await zip.generateAsync({ type: 'blob' }); const link = document.createElement('a'); link.href = URL.createObjectURL(content); link.download = `lora_dataset_${new Date().getTime()}.zip`; link.click(); } catch (err: any) { alert("Export failed: " + err.message); } finally { setIsExporting(false); } }, [selectedFiles, datasetPrefix]); const handleNextPreview = useCallback(() => { if (!activePreviewId || (mediaFiles || []).length <= 1) return; const currentIndex = mediaFiles.findIndex(m => m.id === activePreviewId); const nextIndex = (currentIndex + 1) % mediaFiles.length; setActivePreviewId(mediaFiles[nextIndex].id); }, [activePreviewId, mediaFiles]); const handlePrevPreview = useCallback(() => { if (!activePreviewId || (mediaFiles || []).length <= 1) return; const currentIndex = mediaFiles.findIndex(m => m.id === activePreviewId); const prevIndex = (currentIndex - 1 + mediaFiles.length) % mediaFiles.length; setActivePreviewId(mediaFiles[prevIndex].id); }, [activePreviewId, mediaFiles]); const downloadQwenSetupScript = () => { const isWin = qwenOsType === 'windows'; const content = isWin ? `@echo off\nSETLOCAL EnableDelayedExpansion\necho [LoRA Caption Assistant] Starting Local Qwen Setup for Windows...\n\n:: Check for Python\npython --version >nul 2>&1\nif %errorlevel% neq 0 (\n echo [ERROR] Python not found! Please install Python 3.10+ from python.org\n pause\n exit /b\n)\n\necho [1/3] Creating Virtual Environment...\npython -m venv venv\nif %errorlevel% neq 0 (\n echo [ERROR] Failed to create venv.\n pause\n exit /b\n)\n\necho [2/3] Activating Environment and Upgrading Pip...\ncall venv\\Scripts\\activate\npython -m pip install --upgrade pip\n\necho [3/3] Installing vLLM and Dependencies...\necho vLLM natively on Windows is Experimental. Using WSL2 is highly recommended.\necho Attempting installation of bitsandbytes and requirements...\npip install bitsandbytes requests\n:: Note: Users often need specific wheels for vLLM on Windows or WSL2.\necho To run vLLM on Windows, please follow the official guide for WSL2.\necho This script sets up the local Python environment for bridging.\npause` : `#!/bin/bash\npython3 -m venv venv\nsource venv/bin/activate\npip install vllm bitsandbytes\necho Setup Complete.`; const filename = isWin ? 'setup_qwen.bat' : 'setup_qwen.sh'; const blob = new Blob([content], { type: 'text/plain' }); const url = URL.createObjectURL(blob); const a = document.createElement('a'); a.href = url; a.download = filename; a.click(); URL.revokeObjectURL(url); }; const downloadBridgeSetupScript = () => { const isWin = bridgeOsType === 'windows'; const content = isWin ? `@echo off\nSETLOCAL EnableDelayedExpansion\necho [LoRA Caption Assistant] Starting Secure Bridge Setup for Windows...\n\n:: Check for Python\npython --version >nul 2>&1\nif %errorlevel% neq 0 (\n echo [ERROR] Python not found! Please install Python 3.10+ from python.org\n pause\n exit /b\n)\n\necho [1/3] Creating Virtual Environment...\npython -m venv venv\nif %errorlevel% neq 0 (\n echo [ERROR] Failed to create venv.\n pause\n exit /b\n)\n\necho [2/3] Activating Environment...\ncall venv\\Scripts\\activate\n\necho [3/3] Installing Bridge Dependencies...\npip install flask flask-cors requests\nif %errorlevel% neq 0 (\n echo [ERROR] Installation failed.\n pause\n exit /b\n)\n\necho Bridge Setup Complete. You can now download bridge.py and run it using the command shown in the app.\npause` : `#!/bin/bash\npython3 -m venv venv\nsource venv/bin/activate\npip install flask flask-cors requests\necho Bridge Setup Complete.`; const filename = isWin ? 'setup_bridge.bat' : 'setup_bridge.sh'; const blob = new Blob([content], { type: 'text/plain' }); const url = URL.createObjectURL(blob); const a = document.createElement('a'); a.href = url; a.download = filename; a.click(); URL.revokeObjectURL(url); }; const downloadBridgeScript = () => { const code = `import requests\nfrom flask import Flask, request, Response\nfrom flask_cors import CORS\napp = Flask(__name__)\nCORS(app)\nTARGET = "http://127.0.0.1:8188"\n@app.route('/', defaults={'path': ''}, methods=['GET','POST','PUT','DELETE','PATCH','OPTIONS'])\n@app.route('/', methods=['GET','POST','PUT','DELETE','PATCH','OPTIONS'])\ndef proxy(path):\n url = f"{TARGET}/{path}"\n headers = {k:v for k,v in request.headers.items() if k.lower() not in ['host', 'origin', 'referer']}\n resp = requests.request(method=request.method, url=url, headers=headers, data=request.get_data(), params=request.args, stream=True)\n return Response(resp.content, resp.status_code, [(n,v) for n,v in resp.headers.items() if n.lower() not in ['content-encoding','content-length','transfer-encoding','connection']])\nif __name__ == '__main__': app.run(port=5000, host='0.0.0.0')`; const blob = new Blob([code], { type: 'text/x-python' }); const url = URL.createObjectURL(blob); const a = document.createElement('a'); a.href = url; a.download = 'bridge.py'; a.click(); URL.revokeObjectURL(url); }; // --- RENDER --- return (
{/* PREVIEW MODAL */} {activePreviewId && currentPreviewItem && (
setActivePreviewId(null)}>
e.stopPropagation()}>

{(mediaFiles || []).findIndex(m => m.id === activePreviewId) + 1} of {mediaFiles.length}

{currentPreviewItem.file.name}

{currentPreviewItem.file.type.startsWith('video/') ?
{currentPreviewItem.comfyPreviewUrl ? :
No Preview Rendered
} {currentPreviewItem.comfyStatus === 'generating' &&
Rendering via ComfyUI...
}
ComfyUI Render