import React, { useEffect, useState, useCallback } from "react"; import { ArrowLeft, Play, Pause, SkipBack, SkipForward, Plus, RefreshCw, Check, Loader2, ImageIcon, Monitor, Edit3, Wand2, Save, X, Settings, FileText, Sparkles, AlertCircle, Download, Tv, Smartphone, Presentation, Camera, Palette, Star, Lock, Shield, Film, MoreHorizontal, Sliders, ChevronDown, } from "lucide-react"; import { useTVModeStore } from "./studio/stores/tvModeStore"; import type { TVScene } from "./studio/stores/tvModeStore"; import { TVModeContainer } from "./studio/components/TVMode/TVModeContainer"; import CreatorStudioSettings, { CREATOR_STUDIO_PARAM_DEFAULTS, type CreatorStudioGenerationParams, } from "./CreatorStudioSettings"; // Types type SceneStatus = "pending" | "generating" | "ready" | "error"; type PlatformPreset = "youtube_16_9" | "shorts_9_16" | "slides_16_9"; type ContentRating = "sfw" | "mature"; type Scene = { id: string; videoId: string; idx: number; narration: string; imagePrompt: string; negativePrompt: string; imageUrl: string | null; videoUrl: string | null; audioUrl: string | null; status: SceneStatus; durationSec: number; createdAt: number; updatedAt: number; }; type Project = { id: string; title: string; logline: string; status: "draft" | "in_review" | "approved" | "archived"; platformPreset: PlatformPreset; contentRating: ContentRating; tags?: string[]; createdAt: number; updatedAt: number; metadata?: { story_outline?: StoryOutline; generationMode?: "video" | "slideshow"; }; }; type SceneOutline = { scene_number: number; title: string; description: string; narration: string; image_prompt: string; negative_prompt: string; duration_sec: number; }; type StoryOutline = { title: string; logline: string; visual_style: string; tone: string; story_arc: { beginning: string; rising_action: string; climax: string; falling_action: string; resolution: string; }; scenes: SceneOutline[]; }; type AvailableModel = { id: string; name: string; provider?: string; }; interface CreatorStudioEditorProps { projectId: string; backendUrl: string; apiKey?: string; onExit: () => void; autoGenerateFirst?: boolean; targetSceneCount?: number; defaultLLMModel?: string; imageProvider?: string; imageModel?: string; imageWidth?: number; imageHeight?: number; imageSteps?: number; imageCfg?: number; /** Video model for AI video generation */ videoModel?: string; /** Enable video generation after image generation */ enableVideoGeneration?: boolean; } /** * CreatorStudioEditor - Professional editor for Creator Studio projects * Styled like Play Story mode but enhanced for creators */ export function CreatorStudioEditor({ projectId, backendUrl, apiKey, onExit, autoGenerateFirst = false, targetSceneCount = 8, defaultLLMModel = "", imageProvider = "comfyui", imageModel, imageWidth = 1344, imageHeight = 768, imageSteps, imageCfg, videoModel, enableVideoGeneration = false, }: CreatorStudioEditorProps) { const authKey = (apiKey || "").trim(); const [hasAutoTriggered, setHasAutoTriggered] = useState(false); const [hasAutoGeneratedOutline, setHasAutoGeneratedOutline] = useState(false); // TV Mode store const tvModeActive = useTVModeStore((s) => s.isActive); const enterTVMode = useTVModeStore((s) => s.enterTVMode); const updateSceneImageByIdx = useTVModeStore((s) => s.updateSceneImageByIdx); // State const [project, setProject] = useState(null); const [scenes, setScenes] = useState([]); const [loading, setLoading] = useState(true); // Story outline state const [storyOutline, setStoryOutline] = useState(null); const [isGeneratingOutline, setIsGeneratingOutline] = useState(false); const [showOutlinePanel, setShowOutlinePanel] = useState(false); // Scene editor state const [showSceneEditor, setShowSceneEditor] = useState(false); const [editingScene, setEditingScene] = useState(null); const [editNarration, setEditNarration] = useState(""); const [editImagePrompt, setEditImagePrompt] = useState(""); const [editNegativePrompt, setEditNegativePrompt] = useState(""); const [isSavingScene, setIsSavingScene] = useState(false); // Model selection state const [availableLLMModels, setAvailableLLMModels] = useState([]); const [availableImageModels, setAvailableImageModels] = useState([]); const [selectedLLMModel, setSelectedLLMModel] = useState(defaultLLMModel); const [selectedImageModel, setSelectedImageModel] = useState(imageModel || ""); const [selectedVideoModel, setSelectedVideoModel] = useState(videoModel || ""); const [settingsLLMModel, setSettingsLLMModel] = useState(defaultLLMModel); const [error, setError] = useState(null); const [currentSceneIndex, setCurrentSceneIndex] = useState(0); const [isPlaying, setIsPlaying] = useState(false); const [isTTSSpeaking, setIsTTSSpeaking] = useState(false); const [isSaving, setIsSaving] = useState(false); const [lastSaved, setLastSaved] = useState(null); const [isGeneratingScene, setIsGeneratingScene] = useState(false); const [isGeneratingImage, setIsGeneratingImage] = useState(false); const [isGeneratingVideo, setIsGeneratingVideo] = useState(false); const [hoveredSceneIdx, setHoveredSceneIdx] = useState(null); const [canPlayWebm, setCanPlayWebm] = useState(true); // Batch generation state (generates all scenes from outline) const [isBatchGenerating, setIsBatchGenerating] = useState(false); const [batchProgress, setBatchProgress] = useState<{ current: number; total: number; phase: 'scene' | 'image' | 'video' }>({ current: 0, total: 0, phase: 'scene' }); // Project Settings Modal state const [showSettingsModal, setShowSettingsModal] = useState(false); const [settingsTitle, setSettingsTitle] = useState(""); const [settingsLogline, setSettingsLogline] = useState(""); const [settingsPlatform, setSettingsPlatform] = useState("youtube_16_9"); const [settingsGoal, setSettingsGoal] = useState<"Entertain" | "Educate" | "Inspire">("Educate"); const [settingsVisualStyle, setSettingsVisualStyle] = useState<"Cinematic" | "Digital Art" | "Anime">("Cinematic"); const [settingsTones, setSettingsTones] = useState(["Documentary", "Calm"]); const [settingsSceneCount, setSettingsSceneCount] = useState(8); const [settingsSceneDuration, setSettingsSceneDuration] = useState(5); const [settingsLockIdentity, setSettingsLockIdentity] = useState(true); const [settingsContentRating, setSettingsContentRating] = useState("sfw"); const [settingsEnableVideo, setSettingsEnableVideo] = useState(false); // Enable video generation capability const [isSavingSettings, setIsSavingSettings] = useState(false); // Model override state for Project Settings const [settingsImageModel, setSettingsImageModel] = useState(""); const [settingsVideoModel, setSettingsVideoModel] = useState(""); const [availableVideoModels, setAvailableVideoModels] = useState([]); const [loadingModels, setLoadingModels] = useState(false); // Generation parameters state (advanced customization) const [genParams, setGenParams] = useState(CREATOR_STUDIO_PARAM_DEFAULTS); // Project-level capability: can this project generate videos? // IMPORTANT: do NOT rely only on enableVideoGeneration (wizard-only, transient) // This derives video capability from project metadata/tags for existing projects const projectWantsVideo = Boolean( project?.metadata?.generationMode === "video" || project?.tags?.includes("mode:video") || project?.tags?.includes("projectType:video") || project?.tags?.includes("projectType:video_series") || enableVideoGeneration === true // backward compatibility with wizard prop ); // API helpers const fetchApi = useCallback( async (path: string): Promise => { const url = `${backendUrl.replace(/\/+$/, "")}${path}`; const res = await fetch(url, { method: "GET", headers: { "Content-Type": "application/json", ...(authKey ? { "x-api-key": authKey } : {}), }, }); if (!res.ok) { const text = await res.text().catch(() => ""); throw new Error(`HTTP ${res.status}${text ? `: ${text}` : ""}`); } return (await res.json()) as T; }, [backendUrl, authKey] ); const postApi = useCallback( async (path: string, body: any): Promise => { const url = `${backendUrl.replace(/\/+$/, "")}${path}`; const res = await fetch(url, { method: "POST", headers: { "Content-Type": "application/json", ...(authKey ? { "x-api-key": authKey } : {}), }, body: JSON.stringify(body), }); if (!res.ok) { const text = await res.text().catch(() => ""); throw new Error(`HTTP ${res.status}${text ? `: ${text}` : ""}`); } return (await res.json()) as T; }, [backendUrl, authKey] ); const patchApi = useCallback( async (path: string, body: any): Promise => { const url = `${backendUrl.replace(/\/+$/, "")}${path}`; const res = await fetch(url, { method: "PATCH", headers: { "Content-Type": "application/json", ...(authKey ? { "x-api-key": authKey } : {}), }, body: JSON.stringify(body), }); if (!res.ok) { const text = await res.text().catch(() => ""); throw new Error(`HTTP ${res.status}${text ? `: ${text}` : ""}`); } return (await res.json()) as T; }, [backendUrl, authKey] ); const deleteApi = useCallback( async (path: string): Promise => { const url = `${backendUrl.replace(/\/+$/, "")}${path}`; const res = await fetch(url, { method: "DELETE", headers: { "Content-Type": "application/json", ...(authKey ? { "x-api-key": authKey } : {}), }, }); if (!res.ok) { const text = await res.text().catch(() => ""); throw new Error(`HTTP ${res.status}${text ? `: ${text}` : ""}`); } return (await res.json()) as T; }, [backendUrl, authKey] ); // ---------- API <-> UI normalization helpers ---------- // Backend may return snake_case (image_url/video_url). UI uses camelCase (imageUrl/videoUrl). const normalizeScene = useCallback((raw: any): Scene => { return { id: raw.id, videoId: raw.videoId ?? raw.video_id ?? "", idx: raw.idx ?? raw.index ?? raw.scene_index ?? 0, narration: raw.narration ?? "", imagePrompt: raw.imagePrompt ?? raw.image_prompt ?? "", negativePrompt: raw.negativePrompt ?? raw.negative_prompt ?? "", imageUrl: raw.imageUrl ?? raw.image_url ?? null, videoUrl: raw.videoUrl ?? raw.video_url ?? null, audioUrl: raw.audioUrl ?? raw.audio_url ?? null, status: raw.status ?? "pending", durationSec: raw.durationSec ?? raw.duration_sec ?? 5, createdAt: raw.createdAt ?? raw.created_at ?? 0, updatedAt: raw.updatedAt ?? raw.updated_at ?? 0, } as Scene; }, []); const normalizeScenes = useCallback((arr: any[]): Scene[] => { if (!Array.isArray(arr)) return []; return arr.map(normalizeScene).sort((a, b) => a.idx - b.idx); }, [normalizeScene]); // Convert UI patch (camelCase) to backend-friendly payload (include snake_case too) const toScenePatch = useCallback((patch: any) => { const out: any = { ...patch }; if ("imageUrl" in out) out.image_url = out.imageUrl; if ("videoUrl" in out) out.video_url = out.videoUrl; if ("audioUrl" in out) out.audio_url = out.audioUrl; if ("imagePrompt" in out) out.image_prompt = out.imagePrompt; if ("negativePrompt" in out) out.negative_prompt = out.negativePrompt; if ("durationSec" in out) out.duration_sec = out.durationSec; return out; }, []); // Authoritative refresh from backend (source of truth) const refreshScenes = useCallback(async () => { try { const scenesRes = await fetchApi<{ scenes: any[] }>(`/studio/videos/${projectId}/scenes`); setScenes(normalizeScenes((scenesRes as any).scenes)); } catch (e) { // Non-critical - log but don't alert console.warn('[CreatorStudioEditor] Failed to refresh scenes:', e); } }, [fetchApi, projectId, normalizeScenes]); // Proxy video URL through backend for correct Content-Type headers // This ensures WebM videos play correctly in browsers const proxyVideoUrl = useCallback((rawUrl: string | null | undefined): string | null => { if (!rawUrl) return null; // Only proxy ComfyUI localhost URLs if (rawUrl.startsWith('http://localhost:8188/') || rawUrl.startsWith('http://127.0.0.1:8188/')) { return `${backendUrl.replace(/\/+$/, '')}/studio/media?url=${encodeURIComponent(rawUrl)}`; } // Already proxied or external URL - return as-is return rawUrl; }, [backendUrl]); // Detect when "videoUrl" is actually an animated image (e.g. .webp, .gif) // Some Comfy workflows output animated WebP instead of WebM when ffmpeg is unavailable const isAnimatedImageUrl = useCallback((u: string | null | undefined): boolean => { if (!u) return false; const s = u.toLowerCase(); // Covers direct URLs and ComfyUI view?filename=... patterns return s.includes(".webp") || s.includes(".gif"); }, []); // Detect if URL is a WebM video const isWebmUrl = useCallback((u: string | null | undefined): boolean => { if (!u) return false; return u.toLowerCase().includes(".webm"); }, []); // Sync outline with current scenes (keeps outline in sync with actual scene data) const syncOutlineWithScenes = useCallback(async () => { if (!projectId) return; try { console.log('[CreatorStudioEditor] Syncing outline with scenes...'); const data = await postApi<{ ok: boolean; outline: any; scene_count: number }>( `/studio/videos/${projectId}/sync-outline`, {} ); if (data.ok && data.outline) { setStoryOutline(data.outline); console.log(`[CreatorStudioEditor] Outline synced: ${data.scene_count} scenes`); } } catch (e: any) { console.warn('[CreatorStudioEditor] Failed to sync outline:', e.message); // Non-critical - don't alert user } }, [projectId, postApi]); // Generate AI-powered story outline const generateStoryOutline = useCallback(async () => { if (!project || isGeneratingOutline) return; setIsGeneratingOutline(true); try { console.log('[CreatorStudioEditor] Generating story outline...'); const data = await postApi<{ ok: boolean; outline: StoryOutline; model_used: string }>( `/studio/videos/${projectId}/generate-outline`, { target_scenes: targetSceneCount, scene_duration: 5, ollama_model: selectedLLMModel || undefined, } ); if (data.ok && data.outline) { setStoryOutline(data.outline); console.log('[CreatorStudioEditor] Story outline generated:', data.outline.title); } } catch (e: any) { console.error('[CreatorStudioEditor] Failed to generate outline:', e); alert(`Failed to generate outline: ${e.message}`); } finally { setIsGeneratingOutline(false); } }, [project, projectId, targetSceneCount, selectedLLMModel, isGeneratingOutline, postApi]); // Load existing story outline const loadStoryOutline = useCallback(async () => { try { const data = await fetchApi<{ ok: boolean; outline: StoryOutline | null }>( `/studio/videos/${projectId}/outline` ); if (data.ok && data.outline) { setStoryOutline(data.outline); } } catch (e) { console.log('[CreatorStudioEditor] No existing outline found'); } }, [projectId, fetchApi]); // Open scene editor const openSceneEditor = useCallback((scene: Scene) => { setEditingScene(scene); setEditNarration(scene.narration || ""); setEditImagePrompt(scene.imagePrompt || ""); setEditNegativePrompt(scene.negativePrompt || ""); setShowSceneEditor(true); }, []); // Save scene edits const saveSceneEdits = useCallback(async () => { if (!editingScene) return; setIsSavingScene(true); try { await patchApi(`/studio/videos/${projectId}/scenes/${editingScene.id}`, { narration: editNarration, imagePrompt: editImagePrompt, negativePrompt: editNegativePrompt, }); setScenes((prev) => prev.map((s) => s.id === editingScene.id ? { ...s, narration: editNarration, imagePrompt: editImagePrompt, negativePrompt: editNegativePrompt } : s ) ); setLastSaved(new Date()); setShowSceneEditor(false); setEditingScene(null); } catch (e: any) { console.error('[CreatorStudioEditor] Failed to save scene:', e); alert(`Failed to save scene: ${e.message}`); } finally { setIsSavingScene(false); } }, [editingScene, editNarration, editImagePrompt, editNegativePrompt, projectId, patchApi]); // Parse tags from project const parseTagsFromProject = useCallback((proj: Project) => { const tags = proj.tags || []; let goal: "Entertain" | "Educate" | "Inspire" = "Educate"; let visualStyle: "Cinematic" | "Digital Art" | "Anime" = "Cinematic"; let tones: string[] = []; let lockIdentity = true; let sceneCount = 8; let sceneDuration = 5; tags.forEach((tag) => { if (tag.startsWith("goal:")) { const g = tag.replace("goal:", ""); if (g === "entertain") goal = "Entertain"; else if (g === "educate") goal = "Educate"; else if (g === "inspire") goal = "Inspire"; } else if (tag.startsWith("visual:")) { const v = tag.replace("visual:", "").replace(/_/g, " "); if (v.toLowerCase() === "cinematic") visualStyle = "Cinematic"; else if (v.toLowerCase() === "digital art") visualStyle = "Digital Art"; else if (v.toLowerCase() === "anime") visualStyle = "Anime"; } else if (tag.startsWith("tone:")) { const t = tag.replace("tone:", "").replace(/_/g, " "); const capitalizedTone = t.charAt(0).toUpperCase() + t.slice(1); tones.push(capitalizedTone); } else if (tag === "lock:identity") { lockIdentity = true; } else if (tag.startsWith("scenes:")) { sceneCount = parseInt(tag.replace("scenes:", ""), 10) || 8; } else if (tag.startsWith("duration:")) { sceneDuration = parseInt(tag.replace("duration:", ""), 10) || 5; } }); return { goal, visualStyle, tones, lockIdentity, sceneCount, sceneDuration }; }, []); // Parse generation params from project tags const parseGenParamsFromTags = useCallback((tags: string[] | undefined | null): CreatorStudioGenerationParams => { const t = tags || []; const next = { ...CREATOR_STUDIO_PARAM_DEFAULTS }; const get = (k: string) => t.find(x => x.startsWith(`gen:${k}=`))?.split("=")[1]; next.enabled = get("enabled") === "1"; next.steps = Number(get("steps") ?? next.steps); next.cfgScale = Number(get("cfg") ?? next.cfgScale); next.creativity = Number(get("creativity") ?? next.creativity); next.lockSeed = get("seedlock") === "1"; next.seed = Number(get("seed") ?? next.seed); return next; }, []); // Open settings modal const openSettingsModal = useCallback(async () => { if (!project) return; setSettingsTitle(project.title || ""); setSettingsLogline(project.logline || ""); setSettingsPlatform(project.platformPreset || "youtube_16_9"); setSettingsContentRating(project.contentRating || "sfw"); const parsed = parseTagsFromProject(project); setSettingsGoal(parsed.goal); setSettingsVisualStyle(parsed.visualStyle); setSettingsTones(parsed.tones.length > 0 ? parsed.tones : ["Documentary", "Calm"]); setSettingsSceneCount(parsed.sceneCount); setSettingsSceneDuration(parsed.sceneDuration); setSettingsLockIdentity(parsed.lockIdentity); // Set the LLM model from current selection or project tags const tags = (project as any).tags || []; const llmTag = tags.find((t: string) => t.startsWith("llm:")); setSettingsLLMModel(llmTag ? llmTag.replace("llm:", "") : selectedLLMModel); // Initialize video generation enabled state from tags or enableVideoGeneration prop const hasVideoMode = tags.includes("mode:video") || tags.includes("projectType:video") || tags.includes("projectType:video_series") || project?.metadata?.generationMode === "video" || enableVideoGeneration === true; setSettingsEnableVideo(hasVideoMode); // Initialize generation params from project tags const parsedGenParams = parseGenParamsFromTags(tags); // Also restore negative prompt from localStorage try { const key = `creatorstudio:genparams:${projectId}:neg`; const stored = localStorage.getItem(key); if (stored) { const neg = JSON.parse(stored); parsedGenParams.useCustomNegativePrompt = Boolean(neg?.use); parsedGenParams.customNegativePrompt = String(neg?.text || ""); } } catch {} setGenParams(parsedGenParams); // Initialize image/video model overrides from project tags or props const imageModelTag = tags.find((t: string) => t.startsWith("imageModel:")); setSettingsImageModel(imageModelTag?.replace("imageModel:", "") || imageModel || ""); const videoModelTag = tags.find((t: string) => t.startsWith("videoModel:")); setSettingsVideoModel(videoModelTag?.replace("videoModel:", "") || videoModel || ""); // Fetch available models to ensure the dropdowns are populated setLoadingModels(true); try { // Fetch LLM, Image, and Video models in parallel const [llmData, imgData, vidData] = await Promise.all([ fetchApi<{ models: { id: string; name?: string }[] }>('/models?provider=ollama'), fetchApi<{ models: string[] }>('/models?provider=comfyui&model_type=image'), fetchApi<{ models: string[] }>('/models?provider=comfyui&model_type=video'), ]); if (llmData.models) { const models = llmData.models.map(m => ({ id: m.id, name: m.name || m.id })); setAvailableLLMModels(models); } if (imgData.models) { const models = imgData.models.map(m => ({ id: m, name: m })); setAvailableImageModels(models); } if (vidData.models) { const models = vidData.models.map(m => ({ id: m, name: m })); setAvailableVideoModels(models); } } catch (e) { console.log('[CreatorStudioEditor] Failed to fetch models for settings:', e); } finally { setLoadingModels(false); } setShowSettingsModal(true); }, [project, projectId, parseTagsFromProject, parseGenParamsFromTags, selectedLLMModel, imageModel, videoModel, enableVideoGeneration, fetchApi]); // Toggle tone in settings const toggleSettingsTone = useCallback((tone: string) => { setSettingsTones((prev) => { if (prev.includes(tone)) { return prev.filter((t) => t !== tone); } return [...prev, tone]; }); }, []); // Build tags from settings const buildTagsFromSettings = useCallback(() => { const tags: string[] = []; // Video generation mode - critical for "Make Video" button visibility tags.push(`mode:${settingsEnableVideo ? "video" : "slideshow"}`); if (settingsGoal) tags.push(`goal:${settingsGoal.toLowerCase()}`); if (settingsVisualStyle) tags.push(`visual:${settingsVisualStyle.toLowerCase().replace(/ /g, "_")}`); settingsTones.forEach((t) => tags.push(`tone:${t.toLowerCase().replace(/ /g, "_")}`)); if (settingsLockIdentity) tags.push("lock:identity"); tags.push(`scenes:${settingsSceneCount}`); tags.push(`duration:${settingsSceneDuration}`); if (settingsLLMModel) tags.push(`llm:${settingsLLMModel}`); // Model overrides - allows per-project model selection if (settingsImageModel) tags.push(`imageModel:${settingsImageModel}`); if (settingsVideoModel) tags.push(`videoModel:${settingsVideoModel}`); return tags; }, [settingsEnableVideo, settingsGoal, settingsVisualStyle, settingsTones, settingsLockIdentity, settingsSceneCount, settingsSceneDuration, settingsLLMModel, settingsImageModel, settingsVideoModel]); // Build tags including generation params const buildGenTags = useCallback((p: CreatorStudioGenerationParams) => { // Start with base settings tags, filter out any existing gen:* tags const tags = buildTagsFromSettings().filter(t => !t.startsWith("gen:")); // Add generation params tags.push(`gen:enabled=${p.enabled ? "1" : "0"}`); tags.push(`gen:steps=${p.steps}`); tags.push(`gen:cfg=${p.cfgScale}`); tags.push(`gen:creativity=${p.creativity}`); tags.push(`gen:seedlock=${p.lockSeed ? "1" : "0"}`); tags.push(`gen:seed=${p.seed}`); return tags; }, [buildTagsFromSettings]); // Save project settings const saveProjectSettings = useCallback(async () => { if (!project) return; setIsSavingSettings(true); try { const tags = buildGenTags(genParams); await patchApi(`/studio/videos/${projectId}`, { title: settingsTitle.trim(), logline: settingsLogline.trim(), platformPreset: settingsPlatform, contentRating: settingsContentRating, tags, }); // Persist negative prompt locally (can be long, so not in tags) try { const key = `creatorstudio:genparams:${projectId}:neg`; localStorage.setItem(key, JSON.stringify({ use: genParams.useCustomNegativePrompt, text: genParams.customNegativePrompt, })); } catch {} // Update local project state setProject((prev) => prev ? { ...prev, title: settingsTitle.trim(), logline: settingsLogline.trim(), platformPreset: settingsPlatform, contentRating: settingsContentRating, tags, } : null); // Update selected models to match settings setSelectedLLMModel(settingsLLMModel); if (settingsImageModel) setSelectedImageModel(settingsImageModel); if (settingsVideoModel) setSelectedVideoModel(settingsVideoModel); setLastSaved(new Date()); setShowSettingsModal(false); console.log('[CreatorStudioEditor] Project settings saved'); } catch (e: any) { console.error('[CreatorStudioEditor] Failed to save settings:', e); alert(`Failed to save settings: ${e.message}`); } finally { setIsSavingSettings(false); } }, [project, projectId, settingsTitle, settingsLogline, settingsPlatform, settingsContentRating, settingsLLMModel, settingsImageModel, settingsVideoModel, genParams, buildGenTags, patchApi]); // Delete scene const deleteScene = useCallback(async (sceneId: string) => { if (scenes.length <= 1) { alert("Cannot delete the last scene."); return; } if (!window.confirm("Delete this scene? This cannot be undone.")) { return; } try { console.log('[CreatorStudioEditor] Deleting scene:', sceneId); await deleteApi<{ ok: boolean }>(`/studio/videos/${projectId}/scenes/${sceneId}`); const deletedIndex = scenes.findIndex((s) => s.id === sceneId); setScenes((prev) => { const newScenes = prev .filter((s) => s.id !== sceneId) .map((s, i) => ({ ...s, idx: i })); return newScenes; }); if (deletedIndex >= 0 && deletedIndex <= currentSceneIndex) { setCurrentSceneIndex((prev) => Math.max(0, prev - 1)); } setLastSaved(new Date()); console.log('[CreatorStudioEditor] Scene deleted successfully'); } catch (e: any) { console.error('[CreatorStudioEditor] Failed to delete scene:', e); alert(`Failed to delete scene: ${e.message}`); } }, [projectId, scenes, currentSceneIndex, deleteApi]); // Fetch available models const fetchAvailableModels = useCallback(async () => { try { const llmData = await fetchApi<{ models: { id: string; name?: string }[] }>( '/models?provider=ollama' ); if (llmData.models) { const models = llmData.models.map(m => ({ id: m.id, name: m.name || m.id })); setAvailableLLMModels(models); // Auto-select a model if none selected if (!selectedLLMModel && models.length > 0) { // Prefer llama3:8b if available, otherwise first model const preferred = models.find(m => m.id === "llama3:8b") || models[0]; setSelectedLLMModel(preferred.id); setSettingsLLMModel(preferred.id); } } } catch (e) { console.log('[CreatorStudioEditor] Failed to fetch LLM models:', e); } try { const imgData = await fetchApi<{ models: string[] }>( '/models?provider=comfyui&model_type=image' ); if (imgData.models) { setAvailableImageModels(imgData.models.map(m => ({ id: m, name: m }))); } } catch (e) { console.log('[CreatorStudioEditor] Failed to fetch image models:', e); } }, [fetchApi, selectedLLMModel]); // Convert Creator Studio scene to TV Mode scene format const sceneToTVScene = useCallback((scene: Scene): TVScene => { return { idx: scene.idx, narration: scene.narration || "", image_prompt: scene.imagePrompt || "", negative_prompt: scene.negativePrompt || "", duration_s: scene.durationSec || 5, tags: {}, image_url: scene.imageUrl || null, status: scene.status === "ready" ? "ready" : "pending", imageStatus: scene.imageUrl ? "ready" : "pending", }; }, []); // Enter TV Mode with current scenes const handleEnterTVMode = useCallback(() => { if (!project || scenes.length === 0) return; const tvScenes = scenes.map(sceneToTVScene); enterTVMode(projectId, project.title, tvScenes, currentSceneIndex); }, [project, projectId, scenes, currentSceneIndex, enterTVMode, sceneToTVScene]); // Editor playback with TTS - speak scene narration when playing useEffect(() => { if (!isPlaying || scenes.length === 0) { // Stop any ongoing speech when not playing if (window.SpeechService?.stopSpeaking) { window.SpeechService.stopSpeaking(); } setIsTTSSpeaking(false); return; } const currentScene = scenes[currentSceneIndex]; if (!currentScene?.narration) { // No narration, advance to next scene after a short delay const timer = setTimeout(() => { if (currentSceneIndex < scenes.length - 1) { setCurrentSceneIndex((i) => i + 1); } else { setIsPlaying(false); // End of scenes } }, 2000); return () => clearTimeout(timer); } // Speak the current scene's narration const svc = window.SpeechService; if (!svc?.speak) { // Fallback if no TTS - use fixed timer const timer = setTimeout(() => { if (currentSceneIndex < scenes.length - 1) { setCurrentSceneIndex((i) => i + 1); } else { setIsPlaying(false); } }, 5000); return () => clearTimeout(timer); } console.log(`[Editor] Speaking scene ${currentSceneIndex + 1} narration...`); setIsTTSSpeaking(true); svc.speak(currentScene.narration, { onStart: () => setIsTTSSpeaking(true), onEnd: () => { setIsTTSSpeaking(false); // Auto-advance to next scene after narration finishes if (currentSceneIndex < scenes.length - 1) { setCurrentSceneIndex((i) => i + 1); } else { setIsPlaying(false); // End of scenes } }, onError: () => { setIsTTSSpeaking(false); // On error, advance anyway after delay setTimeout(() => { if (currentSceneIndex < scenes.length - 1) { setCurrentSceneIndex((i) => i + 1); } else { setIsPlaying(false); } }, 2000); }, }); return () => { svc.stopSpeaking?.(); }; }, [isPlaying, currentSceneIndex, scenes]); // Generate image for a scene const generateImageForScene = useCallback( async (sceneId: string, imagePrompt: string, force: boolean = false) => { if (isGeneratingImage && !force) { console.log('[CreatorStudioEditor] Already generating image, skipping'); return; } setIsGeneratingImage(true); console.log('[CreatorStudioEditor] Generating image for scene:', sceneId); try { const llmProvider = imageProvider === 'comfyui' ? 'ollama' : imageProvider; // Apply generation parameters if enabled const effectiveSteps = genParams.enabled ? genParams.steps : imageSteps; const effectiveCfg = genParams.enabled ? genParams.cfgScale : imageCfg; // Get scene-level negative prompt if available const scene = scenes.find(s => s.id === sceneId); const sceneNeg = scene?.negativePrompt || ""; const combinedNegativePrompt = (genParams.enabled && genParams.useCustomNegativePrompt && genParams.customNegativePrompt.trim()) ? [sceneNeg, genParams.customNegativePrompt.trim()].filter(Boolean).join(", ") : sceneNeg || undefined; // Send explicit width/height from wizard selection to backend // This ensures the resolution selected in Step 2 is actually used const data = await postApi<{ media?: { images?: string[] } }>( '/chat', { message: `imagine ${imagePrompt}`, mode: 'imagine', provider: llmProvider, imgModel: selectedImageModel || imageModel || undefined, // Pass explicit resolution from wizard (if available) // Backend will use these values directly instead of computing from aspect ratio ...(imageWidth ? { imgWidth: imageWidth } : {}), ...(imageHeight ? { imgHeight: imageHeight } : {}), imgSteps: effectiveSteps, imgCfg: effectiveCfg, negativePrompt: combinedNegativePrompt, imgSeed: genParams.enabled && genParams.lockSeed ? genParams.seed : undefined, creativity: genParams.enabled ? genParams.creativity : undefined, promptRefinement: false, } ); const imageUrl = data?.media?.images?.[0]; if (imageUrl) { console.log('[CreatorStudioEditor] Image generated:', imageUrl); await patchApi(`/studio/videos/${projectId}/scenes/${sceneId}`, toScenePatch({ imageUrl, status: 'ready', })); setScenes((prev) => prev.map((s) => s.id === sceneId ? { ...s, imageUrl, status: 'ready' as SceneStatus } : s ) ); setLastSaved(new Date()); } else { console.warn('[CreatorStudioEditor] No image returned from backend'); } } catch (e: any) { console.error('[CreatorStudioEditor] Failed to generate image:', e); } finally { setIsGeneratingImage(false); } }, [projectId, imageProvider, imageModel, selectedImageModel, imageWidth, imageHeight, imageSteps, imageCfg, postApi, patchApi, isGeneratingImage, toScenePatch, genParams, scenes] ); // Generate video for a scene (converts image to video) const generateVideoForScene = useCallback( async (sceneId: string, imageUrl: string, prompt: string) => { if (isGeneratingVideo) { console.log('[CreatorStudioEditor] Already generating video, skipping'); return; } if (!imageUrl) { console.warn('[CreatorStudioEditor] No image URL to animate'); return; } setIsGeneratingVideo(true); console.log('[CreatorStudioEditor] Generating video for scene:', sceneId); try { // IMPORTANT: // Backend animate mode detects the reference image via URL in the message (in this project version). // Backend also expects vidModel (not videoModel). const data = await postApi<{ media?: any }>( '/chat', { message: `${prompt || 'Animate this scene with subtle motion'} ${imageUrl}`, mode: 'animate', provider: 'ollama', vidModel: selectedVideoModel || videoModel || undefined, // Apply generation parameters if enabled vidSeed: genParams.enabled && genParams.lockSeed ? genParams.seed : undefined, vidSteps: genParams.enabled ? genParams.steps : undefined, vidCfg: genParams.enabled ? genParams.cfgScale : undefined, creativity: genParams.enabled ? genParams.creativity : undefined, } ); // Backend returns media.video_url (NOT always media.videos[]). const rawVideoUrl = data?.media?.video_url || data?.media?.videos?.[0] || null; if (rawVideoUrl) { // Proxy the URL for correct Content-Type headers (WebM playback) const proxiedVideoUrl = proxyVideoUrl(rawVideoUrl) || rawVideoUrl; console.log('[CreatorStudioEditor] Video generated:', rawVideoUrl, '-> proxied:', proxiedVideoUrl); await patchApi(`/studio/videos/${projectId}/scenes/${sceneId}`, toScenePatch({ videoUrl: proxiedVideoUrl, status: 'ready', })); setScenes((prev) => prev.map((s) => s.id === sceneId ? { ...s, videoUrl: proxiedVideoUrl, status: 'ready' as SceneStatus } : s ) ); setLastSaved(new Date()); } else { console.warn('[CreatorStudioEditor] No video returned from backend'); } } catch (e: any) { console.error('[CreatorStudioEditor] Failed to generate video:', e); } finally { setIsGeneratingVideo(false); } }, [projectId, videoModel, selectedVideoModel, postApi, patchApi, isGeneratingVideo, toScenePatch, proxyVideoUrl, genParams] ); // Remove video and fall back to image-only for a scene const removeVideoForScene = useCallback(async (sceneId: string) => { if (!window.confirm("Remove the video for this scene and keep the image?")) return; try { await patchApi(`/studio/videos/${projectId}/scenes/${sceneId}`, toScenePatch({ videoUrl: null, status: 'ready' })); setScenes(prev => prev.map(s => (s.id === sceneId ? { ...s, videoUrl: null, status: 'ready' as SceneStatus } : s))); setLastSaved(new Date()); } catch (e: any) { console.error('[CreatorStudioEditor] Failed to remove video:', e); alert(`Failed to remove video: ${e.message}`); } }, [projectId, patchApi, toScenePatch]); // Generate scene from outline const generateSceneFromOutline = useCallback(async (sceneIndex: number) => { if (!storyOutline || sceneIndex >= storyOutline.scenes.length) return; setIsGeneratingScene(true); try { const data = await postApi<{ ok: boolean; scene: Scene }>( `/studio/videos/${projectId}/scenes/generate-from-outline?scene_index=${sceneIndex}`, {} ); if (data.ok && data.scene) { setScenes((prev) => [...prev, data.scene]); setCurrentSceneIndex(scenes.length); setLastSaved(new Date()); generateImageForScene(data.scene.id, data.scene.imagePrompt); } } catch (e: any) { console.error('[CreatorStudioEditor] Failed to generate scene from outline:', e); alert(`Failed to generate scene: ${e.message}`); } finally { setIsGeneratingScene(false); } }, [projectId, storyOutline, scenes.length, postApi, generateImageForScene]); // Generate ALL scenes from outline in sequence (batch generation) const generateAllScenesFromOutline = useCallback(async () => { if (!storyOutline || !storyOutline.scenes || storyOutline.scenes.length === 0) { console.log('[CreatorStudioEditor] No outline scenes to generate'); return; } const totalScenes = storyOutline.scenes.length; console.log(`[CreatorStudioEditor] Starting batch generation of ${totalScenes} scenes`); setIsBatchGenerating(true); setBatchProgress({ current: 0, total: totalScenes, phase: 'scene' }); const generatedScenes: Scene[] = []; try { // Generate all scenes first (without images for speed) for (let i = 0; i < totalScenes; i++) { setBatchProgress({ current: i + 1, total: totalScenes, phase: 'scene' }); console.log(`[CreatorStudioEditor] Generating scene ${i + 1}/${totalScenes}`); try { const data = await postApi<{ ok: boolean; scene: Scene }>( `/studio/videos/${projectId}/scenes/generate-from-outline?scene_index=${i}`, {} ); if (data.ok && data.scene) { generatedScenes.push(data.scene); setScenes((prev) => [...prev, data.scene]); setCurrentSceneIndex(i); } } catch (sceneErr: any) { console.error(`[CreatorStudioEditor] Failed to generate scene ${i + 1}:`, sceneErr); // Continue with remaining scenes even if one fails } // Small delay between scene creations to avoid overwhelming the server if (i < totalScenes - 1) { await new Promise(resolve => setTimeout(resolve, 200)); } } console.log(`[CreatorStudioEditor] Created ${generatedScenes.length} scenes, now generating images...`); // Phase 2: Generate images for all scenes const scenesWithImages: Array<{ scene: Scene; imageUrl: string }> = []; for (let i = 0; i < generatedScenes.length; i++) { const scene = generatedScenes[i]; setBatchProgress({ current: i + 1, total: generatedScenes.length, phase: 'image' }); console.log(`[CreatorStudioEditor] Generating image ${i + 1}/${generatedScenes.length}`); try { // Generate image inline (can't use generateImageForScene due to isGeneratingImage lock) const llmProvider = imageProvider === 'comfyui' ? 'ollama' : imageProvider; const data = await postApi<{ media?: { images?: string[] } }>( '/chat', { message: `imagine ${scene.imagePrompt}`, mode: 'imagine', provider: llmProvider, imgModel: selectedImageModel || imageModel || undefined, imgAspectRatio: '16:9', imgSteps: imageSteps, imgCfg: imageCfg, promptRefinement: false, } ); const imageUrl = data?.media?.images?.[0]; if (imageUrl) { await patchApi(`/studio/videos/${projectId}/scenes/${scene.id}`, toScenePatch({ imageUrl, status: projectWantsVideo ? 'generating' : 'ready', })); setScenes((prev) => prev.map((s) => s.id === scene.id ? { ...s, imageUrl, status: (projectWantsVideo ? 'generating' : 'ready') as SceneStatus } : s ) ); scenesWithImages.push({ scene, imageUrl }); } } catch (imgErr: any) { console.error(`[CreatorStudioEditor] Failed to generate image for scene ${i + 1}:`, imgErr); // Continue with remaining images } } // Phase 3: Generate videos from images (if enabled) if (projectWantsVideo && scenesWithImages.length > 0) { console.log(`[CreatorStudioEditor] Phase 3: Generating ${scenesWithImages.length} videos...`); for (let i = 0; i < scenesWithImages.length; i++) { const { scene, imageUrl } = scenesWithImages[i]; setBatchProgress({ current: i + 1, total: scenesWithImages.length, phase: 'video' }); console.log(`[CreatorStudioEditor] Generating video ${i + 1}/${scenesWithImages.length}`); try { // Use the animate endpoint to generate video from image // IMPORTANT: Include imageUrl in message and use vidModel (not videoModel) const data = await postApi<{ media?: any }>( '/chat', { message: `${scene.imagePrompt} ${imageUrl}`, mode: 'animate', provider: 'ollama', vidModel: selectedVideoModel || videoModel || undefined, } ); // Backend returns media.video_url (NOT always media.videos[]) const rawVideoUrl = data?.media?.video_url || data?.media?.videos?.[0] || null; if (rawVideoUrl) { // Proxy the URL for correct Content-Type headers (WebM playback) const proxiedVideoUrl = proxyVideoUrl(rawVideoUrl) || rawVideoUrl; await patchApi(`/studio/videos/${projectId}/scenes/${scene.id}`, toScenePatch({ videoUrl: proxiedVideoUrl, status: 'ready', })); setScenes((prev) => prev.map((s) => s.id === scene.id ? { ...s, videoUrl: proxiedVideoUrl, status: 'ready' as SceneStatus } : s ) ); } else { // No video generated, mark scene as ready anyway (image only) await patchApi(`/studio/videos/${projectId}/scenes/${scene.id}`, toScenePatch({ status: 'ready', })); setScenes((prev) => prev.map((s) => s.id === scene.id ? { ...s, status: 'ready' as SceneStatus } : s ) ); } } catch (vidErr: any) { console.error(`[CreatorStudioEditor] Failed to generate video for scene ${i + 1}:`, vidErr); // Mark scene as ready anyway (fallback to image only) try { await patchApi(`/studio/videos/${projectId}/scenes/${scene.id}`, toScenePatch({ status: 'ready' })); setScenes((prev) => prev.map((s) => s.id === scene.id ? { ...s, status: 'ready' as SceneStatus } : s ) ); } catch {} } } } setLastSaved(new Date()); setCurrentSceneIndex(0); // Go back to first scene console.log('[CreatorStudioEditor] Batch generation complete!'); // Sync outline with actual scene data to keep them in sync await syncOutlineWithScenes(); // Final refresh so UI shows all persisted videoUrl values await refreshScenes(); } catch (e: any) { console.error('[CreatorStudioEditor] Batch generation failed:', e); } finally { setIsBatchGenerating(false); setBatchProgress({ current: 0, total: 0, phase: 'scene' }); } }, [storyOutline, projectId, postApi, patchApi, imageProvider, imageModel, selectedImageModel, imageSteps, imageCfg, projectWantsVideo, videoModel, selectedVideoModel, syncOutlineWithScenes, toScenePatch, refreshScenes, proxyVideoUrl]); // Load project and scenes useEffect(() => { async function loadData() { setLoading(true); setError(null); try { const [projectRes, scenesRes] = await Promise.all([ fetchApi<{ video: Project }>(`/studio/videos/${projectId}`), fetchApi<{ scenes: any[] }>(`/studio/videos/${projectId}/scenes`), ]); setProject(projectRes.video); setScenes(normalizeScenes((scenesRes as any).scenes)); // Extract LLM model from project tags if not already set const tags = (projectRes.video as any).tags || []; const llmTag = tags.find((t: string) => t.startsWith("llm:")); if (llmTag && !defaultLLMModel) { const modelFromTag = llmTag.replace("llm:", ""); setSelectedLLMModel(modelFromTag); setSettingsLLMModel(modelFromTag); } } catch (e: any) { setError(e.message || String(e)); } finally { setLoading(false); } } loadData(); }, [projectId, fetchApi, defaultLLMModel, normalizeScenes]); // Load available models and existing outline on mount useEffect(() => { fetchAvailableModels(); loadStoryOutline(); }, [fetchAvailableModels, loadStoryOutline]); // Detect whether the browser can play WebM videos useEffect(() => { try { const v = document.createElement("video"); const ok = Boolean( v.canPlayType('video/webm; codecs="vp8, vorbis"') || v.canPlayType('video/webm; codecs="vp9"') || v.canPlayType("video/webm") ); setCanPlayWebm(ok); console.log('[CreatorStudioEditor] WebM playback support:', ok); } catch { setCanPlayWebm(false); } }, []); // Auto-generate outline when project is newly created useEffect(() => { if ( autoGenerateFirst && !hasAutoGeneratedOutline && !loading && project && !storyOutline && !isGeneratingOutline && selectedLLMModel // Wait until we have a model selected ) { console.log('[CreatorStudioEditor] Auto-generating story outline for new project'); setHasAutoGeneratedOutline(true); generateStoryOutline(); } }, [autoGenerateFirst, hasAutoGeneratedOutline, loading, project, storyOutline, isGeneratingOutline, selectedLLMModel, generateStoryOutline]); // Current scene const currentScene = scenes[currentSceneIndex] || null; // Extract visual style from project tags const getVisualStyle = useCallback(() => { if (!project) return "cinematic"; const tags = (project as any).tags || []; const visualTag = tags.find((t: string) => t.startsWith("visual:")); if (visualTag) { const style = visualTag.replace("visual:", "").replace(/_/g, " "); return style; } return "cinematic"; }, [project]); // Extract tone from project tags const getTones = useCallback(() => { if (!project) return ["documentary"]; const tags = (project as any).tags || []; const tones = tags.filter((t: string) => t.startsWith("tone:")).map((t: string) => t.replace("tone:", "").replace(/_/g, " ") ); return tones.length > 0 ? tones : ["documentary"]; }, [project]); // Generate AI-powered scene with better prompts const generateFirstSceneWithAI = useCallback(async () => { if (!project || isGeneratingScene) return; setIsGeneratingScene(true); try { let narration: string; let imagePrompt: string; let negativePrompt: string = "blurry, low quality, text, watermark, ugly, deformed, disfigured, bad anatomy, worst quality, low resolution, duplicate, clone, multiple people, two heads, two faces, split image, extra limbs"; if (storyOutline && storyOutline.scenes && storyOutline.scenes.length > 0) { const outlineScene = storyOutline.scenes[0]; narration = outlineScene.narration; imagePrompt = outlineScene.image_prompt; negativePrompt = outlineScene.negative_prompt || negativePrompt; console.log('[CreatorStudioEditor] Using story outline for first scene'); } else { const visualStyle = getVisualStyle(); const tones = getTones(); const toneDesc = tones.join(", "); narration = `The story begins. ${project.logline || `Welcome to "${project.title}".`}`; imagePrompt = `${visualStyle} style, ${project.logline || project.title}, opening scene, establishing shot, ${toneDesc} mood, high quality, detailed, 4k, masterpiece`; } const data = await postApi<{ scene: Scene }>( `/studio/videos/${projectId}/scenes`, { narration, imagePrompt, negativePrompt, durationSec: 5.0, } ); setScenes((prev) => [...prev, data.scene]); setCurrentSceneIndex(0); setLastSaved(new Date()); console.log('[CreatorStudioEditor] Auto-generating image for first scene'); generateImageForScene(data.scene.id, data.scene.imagePrompt); } catch (e: any) { console.error('[CreatorStudioEditor] Failed to create scene:', e); alert(`Failed to create scene: ${e.message}`); } finally { setIsGeneratingScene(false); } }, [project, projectId, isGeneratingScene, postApi, getVisualStyle, getTones, storyOutline, generateImageForScene]); // Auto-generate ALL scenes after outline is generated (batch generation) useEffect(() => { if ( autoGenerateFirst && !hasAutoTriggered && !loading && project && storyOutline && storyOutline.scenes && storyOutline.scenes.length > 0 && scenes.length === 0 && !isGeneratingScene && !isBatchGenerating ) { console.log('[CreatorStudioEditor] Auto-generating ALL scenes from outline'); setHasAutoTriggered(true); generateAllScenesFromOutline(); } }, [autoGenerateFirst, hasAutoTriggered, loading, project, storyOutline, scenes.length, isGeneratingScene, isBatchGenerating, generateAllScenesFromOutline]); // Generate first scene (non-AI fallback) const generateFirstScene = useCallback(async () => { if (!project || isGeneratingScene) return; setIsGeneratingScene(true); try { const data = await postApi<{ scene: Scene }>( `/studio/videos/${projectId}/scenes`, { narration: `Opening scene for "${project.title}"`, imagePrompt: `${project.logline || project.title}, cinematic, high quality, detailed`, negativePrompt: "blurry, low quality, text, watermark", durationSec: 5.0, } ); setScenes((prev) => [...prev, data.scene]); setCurrentSceneIndex(0); setLastSaved(new Date()); } catch (e: any) { alert(`Failed to create scene: ${e.message}`); } finally { setIsGeneratingScene(false); } }, [project, projectId, isGeneratingScene, postApi]); // Generate next scene with AI-powered prompts (uses backend outline for reliability) const generateNextScene = useCallback(async () => { if (!project || isGeneratingScene) return; setIsGeneratingScene(true); try { const nextSceneIndex = scenes.length; // First, try to generate from outline via backend (reads outline from database - most reliable) try { const data = await postApi<{ ok: boolean; scene: Scene; from_outline?: boolean }>( `/studio/videos/${projectId}/scenes/generate-from-outline?scene_index=${nextSceneIndex}`, {} ); if (data.ok && data.scene) { setScenes((prev) => [...prev, data.scene]); setCurrentSceneIndex(nextSceneIndex); setLastSaved(new Date()); console.log(`[CreatorStudioEditor] Generated scene ${nextSceneIndex + 1} from outline`); generateImageForScene(data.scene.id, data.scene.imagePrompt); // Sync outline with new scene syncOutlineWithScenes(); return; } } catch (outlineErr: any) { // Outline not available or scene index out of range - try AI continuation console.log('[CreatorStudioEditor] No outline scene available, trying AI continuation:', outlineErr.message); } // Second: Try AI-powered continuation based on previous scenes if (scenes.length > 0) { try { console.log('[CreatorStudioEditor] Generating AI continuation from previous context...'); const contData = await postApi<{ ok: boolean; scene: Scene; from_continuation?: boolean }>( `/studio/videos/${projectId}/scenes/generate-continuation`, {} ); if (contData.ok && contData.scene) { setScenes((prev) => [...prev, contData.scene]); setCurrentSceneIndex(nextSceneIndex); setLastSaved(new Date()); console.log(`[CreatorStudioEditor] Generated scene ${nextSceneIndex + 1} via AI continuation`); generateImageForScene(contData.scene.id, contData.scene.imagePrompt); // Sync outline with new scene syncOutlineWithScenes(); return; } } catch (contErr: any) { console.log('[CreatorStudioEditor] AI continuation failed, using fallback:', contErr.message); } } // Final fallback: Generate a scene based on project settings (no outline, no AI) const sceneNum = scenes.length + 1; const visualStyle = getVisualStyle(); const tones = getTones(); const toneDesc = tones.join(", "); const narration = `Scene ${sceneNum}. ${project.logline || `The story of "${project.title}" continues...`}`; const imagePrompt = `${visualStyle} style, ${project.logline || project.title}, scene ${sceneNum}, ${toneDesc} mood, high quality, detailed, 4k, masterpiece`; const negativePrompt = "blurry, low quality, text, watermark, ugly, deformed, disfigured, bad anatomy, worst quality, low resolution, duplicate, clone, multiple people, two heads, two faces, split image, extra limbs"; const data = await postApi<{ scene: Scene }>( `/studio/videos/${projectId}/scenes`, { narration, imagePrompt, negativePrompt, durationSec: 5.0, } ); setScenes((prev) => [...prev, data.scene]); setCurrentSceneIndex(scenes.length); setLastSaved(new Date()); console.log('[CreatorStudioEditor] Generated scene with fallback content:', sceneNum); generateImageForScene(data.scene.id, data.scene.imagePrompt); // Sync outline with new scene syncOutlineWithScenes(); } catch (e: any) { alert(`Failed to create scene: ${e.message}`); } finally { setIsGeneratingScene(false); } }, [project, projectId, scenes.length, isGeneratingScene, postApi, getVisualStyle, getTones, generateImageForScene, syncOutlineWithScenes]); // Generate next scene for TV Mode (uses backend outline for reliability) const generateNextForTVMode = useCallback(async () => { if (!project || isGeneratingScene) return null; try { const nextSceneIndex = scenes.length; // First, try to generate from outline via backend try { const data = await postApi<{ ok: boolean; scene: Scene; from_outline?: boolean }>( `/studio/videos/${projectId}/scenes/generate-from-outline?scene_index=${nextSceneIndex}`, {} ); if (data.ok && data.scene) { setScenes((prev) => [...prev, data.scene]); console.log(`[CreatorStudioEditor] TV Mode: Generated scene ${nextSceneIndex + 1} from outline`); return sceneToTVScene(data.scene); } } catch (outlineErr: any) { console.log('[CreatorStudioEditor] TV Mode: No outline available, trying AI continuation'); } // Second: Try AI-powered continuation based on previous scenes if (scenes.length > 0) { try { console.log('[CreatorStudioEditor] TV Mode: Generating AI continuation from previous context...'); const contData = await postApi<{ ok: boolean; scene: Scene; from_continuation?: boolean }>( `/studio/videos/${projectId}/scenes/generate-continuation`, {} ); if (contData.ok && contData.scene) { setScenes((prev) => [...prev, contData.scene]); console.log(`[CreatorStudioEditor] TV Mode: Generated scene ${nextSceneIndex + 1} via AI continuation`); return sceneToTVScene(contData.scene); } } catch (contErr: any) { console.log('[CreatorStudioEditor] TV Mode: AI continuation failed, using fallback:', contErr.message); } } // Final fallback: Generate scene based on project settings const sceneNum = scenes.length + 1; const visualStyle = getVisualStyle(); const tones = getTones(); const toneDesc = tones.join(", "); const narration = `Scene ${sceneNum}. ${project.logline || `The story of "${project.title}" continues...`}`; const imagePrompt = `${visualStyle} style, ${project.logline || project.title}, scene ${sceneNum}, ${toneDesc} mood, high quality, detailed, 4k, masterpiece`; const negativePrompt = "blurry, low quality, text, watermark, ugly, deformed, disfigured, bad anatomy, worst quality, low resolution, duplicate, clone, multiple people, two heads, two faces, split image, extra limbs"; const data = await postApi<{ scene: Scene }>( `/studio/videos/${projectId}/scenes`, { narration, imagePrompt, negativePrompt, durationSec: 5.0, } ); setScenes((prev) => [...prev, data.scene]); return sceneToTVScene(data.scene); } catch (e: any) { console.error('[CreatorStudioEditor] Failed to generate scene for TV mode:', e); return null; } }, [project, projectId, scenes.length, isGeneratingScene, postApi, getVisualStyle, getTones, sceneToTVScene]); // Ensure image for TV Mode scene const ensureImageForTVMode = useCallback((tvScene: TVScene) => { const scene = scenes.find(s => s.idx === tvScene.idx); if (!scene) return; if (!tvScene.image_url && !tvScene.image) { generateImageForScene(scene.id, scene.imagePrompt).then(() => { const updatedScene = scenes.find(s => s.idx === tvScene.idx); if (updatedScene?.imageUrl) { updateSceneImageByIdx(tvScene.idx, updatedScene.imageUrl); } }); } }, [scenes, generateImageForScene, updateSceneImageByIdx]); // Status badge color const getStatusBadge = (status: string) => { switch (status) { case "draft": return { bg: "bg-amber-500/20", text: "text-amber-300", label: "Draft" }; case "approved": return { bg: "bg-emerald-500/20", text: "text-emerald-300", label: "Finished" }; case "in_review": return { bg: "bg-cyan-500/20", text: "text-cyan-300", label: "In Review" }; case "archived": return { bg: "bg-slate-500/20", text: "text-slate-300", label: "Archived" }; default: return { bg: "bg-slate-500/20", text: "text-slate-300", label: status }; } }; // Scene status indicator const SceneStatusIndicator = ({ status }: { status: SceneStatus }) => { switch (status) { case 'generating': return (
); case 'ready': return null; case 'error': return (
); case 'pending': default: return (
); } }; // Loading state if (loading) { return (
Loading project...
); } // Error state if (error || !project) { return (
Failed to load project
{error || "Project not found"}
); } const statusBadge = getStatusBadge(project.status); return (
{/* Batch Generation Progress Overlay */} {isBatchGenerating && (
{/* Animated Icon */}
{batchProgress.phase === 'scene' ? ( ) : batchProgress.phase === 'image' ? ( ) : ( )}
{/* Title */}

{batchProgress.phase === 'scene' ? 'Creating Scenes' : batchProgress.phase === 'image' ? 'Generating Images' : 'Generating Videos'}

{/* Progress Text */}

{batchProgress.phase === 'scene' ? `Building scene ${batchProgress.current} of ${batchProgress.total}...` : batchProgress.phase === 'image' ? `Generating image ${batchProgress.current} of ${batchProgress.total}...` : `Generating video ${batchProgress.current} of ${batchProgress.total}...` }

{/* Progress Bar */}
0 ? `${(batchProgress.current / batchProgress.total) * 100}%` : '0%' }} />
{batchProgress.phase === 'scene' ? (projectWantsVideo ? 'Phase 1/3: Creating scenes' : 'Phase 1/2: Creating scenes') : batchProgress.phase === 'image' ? (projectWantsVideo ? 'Phase 2/3: Generating images' : 'Phase 2/2: Generating images') : 'Phase 3/3: Generating videos' }
{/* Tip */}

{batchProgress.phase === 'video' ? 'Video generation may take several minutes per scene' : 'This may take a few minutes depending on your hardware' }

)} {/* Header - Compact & Cinematic */}

{project.title}

{scenes.length} scene{scenes.length !== 1 ? "s" : ""} • Creator Studio
{/* Status Badge */} {statusBadge.label} {/* Save Indicator */}
{isSaving ? ( <> Saving... ) : lastSaved ? ( <> Saved ) : null}
{/* Project Settings Button */} {/* Story Outline Button */} {/* Export Button */}
{/* Scene Chips Rail - Like Play Story */} {scenes.length > 0 && (
{scenes.map((scene, idx) => { const isActive = idx === currentSceneIndex; const hasImage = Boolean(scene.imageUrl); const hasVideo = Boolean(scene.videoUrl); const isHovered = hoveredSceneIdx === idx; const showDelete = isHovered && scenes.length > 1; return (
setHoveredSceneIdx(idx)} onMouseLeave={() => setHoveredSceneIdx(null)} > {/* Delete button */} {showDelete && ( )}
); })} {/* Add Scene Chip */}
)} {/* Main Content */} {scenes.length === 0 ? ( // Empty State - Cinematic

Create Your First Scene

Your project is ready. Generate a scene to start bringing your story to life with AI-powered visuals.

Powered by AI • Based on your project settings

) : ( // Preview + Actions - Cinematic Layout
{/* Preview Panel - Dominant */}
{/* Background gradient */}
{/* Main preview area */}
{currentScene?.videoUrl ? ( /* Video/Animation Preview - when scene has generated media */
{isAnimatedImageUrl(currentScene.videoUrl) ? ( /* Animated WebP/GIF - render as (some Comfy workflows output this) */ {`Scene ) : isWebmUrl(currentScene.videoUrl) ? ( /* WebM video - render as
) : currentScene?.imageUrl ? ( /* Image Preview - when scene has image but no video */
{`Scene {/* Top-right overlay controls - glass style */}
{/* More options */} {/* Regenerate image */}
{/* Bottom-right Make Video CTA - only show if project wants video and scene has no video yet */} {projectWantsVideo && !currentScene.videoUrl && !isGeneratingImage && !isGeneratingVideo && (
)} {/* Generating image overlay */} {isGeneratingImage && (
Generating image...
)} {/* Generating video overlay */} {isGeneratingVideo && (
Converting to video... This may take a few minutes
)}
) : ( /* Empty state when no image */
{isGeneratingImage ? ( <>

Generating image...

{currentScene?.imagePrompt && (

{currentScene.imagePrompt}

)} ) : ( <>

No image for this scene

)}
)}
{/* Narration subtitle overlay */} {currentScene?.narration && (

{currentScene.narration}

)}
{/* Action Bar - Like Play Story */}
{/* Left: Playback controls */}
{/* Center: Scene progress bar */}
{scenes.map((_, i) => (
{/* Right: Actions */}
{/* Edit Scene Button */} {/* Generate Next Scene */} {/* TV Mode */}
)} {/* Scene Editor Modal */} {showSceneEditor && editingScene && (
setShowSceneEditor(false)} />
{/* Modal Header */}

Edit Scene {editingScene.idx + 1}

Update narration and prompts

{/* Modal Content */}
{/* Narration */}