import { useState, useRef, useCallback, useEffect } from "react"; import { useQuery, useMutation, useQueryClient } from "@tanstack/react-query"; import { motion, AnimatePresence } from "framer-motion"; import { Video, Loader2, Download, Trash2, Sparkles, Lock, Globe, Play, Upload, X, ImagePlus, CheckCircle2, AlertCircle, Clock, } from "lucide-react"; import { Button } from "@/components/ui/button"; import { Textarea } from "@/components/ui/textarea"; import { Badge } from "@/components/ui/badge"; import { Progress } from "@/components/ui/progress"; import { useToast } from "@/hooks/use-toast"; import { useLang } from "@/contexts/LanguageContext"; import { useAuth } from "@/contexts/AuthContext"; import { AlertDialog, AlertDialogAction, AlertDialogCancel, AlertDialogContent, AlertDialogDescription, AlertDialogFooter, AlertDialogHeader, AlertDialogTitle, AlertDialogTrigger, } from "@/components/ui/alert-dialog"; const BASE = import.meta.env.BASE_URL.replace(/\/$/, ""); // ── Types ───────────────────────────────────────────────────────────────────── interface VideoRecord { id: number; videoUrl: string; thumbnailUrl: string | null; prompt: string; negativePrompt: string | null; model: string; aspectRatio: string; resolution: string; duration: number; hasRefImage: boolean; isPrivate: boolean; userId: number | null; createdAt: string; } interface RefImage { base64: string; mime: string; preview: string } type VideoModel = "grok-3" | "veo-3-fast"; interface VideoOptions { model: VideoModel; aspectRatio: string; resolution: string; duration: number; negativePrompt: string; enhancePrompt: boolean; } type GenPhase = | "idle" | "submitting" // POSTing to /generate | "turnstile" // fetching turnstile token | "connecting" // connecting to geminigen.ai | "generating" // AI generating (status 1 events) | "done" | "error"; interface GenState { phase: GenPhase; message: string; elapsedMs: number; progress: number | null; // 0-100 from geminigen.ai result: VideoRecord | null; errorMsg: string | null; } // ── API helpers ─────────────────────────────────────────────────────────────── async function initiateVideoGeneration( prompt: string, isPrivate: boolean, videoOpts: VideoOptions, refImageBase64?: string, refImageMime?: string, ): Promise<{ taskId: string }> { const resp = await fetch(`${BASE}/api/videos/generate`, { method: "POST", headers: { "Content-Type": "application/json" }, credentials: "include", body: JSON.stringify({ prompt, isPrivate, model: videoOpts.model, aspectRatio: videoOpts.aspectRatio, resolution: videoOpts.resolution, duration: videoOpts.duration, negativePrompt: videoOpts.negativePrompt || undefined, enhancePrompt: videoOpts.enhancePrompt, referenceImageBase64: refImageBase64, referenceImageMime: refImageMime, }), }); if (!resp.ok) { const err = await resp.json().catch(() => ({ message: `HTTP ${resp.status}` })); throw new Error(err.message || `HTTP ${resp.status}`); } return resp.json(); } async function fetchVideoHistory(): Promise<{ videos: VideoRecord[] }> { const resp = await fetch(`${BASE}/api/videos/history?limit=50`, { credentials: "include" }); if (!resp.ok) throw new Error(`HTTP ${resp.status}`); return resp.json(); } async function deleteVideo(id: number): Promise { const resp = await fetch(`${BASE}/api/videos/${id}`, { method: "DELETE", credentials: "include" }); if (!resp.ok) throw new Error(`HTTP ${resp.status}`); } // ── useVideoGeneration hook ─────────────────────────────────────────────────── function useVideoGeneration(onSuccess: (v: VideoRecord) => void) { const { t } = useLang(); const [state, setState] = useState({ phase: "idle", message: "", elapsedMs: 0, progress: null, result: null, errorMsg: null, }); const esRef = useRef(null); const timerRef = useRef | null>(null); const startTimeRef = useRef(0); // Track whether an error was already received via onmessage, so onerror doesn't override it const terminalPhaseRef = useRef(false); const stopTimer = useCallback(() => { if (timerRef.current) { clearInterval(timerRef.current); timerRef.current = null; } }, []); const stopSSE = useCallback(() => { if (esRef.current) { esRef.current.close(); esRef.current = null; } stopTimer(); }, [stopTimer]); const startTimer = useCallback(() => { startTimeRef.current = Date.now(); timerRef.current = setInterval(() => { setState((s) => ({ ...s, elapsedMs: Date.now() - startTimeRef.current })); }, 1000); }, []); const generate = useCallback(async ( prompt: string, isPrivate: boolean, videoOpts: VideoOptions, refImageBase64?: string, refImageMime?: string, ) => { stopSSE(); terminalPhaseRef.current = false; setState({ phase: "submitting", message: t.videoPhaseSubmitting, elapsedMs: 0, progress: null, result: null, errorMsg: null }); startTimer(); let taskId: string; try { const result = await initiateVideoGeneration(prompt, isPrivate, videoOpts, refImageBase64, refImageMime); taskId = result.taskId; } catch (err) { stopTimer(); setState((s) => ({ ...s, phase: "error", message: "", errorMsg: err instanceof Error ? err.message : String(err), })); return; } // Open SSE connection to /progress/:taskId const es = new EventSource(`${BASE}/api/videos/progress/${taskId}`, { withCredentials: true }); esRef.current = es; es.onmessage = (ev) => { try { const data = JSON.parse(ev.data) as { type: string; message?: string; status?: number; progress?: number; video?: VideoRecord; errorCode?: string; }; if (data.type === "start") { setState((s) => ({ ...s, phase: data.message?.includes("Turnstile") ? "turnstile" : (data.message?.includes("連接") || data.message?.includes("connect")) ? "connecting" : "submitting", message: data.message || "", })); } else if (data.type === "progress") { setState((s) => ({ ...s, phase: "generating", message: data.message || t.videoPhaseGenerating, progress: typeof data.progress === "number" ? data.progress : s.progress, })); } else if (data.type === "complete" && data.video) { terminalPhaseRef.current = true; stopSSE(); setState((s) => ({ ...s, phase: "done", message: t.videoDone, result: data.video!, })); onSuccess(data.video); } else if (data.type === "error") { terminalPhaseRef.current = true; stopSSE(); setState((s) => ({ ...s, phase: "error", message: "", errorMsg: data.message || t.videoGenFailed, })); } else if (data.type === "done") { // stream closed by server (task was already in terminal state) es.close(); } } catch { /* ignore parse errors */ } }; es.onerror = () => { if (esRef.current === es) { // If we already received a terminal event (error/complete) via onmessage, // don't overwrite the state with the generic "stream disconnected" message. if (terminalPhaseRef.current) { stopSSE(); return; } stopSSE(); setState((s) => { if (s.phase !== "done" && s.phase !== "error") { return { ...s, phase: "error", message: "", errorMsg: t.videoConnectionFailed, }; } return s; }); } }; }, [stopSSE, startTimer, stopTimer, onSuccess, t]); const reset = useCallback(() => { stopSSE(); setState({ phase: "idle", message: "", elapsedMs: 0, progress: null, result: null, errorMsg: null }); }, [stopSSE]); // Cleanup on unmount useEffect(() => () => { stopSSE(); }, [stopSSE]); return { state, generate, reset }; } // ── ElapsedTimer component ──────────────────────────────────────────────────── function formatMs(ms: number): string { const s = Math.floor(ms / 1000); const m = Math.floor(s / 60); const sec = s % 60; return m > 0 ? `${m}m ${sec}s` : `${sec}s`; } // ── VideoCard component ─────────────────────────────────────────────────────── function VideoCard({ vid, onDelete }: { vid: VideoRecord; onDelete: (id: number) => void }) { const { t } = useLang(); return (

{vid.prompt}

{vid.model} {vid.duration}s {vid.aspectRatio && {vid.aspectRatio}} {vid.resolution && {vid.resolution}}
{t.videoDelete} {vid.prompt.slice(0, 100)} {t.poolCancel} onDelete(vid.id)} className="bg-destructive hover:bg-destructive/90"> {t.videoDelete}
); } // ── GeneratingPanel component ───────────────────────────────────────────────── function GeneratingPanel({ state }: { state: GenState }) { const { t } = useLang(); const phaseProgress: Record = { idle: 0, submitting: 5, turnstile: 12, connecting: 20, generating: 25, done: 100, error: 0, }; // When generating: map geminigen.ai's 0-100 into the 25-95 range of our UI bar // so early phases (submitting/connecting) stay visible before generation starts. let progress: number; if (state.phase === "generating" && state.progress !== null) { progress = 25 + Math.round(state.progress * 0.70); // 25%–95% } else { progress = phaseProgress[state.phase]; } return (

{state.phase === "submitting" ? t.videoPhaseSubmitting : state.phase === "turnstile" ? t.videoPhaseGettingCaptcha : state.phase === "connecting" ? t.videoPhaseConnecting : state.phase === "generating" ? t.videoPhaseGenerating : t.videoPhaseProcessing}

{state.message && (

{state.message}

)}
{state.elapsedMs > 0 && (
{formatMs(state.elapsedMs)}
)}
{(["submitting", "turnstile", "connecting", "generating"] as GenPhase[]).map((ph) => { const labels: Record = { submitting: t.videoPhaseSubmitLabel, turnstile: t.videoPhaseTurnstileLabel, connecting: t.videoPhaseConnectLabel, generating: t.videoPhaseGenerateLabel, }; const passed = phaseProgress[state.phase] > phaseProgress[ph]; const active = state.phase === ph; return (
{passed ? : active ? :
} {labels[ph]}
); })}
{state.phase === "generating" && (

{t.videoGeneratingHint}

)} ); } // ── Main VideoPage ──────────────────────────────────────────────────────────── export function VideoPage() { const { t } = useLang(); const { toast } = useToast(); const queryClient = useQueryClient(); const { isSignedIn } = useAuth(); const [prompt, setPrompt] = useState(""); const [isPrivate, setIsPrivate] = useState(false); const [referenceImage, setReferenceImage] = useState(null); const fileInputRef = useRef(null); // ── Model selection ── const [selectedModel, setSelectedModel] = useState("grok-3"); // ── Video generation options ── const [aspectRatio, setAspectRatio] = useState("16:9"); const [resolution, setResolution] = useState("480p"); const [duration, setDuration] = useState(6); const [negativePrompt, setNegativePrompt] = useState(""); const [enhancePrompt, setEnhancePrompt] = useState(true); const [showAdvanced, setShowAdvanced] = useState(false); // When switching to Veo, enforce its constraints const handleModelChange = (model: VideoModel) => { setSelectedModel(model); if (model === "veo-3-fast") { setDuration(8); if (aspectRatio !== "16:9" && aspectRatio !== "9:16") setAspectRatio("16:9"); } else { setDuration(6); } }; const HISTORY_KEY = ["video-history"]; const { data: historyData, isLoading: historyLoading } = useQuery({ queryKey: HISTORY_KEY, queryFn: fetchVideoHistory, staleTime: 30_000, }); const handleSuccess = useCallback((video: VideoRecord) => { toast({ title: t.videoGenSuccess }); queryClient.invalidateQueries({ queryKey: HISTORY_KEY }); setPrompt(""); setReferenceImage(null); }, [toast, queryClient, t]); const { state: genState, generate, reset } = useVideoGeneration(handleSuccess); const isGenerating = genState.phase !== "idle" && genState.phase !== "done" && genState.phase !== "error"; const handleFileUpload = useCallback((file: File) => { if (!file.type.startsWith("image/")) { toast({ title: t.errorFormatTitle, description: t.errorFormatDesc, variant: "destructive" }); return; } const reader = new FileReader(); reader.onload = (e) => { const dataUrl = e.target?.result as string; const base64 = dataUrl.split(",")[1]; setReferenceImage({ base64, mime: file.type, preview: dataUrl }); }; reader.readAsDataURL(file); }, [t, toast]); const handleDrop = useCallback((e: React.DragEvent) => { e.preventDefault(); const file = e.dataTransfer.files[0]; if (file) handleFileUpload(file); }, [handleFileUpload]); const { mutate: removeVideo } = useMutation({ mutationFn: deleteVideo, onSuccess: () => { toast({ title: t.videoDeleteSuccess }); queryClient.invalidateQueries({ queryKey: HISTORY_KEY }); }, onError: () => { toast({ title: t.videoDeleteFailed, variant: "destructive" }); }, }); const videos = historyData?.videos ?? []; return (
{/* Header */}

{t.videoTitle}

{t.videoSubtitle}

{/* Generator Card */}
{/* ── Model Picker ── */}

{t.videoModel}

{([ { value: "grok-3" as VideoModel, label: "Grok-3", sub: t.videoModelGrokSub, desc: t.videoModelGrokDesc, color: "text-violet-400", border: "border-violet-500/40", bg: "bg-violet-500/10", }, { value: "veo-3-fast" as VideoModel, label: "Veo 3.1 Fast", sub: t.videoModelVeoSub, desc: t.videoModelVeoDesc, color: "text-sky-400", border: "border-sky-500/40", bg: "bg-sky-500/10", }, ]).map((m) => { const active = selectedModel === m.value; return ( ); })}
{selectedModel === "veo-3-fast" && (

{t.videoVeoTimingNote}

)}
{referenceImage && ( {t.videoModeImage} )}