Spaces:
Sleeping
Sleeping
| import { useState, useRef, useCallback } from "react"; | |
| import { Button } from "@/components/ui/button"; | |
| import { Card } from "@/components/ui/card"; | |
| import { Play, Pause, Square, Download, Video, Clock } from "lucide-react"; | |
| import { toast } from "sonner"; | |
| import { Slider } from "@/components/ui/slider"; | |
| interface TimelapseFrame { | |
| timestamp: number; | |
| canvasData: string; | |
| } | |
| interface TimelapseRecorderProps { | |
| onRequestSnapshot: () => string | null; | |
| onLoadSnapshot: (data: string) => void; | |
| } | |
| export const TimelapseRecorder = ({ onRequestSnapshot, onLoadSnapshot }: TimelapseRecorderProps) => { | |
| const [isRecording, setIsRecording] = useState(false); | |
| const [isPlaying, setIsPlaying] = useState(false); | |
| const [frames, setFrames] = useState<TimelapseFrame[]>([]); | |
| const [currentFrame, setCurrentFrame] = useState(0); | |
| const [recordingDuration, setRecordingDuration] = useState(0); | |
| const recordingIntervalRef = useRef<NodeJS.Timeout | null>(null); | |
| const playbackIntervalRef = useRef<NodeJS.Timeout | null>(null); | |
| const recordingStartTimeRef = useRef<number>(0); | |
| const mediaRecorderRef = useRef<MediaRecorder | null>(null); | |
| const recordedChunksRef = useRef<Blob[]>([]); | |
| const startRecording = useCallback(() => { | |
| setIsRecording(true); | |
| setFrames([]); | |
| setCurrentFrame(0); | |
| recordingStartTimeRef.current = Date.now(); | |
| // Capture frames every 500ms | |
| recordingIntervalRef.current = setInterval(() => { | |
| const snapshot = onRequestSnapshot(); | |
| if (snapshot) { | |
| const timestamp = Date.now() - recordingStartTimeRef.current; | |
| setFrames(prev => [...prev, { timestamp, canvasData: snapshot }]); | |
| setRecordingDuration(timestamp); | |
| } | |
| }, 500); | |
| toast.success("Enregistrement démarré"); | |
| }, [onRequestSnapshot]); | |
| const stopRecording = useCallback(() => { | |
| if (recordingIntervalRef.current) { | |
| clearInterval(recordingIntervalRef.current); | |
| recordingIntervalRef.current = null; | |
| } | |
| setIsRecording(false); | |
| toast.success(`Enregistrement terminé: ${frames.length} frames capturées`); | |
| }, [frames.length]); | |
| const playTimelapse = useCallback(() => { | |
| if (frames.length === 0) { | |
| toast.error("Aucune séquence enregistrée"); | |
| return; | |
| } | |
| setIsPlaying(true); | |
| setCurrentFrame(0); | |
| let frameIndex = 0; | |
| playbackIntervalRef.current = setInterval(() => { | |
| if (frameIndex >= frames.length) { | |
| setIsPlaying(false); | |
| if (playbackIntervalRef.current) { | |
| clearInterval(playbackIntervalRef.current); | |
| } | |
| toast.success("Lecture terminée"); | |
| return; | |
| } | |
| onLoadSnapshot(frames[frameIndex].canvasData); | |
| setCurrentFrame(frameIndex); | |
| frameIndex++; | |
| }, 500); | |
| }, [frames, onLoadSnapshot]); | |
| const pauseTimelapse = useCallback(() => { | |
| if (playbackIntervalRef.current) { | |
| clearInterval(playbackIntervalRef.current); | |
| playbackIntervalRef.current = null; | |
| } | |
| setIsPlaying(false); | |
| }, []); | |
| const seekToFrame = useCallback((frameIndex: number) => { | |
| if (frameIndex >= 0 && frameIndex < frames.length) { | |
| setCurrentFrame(frameIndex); | |
| onLoadSnapshot(frames[frameIndex].canvasData); | |
| } | |
| }, [frames, onLoadSnapshot]); | |
| const exportAsVideo = useCallback(async () => { | |
| if (frames.length === 0) { | |
| toast.error("Aucune séquence à exporter"); | |
| return; | |
| } | |
| try { | |
| toast.loading("Préparation de l'export vidéo..."); | |
| // Create a temporary canvas for video export | |
| const canvas = document.createElement('canvas'); | |
| canvas.width = 800; | |
| canvas.height = 600; | |
| const ctx = canvas.getContext('2d'); | |
| if (!ctx) { | |
| toast.error("Erreur lors de la création du canvas"); | |
| return; | |
| } | |
| // Setup MediaRecorder | |
| const stream = canvas.captureStream(30); | |
| const mediaRecorder = new MediaRecorder(stream, { | |
| mimeType: 'video/webm;codecs=vp9', | |
| videoBitsPerSecond: 2500000 | |
| }); | |
| recordedChunksRef.current = []; | |
| mediaRecorder.ondataavailable = (event) => { | |
| if (event.data.size > 0) { | |
| recordedChunksRef.current.push(event.data); | |
| } | |
| }; | |
| mediaRecorder.onstop = () => { | |
| const blob = new Blob(recordedChunksRef.current, { type: 'video/webm' }); | |
| const url = URL.createObjectURL(blob); | |
| const a = document.createElement('a'); | |
| a.href = url; | |
| a.download = `experience_timelapse_${Date.now()}.webm`; | |
| a.click(); | |
| URL.revokeObjectURL(url); | |
| toast.success("Vidéo exportée avec succès"); | |
| }; | |
| mediaRecorder.start(); | |
| // Render each frame | |
| for (let i = 0; i < frames.length; i++) { | |
| const img = new Image(); | |
| await new Promise((resolve) => { | |
| img.onload = () => { | |
| ctx.clearRect(0, 0, canvas.width, canvas.height); | |
| ctx.drawImage(img, 0, 0); | |
| resolve(null); | |
| }; | |
| img.src = frames[i].canvasData; | |
| }); | |
| // Wait to maintain frame rate | |
| await new Promise(resolve => setTimeout(resolve, 500)); | |
| } | |
| mediaRecorder.stop(); | |
| } catch (error) { | |
| console.error("Export error:", error); | |
| toast.error("Erreur lors de l'export vidéo"); | |
| } | |
| }, [frames]); | |
| const formatDuration = (ms: number) => { | |
| const seconds = Math.floor(ms / 1000); | |
| const minutes = Math.floor(seconds / 60); | |
| const remainingSeconds = seconds % 60; | |
| return `${minutes}:${remainingSeconds.toString().padStart(2, '0')}`; | |
| }; | |
| return ( | |
| <Card className="p-4 space-y-4"> | |
| <div className="flex items-center gap-2"> | |
| <Video className="w-5 h-5 text-primary" /> | |
| <h3 className="font-semibold text-lg">Enregistrement Time-lapse</h3> | |
| </div> | |
| <div className="space-y-3"> | |
| {/* Recording Controls */} | |
| <div className="flex gap-2"> | |
| {!isRecording ? ( | |
| <Button onClick={startRecording} size="sm" disabled={isPlaying}> | |
| <Clock className="w-4 h-4 mr-2" /> | |
| Démarrer l'enregistrement | |
| </Button> | |
| ) : ( | |
| <Button onClick={stopRecording} variant="destructive" size="sm"> | |
| <Square className="w-4 h-4 mr-2" /> | |
| Arrêter | |
| </Button> | |
| )} | |
| </div> | |
| {/* Recording Status */} | |
| {isRecording && ( | |
| <div className="flex items-center gap-2 text-sm text-muted-foreground animate-pulse"> | |
| <div className="w-2 h-2 bg-red-500 rounded-full" /> | |
| Enregistrement en cours... {formatDuration(recordingDuration)} - {frames.length} frames | |
| </div> | |
| )} | |
| {/* Playback Controls */} | |
| {frames.length > 0 && !isRecording && ( | |
| <> | |
| <div className="space-y-2"> | |
| <div className="flex gap-2"> | |
| {!isPlaying ? ( | |
| <Button onClick={playTimelapse} size="sm" variant="secondary"> | |
| <Play className="w-4 h-4 mr-2" /> | |
| Lire | |
| </Button> | |
| ) : ( | |
| <Button onClick={pauseTimelapse} size="sm" variant="secondary"> | |
| <Pause className="w-4 h-4 mr-2" /> | |
| Pause | |
| </Button> | |
| )} | |
| <Button onClick={exportAsVideo} size="sm" variant="outline"> | |
| <Download className="w-4 h-4 mr-2" /> | |
| Exporter vidéo | |
| </Button> | |
| </div> | |
| {/* Timeline Slider */} | |
| <div className="space-y-1"> | |
| <Slider | |
| value={[currentFrame]} | |
| max={frames.length - 1} | |
| step={1} | |
| onValueChange={(value) => seekToFrame(value[0])} | |
| disabled={isPlaying} | |
| className="w-full" | |
| /> | |
| <div className="flex justify-between text-xs text-muted-foreground"> | |
| <span>Frame {currentFrame + 1} / {frames.length}</span> | |
| <span>{formatDuration(frames[currentFrame]?.timestamp || 0)}</span> | |
| </div> | |
| </div> | |
| </div> | |
| </> | |
| )} | |
| </div> | |
| </Card> | |
| ); | |
| }; | |