Spaces:
Running
Running
| /** | |
| * FFmpeg-based video I/O for server-side watermark processing | |
| * | |
| * Spawns FFmpeg subprocesses to decode/encode raw YUV420p frames. | |
| */ | |
| import { spawn, type ChildProcess } from 'node:child_process'; | |
| import { Readable, Writable } from 'node:stream'; | |
| /** Video metadata */ | |
| export interface VideoInfo { | |
| width: number; | |
| height: number; | |
| fps: number; | |
| duration: number; | |
| totalFrames: number; | |
| } | |
| /** | |
| * Probe video file for metadata using ffprobe | |
| */ | |
| export async function probeVideo(inputPath: string): Promise<VideoInfo> { | |
| return new Promise((resolve, reject) => { | |
| const proc = spawn('ffprobe', [ | |
| '-v', 'quiet', | |
| '-print_format', 'json', | |
| '-show_streams', | |
| '-show_format', | |
| inputPath, | |
| ]); | |
| let stdout = ''; | |
| let stderr = ''; | |
| proc.stdout.on('data', (d: Buffer) => (stdout += d.toString())); | |
| proc.stderr.on('data', (d: Buffer) => (stderr += d.toString())); | |
| proc.on('close', (code) => { | |
| if (code !== 0) { | |
| reject(new Error(`ffprobe failed (${code}): ${stderr}`)); | |
| return; | |
| } | |
| try { | |
| const info = JSON.parse(stdout); | |
| const videoStream = info.streams?.find((s: { codec_type: string }) => s.codec_type === 'video'); | |
| if (!videoStream) throw new Error('No video stream found'); | |
| const [num, den] = (videoStream.r_frame_rate || '30/1').split('/').map(Number); | |
| const fps = den ? num / den : 30; | |
| const duration = parseFloat(info.format?.duration || videoStream.duration || '0'); | |
| const totalFrames = Math.ceil(fps * duration); | |
| resolve({ | |
| width: videoStream.width, | |
| height: videoStream.height, | |
| fps, | |
| duration, | |
| totalFrames, | |
| }); | |
| } catch (e) { | |
| reject(new Error(`Failed to parse ffprobe output: ${e}`)); | |
| } | |
| }); | |
| }); | |
| } | |
| /** | |
| * Frame reader: decodes a video file to raw Y planes | |
| * Yields one Y plane (Uint8Array of width*height) per frame | |
| */ | |
| export async function* readYPlanes( | |
| inputPath: string, | |
| width: number, | |
| height: number | |
| ): AsyncGenerator<Uint8Array> { | |
| const frameSize = width * height; // Y plane only | |
| const yuvFrameSize = frameSize * 3 / 2; // YUV420p: Y + U/4 + V/4 | |
| const proc = spawn('ffmpeg', [ | |
| '-i', inputPath, | |
| '-f', 'rawvideo', | |
| '-pix_fmt', 'yuv420p', | |
| '-v', 'error', | |
| 'pipe:1', | |
| ], { stdio: ['ignore', 'pipe', 'pipe'] }); | |
| let buffer = Buffer.alloc(0); | |
| for await (const chunk of proc.stdout as AsyncIterable<Buffer>) { | |
| buffer = Buffer.concat([buffer, chunk]); | |
| while (buffer.length >= yuvFrameSize) { | |
| // Extract Y plane (first width*height bytes of YUV420p frame) | |
| const yPlane = new Uint8Array(buffer.subarray(0, frameSize)); | |
| yield yPlane; | |
| buffer = buffer.subarray(yuvFrameSize); | |
| } | |
| } | |
| } | |
| /** | |
| * Create a write pipe to FFmpeg for encoding watermarked frames | |
| * Returns a writable stream that accepts YUV420p frame data | |
| */ | |
| export function createEncoder( | |
| outputPath: string, | |
| width: number, | |
| height: number, | |
| fps: number, | |
| crf: number = 18 | |
| ): { stdin: Writable; process: ChildProcess } { | |
| const proc = spawn('ffmpeg', [ | |
| '-y', | |
| '-f', 'rawvideo', | |
| '-pix_fmt', 'yuv420p', | |
| '-s', `${width}x${height}`, | |
| '-r', String(fps), | |
| '-i', 'pipe:0', | |
| '-c:v', 'libx264', | |
| '-crf', String(crf), | |
| '-preset', 'medium', | |
| '-pix_fmt', 'yuv420p', | |
| '-v', 'error', | |
| outputPath, | |
| ], { stdio: ['pipe', 'ignore', 'pipe'] }); | |
| return { stdin: proc.stdin, process: proc }; | |
| } | |
| /** | |
| * Read all YUV420p frames from video and provide full frame buffers | |
| * (Y, U, V planes) for pass-through of chroma channels | |
| */ | |
| export async function* readYuvFrames( | |
| inputPath: string, | |
| width: number, | |
| height: number | |
| ): AsyncGenerator<{ y: Uint8Array; u: Uint8Array; v: Uint8Array }> { | |
| const ySize = width * height; | |
| const uvSize = (width / 2) * (height / 2); | |
| const frameSize = ySize + 2 * uvSize; | |
| const proc = spawn('ffmpeg', [ | |
| '-i', inputPath, | |
| '-f', 'rawvideo', | |
| '-pix_fmt', 'yuv420p', | |
| '-v', 'error', | |
| 'pipe:1', | |
| ], { stdio: ['ignore', 'pipe', 'pipe'] }); | |
| let buffer = Buffer.alloc(0); | |
| for await (const chunk of proc.stdout as AsyncIterable<Buffer>) { | |
| buffer = Buffer.concat([buffer, chunk]); | |
| while (buffer.length >= frameSize) { | |
| const y = new Uint8Array(buffer.subarray(0, ySize)); | |
| const u = new Uint8Array(buffer.subarray(ySize, ySize + uvSize)); | |
| const v = new Uint8Array(buffer.subarray(ySize + uvSize, frameSize)); | |
| yield { y, u, v }; | |
| buffer = buffer.subarray(frameSize); | |
| } | |
| } | |
| } | |