remote-rdr / server-plugins /split-render.js
shiveshnavin's picture
Change to work with single line
7ad7585
import _ from 'lodash'
import fs from 'fs'
import { Plugin } from './plugin.js';
import { exec, spawn } from 'child_process'
import _path from 'path';
import { PerformanceRecorder } from 'common-utils';
/**
* SplitRenderPlugin must be used with transparent-background
* when the Remotion renders the captions on a black background.
* This plugin will then composite the transparent video over
* the actual media files thus combining them into the final output.
*/
export class SplitRenderPlugin extends Plugin {
constructor(name, options) {
super(name, options);
}
async applyPrerender(originalManuscript, jobId) {
_.set(
originalManuscript,
'meta.generationConfig.extras.buildParams',
`--codec=prores --prores-profile=4444 --pixel-format=yuva444p10le --transparent --image-format=png --concurrency=2 --hardware-acceleration=if-possible --enable-multiprocess-on-linux --offthreadvideo-cache-size-in-bytes=8589934592`
);
originalManuscript.transcript.forEach(element => {
element._mediaAbsPaths = _.cloneDeep(element.mediaAbsPaths)
element.mediaAbsPaths = []
});
}
async applyPostrender(originalManuscript, jobId, outFiles) {
return new Promise((resolve, reject) => {
const outFile = outFiles.find(f => f.includes('.webm')) || outFiles.find(f => f.includes('.mp4')) || outFiles.find(f => f.includes('.mov'));
if (!outFile || !fs.existsSync(outFile)) return resolve('No output file found');
const perf = new PerformanceRecorder();
// 1) Probe overlay duration (master duration)
const getVideoDuration = `ffprobe -v quiet -show_entries format=duration -of csv=p=0 "${outFile}"`;
exec(getVideoDuration, (error, stdout) => {
if (error) {
console.error(`[SplitRenderPlugin] Error getting overlay duration: ${error.message}`);
return reject(error);
}
const overlayDuration = parseFloat(stdout.trim());
console.log(`[SplitRenderPlugin] Overlay duration: ${overlayDuration} seconds`);
// Build tokenized ffmpeg args (DON'T concat strings; keep tokens to preserve paths)
const ffmpegArgs = [];
const bgInputIndices = [];
let inputIndex = 0;
originalManuscript.transcript.forEach(scene => {
let durationInSeconds = scene.durationInSeconds;
scene.mediaAbsPaths = _.cloneDeep(scene._mediaAbsPaths);
delete scene._mediaAbsPaths;
for (let i = 0; i < scene.mediaAbsPaths.length; i++) {
const { path, type, dimensions, durationSec } = scene.mediaAbsPaths[i];
const fileName = _path.basename(path);
const publicFilePath = `public/${fileName}`;
if (type === 'video') {
const clipDuration = Math.min(durationSec, durationInSeconds, overlayDuration);
ffmpegArgs.push('-ss', '0', '-t', String(clipDuration), '-i', publicFilePath);
bgInputIndices.push(inputIndex++);
} else if (type === 'image') {
const clipDuration = Math.min(durationInSeconds, overlayDuration);
ffmpegArgs.push('-loop', '1', '-ss', '0', '-t', String(clipDuration), '-i', publicFilePath);
bgInputIndices.push(inputIndex++);
}
}
});
// Add the front/overlay video last; this controls duration
ffmpegArgs.push('-ss', '0', '-t', String(overlayDuration), '-i', outFile);
const frontIdx = inputIndex++; // last added input is the overlay
if (bgInputIndices.length === 0) {
return resolve('No input files to process. Skipping split-render post process');
}
// 2) Build filter_complex:
// - Concatenate all background clips -> [bg]
// - Key out black from overlay -> [fg]
// - Overlay fg on bg, keep bg playing if fg ends -> eof_action=pass
const concatInputs = bgInputIndices.map(i => `[${i}:v]`).join('');
const concatFilter = `${concatInputs}concat=n=${bgInputIndices.length}:v=1:a=0[bg]`;
const chromaKey = `[${frontIdx}:v]colorkey=0x000000:0.08:0.02[fg]`;
const overlay = `[bg][fg]overlay=0:0:format=auto:eof_action=pass`;
const filterComplex = `${concatFilter};${chromaKey};${overlay}`;
const finalOutFile = `out/final_${jobId}.mp4`;
const fullArgs = [
...ffmpegArgs,
'-filter_complex', filterComplex,
'-c:v', 'libx264',
'-pix_fmt', 'yuv420p',
'-y', finalOutFile,
];
console.log('[SplitRenderPlugin] Running ffmpeg with args:', fullArgs);
const ffmpegProcess = spawn('ffmpeg', fullArgs);
let stdoutBuf = '';
let stderrBuf = '';
ffmpegProcess.stdout.on('data', (data) => {
const output = data.toString();
stdoutBuf += output;
console.log('[SplitRenderPlugin] [FFmpeg STDOUT]:', output.trim());
});
ffmpegProcess.stderr.on('data', (data) => {
const output = data.toString();
stderrBuf += output;
console.log('[SplitRenderPlugin] [FFmpeg STDERR]:', output.trim());
});
ffmpegProcess.on('close', (code) => {
if (code !== 0) {
const fullErrorLog = `FFmpeg process failed with code ${code}\n\nSTDOUT:\n${stdoutBuf}\n\nSTDERR:\n${stderrBuf}`;
console.error('[SplitRenderPlugin] FFmpeg failed:', fullErrorLog);
return reject(new Error(fullErrorLog));
}
try {
if (fs.existsSync(outFile)) fs.unlinkSync(outFile);
console.log('[SplitRenderPlugin] Removed initial render file:', outFile);
} catch (e) {
console.warn('[SplitRenderPlugin] Could not remove initial render file:', e.message);
}
console.log('[SplitRenderPlugin] FFmpeg process completed successfully, took ', perf.elapsedString());
console.log('[SplitRenderPlugin] Final output file:', finalOutFile);
resolve('FFmpeg completed successfully');
});
ffmpegProcess.on('error', (err) => {
console.error('[SplitRenderPlugin] FFmpeg spawn error:', err);
reject(err);
});
});
});
}
}