bigbossmonster's picture
Upload 24 files
ea81969 verified
import { SRT_TRANSLATOR_PROMPT } from '../../../prompts/srttranslator.js';
import { tryModels, getPrompt, cleanSRTOutput, DEFAULT_SAFETY_SETTINGS } from '@/backend/services/ai/utils';
/**
* Counts the number of SRT blocks in a given text.
*/
function countSrtBlocks(text) {
if (!text) return 0;
// Improved regex to count standard SRT blocks accurately
const matches = text.match(/^\d+\s*\r?\n\d{2}:\d{2}:\d{2},\d{3}/gm);
return matches ? matches.length : 0;
}
export async function srtTranslate(srtContent, sourceLanguage, targetLanguage, apiKey, isOwnApi = false) {
const models = ['gemini-3-flash-preview', 'gemini-flash-lite-latest'];
const finalPrompt = SRT_TRANSLATOR_PROMPT(sourceLanguage, targetLanguage);
// Normalize line endings and split into blocks
const blocks = srtContent.replace(/\r\n/g, '\n').split(/\n\s*\n/).filter(b => b.trim().length > 0);
const CHUNK_SIZE = 120; // Increased to 120 as requested for higher throughput
const BATCH_SIZE = 2; // Enabled Parallel processing (2 at a time)
const COOLDOWN = 1000; // Minimal cooldown between parallel batches
console.log(`[AI-SRT-SAFE-MAX] Segment Blocks: ${blocks.length}. Chunks: ${Math.ceil(blocks.length/CHUNK_SIZE)}.`);
const chunkTexts = [];
const chunkBlockCounts = [];
for (let i = 0; i < blocks.length; i += CHUNK_SIZE) {
const slice = blocks.slice(i, i + CHUNK_SIZE);
chunkTexts.push(slice.join('\n\n'));
chunkBlockCounts.push(slice.length);
}
const results = [];
for (let i = 0; i < chunkTexts.length; i += BATCH_SIZE) {
const currentBatchIndices = Array.from({ length: Math.min(BATCH_SIZE, chunkTexts.length - i) }, (_, k) => i + k);
const batchPromises = currentBatchIndices.map(idx =>
tryModels(apiKey, models, async (ai, model) => {
const inputText = chunkTexts[idx];
const expectedCount = chunkBlockCounts[idx];
let response = await ai.models.generateContent({
model: model,
contents: [{ parts: [{ text: `Translate these ${expectedCount} blocks now. Maintain all IDs:\n\n${inputText}` }] }],
config: {
temperature: 0.1,
systemInstruction: finalPrompt,
safetySettings: DEFAULT_SAFETY_SETTINGS,
thinkingConfig: { thinkingBudget: 0 }
}
});
let translatedText = cleanSRTOutput(response.text);
let actualCount = countSrtBlocks(translatedText);
// --- INTEGRITY AUTO-RECOVERY ---
if (actualCount < expectedCount) {
console.warn(`[AI-SRT] Gap detected in chunk ${idx} (${actualCount}/${expectedCount}). Retrying with max precision...`);
response = await ai.models.generateContent({
model: model,
contents: [{ parts: [{ text: `CRITICAL: Do not skip lines. You MUST return exactly ${expectedCount} blocks. Translate ALL now:\n\n${inputText}` }] }],
config: { temperature: 0.0, systemInstruction: finalPrompt, safetySettings: DEFAULT_SAFETY_SETTINGS }
});
translatedText = cleanSRTOutput(response.text);
}
return translatedText;
})
);
const batchResults = await Promise.all(batchPromises);
results.push(...batchResults);
if (i + BATCH_SIZE < chunkTexts.length) {
await new Promise(r => setTimeout(r, COOLDOWN));
}
}
return results.join('\n\n').trim();
}