// Babel - Main Script (Cleaned & Optimized) let mediaRecorder; let audioChunks = []; let isRecording = false; let audioContext; let analyser; let micSource; let animationId; let recognition; let streamTimeout; let globalStream = null; // 🎀 PERSISTENT MIC STREAM let isRestarting = false; // Prevent double restart logic let isProcessingAudio = false; // Prevent duplicate audio processing let detectedLanguage = null; // Store detected language let isTTSPlaying = false; // Track TTS playback state to prevent mic feedback let textProcessingTriggered = false; // Track if text was already sent (prevents double-processing) let silenceDetectionActive = true; // Control silence detection loop let currentRecognitionLang = 'fr-FR'; // Track current recognition language for duplex mode // Global mode variable window.continuousMode = false; window.lastBotAudio = null; // 🌍 Exposed for Replay Button // πŸ†” Cycle tracking to prevent ghost handler duplicates let currentCycleId = 0; // πŸ”Š SILENCE DETECTION THRESHOLDS (More sensitive to prevent hallucination) // πŸ”Š VOLUME THRESHOLD Config moved to top of file const VOLUME_THRESHOLD = 8; // Noise Gate: Very sensitive (was 10) const SILENCE_LIMIT_MS = 5000; // 5.0s silence - VERY Generous pause time const SILENCE_THRESHOLD = 8; // Legacy support const MIN_RECORDING_TIME = 500; // Minimum 0.5 second recording const MIN_SPEECH_VOLUME = 5; // Minimum average volume to consider as speech const TYPING_SPEED_MS = 25; // CHAT UI HELPERS let recentMessages = new Set(); // πŸ›‘οΈ WHISPER HALLUCINATION FILTER - Common false outputs when silence/noise const HALLUCINATION_PHRASES = [ 'thanks for watching', 'thank you for watching', 'subscribe', 'like and subscribe', 'see you next time', 'bye bye', 'goodbye', 'merci d\'avoir regardΓ©', 'merci de votre attention', 'Γ  bientΓ΄t', 'sous-titres', 'sous-titrage', 'subtitles by', 'transcribed by', 'music', 'applause', '[music]', '[applause]', '...', 'you', 'the', 'i', 'a' ]; function isHallucination(text) { if (!text) return true; const cleaned = text.toLowerCase().trim(); // Too short = likely noise if (cleaned.length < 3) return true; // Check against known hallucinations for (const phrase of HALLUCINATION_PHRASES) { if (cleaned === phrase || cleaned.startsWith(phrase + '.') || cleaned.startsWith(phrase + '!')) { console.log(`🚫 HALLUCINATION BLOCKED: "${text}"`); return true; } } // Single repeated character or word if (/^(.)\1*$/.test(cleaned) || /^(\w+\s*)\1+$/.test(cleaned)) { console.log(`🚫 REPEATED PATTERN BLOCKED: "${text}"`); return true; } return false; } function createChatMessage(role, text, audioSrc = null, info = null, lang = null) { const chatHistory = document.getElementById('chat-history'); if (!chatHistory) return; // πŸ›‘οΈ Block hallucinations at message level too if (isHallucination(text)) { console.log(`🚫 createChatMessage: Hallucination blocked: "${text}"`); return; } // πŸ›‘οΈ HOLY WAR VISUAL SHIELD (DEDUPLICATION) // Prevent duplicate messages within 5 seconds const normalizedText = text.trim().toLowerCase().substring(0, 100); const messageHash = `${role}-${normalizedText}`; if (recentMessages.has(messageHash)) { console.log(`πŸ›‘οΈ VISUAL SHIELD: Blocked duplicate message: "${text.substring(0, 30)}..."`); return; } recentMessages.add(messageHash); setTimeout(() => recentMessages.delete(messageHash), 5000); // 5 seconds Blocking Period const msgDiv = document.createElement('div'); msgDiv.className = `message ${role}-message`; msgDiv.style.opacity = '0'; // For animation msgDiv.style.cssText = ` background: ${role === 'user' ? 'rgba(30, 30, 35, 0.8)' : 'rgba(45, 45, 52, 0.8)'}; border-radius: 16px; padding: 20px; margin-bottom: 16px; border: 1px solid ${role === 'user' ? 'rgba(60, 60, 70, 0.5)' : 'rgba(80, 80, 90, 0.5)'}; `; // Language Badge (Always show) const langBadge = document.createElement('div'); langBadge.className = 'lang-badge'; langBadge.style.cssText = ` display: inline-block; background: ${role === 'user' ? 'rgba(60, 60, 70, 0.6)' : 'rgba(80, 80, 90, 0.6)'}; color: ${role === 'user' ? '#a0a0a8' : '#c0c0c8'}; padding: 6px 12px; border-radius: 8px; font-size: 0.75rem; font-weight: 600; text-transform: uppercase; letter-spacing: 0.05em; margin-bottom: 12px; `; // Determine language display let langDisplay = lang || (role === 'user' ? 'Input' : 'Translation'); langBadge.innerText = `Language: ${langDisplay}`; msgDiv.appendChild(langBadge); // Text Content - Large and Clear const textDiv = document.createElement('div'); textDiv.className = 'message-content'; textDiv.style.cssText = ` font-size: 1.25rem; line-height: 1.7; color: #ffffff; font-weight: 400; margin-top: 8px; `; textDiv.innerText = text; msgDiv.appendChild(textDiv); // Audio Player Integration (Only for Bot) // πŸ›‘οΈ FALLBACK: If audioSrc is missing, use Browser TTS! if (role === 'bot') { if (!audioSrc) { console.warn("⚠️ No Audio from Server (API Limit/Error). Using Browser TTS Fallback."); // Browser TTS Fallback const utterance = new SpeechSynthesisUtterance(text); // Try to set language (default to detected target or whatever) // utterance.lang = 'en-US'; // Ideally passed in info window.speechSynthesis.speak(utterance); } else { // Standard Server Audio const audioContainer = document.createElement('div'); audioContainer.className = 'audio-container'; audioContainer.style.marginTop = '12px'; audioContainer.style.background = 'rgba(0,0,0,0.1)'; audioContainer.style.borderRadius = '8px'; audioContainer.style.padding = '8px'; audioContainer.style.display = 'flex'; audioContainer.style.alignItems = 'center'; audioContainer.style.gap = '10px'; const playBtn = document.createElement('button'); playBtn.innerHTML = ''; playBtn.className = 'icon-btn'; // Re-use existing class playBtn.style.width = '32px'; playBtn.style.height = '32px'; playBtn.style.background = '#fff'; playBtn.style.color = '#333'; // Waveform Visual (Fake/Static for aesthetics) const waveDiv = document.createElement('div'); waveDiv.style.flex = '1'; waveDiv.style.height = '4px'; waveDiv.style.background = 'rgba(255,255,255,0.3)'; waveDiv.style.borderRadius = '2px'; waveDiv.style.position = 'relative'; const progressDiv = document.createElement('div'); progressDiv.style.width = '0%'; progressDiv.style.height = '100%'; progressDiv.style.background = '#fff'; progressDiv.style.borderRadius = '2px'; progressDiv.style.transition = 'width 0.1s linear'; waveDiv.appendChild(progressDiv); // Audio Logic const audio = new Audio(audioSrc); audio.preload = 'auto'; // Force immediate buffer // 🌍 Update Global Replay Reference window.lastBotAudio = audio; playBtn.onclick = () => { if (audio.paused) { audio.play(); playBtn.innerHTML = ''; } else { audio.pause(); playBtn.innerHTML = ''; } }; // πŸ”‡ CRITICAL: Pause speech recognition when TTS starts (prevent feedback loop) audio.onplay = () => { isTTSPlaying = true; console.log('πŸ”Š TTS Started - Pausing speech recognition to prevent feedback'); // Pause browser speech recognition if active if (recognition) { try { recognition.stop(); console.log('⏸️ Paused speech recognition during TTS'); } catch (e) { } } // 🎯 DUPLEX MODE: MediaRecorder keeps running (don't pause it) // We only pause speech recognition to avoid feedback console.log('πŸŽ™οΈ MediaRecorder continues running during TTS'); }; audio.onended = () => { playBtn.innerHTML = ''; progressDiv.style.width = '0%'; // ▢️ CRITICAL: Resume after TTS isTTSPlaying = false; console.log('βœ… TTS ended - Ready for next conversation'); // Update status for continuous mode if (window.continuousMode) { statusText.innerText = 'πŸ’€ PrΓͺt pour la suite...'; statusText.style.color = '#4a9b87'; console.log('πŸ”„ Continuous mode active - system will listen automatically'); } }; // 🚨 Error handling to prevent crashes audio.onerror = (e) => { console.error('❌ TTS playback error:', e); isTTSPlaying = false; playBtn.innerHTML = ''; if (window.continuousMode) { statusText.innerText = '⚠️ Erreur TTS - PrΓͺt'; statusText.style.color = '#ff6b6b'; } }; audio.ontimeupdate = () => { const percent = (audio.currentTime / audio.duration) * 100; progressDiv.style.width = `${percent}%`; }; // πŸš€ AUTO-PLAY + PRE-CHECK // Ensure audio is playable immediately audio.oncanplay = () => { // Ready to start }; audio.oncanplaythrough = () => { // Fully ready }; audioContainer.appendChild(playBtn); audioContainer.appendChild(waveDiv); msgDiv.appendChild(audioContainer); // Latency Badge - REMOVED (cleaner UI) // Users don't need to see engine names // Immediate Trigger - Will auto-pause mic via onplay handler const playPromise = audio.play(); if (playPromise !== undefined) { playPromise.then(_ => { playBtn.innerHTML = ''; }).catch(error => { console.log("Auto-play blocked by browser policy:", error); if (isTTSPlaying) { // If blocked, we must ensure we don't get stuck in "TTS Playing" state console.warn("⚠️ Autoplay blocked. Resetting state."); isTTSPlaying = false; playBtn.innerHTML = ''; } }); } } // End of else (Server Audio) } // End of if (Bot Role) chatHistory.appendChild(msgDiv); // AUTO-SCROLL: Scroll both containers to show latest message const scrollToBottom = () => { // Scroll chat history (if it becomes scrollable) chatHistory.scrollTo({ top: chatHistory.scrollHeight, behavior: 'smooth' }); // πŸš€ SUGAR: Scroll the Window (Main Stage is not scrollable) window.scrollTo({ top: document.body.scrollHeight, behavior: 'smooth' }); }; // Immediate scroll scrollToBottom(); // Scroll again after animation completes setTimeout(scrollToBottom, 300); setTimeout(scrollToBottom, 600); // Fade In Animation setTimeout(() => { msgDiv.style.transition = 'opacity 0.3s ease, transform 0.3s ease'; msgDiv.style.transform = 'translateY(10px)'; requestAnimationFrame(() => { msgDiv.style.opacity = '1'; msgDiv.style.transform = 'translateY(0)'; }); }, 50); } // DOM Elements - declared but not initialized yet let recordBtn, statusText, settingsBtn, settingsModal, audioPlayer; let originalTextField, translatedTextField, quickLangSelector, sourceLangSelector, aiModelSelector; // πŸ”Š AUDIO UNLOCKER: Play silence on click to enable Autoplay function unlockAudioContext() { try { const ctx = new (window.AudioContext || window.webkitAudioContext)(); const osc = ctx.createOscillator(); const gain = ctx.createGain(); gain.gain.value = 0.001; osc.connect(gain); gain.connect(ctx.destination); osc.start(0); setTimeout(() => { osc.stop(); ctx.close(); }, 100); console.log("πŸ”“ Audio Autoplay Unlocked"); } catch (e) { console.log("Audio unlock not needed"); } } // ============================================================ // 🎯 INITIALIZE EVERYTHING WHEN DOM IS READY // ============================================================ function initializeApp() { console.log('🎯 initializeApp() called'); // πŸš€ FULL AUTO CONFIGURATION (User Request) if (!localStorage.getItem('googleKey')) { console.log('πŸ’Ž FULL AUTO: Injecting Google API Key...'); localStorage.setItem('googleKey', 'AIzaSyDB9wiqXsy1dG9OLU9r4Tar8oDdeVy4NOQ'); } // Get DOM Elements recordBtn = document.getElementById('record-btn'); statusText = document.getElementById('status-placeholder'); settingsBtn = document.getElementById('settings-trigger'); settingsModal = document.getElementById('settings-modal'); audioPlayer = document.getElementById('audio-player'); originalTextField = document.getElementById('original-text'); translatedTextField = document.getElementById('translated-text'); quickLangSelector = document.getElementById('target-lang-quick'); // πŸ”§ FIXED: Use correct ID sourceLangSelector = document.getElementById('source-lang-selector'); aiModelSelector = document.getElementById('ai-model'); console.log('πŸ“¦ DOM Elements loaded:'); console.log(' - recordBtn:', recordBtn ? 'βœ… FOUND' : '❌ NOT FOUND'); console.log(' - statusText:', statusText ? 'βœ… FOUND' : '❌ NOT FOUND'); if (!recordBtn) { console.error('❌❌❌ CRITICAL: record-btn NOT FOUND IN DOM! ❌❌❌'); return; } // πŸŽ™οΈ BUTTON CLICK HANDLER console.log('πŸ”§ Attaching click handler...'); recordBtn.onclick = async function (e) { console.log('πŸ”˜πŸ”˜πŸ”˜ BUTTON CLICKED! πŸ”˜πŸ”˜πŸ”˜'); e.preventDefault(); e.stopPropagation(); // Unlock audio unlockAudioContext(); if (!window.continuousMode) { // START console.log('▢️ Starting continuous mode...'); window.continuousMode = true; this.classList.add('active'); if (statusText) { statusText.innerText = 'Γ‰coute en continu...'; statusText.style.color = '#4a9b87'; } try { await listenContinuously(); } catch (error) { console.error('❌ Error:', error); window.continuousMode = false; this.classList.remove('active'); if (statusText) { statusText.innerText = 'Erreur: ' + error.message; statusText.style.color = '#ff6b6b'; } } } else { // STOP console.log('⏹️ Stopping continuous mode...'); window.continuousMode = false; this.classList.remove('active'); this.classList.remove('active-speech'); // βœ… FIXED: CSS class name this.classList.remove('processing'); // βœ… FIXED: Remove processing state too // Stop all components try { if (mediaRecorder && mediaRecorder.state !== 'inactive') { mediaRecorder.stop(); } if (recognition) { recognition.stop(); recognition = null; } if (audioContext && audioContext.state !== 'closed') { audioContext.close(); } } catch (e) { console.warn('Cleanup warning:', e); } // 🧹 CRITICAL: Full memory cleanup to keep system fast console.log('🧹 Cleaning up memory and cache...'); audioContext = null; analyser = null; micSource = null; mediaRecorder = null; audioChunks = []; isRecording = false; isProcessingAudio = false; speechDetected = false; textProcessingTriggered = false; // Clear audio buffers if (animationId) { cancelAnimationFrame(animationId); animationId = null; } // ⚑ Clear backend conversation cache for fresh start fetch('/clear_cache', { method: 'POST' }) .then(res => res.json()) .then(data => console.log(`βœ… Backend cache cleared: ${data.cleared} entries`)) .catch(e => console.warn('Cache clear failed:', e)); if (statusText) { statusText.innerText = 'ArrΓͺtΓ©'; statusText.style.color = '#888'; } console.log('βœ… Stopped'); // Kill global stream on full stop if (globalStream) { try { globalStream.getTracks().forEach(track => track.stop()); } catch (e) { } globalStream = null; } } }; // πŸ“± MOBILE TOUCH SUPPORT (CRITICAL FIX) // Desktop: onclick works // Mobile: Need touchstart/touchend let touchHandled = false; recordBtn.addEventListener('touchstart', (e) => { e.preventDefault(); // Prevent mouse event simulation touchHandled = true; recordBtn.onclick(e); // Trigger the same logic }, { passive: false }); recordBtn.addEventListener('touchend', (e) => { e.preventDefault(); }, { passive: false }); // Fallback for desktop recordBtn.addEventListener('click', (e) => { if (touchHandled) { touchHandled = false; return; // Already handled by touch } // Desktop logic continues normally }); // Disable context menu recordBtn.oncontextmenu = (e) => e.preventDefault(); // ============================================================ // 🌍 LANGUAGE QUICK SELECTORS - Event Handlers // ============================================================ const sourceLangQuick = document.getElementById('source-lang-quick'); const targetLangQuick = document.getElementById('target-lang-quick'); const swapLangsBtn = document.getElementById('swap-langs'); // 🎯 SOURCE LANGUAGE CHANGE if (sourceLangQuick) { sourceLangQuick.addEventListener('change', function () { const newLang = this.value; console.log(`πŸ”„ Source language changed to: ${newLang}`); // Save to localStorage for persistence localStorage.setItem('sourceLangQuick', newLang); // Update status to show change if (statusText) { statusText.innerText = `πŸ“ Source: ${this.options[this.selectedIndex].text}`; statusText.style.color = '#4a9b87'; setTimeout(() => { statusText.innerText = 'PrΓͺt'; statusText.style.color = '#888'; }, 2000); } // Restart recognition with new language if currently recording if (window.continuousMode && recognition) { console.log('πŸ”„ Restarting recognition with new source language...'); try { recognition.stop(); } catch (e) { } // It will auto-restart with new language via onend handler } }); // Restore saved value const savedSource = localStorage.getItem('sourceLangQuick'); if (savedSource) { sourceLangQuick.value = savedSource; } } // 🎯 TARGET LANGUAGE CHANGE if (targetLangQuick) { targetLangQuick.addEventListener('change', function () { const newLang = this.value; console.log(`🎯 Target language changed to: ${newLang}`); // Save to localStorage for persistence localStorage.setItem('targetLangQuick', newLang); // Also update the main selector if it exists if (quickLangSelector) { quickLangSelector.value = newLang; } // Update status to show change if (statusText) { statusText.innerText = `🎯 Cible: ${this.options[this.selectedIndex].text}`; statusText.style.color = '#4a9b87'; setTimeout(() => { statusText.innerText = 'PrΓͺt'; statusText.style.color = '#888'; }, 2000); } }); // Restore saved value const savedTarget = localStorage.getItem('targetLangQuick'); if (savedTarget) { targetLangQuick.value = savedTarget; } } // πŸ”„ SWAP LANGUAGES BUTTON if (swapLangsBtn) { swapLangsBtn.addEventListener('click', function () { console.log('πŸ”„ Swapping languages...'); const sourceSelect = document.getElementById('source-lang-quick'); const targetSelect = document.getElementById('target-lang-quick'); if (!sourceSelect || !targetSelect) { console.warn('Language selectors not found'); return; } // Get current values const currentSource = sourceSelect.value; const currentTarget = targetSelect.value; // Map target names to source codes const targetToSourceMap = { 'French': 'fr-FR', 'English': 'en-US', 'Arabic': 'ar-SA', 'Moroccan Darija': 'ar-SA', 'Spanish': 'es-ES', 'German': 'de-DE' }; // Map source codes to target names const sourceToTargetMap = { 'fr-FR': 'French', 'en-US': 'English', 'ar-SA': 'Arabic', 'es-ES': 'Spanish', 'de-DE': 'German', 'auto': 'French' // Default when swapping from auto }; // Calculate new values const newSourceCode = targetToSourceMap[currentTarget] || 'auto'; const newTargetName = sourceToTargetMap[currentSource] || 'French'; // Apply swap sourceSelect.value = newSourceCode; targetSelect.value = newTargetName; // Save to localStorage localStorage.setItem('sourceLangQuick', newSourceCode); localStorage.setItem('targetLangQuick', newTargetName); // Visual feedback this.style.transform = 'rotate(180deg)'; setTimeout(() => { this.style.transform = 'rotate(0deg)'; }, 300); // Update status if (statusText) { statusText.innerText = `πŸ”„ ${sourceSelect.options[sourceSelect.selectedIndex].text} ↔ ${targetSelect.options[targetSelect.selectedIndex].text}`; statusText.style.color = '#60a5fa'; setTimeout(() => { statusText.innerText = 'PrΓͺt'; statusText.style.color = '#888'; }, 2500); } console.log(`βœ… Swapped: ${currentSource} β†’ ${newTargetName}, ${currentTarget} β†’ ${newSourceCode}`); // Restart recognition if active if (window.continuousMode && recognition) { try { recognition.stop(); } catch (e) { } } }); } console.log('🌍 Language quick selectors initialized'); console.log('βœ…βœ…βœ… BUTTON HANDLER ATTACHED! βœ…βœ…βœ…'); } // Run initialization when DOM is ready if (document.readyState === 'loading') { console.log('πŸ“„ DOM not ready, waiting for DOMContentLoaded...'); document.addEventListener('DOMContentLoaded', initializeApp); } else { console.log('πŸ“„ DOM already ready, initializing now...'); initializeApp(); } // --- CONTINUOUS CONVERSATION MODE --- async function listenContinuously() { if (!window.continuousMode) { console.log("❌ listenContinuously called but window.continuousMode is false"); return; } // πŸ›‘οΈ RECURSION GUARD: If we are already recording, don't start another loop! if (isRecording) { console.log("⚠️ Already recording, skipping duplicate start request"); return; } console.log("πŸŽ™οΈ Starting NEW listening cycle..."); try { // πŸ”₯ CRITICAL: Increment cycle ID to invalidate old handlers currentCycleId++; const thisCycleId = currentCycleId; console.log(`πŸ†” Cycle ID: ${thisCycleId}`); // πŸ”₯ CRITICAL: Clean up old MediaRecorder to prevent ghost handlers if (mediaRecorder && mediaRecorder.state !== 'inactive') { try { console.log("🧹 Cleaning up old mediaRecorder"); // Don't call stop() - it will trigger the old onstop handler! // Just abandon it and create a new one mediaRecorder.ondataavailable = null; mediaRecorder.onstop = null; mediaRecorder = null; } catch (e) { console.warn("Cleanup warning:", e); } } isRecording = true; audioChunks = []; let speechDetected = false; // Reset speech detection textProcessingTriggered = false; // Reset flag silenceDetectionActive = true; // Enable silence detection let stream; // πŸš€ REUSE STREAM IF AVAILABLE if (globalStream && globalStream.active) { console.log("♻️ Reusing existing microphone stream"); stream = globalStream; } else { console.log("🎀 Requesting NEW microphone access..."); stream = await navigator.mediaDevices.getUserMedia({ audio: true }); globalStream = stream; console.log("βœ… Microphone access granted"); } // 🎯 REAL-TIME TRANSCRIPTION: Start immediately startRealTimeTranscription(); // Setup audio analysis for silence detection // Reuse context if active to reduce click/pop if (!audioContext || audioContext.state === 'closed') { audioContext = new (window.AudioContext || window.webkitAudioContext)(); } analyser = audioContext.createAnalyser(); micSource = audioContext.createMediaStreamSource(stream); micSource.connect(analyser); analyser.fftSize = 256; const bufferLength = analyser.frequencyBinCount; const dataArray = new Uint8Array(bufferLength); let silenceStart = Date.now(); // speechDetected already declared above mediaRecorder = new MediaRecorder(stream); mediaRecorder.ondataavailable = e => { // πŸ›‘οΈ Only process if this is still the current cycle if (thisCycleId === currentCycleId) { audioChunks.push(e.data); } else { console.warn(`⚠️ Ignoring data from old cycle ${thisCycleId} (current: ${currentCycleId})`); } }; mediaRecorder.onstop = async () => { // πŸ›‘οΈ CRITICAL: Ignore events from old cycles if (thisCycleId !== currentCycleId) { console.warn(`⚠️ Ignoring onstop from old cycle ${thisCycleId} (current: ${currentCycleId})`); return; } console.log(`πŸ›‘ Chunk finalized (Cycle ${thisCycleId}).`); // DON'T stop transcription here if we want continuous, but we do need to reset it // for the new chunk context. Actually, let's keep it simply "Running". // Process audio if we have valid speech if (audioChunks.length > 0 && speechDetected) { const blob = new Blob(audioChunks, { type: 'audio/wav' }); if (blob.size > 2000) { // πŸš€ INSTANT RESTART: Don't wait for processing! // Trigger processing in background statusText.innerText = 'Traitement...'; statusText.style.color = '#4a9b87'; // We DO NOT await here. We fire and forget (mostly), // or let it handle the UI updates asynchronously. // ⚑ OPTIMIZATION: Pass 'true' to bypass redundant silence check (we already checked it!) processAudio(blob, true).catch(e => console.error("Processing error:", e)); } } // πŸ”„ INSTANT RESTART (Synchronized) // Immediately restart listening *unless* completely stopped if (window.continuousMode) { // Reset flags for next turn - NOW it is safe to reset speechDetected = false; // πŸš€ AGGRESSIVE IMMEDIATE RESTART // Use 0ms delay to unblock the event loop but start ASAP setTimeout(() => { if (window.continuousMode) { console.log("πŸ”„ Instant Restart Triggered (Parallel)"); listenContinuously(); } }, 0); } // NOTE: We do NOT close audioContext here anymore, to keep it 'warm'. // Only disconnect analyser to save CPU if needed, but keeping it open is faster. try { if (micSource) micSource.disconnect(); if (analyser) analyser.disconnect(); if (animationId) cancelAnimationFrame(animationId); } catch (e) { } }; mediaRecorder.start(); if (animationId) cancelAnimationFrame(animationId); // 🎯 Continuous monitoring with better noise filtering let consecutiveSpeechFrames = 0; let consecutiveSilenceFrames = 0; const SPEECH_FRAMES_THRESHOLD = 3; // React faster to speech (3 frames = ~50ms) const SILENCE_FRAMES_THRESHOLD = 1200; // ~20.0s silence (User request: "Keep button working / Don't cut") function monitorAudio() { if (!window.continuousMode || !isRecording) { console.log("πŸ›‘ Audio monitoring stopped"); return; } // πŸ”‡ Skip monitoring while TTS is playing if (isTTSPlaying) { requestAnimationFrame(monitorAudio); return; } analyser.getByteFrequencyData(dataArray); let sum = 0; for (let i = 0; i < bufferLength; i++) sum += dataArray[i]; const average = sum / bufferLength; // 🎯 Better noise filtering // Use local VOLUME_THRESHOLD if defined, else generic 10 if (average > 4) { // Hardcoded decent threshold for consistency consecutiveSpeechFrames++; consecutiveSilenceFrames = 0; // Only mark as speech after consecutive loud frames (avoid false starts) if (consecutiveSpeechFrames >= SPEECH_FRAMES_THRESHOLD && !speechDetected) { speechDetected = true; silenceStart = Date.now(); console.log("πŸ—£οΈ Speech confirmed (filtered noise)"); statusText.innerText = '🎀 Enregistrement...'; statusText.style.color = '#ff4444'; recordBtn.classList.add('active-speech'); // βœ… FIXED: Match CSS class } } else { consecutiveSpeechFrames = 0; consecutiveSilenceFrames++; if (!speechDetected) { // Still waiting for speech if (!statusText.innerText.includes('Traitement')) { statusText.innerText = 'πŸ’€ En attente de parole...'; statusText.style.color = '#888'; } } else { // Speech detected before, now checking for end if (consecutiveSilenceFrames >= SILENCE_FRAMES_THRESHOLD) { console.log('🀫 Silence confirmed - ending speech'); consecutiveSpeechFrames = 0; consecutiveSilenceFrames = 0; // speechDetected = false; // ❌ DON'T RESET HERE! Needed for onstop check. isRecording = false; recordBtn.classList.remove('active-speech'); // βœ… FIXED: Match CSS class // Stop recorder to process if (mediaRecorder && mediaRecorder.state === 'recording') { mediaRecorder.stop(); } return; } } } animationId = requestAnimationFrame(monitorAudio); } monitorAudio(); } catch (err) { console.error('Erreur listenContinuously:', err); window.continuousMode = false; recordBtn.classList.remove('active'); } } // --- REAL-TIME TRANSCRIPTION (Consolidated & Cleaned) --- // This function handles browser-based speech recognition for instant feedback // ⚠️ NOTE: Browser SpeechRecognition does NOT support Darija well - we use it only for visual feedback let arabicModeActive = false; // Track if we're in Arabic/Darija mode function startRealTimeTranscription() { const SpeechRecognition = window.SpeechRecognition || window.webkitSpeechRecognition; // 1. Check Browser Support if (!SpeechRecognition) { console.warn("⚠️ Browser Speech Recognition not supported."); return; } // 2. Prevent Multiple Instances if (recognition) { try { recognition.stop(); } catch (e) { } recognition = null; } // 🧹 Reset global capture window.currentTranscript = ""; try { // 3. Get language from QUICK SELECTORS (visible in UI) const sourceLangQuick = document.getElementById('source-lang-quick'); const targetLangQuick = document.getElementById('target-lang-quick'); const targetLang = targetLangQuick?.value || quickLangSelector?.value || 'French'; let sourceLang = sourceLangQuick?.value || localStorage.getItem('sourceLangQuick') || 'auto'; console.log(`🎯 Quick Selectors: Source=${sourceLang}, Target=${targetLang}`); // πŸ‡²πŸ‡¦ SMART MODE: Only activate if source is 'auto' if (sourceLang === 'auto') { // Smart detection based on target language if (targetLang === 'French') { sourceLang = 'ar-SA'; // Likely speaking Arabic/Darija arabicModeActive = true; console.log('πŸ‡²πŸ‡¦ AUTO MODE: Target=French β†’ Assuming Arabic/Darija'); } else if (targetLang === 'Moroccan Darija' || targetLang === 'Arabic') { sourceLang = 'fr-FR'; // Likely speaking French arabicModeActive = false; console.log('πŸ‡«πŸ‡· AUTO MODE: Target=Arabic β†’ Assuming French'); } else if (targetLang === 'English') { // Default to Arabic for Moroccan users, but check cache sourceLang = detectedLanguage === 'French' ? 'fr-FR' : 'ar-SA'; arabicModeActive = sourceLang === 'ar-SA'; console.log(`🌍 AUTO MODE: Target=English β†’ Assuming ${sourceLang}`); } else { sourceLang = 'fr-FR'; // Default fallback arabicModeActive = false; } } else { // MANUAL MODE: User selected specific source language arabicModeActive = sourceLang === 'ar-SA'; console.log(`πŸ“Œ MANUAL MODE: Source=${sourceLang} (Arabic mode: ${arabicModeActive})`); } // 4. Configure Recognition with selected language recognition = new SpeechRecognition(); recognition.continuous = true; recognition.interimResults = true; recognition.lang = sourceLang; currentRecognitionLang = sourceLang; console.log(`🎀 Browser Recognition: ${sourceLang} (Arabic mode: ${arabicModeActive})`); // 5. Event Handlers recognition.onstart = () => { console.log("βœ… Real-time transcription active"); if (navigator.vibrate) navigator.vibrate(50); // Haptic feedback }; recognition.onerror = (event) => { console.warn("❌ Recognition error:", event.error); if (event.error === 'not-allowed') { statusText.innerText = "⚠️ AccΓ¨s micro refusΓ©"; statusText.style.color = "yellow"; } else if (event.error !== 'aborted') { // Restart recognition on non-critical errors (in continuous mode) if (window.continuousMode && isRecording) { console.log("πŸ”„ Restarting recognition after error..."); setTimeout(() => { if (window.continuousMode && isRecording) { try { recognition.start(); } catch (e) { } } }, 500); } } }; recognition.onend = () => { console.log("πŸ”„ Recognition ended"); // Auto-restart in continuous mode (unless TTS is playing) if (window.continuousMode && isRecording) { if (isTTSPlaying) { console.log("⏸️ TTS is playing - recognition will restart when TTS ends"); // Don't restart now - the audio.onended handler will do it } else { console.log("πŸ”„ Auto-restarting recognition for continuous mode..."); setTimeout(() => { if (window.continuousMode && isRecording && !isTTSPlaying) { try { recognition.start(); console.log("βœ… Recognition restarted successfully"); } catch (e) { console.warn("Could not restart recognition:", e); } } }, 300); } } }; recognition.onresult = (event) => { let interimTranscript = ''; let finalTranscript = ''; for (let i = event.resultIndex; i < event.results.length; ++i) { const transcript = event.results[i][0].transcript; if (event.results[i].isFinal) { finalTranscript += transcript; } else { interimTranscript += transcript; } } // ✨ AFFICHAGE INSTANTANΓ‰ - LYRICS STYLE const fullText = finalTranscript || interimTranscript; // πŸš€ STEAL THE MICROPHONE: Store global transcript for processAudio to pick up if (finalTranscript.trim().length > 0) { window.currentTranscript = finalTranscript; } else if (interimTranscript.trim().length > 0) { window.currentTranscript = interimTranscript; } if (fullText.trim().length > 0) { // πŸ‡²πŸ‡¦ ARABIC MODE: Browser recognition is unreliable for Arabic/Darija // Show the text but with a note that final transcription will be better if (arabicModeActive) { // For Arabic, only show if it looks like actual Arabic text const hasArabicChars = /[\u0600-\u06FF]/.test(fullText); if (hasArabicChars && originalTextField) { originalTextField.innerText = fullText; originalTextField.hidden = false; originalTextField.style.opacity = '1'; originalTextField.style.direction = 'rtl'; originalTextField.style.textAlign = 'right'; originalTextField.style.fontSize = '1.3rem'; originalTextField.style.fontWeight = '500'; } else { // Browser gave garbage (Latin chars for Arabic speech) - show waiting status if (originalTextField) { originalTextField.innerText = '🎀 جاري Ψ§Ω„Ψ§Ψ³ΨͺΩ…Ψ§ΨΉ...'; // "Listening..." in Arabic originalTextField.style.direction = 'rtl'; originalTextField.style.textAlign = 'right'; originalTextField.style.opacity = '0.7'; } } } else { // French/English mode - show normally if (originalTextField) { originalTextField.innerText = fullText; originalTextField.hidden = false; originalTextField.style.opacity = '1'; originalTextField.style.direction = 'ltr'; originalTextField.style.textAlign = 'left'; originalTextField.style.fontSize = '1.2rem'; originalTextField.style.fontWeight = '500'; originalTextField.style.lineHeight = '1.6'; originalTextField.style.fontStyle = 'normal'; originalTextField.style.animation = 'fadeIn 0.3s ease'; } } // Scroll to bottom const chatHistory = document.getElementById('chat-history'); if (chatHistory) chatHistory.scrollTop = chatHistory.scrollHeight; // ✨ Recognition is ONLY for visual display if (finalTranscript.trim().length > 2) { console.log("βœ… Sentence transcribed (visual feedback only)"); } } }; // 6. Start recognition.start(); } catch (e) { console.error("❌ Fatal Error starting recognition:", e); } } // πŸš€ INTELLIGENT MODE: Send Text directly (Primary trigger via isFinal) async function sendTextForProcessing(text) { if (isProcessingAudio) { console.log("⚠️ Already processing, skipping duplicate..."); return; } isProcessingAudio = true; const targetLang = quickLangSelector?.value || 'French'; // 🌍 Language detection is now handled by the backend with Gemini console.log(`πŸ“€ Sending text for processing: "${text}"`); statusText.innerText = 'Traduction en cours...'; statusText.style.color = '#4a9b87'; const payload = { text_input: text, // Sending TEXT, not AUDIO source_language: 'auto', // Let backend (Gemini) detect language target_language: targetLang, // Use quick-lang-selector model: localStorage.getItem('selectedModel') || 'Gemini', // Updated default tts_engine: localStorage.getItem('ttsEngine') || 'openai', // Updated default stt_engine: localStorage.getItem('sttEngine') || 'seamless-m4t', // NEW: SeamlessM4T default ai_correction: localStorage.getItem('aiCorrectionEnabled') !== 'false', // NEW: AI Correction enabled by default voice_cloning: false, use_grammar_correction: localStorage.getItem('grammarCorrectionEnabled') !== 'false', voice_gender_preference: localStorage.getItem('voiceGenderPreference') || 'auto' }; try { const response = await fetch('/process_audio', { method: 'POST', headers: { 'Content-Type': 'application/json' }, body: JSON.stringify(payload) }); const data = await response.json(); if (data.error) { console.error("❌ Processing error:", data.error); statusText.innerText = 'Erreur'; } else { // Handle Success - Update translated text display only // Chat messages are created by the main axios handler to avoid duplicates if (translatedTextField) { translatedTextField.innerText = data.translated_text; translatedTextField.style.opacity = '1'; } // 🧠 SMART MODE LATCHING: Update recognition language for the NEXT turn if (data.source_language_full) { const newLang = data.source_language_full; console.log(`🧠 SMART MODE: Latching onto detected language: ${newLang}`); // Update the global state so startRealTimeTranscription uses it // We map standard names to BCP-47 codes for SpeechRecognition const langToCode = { 'French': 'fr-FR', 'English': 'en-US', 'Arabic': 'ar-SA', // Default to SA for generic, or MA if available 'Moroccan Darija': 'ar-MA', // Chrome might treat this as ar-SA or similar 'Spanish': 'es-ES', 'German': 'de-DE', 'Italian': 'it-IT', 'Portuguese': 'pt-PT', 'Russian': 'ru-RU', 'Japanese': 'ja-JP', 'Korean': 'ko-KR', 'Chinese': 'zh-CN', 'Hindi': 'hi-IN' }; const code = langToCode[newLang]; if (code) { currentRecognitionLang = code; detectedLanguage = newLang; // Update global detected // If we are in AUTO mode, this is critical if (document.getElementById('source-lang-selector').value === 'auto') { console.log(`πŸ”„ UPDATING RECOGNITION to ${code} for next turn`); // Restart recognition if it's running, to apply new language if (recognition) { try { recognition.stop(); } catch (e) { } // It will auto-restart via the 'end' event or our continuous loop } } } } console.log("βœ… Text processing complete - TTS will play automatically"); // Update status for continuous mode if (window.continuousMode) { statusText.innerText = 'πŸ”Š Lecture TTS...'; statusText.style.color = '#4a9b87'; console.log('πŸŽ™οΈ Continuous mode active - will resume listening after TTS'); } else { statusText.innerText = 'PrΓͺt'; } } } catch (e) { console.error("❌ Text processing error:", e); statusText.innerText = 'Erreur rΓ©seau'; } finally { isProcessingAudio = false; } } // --- RECORDER LOGIC --- // Silence Detection Config - Moved to top of file // CONSTANTS ARE GLOBAL NOW async function startSmartRecording() { try { console.log('🎀 STARTING RECORDING...'); isRecording = true; recordBtn.classList.add('active'); statusText.innerText = 'Γ‰coute...'; statusText.style.color = 'white'; document.dispatchEvent(new Event('reset-ui')); originalTextField.innerText = '...'; translatedTextField.innerText = '...'; // 🎀 EXPERT MICROPHONE CONFIGURATION const stream = await navigator.mediaDevices.getUserMedia({ audio: { echoCancellation: true, // πŸ›‘οΈ Prevent Speaker Feedack noiseSuppression: true, // πŸ”‡ Remove Background Noise autoGainControl: true, // 🎚️ Normalize Volume channelCount: 1, sampleRate: 48000 } }); // 1. Setup Audio Analysis (Silence Detection) audioContext = new (window.AudioContext || window.webkitAudioContext)(); // ⚑ EXPERT: Force Active Context (Wake up the Audio Engine) if (audioContext.state === 'suspended') { await audioContext.resume(); console.log('⚑ AudioContext Force-Resumed'); } analyser = audioContext.createAnalyser(); micSource = audioContext.createMediaStreamSource(stream); micSource.connect(analyser); analyser.fftSize = 256; const bufferLength = analyser.frequencyBinCount; const dataArray = new Uint8Array(bufferLength); let silenceStart = Date.now(); // Flag to track if human speech was actually detected let smartSpeechDetected = false; function detectSilence() { if (!isRecording) return; analyser.getByteFrequencyData(dataArray); // Calculate average volume let sum = 0; for (let i = 0; i < bufferLength; i++) sum += dataArray[i]; const average = sum / bufferLength; // Visual feedback const scale = 1 + (average / 100); recordBtn.style.transform = `scale(${Math.min(scale, 1.2)})`; // UI Feedback for waiting status if (average < VOLUME_THRESHOLD && !smartSpeechDetected) { statusText.innerText = 'πŸ’€ En attente de parole...'; statusText.style.color = 'rgba(255,255,255,0.7)'; } if (average < VOLUME_THRESHOLD) { // It is silent if (Date.now() - silenceStart > SILENCE_LIMIT_MS) { // Silence limit reached! Stop! console.log("🀫 Silence limit reached."); stopSmartRecording(); return; } } else { // Sound detected! silenceStart = Date.now(); if (!smartSpeechDetected) { smartSpeechDetected = true; // βœ… Valid speech detected console.log("πŸ—£οΈ Speech detected!"); statusText.innerText = '🎀 Je vous Γ©coute...'; statusText.style.color = '#fff'; recordBtn.classList.add('active-speech'); } } animationId = requestAnimationFrame(detectSilence); } detectSilence(); // Start monitoring // 2. Start Speech Recognition (Instant Mode) try { startRealTimeTranscription(); } catch (e) { } // 3. Start MediaRecorder mediaRecorder = new MediaRecorder(stream); audioChunks = []; mediaRecorder.ondataavailable = e => audioChunks.push(e.data); mediaRecorder.onstop = async () => { console.log("πŸ›‘ Recorder stopped. Processing audio..."); // Clean up if (recognition) { try { recognition.stop(); } catch (e) { } } if (animationId) cancelAnimationFrame(animationId); if (micSource) micSource.disconnect(); if (audioContext) audioContext.close(); if (audioChunks.length > 0) { const blob = new Blob(audioChunks, { type: 'audio/wav' }); console.log(`πŸ“¦ Audio Data: ${blob.size} bytes`); // FORCE PROCESSING - UI Feedback statusText.innerText = 'Traitement...'; statusText.style.color = '#4a9b87'; try { await processAudio(blob); } catch (e) { console.error("Error in processAudio", e); statusText.innerText = 'Erreur'; } } else { console.error("❌ Audio was empty!"); statusText.innerText = 'Audio Vide'; } // πŸ”„ AUTO-RESTART LOOP (Crucial for Continuous Conversation) if (window.continuousMode) { // πŸ›‘ CRITICAL FIX: DO NOT RESTART IF TTS IS PLAYING! if (isTTSPlaying && window.lastBotAudio) { console.log("⏸️ TTS Playing - Waiting for audio to finish before restarting..."); // Chain the restart to the onended event const originalEnded = window.lastBotAudio.onended; window.lastBotAudio.onended = () => { if (originalEnded) originalEnded(); console.log("βœ… TTS Finished - Restarting conversation loop"); // Small delay to ensure clean state setTimeout(() => { if (window.continuousMode) listenContinuously(); }, 100); }; return; } else { // No audio playing, restart immediately console.log("πŸ”„ Auto-restarting conversation loop (No TTS active)..."); setTimeout(() => { if (window.continuousMode) listenContinuously(); }, 100); } } else { statusText.innerText = 'PrΓͺt'; } }; mediaRecorder.start(); console.log("🎀 Recording started (with Auto-Stop)..."); } catch (err) { console.error(err); statusText.innerText = "Erreur Micro"; isRecording = false; recordBtn.classList.remove('active'); } } function stopSmartRecording() { if (mediaRecorder && mediaRecorder.state !== 'inactive') mediaRecorder.stop(); if (recognition) { try { recognition.stop(); } catch (e) { } } isRecording = false; recordBtn.classList.remove('active'); statusText.innerText = 'RΓ©flexion...'; } // [Function setupRealTimeTranscription removed - Consolidated into startRealTimeTranscription] function debouncedStreamTranslation(text) { if (streamTimeout) clearTimeout(streamTimeout); streamTimeout = setTimeout(() => performStreamTranslation(text), 200); } async function performStreamTranslation(text) { try { const res = await axios.post('/stream_text', { text: text, target_lang: quickLangSelector?.value || 'English' }); if (res.data.translation) { translatedTextField.innerText = res.data.translation; // ✨ SHOW TRANSLATION CARD - Make it visible! if (res.data.translation.trim().length > 0) { translatedTextField.style.opacity = '1'; console.log('🌍 Real-time translation:', res.data.translation); } } } catch (e) { console.error("Stream Error", e); } } // Helper function to analyze audio energy and detect silence function analyzeAudioEnergy(blob) { return new Promise((resolve) => { const reader = new FileReader(); reader.readAsArrayBuffer(blob); reader.onloadend = async () => { try { const audioContext = new (window.AudioContext || window.webkitAudioContext)(); const audioBuffer = await audioContext.decodeAudioData(reader.result); // Get audio samples const channelData = audioBuffer.getChannelData(0); // Calculate RMS (Root Mean Square) energy let sum = 0; for (let i = 0; i < channelData.length; i++) { sum += channelData[i] * channelData[i]; } const rms = Math.sqrt(sum / channelData.length); // Calculate peak amplitude let peak = 0; for (let i = 0; i < channelData.length; i++) { const abs = Math.abs(channelData[i]); if (abs > peak) peak = abs; } // Duration in seconds const duration = audioBuffer.duration; console.log(`πŸ”Š Audio Analysis: RMS=${rms.toFixed(4)}, Peak=${peak.toFixed(4)}, Duration=${duration.toFixed(2)}s`); // πŸ›‘οΈ WAR MODE SENSITIVITY: Pick up even whispers (0.002) // Was 0.01 - Lowered to prevent cutting out during "Chaos" resolve({ rms, peak, duration, isSilent: rms < 0.002 && peak < 0.01 }); } catch (e) { console.error('⚠️ Audio analysis failed:', e); resolve({ rms: 0, peak: 0, duration: 0, isSilent: true }); } }; }); } async function processAudio(blob, bypassSilenceCheck = false) { // πŸš€ PREVENT DUPLICATE PROCESSING if (isProcessingAudio) { console.log('⚠️ Audio already being processed, skipping...'); return; } isProcessingAudio = true; recordBtn.classList.add('processing'); // πŸ”΅ Visual Feedback: Blue Spinner recordBtn.classList.remove('active'); // Stop Red pulse recordBtn.classList.remove('active-speech'); // Stop Green pulse // πŸ”Š CRITICAL: SILENCE DETECTION - Prevent hallucination // ⚑ OPTIMIZATION: If we already confirmed speech in monitorAudio, skip this heavy decoding! if (!bypassSilenceCheck) { const audioAnalysis = await analyzeAudioEnergy(blob); // Reject if audio is too quiet (silence/background noise) if (audioAnalysis.isSilent) { console.warn('πŸ”‡ SILENCE DETECTED (Threshold check failed) - Skipping processing'); console.log(`πŸ“Š Analysis: RMS=${audioAnalysis.rms}, Peak=${audioAnalysis.peak}`); statusText.innerText = 'Trop silencieux'; isProcessingAudio = false; // Reset UI // ⚑ WAR MODE: Restart INSTANTLY (100ms) insead of 1s setTimeout(() => { statusText.innerText = 'PrΓͺt'; // Force restart if in continuous mode! if (window.continuousMode) listenContinuously(); }, 100); return; } // Reject if audio is too short (likely just a click) if (audioAnalysis.duration < 0.5) { console.log(`⏱️ Audio too short (${audioAnalysis.duration.toFixed(2)}s) - Skipping`); statusText.innerText = 'Audio trop court'; isProcessingAudio = false; setTimeout(() => { statusText.innerText = 'PrΓͺt'; if (window.continuousMode) listenContinuously(); }, 800); return; } } else { console.log("⚑ SPEED: Bypassing secondary silence check (Speech already confirmed)"); } console.log('βœ… Audio validation passed - Processing...'); const startTime = Date.now(); const reader = new FileReader(); reader.readAsDataURL(blob); reader.onloadend = async () => { const base64 = reader.result.split(',')[1]; try { // πŸš€ STEAL THE MICROPHONE (Client-Side STT Injection) // Use the global variable captured by Web Speech API directly! // This is cleaner than reading DOM. let textInput = (window.currentTranscript || originalTextField.innerText || "").trim(); // Clean up placeholders textInput = textInput.replace('...', '').replace('🎀', '').trim(); // Filter out placeholder indicators if (textInput.includes('Γ‰coute') || textInput.length < 2) { textInput = ''; // Empty = backend will use Whisper/Gemini transcription console.log('🎯 Using backend STT only (no client text available)'); } else { console.log(`🎀 Client-Side STT Injected: "${textInput}" (Skipping Server STT)`); } // Get languages from quick selectors const targetLangQuick = document.getElementById('target-lang-quick'); const sourceLangQuick = document.getElementById('source-lang-quick'); const selectedTarget = targetLangQuick?.value || quickLangSelector?.value || 'French'; const selectedSource = sourceLangQuick?.value || 'auto'; const settings = { audio: base64, text_input: textInput, // Only send real transcribed text, not placeholders target_language: selectedTarget, source_language: selectedSource === 'auto' ? 'auto' : selectedSource, // Pass manual selection to backend stt_engine: localStorage.getItem('sttEngine') || 'openai-whisper', // ⚑ WHISPER (Requested by User) model: localStorage.getItem('aiModel') || 'gpt-4o-mini', // βœ… CHATGPT (Requested by User) tts_engine: localStorage.getItem('ttsEngine') || 'seamless', // πŸ”Š SEAMLESS TTS (Kaggle GPU - FREE!) openai_api_key: localStorage.getItem('openaiKey'), google_api_key: localStorage.getItem('googleKey'), // βœ… For Gemini STT openai_voice: localStorage.getItem('openaiVoice') || 'nova', elevenlabs_key: localStorage.getItem('elevenlabsKey'), // Fixed: elevenlabs_key not elevenlabs_api_key use_grammar_correction: localStorage.getItem('grammarCorrectionEnabled') !== 'false', // Default: enabled voice_gender_preference: localStorage.getItem('voiceGenderPreference') || 'auto' // πŸŽ™οΈ Voice gender: auto/male/female }; console.log(`πŸ“ Grammar Correction: ${settings.use_grammar_correction ? 'ENABLED (GPT)' : 'DISABLED (Direct Translation)'}`); console.log(`πŸŽ™οΈ Voice Gender Preference: ${settings.voice_gender_preference.toUpperCase()}`); // VOICE CLONING LOGIC - Check if enabled via toggle // πŸš€ SPEED FIRST: Default is DISABLED for instant translations const voiceCloneEnabled = localStorage.getItem('voiceCloneEnabled') === 'true'; // Default: DISABLED const ttsEngine = settings.tts_engine; console.log(`🎭 Voice Cloning Status: ${voiceCloneEnabled ? 'ENABLED' : 'DISABLED'}`); // Send voice cloning data if enabled if (voiceCloneEnabled) { console.log('🎀 Voice Cloning ENABLED β†’ Sending audio sample to server'); settings.voice_audio = `data:audio/wav;base64,${base64}`; settings.voice_cloning = true; } else { console.log('πŸ”‡ Voice Cloning DISABLED β†’ Using gender-matched fallback voices'); settings.voice_cloning = false; } const res = await axios.post('/process_audio', settings); if (res.data.translated_text) { const translation = res.data.translated_text; const userText = settings.text_input; console.log('βœ… Response received:', { original: userText?.substring(0, 50), translation: translation?.substring(0, 50), hasAudio: !!res.data.tts_audio }); // πŸ”Š FORCE DISPLAY RESULT (Fallback) const resultDisplay = document.getElementById('result-display'); const originalDisplay = document.getElementById('original-display'); const translationDisplay = document.getElementById('translation-display'); const pronunciationDisplay = document.getElementById('pronunciation-display'); const greeting = document.getElementById('greeting'); if (resultDisplay && translationDisplay) { if (greeting) greeting.style.display = 'none'; resultDisplay.style.display = 'block'; if (originalDisplay) originalDisplay.innerText = userText || 'Audio input'; // Pronunciation if (pronunciationDisplay) { const pronunciation = res.data.pronunciation; if (pronunciation && pronunciation !== translation) { pronunciationDisplay.innerText = pronunciation; pronunciationDisplay.style.display = 'block'; } else { pronunciationDisplay.style.display = 'none'; } } translationDisplay.innerText = translation; console.log('πŸ“Ί Result displayed on screen'); } // πŸ”Š FORCE PLAY AUDIO if (res.data.tts_audio) { const audioSrc = `data:audio/mp3;base64,${res.data.tts_audio}`; const audio = new Audio(audioSrc); audio.play().then(() => { console.log('πŸ”Š Audio playing!'); }).catch(err => { console.log('❌ Auto-play blocked:', err); // Show play button if (translationDisplay) { translationDisplay.innerHTML += ' '; } }); window.lastAudio = audio; } // πŸ›‘οΈ HALLUCINATION CHECK - Block fake Whisper outputs // userText already defined above if (isHallucination(userText) || isHallucination(translation)) { console.log(`🚫 HALLUCINATION DETECTED - Skipping message creation`); console.log(` User: "${userText}" | Translation: "${translation}"`); statusText.innerText = 'PrΓͺt'; isProcessingAudio = false; recordBtn.classList.remove('processing'); return; } // AUTOMATIC LANGUAGE DETECTION - Update UI if (res.data.source_language_full && sourceLangSelector) { const detectedLang = res.data.source_language_full; // Store detected language for potential future auto-selection detectedLanguage = detectedLang; console.log(`🌍 Language auto-detected: ${detectedLang}`); // Update real-time transcription language for next recording if (recognition) { const langMap = { 'English': 'en-US', 'French': 'fr-FR', 'Spanish': 'es-ES', 'German': 'de-DE', 'Italian': 'it-IT', 'Portuguese': 'pt-PT', 'Russian': 'ru-RU', 'Japanese': 'ja-JP', 'Korean': 'ko-KR', 'Chinese': 'zh-CN', 'Arabic': 'ar-SA', 'Hindi': 'hi-IN', 'Dutch': 'nl-NL', 'Polish': 'pl-PL', 'Turkish': 'tr-TR', 'Indonesian': 'id-ID', 'Malay': 'ms-MY', 'Thai': 'th-TH', 'Vietnamese': 'vi-VN', 'Bengali': 'bn-IN', 'Urdu': 'ur-PK', 'Swahili': 'sw-KE', 'Hebrew': 'he-IL', 'Persian': 'fa-IR', 'Ukrainian': 'uk-UA', 'Swedish': 'sv-SE', 'Greek': 'el-GR', 'Czech': 'cs-CZ', 'Romanian': 'ro-RO', 'Hungarian': 'hu-HU', 'Danish': 'da-DK', 'Finnish': 'fi-FI', 'Norwegian': 'no-NO', 'Slovak': 'sk-SK', 'Filipino': 'fil-PH', 'Amharic': 'am-ET' }; const speechLang = langMap[detectedLang] || navigator.language || 'en-US'; console.log(`🎀 Speech recognition updated to: ${speechLang}`); } } // Hide greeting const greetingEl = document.getElementById('greeting'); if (greetingEl) greetingEl.style.display = 'none'; // 2. Add User Message to Chat (with source language) // userText already defined above for hallucination check const sourceLang = res.data.source_language_full || 'Auto'; const targetLang = res.data.target_language || 'Translation'; createChatMessage('user', userText, null, null, sourceLang); // 3. Create NEW audio for this message let messageAudioSrc = null; if (res.data.tts_audio) { messageAudioSrc = `data:audio/mp3;base64,${res.data.tts_audio}`; // Play on global player audioPlayer.src = messageAudioSrc; audioPlayer.play().catch(err => { console.log('Auto-play blocked:', err); }); } // 4. Add Bot Message with its OWN Audio Player (with target language) const info = { latency: ((Date.now() - startTime) / 1000).toFixed(2), stt: res.data.stt_engine, translation: res.data.translation_engine, tts: res.data.tts_engine }; createChatMessage('bot', translation, messageAudioSrc, info, targetLang); // πŸŽ™οΈ IMPORTANT: Update status based on mode if (window.continuousMode) { // Keep button active in continuous mode statusText.innerText = 'Γ‰coute en continu...'; console.log('βœ… TTS gΓ©nΓ©rΓ© - En attente de la prochaine phrase'); } else { // Normal mode: reset button isRecording = false; recordBtn.classList.remove('active'); recordBtn.disabled = false; statusText.innerText = 'PrΓͺt'; console.log('βœ… TTS gΓ©nΓ©rΓ© - Bouton prΓͺt'); } } } catch (e) { console.error("Erreur de traitement:", e); statusText.innerText = "Erreur de connexion"; // Re-enable button even on error isRecording = false; recordBtn.classList.remove('active'); recordBtn.disabled = false; } finally { // Ensure button is always ready recordBtn.disabled = false; recordBtn.classList.remove('processing'); // πŸ”΅ Stop Blue Spinner isProcessingAudio = false; // πŸš€ Reset processing flag // If NOT continuous mode, ensure text says Ready if (!window.continuousMode) { statusText.innerText = 'PrΓͺt'; } } }; } // Logic de sauvegarde/chargement des paramΓ¨tres window.loadModalSettings = () => { document.getElementById('stt-engine').value = localStorage.getItem('sttEngine') || 'openai-whisper'; document.getElementById('openai-key').value = localStorage.getItem('openaiKey') || ''; if (localStorage.getItem('sourceLang')) document.getElementById('source-lang-selector').value = localStorage.getItem('sourceLang'); // 🎯 Set default target language for bidirectional translation const savedTargetLang = localStorage.getItem('targetLang'); if (savedTargetLang && quickLangSelector) { quickLangSelector.value = savedTargetLang; } else if (quickLangSelector) { // Default to French for Arabic ↔ French bidirectional translation quickLangSelector.value = 'French'; console.log('🌍 Default target language set to French for bidirectional translation'); } }; window.saveModalSettings = () => { localStorage.setItem('sttEngine', document.getElementById('stt-engine').value); localStorage.setItem('openaiKey', document.getElementById('openai-key').value); localStorage.setItem('targetLang', quickLangSelector.value); localStorage.setItem('sourceLang', document.getElementById('source-lang-selector').value); }; // Removed: replay-trigger button (deleted from HTML) // Functionality removed as button no longer exists // =================================== // πŸ› οΈ BUTTON TOGGLE LOGIC (FIXED) // =================================== function setupToggle(id, storageKey, defaultValue, onToggle) { const btn = document.getElementById(id); if (!btn) return; // Load initial state const saved = localStorage.getItem(storageKey); const isActive = saved === null ? defaultValue : saved === 'true'; if (isActive) btn.classList.add('active'); else btn.classList.remove('active'); btn.addEventListener('click', (e) => { e.stopPropagation(); // Prevent bubbling const currentlyActive = btn.classList.contains('active'); const newState = !currentlyActive; // Toggle visual if (newState) btn.classList.add('active'); else btn.classList.remove('active'); // Save state localStorage.setItem(storageKey, newState); // Optional callback if (onToggle) onToggle(newState); console.log(`πŸ”˜ Toggle ${id}: ${newState ? 'ON' : 'OFF'}`); }); } function setupCycle(id, storageKey, values, onCycle) { const btn = document.getElementById(id); if (!btn) return; // Load initial state let currentVal = localStorage.getItem(storageKey) || values[0]; if (!values.includes(currentVal)) currentVal = values[0]; // Fallback const updateVisual = (val) => { // Remove all active classes first if needed, or just set generic active // For gender, we might want different icons? // For now, simple active state if not default if (val !== values[0]) btn.classList.add('active'); else btn.classList.remove('active'); // Tooltip feedback btn.title = `Mode: ${val.toUpperCase()}`; }; updateVisual(currentVal); btn.addEventListener('click', (e) => { e.stopPropagation(); const currentIndex = values.indexOf(currentVal); const nextIndex = (currentIndex + 1) % values.length; currentVal = values[nextIndex]; localStorage.setItem(storageKey, currentVal); updateVisual(currentVal); if (onCycle) onCycle(currentVal); console.log(`πŸ”„ Cycle ${id}: ${currentVal}`); // Visual text feedback (Toast) statusText.innerText = `Mode: ${currentVal.toUpperCase()}`; setTimeout(() => statusText.innerText = 'PrΓͺt', 1500); }); } // 🎯 Initialize Toggles when DOM is ready document.addEventListener('DOMContentLoaded', () => { // 1. Magic/Grammar Toggle setupToggle('grammar-toggle', 'grammarCorrectionEnabled', true, (state) => { statusText.innerText = state ? '✨ Correction: ON' : 'πŸ“ Correction: OFF'; setTimeout(() => statusText.innerText = 'PrΓͺt', 1500); }); // 2. Voice Gender Toggle (Auto -> Male -> Female) setupCycle('voice-gender-toggle', 'voiceGenderPreference', ['auto', 'male', 'female']); // 3. Smart Mode Toggle (Brain) setupToggle('smart-mode-toggle', 'smartModeEnabled', true, (state) => { statusText.innerText = state ? '🧠 Mode Smart: ON' : '🧠 Mode Smart: OFF'; setTimeout(() => statusText.innerText = 'PrΓͺt', 1500); }); }); // βš™οΈ SETTINGS MODAL LOGIC (Fixing the "Broken Button") document.addEventListener('DOMContentLoaded', () => { const settingsBtn = document.getElementById('settings-trigger'); // Found ID const closeSettingsBtn = document.getElementById('close-settings'); const settingsModal = document.getElementById('settings-modal'); // Open if (settingsBtn && settingsModal) { settingsBtn.addEventListener('click', () => { settingsModal.style.display = 'flex'; // Or remove hidden class // settingsModal.classList.remove('hidden'); // If using classes console.log('βš™οΈ Settings Opened'); }); } else { console.error('❌ Settings Trigger or Modal NOT FOUND'); } // Close Button if (closeSettingsBtn && settingsModal) { closeSettingsBtn.addEventListener('click', () => { settingsModal.style.display = 'none'; }); } // Close on Outside Click window.addEventListener('click', (e) => { if (e.target === settingsModal) { settingsModal.style.display = 'none'; } }); // πŸ’Ύ SAVE & LOAD LOGIC const saveBtn = document.getElementById('save-settings'); const aiSelector = document.getElementById('ai-model-selector'); const ttsSelector = document.getElementById('tts-selector'); // Load Initial Values if (aiSelector) aiSelector.value = localStorage.getItem('aiModel') || 'gpt-4o-mini'; if (ttsSelector) ttsSelector.value = localStorage.getItem('ttsEngine') || 'openai'; // Save Handler if (saveBtn) { saveBtn.addEventListener('click', () => { if (aiSelector) { localStorage.setItem('aiModel', aiSelector.value); console.log(`🧠 AI Model set to: ${aiSelector.value}`); } if (ttsSelector) { localStorage.setItem('ttsEngine', ttsSelector.value); console.log(`πŸ—£οΈ TTS Engine set to: ${ttsSelector.value}`); } // Close Modal if (settingsModal) settingsModal.style.display = 'none'; // Feedback if (statusText) { statusText.innerText = 'βœ… SauvegardΓ©!'; setTimeout(() => statusText.innerText = 'PrΓͺt', 2000); } }); } }); // 🎯 Initialize language settings on page load document.addEventListener('DOMContentLoaded', () => { // Set default target language for bidirectional translation if (quickLangSelector) { const savedTargetLang = localStorage.getItem('targetLang'); if (savedTargetLang) { quickLangSelector.value = savedTargetLang; console.log(`🌍 Loaded saved target language: ${savedTargetLang}`); } else { quickLangSelector.value = 'French'; console.log('🌍 Default target language set to French for Arabic ↔ French bidirectional translation'); } } // Set source to auto for automatic detection const sourceLangSelector = document.getElementById('source-lang-selector'); if (sourceLangSelector) { sourceLangSelector.value = 'auto'; console.log('🎯 Source language set to AUTO for automatic detection'); // πŸ”„ HARD SYNC: If user changes Source, clear "Smart History" to prevent confusion sourceLangSelector.addEventListener('change', function () { console.log('πŸ”„ Source Language Changed -> Clearing Smart History...'); localStorage.setItem('sourceLang', this.value); fetch('/clear_cache', { method: 'POST' }); }); } // πŸ”„ HARD SYNC: If user changes Target, clear "Smart History" to prevent confusion if (quickLangSelector) { quickLangSelector.addEventListener('change', function () { console.log('πŸ”„ Target Language Changed -> Clearing Smart History...'); localStorage.setItem('targetLang', this.value); fetch('/clear_cache', { method: 'POST' }); }); } // =================================== // 🎭 VOICE CLONING TOGGLE // =================================== let voiceCloneEnabled = localStorage.getItem('voiceCloneEnabled') !== 'false'; // Default: ON const voiceCloneToggle = document.getElementById('voice-clone-toggle'); if (voiceCloneToggle) { // Set initial state if (voiceCloneEnabled) { voiceCloneToggle.classList.add('active'); } else { voiceCloneToggle.classList.remove('active'); } // Toggle on click voiceCloneToggle.addEventListener('click', function () { voiceCloneEnabled = !voiceCloneEnabled; localStorage.setItem('voiceCloneEnabled', voiceCloneEnabled); if (voiceCloneEnabled) { this.classList.add('active'); console.log('🎭 Voice Cloning: ON'); } else { this.classList.remove('active'); console.log('🎭 Voice Cloning: OFF'); } }); } });