Spaces:
Running
Running
| // Babel - Main Script (Cleaned & Optimized) | |
| let mediaRecorder; | |
| let audioChunks = []; | |
| let isRecording = false; | |
| let audioContext; | |
| let analyser; | |
| let micSource; | |
| let animationId; | |
| let recognition; | |
| let streamTimeout; | |
| let globalStream = null; // ๐ค PERSISTENT MIC STREAM | |
| let isRestarting = false; // Prevent double restart logic | |
| let isProcessingAudio = false; // Prevent duplicate audio processing | |
| let detectedLanguage = null; // Store detected language | |
| let isTTSPlaying = false; // Track TTS playback state to prevent mic feedback | |
| let textProcessingTriggered = false; // Track if text was already sent (prevents double-processing) | |
| let silenceDetectionActive = true; // Control silence detection loop | |
| let currentRecognitionLang = 'fr-FR'; // Track current recognition language for duplex mode | |
| // Global mode variable | |
| window.continuousMode = false; | |
| window.lastBotAudio = null; // ๐ Exposed for Replay Button | |
| // ๐ Cycle tracking to prevent ghost handler duplicates | |
| let currentCycleId = 0; | |
| // ๐ SILENCE DETECTION THRESHOLDS (More sensitive to prevent hallucination) | |
| // ๐ VOLUME THRESHOLD Config moved to top of file | |
| const VOLUME_THRESHOLD = 8; // Noise Gate: Very sensitive (was 10) | |
| const SILENCE_LIMIT_MS = 5000; // 5.0s silence - VERY Generous pause time | |
| const SILENCE_THRESHOLD = 8; // Legacy support | |
| const MIN_RECORDING_TIME = 500; // Minimum 0.5 second recording | |
| const MIN_SPEECH_VOLUME = 5; // Minimum average volume to consider as speech | |
| const TYPING_SPEED_MS = 25; | |
| // CHAT UI HELPERS | |
| let recentMessages = new Set(); | |
| // ๐ก๏ธ WHISPER HALLUCINATION FILTER - Common false outputs when silence/noise | |
| const HALLUCINATION_PHRASES = [ | |
| 'thanks for watching', | |
| 'thank you for watching', | |
| 'subscribe', | |
| 'like and subscribe', | |
| 'see you next time', | |
| 'bye bye', | |
| 'goodbye', | |
| 'merci d\'avoir regardรฉ', | |
| 'merci de votre attention', | |
| 'ร bientรดt', | |
| 'sous-titres', | |
| 'sous-titrage', | |
| 'subtitles by', | |
| 'transcribed by', | |
| 'music', | |
| 'applause', | |
| '[music]', | |
| '[applause]', | |
| '...', | |
| 'you', | |
| 'the', | |
| 'i', | |
| 'a' | |
| ]; | |
| function isHallucination(text) { | |
| if (!text) return true; | |
| const cleaned = text.toLowerCase().trim(); | |
| // Too short = likely noise | |
| if (cleaned.length < 3) return true; | |
| // Check against known hallucinations | |
| for (const phrase of HALLUCINATION_PHRASES) { | |
| if (cleaned === phrase || cleaned.startsWith(phrase + '.') || cleaned.startsWith(phrase + '!')) { | |
| console.log(`๐ซ HALLUCINATION BLOCKED: "${text}"`); | |
| return true; | |
| } | |
| } | |
| // Single repeated character or word | |
| if (/^(.)\1*$/.test(cleaned) || /^(\w+\s*)\1+$/.test(cleaned)) { | |
| console.log(`๐ซ REPEATED PATTERN BLOCKED: "${text}"`); | |
| return true; | |
| } | |
| return false; | |
| } | |
| function createChatMessage(role, text, audioSrc = null, info = null, lang = null) { | |
| const chatHistory = document.getElementById('chat-history'); | |
| if (!chatHistory) return; | |
| // ๐ก๏ธ Block hallucinations at message level too | |
| if (isHallucination(text)) { | |
| console.log(`๐ซ createChatMessage: Hallucination blocked: "${text}"`); | |
| return; | |
| } | |
| // ๐ก๏ธ HOLY WAR VISUAL SHIELD (DEDUPLICATION) | |
| // Prevent duplicate messages within 5 seconds | |
| const normalizedText = text.trim().toLowerCase().substring(0, 100); | |
| const messageHash = `${role}-${normalizedText}`; | |
| if (recentMessages.has(messageHash)) { | |
| console.log(`๐ก๏ธ VISUAL SHIELD: Blocked duplicate message: "${text.substring(0, 30)}..."`); | |
| return; | |
| } | |
| recentMessages.add(messageHash); | |
| setTimeout(() => recentMessages.delete(messageHash), 5000); // 5 seconds Blocking Period | |
| const msgDiv = document.createElement('div'); | |
| msgDiv.className = `message ${role}-message`; | |
| msgDiv.style.opacity = '0'; // For animation | |
| msgDiv.style.cssText = ` | |
| background: ${role === 'user' ? 'rgba(30, 30, 35, 0.8)' : 'rgba(45, 45, 52, 0.8)'}; | |
| border-radius: 16px; | |
| padding: 20px; | |
| margin-bottom: 16px; | |
| border: 1px solid ${role === 'user' ? 'rgba(60, 60, 70, 0.5)' : 'rgba(80, 80, 90, 0.5)'}; | |
| `; | |
| // Language Badge (Always show) | |
| const langBadge = document.createElement('div'); | |
| langBadge.className = 'lang-badge'; | |
| langBadge.style.cssText = ` | |
| display: inline-block; | |
| background: ${role === 'user' ? 'rgba(60, 60, 70, 0.6)' : 'rgba(80, 80, 90, 0.6)'}; | |
| color: ${role === 'user' ? '#a0a0a8' : '#c0c0c8'}; | |
| padding: 6px 12px; | |
| border-radius: 8px; | |
| font-size: 0.75rem; | |
| font-weight: 600; | |
| text-transform: uppercase; | |
| letter-spacing: 0.05em; | |
| margin-bottom: 12px; | |
| `; | |
| // Determine language display | |
| let langDisplay = lang || (role === 'user' ? 'Input' : 'Translation'); | |
| langBadge.innerText = `Language: ${langDisplay}`; | |
| msgDiv.appendChild(langBadge); | |
| // Text Content - Large and Clear | |
| const textDiv = document.createElement('div'); | |
| textDiv.className = 'message-content'; | |
| textDiv.style.cssText = ` | |
| font-size: 1.25rem; | |
| line-height: 1.7; | |
| color: #ffffff; | |
| font-weight: 400; | |
| margin-top: 8px; | |
| `; | |
| textDiv.innerText = text; | |
| msgDiv.appendChild(textDiv); | |
| // Audio Player Integration (Only for Bot) | |
| // ๐ก๏ธ FALLBACK: If audioSrc is missing, use Browser TTS! | |
| if (role === 'bot') { | |
| if (!audioSrc) { | |
| console.warn("โ ๏ธ No Audio from Server (API Limit/Error). Using Browser TTS Fallback."); | |
| // Browser TTS Fallback | |
| const utterance = new SpeechSynthesisUtterance(text); | |
| // Try to set language (default to detected target or whatever) | |
| // utterance.lang = 'en-US'; // Ideally passed in info | |
| window.speechSynthesis.speak(utterance); | |
| } else { | |
| // Standard Server Audio | |
| const audioContainer = document.createElement('div'); | |
| audioContainer.className = 'audio-container'; | |
| audioContainer.style.marginTop = '12px'; | |
| audioContainer.style.background = 'rgba(0,0,0,0.1)'; | |
| audioContainer.style.borderRadius = '8px'; | |
| audioContainer.style.padding = '8px'; | |
| audioContainer.style.display = 'flex'; | |
| audioContainer.style.alignItems = 'center'; | |
| audioContainer.style.gap = '10px'; | |
| const playBtn = document.createElement('button'); | |
| playBtn.innerHTML = '<i class="fa-solid fa-play"></i>'; | |
| playBtn.className = 'icon-btn'; // Re-use existing class | |
| playBtn.style.width = '32px'; | |
| playBtn.style.height = '32px'; | |
| playBtn.style.background = '#fff'; | |
| playBtn.style.color = '#333'; | |
| // Waveform Visual (Fake/Static for aesthetics) | |
| const waveDiv = document.createElement('div'); | |
| waveDiv.style.flex = '1'; | |
| waveDiv.style.height = '4px'; | |
| waveDiv.style.background = 'rgba(255,255,255,0.3)'; | |
| waveDiv.style.borderRadius = '2px'; | |
| waveDiv.style.position = 'relative'; | |
| const progressDiv = document.createElement('div'); | |
| progressDiv.style.width = '0%'; | |
| progressDiv.style.height = '100%'; | |
| progressDiv.style.background = '#fff'; | |
| progressDiv.style.borderRadius = '2px'; | |
| progressDiv.style.transition = 'width 0.1s linear'; | |
| waveDiv.appendChild(progressDiv); | |
| // Audio Logic | |
| const audio = new Audio(audioSrc); | |
| audio.preload = 'auto'; // Force immediate buffer | |
| // ๐ Update Global Replay Reference | |
| window.lastBotAudio = audio; | |
| playBtn.onclick = () => { | |
| if (audio.paused) { | |
| audio.play(); | |
| playBtn.innerHTML = '<i class="fa-solid fa-pause"></i>'; | |
| } else { | |
| audio.pause(); | |
| playBtn.innerHTML = '<i class="fa-solid fa-play"></i>'; | |
| } | |
| }; | |
| // ๐ CRITICAL: Pause speech recognition when TTS starts (prevent feedback loop) | |
| audio.onplay = () => { | |
| isTTSPlaying = true; | |
| console.log('๐ TTS Started - Pausing speech recognition to prevent feedback'); | |
| // Pause browser speech recognition if active | |
| if (recognition) { | |
| try { | |
| recognition.stop(); | |
| console.log('โธ๏ธ Paused speech recognition during TTS'); | |
| } catch (e) { } | |
| } | |
| // ๐ฏ DUPLEX MODE: MediaRecorder keeps running (don't pause it) | |
| // We only pause speech recognition to avoid feedback | |
| console.log('๐๏ธ MediaRecorder continues running during TTS'); | |
| }; | |
| audio.onended = () => { | |
| playBtn.innerHTML = '<i class="fa-solid fa-play"></i>'; | |
| progressDiv.style.width = '0%'; | |
| // โถ๏ธ CRITICAL: Resume after TTS | |
| isTTSPlaying = false; | |
| console.log('โ TTS ended - Ready for next conversation'); | |
| // Update status for continuous mode | |
| if (window.continuousMode) { | |
| statusText.innerText = '๐ค Prรชt pour la suite...'; | |
| statusText.style.color = '#4a9b87'; | |
| console.log('๐ Continuous mode active - system will listen automatically'); | |
| } | |
| }; | |
| // ๐จ Error handling to prevent crashes | |
| audio.onerror = (e) => { | |
| console.error('โ TTS playback error:', e); | |
| isTTSPlaying = false; | |
| playBtn.innerHTML = '<i class="fa-solid fa-play"></i>'; | |
| if (window.continuousMode) { | |
| statusText.innerText = 'โ ๏ธ Erreur TTS - Prรชt'; | |
| statusText.style.color = '#ff6b6b'; | |
| } | |
| }; | |
| audio.ontimeupdate = () => { | |
| const percent = (audio.currentTime / audio.duration) * 100; | |
| progressDiv.style.width = `${percent}%`; | |
| }; | |
| // ๐ AUTO-PLAY + PRE-CHECK | |
| // Ensure audio is playable immediately | |
| audio.oncanplay = () => { | |
| // Ready to start | |
| }; | |
| audio.oncanplaythrough = () => { | |
| // Fully ready | |
| }; | |
| audioContainer.appendChild(playBtn); | |
| audioContainer.appendChild(waveDiv); | |
| msgDiv.appendChild(audioContainer); | |
| // Latency Badge - REMOVED (cleaner UI) | |
| // Users don't need to see engine names | |
| // Immediate Trigger - Will auto-pause mic via onplay handler | |
| const playPromise = audio.play(); | |
| if (playPromise !== undefined) { | |
| playPromise.then(_ => { | |
| playBtn.innerHTML = '<i class="fa-solid fa-pause"></i>'; | |
| }).catch(error => { | |
| console.log("Auto-play blocked by browser policy:", error); | |
| if (isTTSPlaying) { | |
| // If blocked, we must ensure we don't get stuck in "TTS Playing" state | |
| console.warn("โ ๏ธ Autoplay blocked. Resetting state."); | |
| isTTSPlaying = false; | |
| playBtn.innerHTML = '<i class="fa-solid fa-play"></i>'; | |
| } | |
| }); | |
| } | |
| } // End of else (Server Audio) | |
| } // End of if (Bot Role) | |
| chatHistory.appendChild(msgDiv); | |
| // AUTO-SCROLL: Scroll both containers to show latest message | |
| const scrollToBottom = () => { | |
| // Scroll chat history (if it becomes scrollable) | |
| chatHistory.scrollTo({ | |
| top: chatHistory.scrollHeight, | |
| behavior: 'smooth' | |
| }); | |
| // ๐ SUGAR: Scroll the Window (Main Stage is not scrollable) | |
| window.scrollTo({ | |
| top: document.body.scrollHeight, | |
| behavior: 'smooth' | |
| }); | |
| }; | |
| // Immediate scroll | |
| scrollToBottom(); | |
| // Scroll again after animation completes | |
| setTimeout(scrollToBottom, 300); | |
| setTimeout(scrollToBottom, 600); | |
| // Fade In Animation | |
| setTimeout(() => { | |
| msgDiv.style.transition = 'opacity 0.3s ease, transform 0.3s ease'; | |
| msgDiv.style.transform = 'translateY(10px)'; | |
| requestAnimationFrame(() => { | |
| msgDiv.style.opacity = '1'; | |
| msgDiv.style.transform = 'translateY(0)'; | |
| }); | |
| }, 50); | |
| } | |
| // DOM Elements - declared but not initialized yet | |
| let recordBtn, statusText, settingsBtn, settingsModal, audioPlayer; | |
| let originalTextField, translatedTextField, quickLangSelector, sourceLangSelector, aiModelSelector; | |
| // ๐ AUDIO UNLOCKER: Play silence on click to enable Autoplay | |
| function unlockAudioContext() { | |
| try { | |
| const ctx = new (window.AudioContext || window.webkitAudioContext)(); | |
| const osc = ctx.createOscillator(); | |
| const gain = ctx.createGain(); | |
| gain.gain.value = 0.001; | |
| osc.connect(gain); | |
| gain.connect(ctx.destination); | |
| osc.start(0); | |
| setTimeout(() => { osc.stop(); ctx.close(); }, 100); | |
| console.log("๐ Audio Autoplay Unlocked"); | |
| } catch (e) { | |
| console.log("Audio unlock not needed"); | |
| } | |
| } | |
| // ============================================================ | |
| // ๐ฏ INITIALIZE EVERYTHING WHEN DOM IS READY | |
| // ============================================================ | |
| function initializeApp() { | |
| console.log('๐ฏ initializeApp() called'); | |
| // ๐ FULL AUTO CONFIGURATION (User Request) | |
| if (!localStorage.getItem('googleKey')) { | |
| console.log('๐ FULL AUTO: Injecting Google API Key...'); | |
| localStorage.setItem('googleKey', 'AIzaSyDB9wiqXsy1dG9OLU9r4Tar8oDdeVy4NOQ'); | |
| } | |
| // Get DOM Elements | |
| recordBtn = document.getElementById('record-btn'); | |
| statusText = document.getElementById('status-placeholder'); | |
| settingsBtn = document.getElementById('settings-trigger'); | |
| settingsModal = document.getElementById('settings-modal'); | |
| audioPlayer = document.getElementById('audio-player'); | |
| originalTextField = document.getElementById('original-text'); | |
| translatedTextField = document.getElementById('translated-text'); | |
| quickLangSelector = document.getElementById('target-lang-quick'); // ๐ง FIXED: Use correct ID | |
| sourceLangSelector = document.getElementById('source-lang-selector'); | |
| aiModelSelector = document.getElementById('ai-model'); | |
| console.log('๐ฆ DOM Elements loaded:'); | |
| console.log(' - recordBtn:', recordBtn ? 'โ FOUND' : 'โ NOT FOUND'); | |
| console.log(' - statusText:', statusText ? 'โ FOUND' : 'โ NOT FOUND'); | |
| if (!recordBtn) { | |
| console.error('โโโ CRITICAL: record-btn NOT FOUND IN DOM! โโโ'); | |
| return; | |
| } | |
| // ๐๏ธ BUTTON CLICK HANDLER | |
| console.log('๐ง Attaching click handler...'); | |
| recordBtn.onclick = async function (e) { | |
| console.log('๐๐๐ BUTTON CLICKED! ๐๐๐'); | |
| e.preventDefault(); | |
| e.stopPropagation(); | |
| // Unlock audio | |
| unlockAudioContext(); | |
| if (!window.continuousMode) { | |
| // START | |
| console.log('โถ๏ธ Starting continuous mode...'); | |
| window.continuousMode = true; | |
| this.classList.add('active'); | |
| if (statusText) { | |
| statusText.innerText = 'รcoute en continu...'; | |
| statusText.style.color = '#4a9b87'; | |
| } | |
| try { | |
| await listenContinuously(); | |
| } catch (error) { | |
| console.error('โ Error:', error); | |
| window.continuousMode = false; | |
| this.classList.remove('active'); | |
| if (statusText) { | |
| statusText.innerText = 'Erreur: ' + error.message; | |
| statusText.style.color = '#ff6b6b'; | |
| } | |
| } | |
| } else { | |
| // STOP | |
| console.log('โน๏ธ Stopping continuous mode...'); | |
| window.continuousMode = false; | |
| this.classList.remove('active'); | |
| this.classList.remove('active-speech'); // โ FIXED: CSS class name | |
| this.classList.remove('processing'); // โ FIXED: Remove processing state too | |
| // Stop all components | |
| try { | |
| if (mediaRecorder && mediaRecorder.state !== 'inactive') { | |
| mediaRecorder.stop(); | |
| } | |
| if (recognition) { | |
| recognition.stop(); | |
| recognition = null; | |
| } | |
| if (audioContext && audioContext.state !== 'closed') { | |
| audioContext.close(); | |
| } | |
| } catch (e) { | |
| console.warn('Cleanup warning:', e); | |
| } | |
| // ๐งน CRITICAL: Full memory cleanup to keep system fast | |
| console.log('๐งน Cleaning up memory and cache...'); | |
| audioContext = null; | |
| analyser = null; | |
| micSource = null; | |
| mediaRecorder = null; | |
| audioChunks = []; | |
| isRecording = false; | |
| isProcessingAudio = false; | |
| speechDetected = false; | |
| textProcessingTriggered = false; | |
| // Clear audio buffers | |
| if (animationId) { | |
| cancelAnimationFrame(animationId); | |
| animationId = null; | |
| } | |
| // โก Clear backend conversation cache for fresh start | |
| fetch('/clear_cache', { method: 'POST' }) | |
| .then(res => res.json()) | |
| .then(data => console.log(`โ Backend cache cleared: ${data.cleared} entries`)) | |
| .catch(e => console.warn('Cache clear failed:', e)); | |
| if (statusText) { | |
| statusText.innerText = 'Arrรชtรฉ'; | |
| statusText.style.color = '#888'; | |
| } | |
| console.log('โ Stopped'); | |
| // Kill global stream on full stop | |
| if (globalStream) { | |
| try { | |
| globalStream.getTracks().forEach(track => track.stop()); | |
| } catch (e) { } | |
| globalStream = null; | |
| } | |
| } | |
| }; | |
| // ๐ฑ MOBILE TOUCH SUPPORT (CRITICAL FIX) | |
| // Desktop: onclick works | |
| // Mobile: Need touchstart/touchend | |
| let touchHandled = false; | |
| recordBtn.addEventListener('touchstart', (e) => { | |
| e.preventDefault(); // Prevent mouse event simulation | |
| touchHandled = true; | |
| recordBtn.onclick(e); // Trigger the same logic | |
| }, { passive: false }); | |
| recordBtn.addEventListener('touchend', (e) => { | |
| e.preventDefault(); | |
| }, { passive: false }); | |
| // Fallback for desktop | |
| recordBtn.addEventListener('click', (e) => { | |
| if (touchHandled) { | |
| touchHandled = false; | |
| return; // Already handled by touch | |
| } | |
| // Desktop logic continues normally | |
| }); | |
| // Disable context menu | |
| recordBtn.oncontextmenu = (e) => e.preventDefault(); | |
| // ============================================================ | |
| // ๐ LANGUAGE QUICK SELECTORS - Event Handlers | |
| // ============================================================ | |
| const sourceLangQuick = document.getElementById('source-lang-quick'); | |
| const targetLangQuick = document.getElementById('target-lang-quick'); | |
| const swapLangsBtn = document.getElementById('swap-langs'); | |
| // ๐ฏ SOURCE LANGUAGE CHANGE | |
| if (sourceLangQuick) { | |
| sourceLangQuick.addEventListener('change', function () { | |
| const newLang = this.value; | |
| console.log(`๐ Source language changed to: ${newLang}`); | |
| // Save to localStorage for persistence | |
| localStorage.setItem('sourceLangQuick', newLang); | |
| // Update status to show change | |
| if (statusText) { | |
| statusText.innerText = `๐ Source: ${this.options[this.selectedIndex].text}`; | |
| statusText.style.color = '#4a9b87'; | |
| setTimeout(() => { | |
| statusText.innerText = 'Prรชt'; | |
| statusText.style.color = '#888'; | |
| }, 2000); | |
| } | |
| // Restart recognition with new language if currently recording | |
| if (window.continuousMode && recognition) { | |
| console.log('๐ Restarting recognition with new source language...'); | |
| try { recognition.stop(); } catch (e) { } | |
| // It will auto-restart with new language via onend handler | |
| } | |
| }); | |
| // Restore saved value | |
| const savedSource = localStorage.getItem('sourceLangQuick'); | |
| if (savedSource) { | |
| sourceLangQuick.value = savedSource; | |
| } | |
| } | |
| // ๐ฏ TARGET LANGUAGE CHANGE | |
| if (targetLangQuick) { | |
| targetLangQuick.addEventListener('change', function () { | |
| const newLang = this.value; | |
| console.log(`๐ฏ Target language changed to: ${newLang}`); | |
| // Save to localStorage for persistence | |
| localStorage.setItem('targetLangQuick', newLang); | |
| // Also update the main selector if it exists | |
| if (quickLangSelector) { | |
| quickLangSelector.value = newLang; | |
| } | |
| // Update status to show change | |
| if (statusText) { | |
| statusText.innerText = `๐ฏ Cible: ${this.options[this.selectedIndex].text}`; | |
| statusText.style.color = '#4a9b87'; | |
| setTimeout(() => { | |
| statusText.innerText = 'Prรชt'; | |
| statusText.style.color = '#888'; | |
| }, 2000); | |
| } | |
| }); | |
| // Restore saved value | |
| const savedTarget = localStorage.getItem('targetLangQuick'); | |
| if (savedTarget) { | |
| targetLangQuick.value = savedTarget; | |
| } | |
| } | |
| // ๐ SWAP LANGUAGES BUTTON | |
| if (swapLangsBtn) { | |
| swapLangsBtn.addEventListener('click', function () { | |
| console.log('๐ Swapping languages...'); | |
| const sourceSelect = document.getElementById('source-lang-quick'); | |
| const targetSelect = document.getElementById('target-lang-quick'); | |
| if (!sourceSelect || !targetSelect) { | |
| console.warn('Language selectors not found'); | |
| return; | |
| } | |
| // Get current values | |
| const currentSource = sourceSelect.value; | |
| const currentTarget = targetSelect.value; | |
| // Map target names to source codes | |
| const targetToSourceMap = { | |
| 'French': 'fr-FR', | |
| 'English': 'en-US', | |
| 'Arabic': 'ar-SA', | |
| 'Moroccan Darija': 'ar-SA', | |
| 'Spanish': 'es-ES', | |
| 'German': 'de-DE' | |
| }; | |
| // Map source codes to target names | |
| const sourceToTargetMap = { | |
| 'fr-FR': 'French', | |
| 'en-US': 'English', | |
| 'ar-SA': 'Arabic', | |
| 'es-ES': 'Spanish', | |
| 'de-DE': 'German', | |
| 'auto': 'French' // Default when swapping from auto | |
| }; | |
| // Calculate new values | |
| const newSourceCode = targetToSourceMap[currentTarget] || 'auto'; | |
| const newTargetName = sourceToTargetMap[currentSource] || 'French'; | |
| // Apply swap | |
| sourceSelect.value = newSourceCode; | |
| targetSelect.value = newTargetName; | |
| // Save to localStorage | |
| localStorage.setItem('sourceLangQuick', newSourceCode); | |
| localStorage.setItem('targetLangQuick', newTargetName); | |
| // Visual feedback | |
| this.style.transform = 'rotate(180deg)'; | |
| setTimeout(() => { | |
| this.style.transform = 'rotate(0deg)'; | |
| }, 300); | |
| // Update status | |
| if (statusText) { | |
| statusText.innerText = `๐ ${sourceSelect.options[sourceSelect.selectedIndex].text} โ ${targetSelect.options[targetSelect.selectedIndex].text}`; | |
| statusText.style.color = '#60a5fa'; | |
| setTimeout(() => { | |
| statusText.innerText = 'Prรชt'; | |
| statusText.style.color = '#888'; | |
| }, 2500); | |
| } | |
| console.log(`โ Swapped: ${currentSource} โ ${newTargetName}, ${currentTarget} โ ${newSourceCode}`); | |
| // Restart recognition if active | |
| if (window.continuousMode && recognition) { | |
| try { recognition.stop(); } catch (e) { } | |
| } | |
| }); | |
| } | |
| console.log('๐ Language quick selectors initialized'); | |
| console.log('โ โ โ BUTTON HANDLER ATTACHED! โ โ โ '); | |
| } | |
| // Run initialization when DOM is ready | |
| if (document.readyState === 'loading') { | |
| console.log('๐ DOM not ready, waiting for DOMContentLoaded...'); | |
| document.addEventListener('DOMContentLoaded', initializeApp); | |
| } else { | |
| console.log('๐ DOM already ready, initializing now...'); | |
| initializeApp(); | |
| } | |
| // --- CONTINUOUS CONVERSATION MODE --- | |
| async function listenContinuously() { | |
| if (!window.continuousMode) { | |
| console.log("โ listenContinuously called but window.continuousMode is false"); | |
| return; | |
| } | |
| // ๐ก๏ธ RECURSION GUARD: If we are already recording, don't start another loop! | |
| if (isRecording) { | |
| console.log("โ ๏ธ Already recording, skipping duplicate start request"); | |
| return; | |
| } | |
| console.log("๐๏ธ Starting NEW listening cycle..."); | |
| try { | |
| // ๐ฅ CRITICAL: Increment cycle ID to invalidate old handlers | |
| currentCycleId++; | |
| const thisCycleId = currentCycleId; | |
| console.log(`๐ Cycle ID: ${thisCycleId}`); | |
| // ๐ฅ CRITICAL: Clean up old MediaRecorder to prevent ghost handlers | |
| if (mediaRecorder && mediaRecorder.state !== 'inactive') { | |
| try { | |
| console.log("๐งน Cleaning up old mediaRecorder"); | |
| // Don't call stop() - it will trigger the old onstop handler! | |
| // Just abandon it and create a new one | |
| mediaRecorder.ondataavailable = null; | |
| mediaRecorder.onstop = null; | |
| mediaRecorder = null; | |
| } catch (e) { console.warn("Cleanup warning:", e); } | |
| } | |
| isRecording = true; | |
| audioChunks = []; | |
| let speechDetected = false; // Reset speech detection | |
| textProcessingTriggered = false; // Reset flag | |
| silenceDetectionActive = true; // Enable silence detection | |
| let stream; | |
| // ๐ REUSE STREAM IF AVAILABLE | |
| if (globalStream && globalStream.active) { | |
| console.log("โป๏ธ Reusing existing microphone stream"); | |
| stream = globalStream; | |
| } else { | |
| console.log("๐ค Requesting NEW microphone access..."); | |
| stream = await navigator.mediaDevices.getUserMedia({ audio: true }); | |
| globalStream = stream; | |
| console.log("โ Microphone access granted"); | |
| } | |
| // ๐ฏ REAL-TIME TRANSCRIPTION: Start immediately | |
| startRealTimeTranscription(); | |
| // Setup audio analysis for silence detection | |
| // Reuse context if active to reduce click/pop | |
| if (!audioContext || audioContext.state === 'closed') { | |
| audioContext = new (window.AudioContext || window.webkitAudioContext)(); | |
| } | |
| analyser = audioContext.createAnalyser(); | |
| micSource = audioContext.createMediaStreamSource(stream); | |
| micSource.connect(analyser); | |
| analyser.fftSize = 256; | |
| const bufferLength = analyser.frequencyBinCount; | |
| const dataArray = new Uint8Array(bufferLength); | |
| let silenceStart = Date.now(); | |
| // speechDetected already declared above | |
| mediaRecorder = new MediaRecorder(stream); | |
| mediaRecorder.ondataavailable = e => { | |
| // ๐ก๏ธ Only process if this is still the current cycle | |
| if (thisCycleId === currentCycleId) { | |
| audioChunks.push(e.data); | |
| } else { | |
| console.warn(`โ ๏ธ Ignoring data from old cycle ${thisCycleId} (current: ${currentCycleId})`); | |
| } | |
| }; | |
| mediaRecorder.onstop = async () => { | |
| // ๐ก๏ธ CRITICAL: Ignore events from old cycles | |
| if (thisCycleId !== currentCycleId) { | |
| console.warn(`โ ๏ธ Ignoring onstop from old cycle ${thisCycleId} (current: ${currentCycleId})`); | |
| return; | |
| } | |
| console.log(`๐ Chunk finalized (Cycle ${thisCycleId}).`); | |
| // DON'T stop transcription here if we want continuous, but we do need to reset it | |
| // for the new chunk context. Actually, let's keep it simply "Running". | |
| // Process audio if we have valid speech | |
| if (audioChunks.length > 0 && speechDetected) { | |
| const blob = new Blob(audioChunks, { type: 'audio/wav' }); | |
| if (blob.size > 2000) { | |
| // ๐ INSTANT RESTART: Don't wait for processing! | |
| // Trigger processing in background | |
| statusText.innerText = 'Traitement...'; | |
| statusText.style.color = '#4a9b87'; | |
| // We DO NOT await here. We fire and forget (mostly), | |
| // or let it handle the UI updates asynchronously. | |
| // โก OPTIMIZATION: Pass 'true' to bypass redundant silence check (we already checked it!) | |
| processAudio(blob, true).catch(e => console.error("Processing error:", e)); | |
| } | |
| } | |
| // ๐ INSTANT RESTART (Synchronized) | |
| // Immediately restart listening *unless* completely stopped | |
| if (window.continuousMode) { | |
| // Reset flags for next turn - NOW it is safe to reset | |
| speechDetected = false; | |
| // ๐ AGGRESSIVE IMMEDIATE RESTART | |
| // Use 0ms delay to unblock the event loop but start ASAP | |
| setTimeout(() => { | |
| if (window.continuousMode) { | |
| console.log("๐ Instant Restart Triggered (Parallel)"); | |
| listenContinuously(); | |
| } | |
| }, 0); | |
| } | |
| // NOTE: We do NOT close audioContext here anymore, to keep it 'warm'. | |
| // Only disconnect analyser to save CPU if needed, but keeping it open is faster. | |
| try { | |
| if (micSource) micSource.disconnect(); | |
| if (analyser) analyser.disconnect(); | |
| if (animationId) cancelAnimationFrame(animationId); | |
| } catch (e) { } | |
| }; | |
| mediaRecorder.start(); | |
| if (animationId) cancelAnimationFrame(animationId); | |
| // ๐ฏ Continuous monitoring with better noise filtering | |
| let consecutiveSpeechFrames = 0; | |
| let consecutiveSilenceFrames = 0; | |
| const SPEECH_FRAMES_THRESHOLD = 3; // React faster to speech (3 frames = ~50ms) | |
| const SILENCE_FRAMES_THRESHOLD = 1200; // ~20.0s silence (User request: "Keep button working / Don't cut") | |
| function monitorAudio() { | |
| if (!window.continuousMode || !isRecording) { | |
| console.log("๐ Audio monitoring stopped"); | |
| return; | |
| } | |
| // ๐ Skip monitoring while TTS is playing | |
| if (isTTSPlaying) { | |
| requestAnimationFrame(monitorAudio); | |
| return; | |
| } | |
| analyser.getByteFrequencyData(dataArray); | |
| let sum = 0; | |
| for (let i = 0; i < bufferLength; i++) sum += dataArray[i]; | |
| const average = sum / bufferLength; | |
| // ๐ฏ Better noise filtering | |
| // Use local VOLUME_THRESHOLD if defined, else generic 10 | |
| if (average > 4) { // Hardcoded decent threshold for consistency | |
| consecutiveSpeechFrames++; | |
| consecutiveSilenceFrames = 0; | |
| // Only mark as speech after consecutive loud frames (avoid false starts) | |
| if (consecutiveSpeechFrames >= SPEECH_FRAMES_THRESHOLD && !speechDetected) { | |
| speechDetected = true; | |
| silenceStart = Date.now(); | |
| console.log("๐ฃ๏ธ Speech confirmed (filtered noise)"); | |
| statusText.innerText = '๐ค Enregistrement...'; | |
| statusText.style.color = '#ff4444'; | |
| recordBtn.classList.add('active-speech'); // โ FIXED: Match CSS class | |
| } | |
| } else { | |
| consecutiveSpeechFrames = 0; | |
| consecutiveSilenceFrames++; | |
| if (!speechDetected) { | |
| // Still waiting for speech | |
| if (!statusText.innerText.includes('Traitement')) { | |
| statusText.innerText = '๐ค En attente de parole...'; | |
| statusText.style.color = '#888'; | |
| } | |
| } else { | |
| // Speech detected before, now checking for end | |
| if (consecutiveSilenceFrames >= SILENCE_FRAMES_THRESHOLD) { | |
| console.log('๐คซ Silence confirmed - ending speech'); | |
| consecutiveSpeechFrames = 0; | |
| consecutiveSilenceFrames = 0; | |
| // speechDetected = false; // โ DON'T RESET HERE! Needed for onstop check. | |
| isRecording = false; | |
| recordBtn.classList.remove('active-speech'); // โ FIXED: Match CSS class | |
| // Stop recorder to process | |
| if (mediaRecorder && mediaRecorder.state === 'recording') { | |
| mediaRecorder.stop(); | |
| } | |
| return; | |
| } | |
| } | |
| } | |
| animationId = requestAnimationFrame(monitorAudio); | |
| } | |
| monitorAudio(); | |
| } catch (err) { | |
| console.error('Erreur listenContinuously:', err); | |
| window.continuousMode = false; | |
| recordBtn.classList.remove('active'); | |
| } | |
| } | |
| // --- REAL-TIME TRANSCRIPTION (Consolidated & Cleaned) --- | |
| // This function handles browser-based speech recognition for instant feedback | |
| // โ ๏ธ NOTE: Browser SpeechRecognition does NOT support Darija well - we use it only for visual feedback | |
| let arabicModeActive = false; // Track if we're in Arabic/Darija mode | |
| function startRealTimeTranscription() { | |
| const SpeechRecognition = window.SpeechRecognition || window.webkitSpeechRecognition; | |
| // 1. Check Browser Support | |
| if (!SpeechRecognition) { | |
| console.warn("โ ๏ธ Browser Speech Recognition not supported."); | |
| return; | |
| } | |
| // 2. Prevent Multiple Instances | |
| if (recognition) { | |
| try { recognition.stop(); } catch (e) { } | |
| recognition = null; | |
| } | |
| // ๐งน Reset global capture | |
| window.currentTranscript = ""; | |
| try { | |
| // 3. Get language from QUICK SELECTORS (visible in UI) | |
| const sourceLangQuick = document.getElementById('source-lang-quick'); | |
| const targetLangQuick = document.getElementById('target-lang-quick'); | |
| const targetLang = targetLangQuick?.value || quickLangSelector?.value || 'French'; | |
| let sourceLang = sourceLangQuick?.value || localStorage.getItem('sourceLangQuick') || 'auto'; | |
| console.log(`๐ฏ Quick Selectors: Source=${sourceLang}, Target=${targetLang}`); | |
| // ๐ฒ๐ฆ SMART MODE: Only activate if source is 'auto' | |
| if (sourceLang === 'auto') { | |
| // Smart detection based on target language | |
| if (targetLang === 'French') { | |
| sourceLang = 'ar-SA'; // Likely speaking Arabic/Darija | |
| arabicModeActive = true; | |
| console.log('๐ฒ๐ฆ AUTO MODE: Target=French โ Assuming Arabic/Darija'); | |
| } else if (targetLang === 'Moroccan Darija' || targetLang === 'Arabic') { | |
| sourceLang = 'fr-FR'; // Likely speaking French | |
| arabicModeActive = false; | |
| console.log('๐ซ๐ท AUTO MODE: Target=Arabic โ Assuming French'); | |
| } else if (targetLang === 'English') { | |
| // Default to Arabic for Moroccan users, but check cache | |
| sourceLang = detectedLanguage === 'French' ? 'fr-FR' : 'ar-SA'; | |
| arabicModeActive = sourceLang === 'ar-SA'; | |
| console.log(`๐ AUTO MODE: Target=English โ Assuming ${sourceLang}`); | |
| } else { | |
| sourceLang = 'fr-FR'; // Default fallback | |
| arabicModeActive = false; | |
| } | |
| } else { | |
| // MANUAL MODE: User selected specific source language | |
| arabicModeActive = sourceLang === 'ar-SA'; | |
| console.log(`๐ MANUAL MODE: Source=${sourceLang} (Arabic mode: ${arabicModeActive})`); | |
| } | |
| // 4. Configure Recognition with selected language | |
| recognition = new SpeechRecognition(); | |
| recognition.continuous = true; | |
| recognition.interimResults = true; | |
| recognition.lang = sourceLang; | |
| currentRecognitionLang = sourceLang; | |
| console.log(`๐ค Browser Recognition: ${sourceLang} (Arabic mode: ${arabicModeActive})`); | |
| // 5. Event Handlers | |
| recognition.onstart = () => { | |
| console.log("โ Real-time transcription active"); | |
| if (navigator.vibrate) navigator.vibrate(50); // Haptic feedback | |
| }; | |
| recognition.onerror = (event) => { | |
| console.warn("โ Recognition error:", event.error); | |
| if (event.error === 'not-allowed') { | |
| statusText.innerText = "โ ๏ธ Accรจs micro refusรฉ"; | |
| statusText.style.color = "yellow"; | |
| } else if (event.error !== 'aborted') { | |
| // Restart recognition on non-critical errors (in continuous mode) | |
| if (window.continuousMode && isRecording) { | |
| console.log("๐ Restarting recognition after error..."); | |
| setTimeout(() => { | |
| if (window.continuousMode && isRecording) { | |
| try { recognition.start(); } catch (e) { } | |
| } | |
| }, 500); | |
| } | |
| } | |
| }; | |
| recognition.onend = () => { | |
| console.log("๐ Recognition ended"); | |
| // Auto-restart in continuous mode (unless TTS is playing) | |
| if (window.continuousMode && isRecording) { | |
| if (isTTSPlaying) { | |
| console.log("โธ๏ธ TTS is playing - recognition will restart when TTS ends"); | |
| // Don't restart now - the audio.onended handler will do it | |
| } else { | |
| console.log("๐ Auto-restarting recognition for continuous mode..."); | |
| setTimeout(() => { | |
| if (window.continuousMode && isRecording && !isTTSPlaying) { | |
| try { | |
| recognition.start(); | |
| console.log("โ Recognition restarted successfully"); | |
| } catch (e) { | |
| console.warn("Could not restart recognition:", e); | |
| } | |
| } | |
| }, 300); | |
| } | |
| } | |
| }; | |
| recognition.onresult = (event) => { | |
| let interimTranscript = ''; | |
| let finalTranscript = ''; | |
| for (let i = event.resultIndex; i < event.results.length; ++i) { | |
| const transcript = event.results[i][0].transcript; | |
| if (event.results[i].isFinal) { | |
| finalTranscript += transcript; | |
| } else { | |
| interimTranscript += transcript; | |
| } | |
| } | |
| // โจ AFFICHAGE INSTANTANร - LYRICS STYLE | |
| const fullText = finalTranscript || interimTranscript; | |
| // ๐ STEAL THE MICROPHONE: Store global transcript for processAudio to pick up | |
| if (finalTranscript.trim().length > 0) { | |
| window.currentTranscript = finalTranscript; | |
| } else if (interimTranscript.trim().length > 0) { | |
| window.currentTranscript = interimTranscript; | |
| } | |
| if (fullText.trim().length > 0) { | |
| // ๐ฒ๐ฆ ARABIC MODE: Browser recognition is unreliable for Arabic/Darija | |
| // Show the text but with a note that final transcription will be better | |
| if (arabicModeActive) { | |
| // For Arabic, only show if it looks like actual Arabic text | |
| const hasArabicChars = /[\u0600-\u06FF]/.test(fullText); | |
| if (hasArabicChars && originalTextField) { | |
| originalTextField.innerText = fullText; | |
| originalTextField.hidden = false; | |
| originalTextField.style.opacity = '1'; | |
| originalTextField.style.direction = 'rtl'; | |
| originalTextField.style.textAlign = 'right'; | |
| originalTextField.style.fontSize = '1.3rem'; | |
| originalTextField.style.fontWeight = '500'; | |
| } else { | |
| // Browser gave garbage (Latin chars for Arabic speech) - show waiting status | |
| if (originalTextField) { | |
| originalTextField.innerText = '๐ค ุฌุงุฑู ุงูุงุณุชู ุงุน...'; // "Listening..." in Arabic | |
| originalTextField.style.direction = 'rtl'; | |
| originalTextField.style.textAlign = 'right'; | |
| originalTextField.style.opacity = '0.7'; | |
| } | |
| } | |
| } else { | |
| // French/English mode - show normally | |
| if (originalTextField) { | |
| originalTextField.innerText = fullText; | |
| originalTextField.hidden = false; | |
| originalTextField.style.opacity = '1'; | |
| originalTextField.style.direction = 'ltr'; | |
| originalTextField.style.textAlign = 'left'; | |
| originalTextField.style.fontSize = '1.2rem'; | |
| originalTextField.style.fontWeight = '500'; | |
| originalTextField.style.lineHeight = '1.6'; | |
| originalTextField.style.fontStyle = 'normal'; | |
| originalTextField.style.animation = 'fadeIn 0.3s ease'; | |
| } | |
| } | |
| // Scroll to bottom | |
| const chatHistory = document.getElementById('chat-history'); | |
| if (chatHistory) chatHistory.scrollTop = chatHistory.scrollHeight; | |
| // โจ Recognition is ONLY for visual display | |
| if (finalTranscript.trim().length > 2) { | |
| console.log("โ Sentence transcribed (visual feedback only)"); | |
| } | |
| } | |
| }; | |
| // 6. Start | |
| recognition.start(); | |
| } catch (e) { | |
| console.error("โ Fatal Error starting recognition:", e); | |
| } | |
| } | |
| // ๐ INTELLIGENT MODE: Send Text directly (Primary trigger via isFinal) | |
| async function sendTextForProcessing(text) { | |
| if (isProcessingAudio) { | |
| console.log("โ ๏ธ Already processing, skipping duplicate..."); | |
| return; | |
| } | |
| isProcessingAudio = true; | |
| const targetLang = quickLangSelector?.value || 'French'; | |
| // ๐ Language detection is now handled by the backend with Gemini | |
| console.log(`๐ค Sending text for processing: "${text}"`); | |
| statusText.innerText = 'Traduction en cours...'; | |
| statusText.style.color = '#4a9b87'; | |
| const payload = { | |
| text_input: text, // Sending TEXT, not AUDIO | |
| source_language: 'auto', // Let backend (Gemini) detect language | |
| target_language: targetLang, // Use quick-lang-selector | |
| model: localStorage.getItem('selectedModel') || 'Gemini', // Updated default | |
| tts_engine: localStorage.getItem('ttsEngine') || 'openai', // Updated default | |
| stt_engine: localStorage.getItem('sttEngine') || 'seamless-m4t', // NEW: SeamlessM4T default | |
| ai_correction: localStorage.getItem('aiCorrectionEnabled') !== 'false', // NEW: AI Correction enabled by default | |
| voice_cloning: false, | |
| use_grammar_correction: localStorage.getItem('grammarCorrectionEnabled') !== 'false', | |
| voice_gender_preference: localStorage.getItem('voiceGenderPreference') || 'auto' | |
| }; | |
| try { | |
| const response = await fetch('/process_audio', { | |
| method: 'POST', | |
| headers: { 'Content-Type': 'application/json' }, | |
| body: JSON.stringify(payload) | |
| }); | |
| const data = await response.json(); | |
| if (data.error) { | |
| console.error("โ Processing error:", data.error); | |
| statusText.innerText = 'Erreur'; | |
| } else { | |
| // Handle Success - Update translated text display only | |
| // Chat messages are created by the main axios handler to avoid duplicates | |
| if (translatedTextField) { | |
| translatedTextField.innerText = data.translated_text; | |
| translatedTextField.style.opacity = '1'; | |
| } | |
| // ๐ง SMART MODE LATCHING: Update recognition language for the NEXT turn | |
| if (data.source_language_full) { | |
| const newLang = data.source_language_full; | |
| console.log(`๐ง SMART MODE: Latching onto detected language: ${newLang}`); | |
| // Update the global state so startRealTimeTranscription uses it | |
| // We map standard names to BCP-47 codes for SpeechRecognition | |
| const langToCode = { | |
| 'French': 'fr-FR', | |
| 'English': 'en-US', | |
| 'Arabic': 'ar-SA', // Default to SA for generic, or MA if available | |
| 'Moroccan Darija': 'ar-MA', // Chrome might treat this as ar-SA or similar | |
| 'Spanish': 'es-ES', | |
| 'German': 'de-DE', | |
| 'Italian': 'it-IT', | |
| 'Portuguese': 'pt-PT', | |
| 'Russian': 'ru-RU', | |
| 'Japanese': 'ja-JP', | |
| 'Korean': 'ko-KR', | |
| 'Chinese': 'zh-CN', | |
| 'Hindi': 'hi-IN' | |
| }; | |
| const code = langToCode[newLang]; | |
| if (code) { | |
| currentRecognitionLang = code; | |
| detectedLanguage = newLang; // Update global detected | |
| // If we are in AUTO mode, this is critical | |
| if (document.getElementById('source-lang-selector').value === 'auto') { | |
| console.log(`๐ UPDATING RECOGNITION to ${code} for next turn`); | |
| // Restart recognition if it's running, to apply new language | |
| if (recognition) { | |
| try { recognition.stop(); } catch (e) { } | |
| // It will auto-restart via the 'end' event or our continuous loop | |
| } | |
| } | |
| } | |
| } | |
| console.log("โ Text processing complete - TTS will play automatically"); | |
| // Update status for continuous mode | |
| if (window.continuousMode) { | |
| statusText.innerText = '๐ Lecture TTS...'; | |
| statusText.style.color = '#4a9b87'; | |
| console.log('๐๏ธ Continuous mode active - will resume listening after TTS'); | |
| } else { | |
| statusText.innerText = 'Prรชt'; | |
| } | |
| } | |
| } catch (e) { | |
| console.error("โ Text processing error:", e); | |
| statusText.innerText = 'Erreur rรฉseau'; | |
| } finally { | |
| isProcessingAudio = false; | |
| } | |
| } | |
| // --- RECORDER LOGIC --- | |
| // Silence Detection Config - Moved to top of file | |
| // CONSTANTS ARE GLOBAL NOW | |
| async function startSmartRecording() { | |
| try { | |
| console.log('๐ค STARTING RECORDING...'); | |
| isRecording = true; | |
| recordBtn.classList.add('active'); | |
| statusText.innerText = 'รcoute...'; | |
| statusText.style.color = 'white'; | |
| document.dispatchEvent(new Event('reset-ui')); | |
| originalTextField.innerText = '...'; | |
| translatedTextField.innerText = '...'; | |
| // ๐ค EXPERT MICROPHONE CONFIGURATION | |
| const stream = await navigator.mediaDevices.getUserMedia({ | |
| audio: { | |
| echoCancellation: true, // ๐ก๏ธ Prevent Speaker Feedack | |
| noiseSuppression: true, // ๐ Remove Background Noise | |
| autoGainControl: true, // ๐๏ธ Normalize Volume | |
| channelCount: 1, | |
| sampleRate: 48000 | |
| } | |
| }); | |
| // 1. Setup Audio Analysis (Silence Detection) | |
| audioContext = new (window.AudioContext || window.webkitAudioContext)(); | |
| // โก EXPERT: Force Active Context (Wake up the Audio Engine) | |
| if (audioContext.state === 'suspended') { | |
| await audioContext.resume(); | |
| console.log('โก AudioContext Force-Resumed'); | |
| } | |
| analyser = audioContext.createAnalyser(); | |
| micSource = audioContext.createMediaStreamSource(stream); | |
| micSource.connect(analyser); | |
| analyser.fftSize = 256; | |
| const bufferLength = analyser.frequencyBinCount; | |
| const dataArray = new Uint8Array(bufferLength); | |
| let silenceStart = Date.now(); | |
| // Flag to track if human speech was actually detected | |
| let smartSpeechDetected = false; | |
| function detectSilence() { | |
| if (!isRecording) return; | |
| analyser.getByteFrequencyData(dataArray); | |
| // Calculate average volume | |
| let sum = 0; | |
| for (let i = 0; i < bufferLength; i++) sum += dataArray[i]; | |
| const average = sum / bufferLength; | |
| // Visual feedback | |
| const scale = 1 + (average / 100); | |
| recordBtn.style.transform = `scale(${Math.min(scale, 1.2)})`; | |
| // UI Feedback for waiting status | |
| if (average < VOLUME_THRESHOLD && !smartSpeechDetected) { | |
| statusText.innerText = '๐ค En attente de parole...'; | |
| statusText.style.color = 'rgba(255,255,255,0.7)'; | |
| } | |
| if (average < VOLUME_THRESHOLD) { | |
| // It is silent | |
| if (Date.now() - silenceStart > SILENCE_LIMIT_MS) { | |
| // Silence limit reached! Stop! | |
| console.log("๐คซ Silence limit reached."); | |
| stopSmartRecording(); | |
| return; | |
| } | |
| } else { | |
| // Sound detected! | |
| silenceStart = Date.now(); | |
| if (!smartSpeechDetected) { | |
| smartSpeechDetected = true; // โ Valid speech detected | |
| console.log("๐ฃ๏ธ Speech detected!"); | |
| statusText.innerText = '๐ค Je vous รฉcoute...'; | |
| statusText.style.color = '#fff'; | |
| recordBtn.classList.add('active-speech'); | |
| } | |
| } | |
| animationId = requestAnimationFrame(detectSilence); | |
| } | |
| detectSilence(); // Start monitoring | |
| // 2. Start Speech Recognition (Instant Mode) | |
| try { startRealTimeTranscription(); } catch (e) { } | |
| // 3. Start MediaRecorder | |
| mediaRecorder = new MediaRecorder(stream); | |
| audioChunks = []; | |
| mediaRecorder.ondataavailable = e => audioChunks.push(e.data); | |
| mediaRecorder.onstop = async () => { | |
| console.log("๐ Recorder stopped. Processing audio..."); | |
| // Clean up | |
| if (recognition) { try { recognition.stop(); } catch (e) { } } | |
| if (animationId) cancelAnimationFrame(animationId); | |
| if (micSource) micSource.disconnect(); | |
| if (audioContext) audioContext.close(); | |
| if (audioChunks.length > 0) { | |
| const blob = new Blob(audioChunks, { type: 'audio/wav' }); | |
| console.log(`๐ฆ Audio Data: ${blob.size} bytes`); | |
| // FORCE PROCESSING - UI Feedback | |
| statusText.innerText = 'Traitement...'; | |
| statusText.style.color = '#4a9b87'; | |
| try { | |
| await processAudio(blob); | |
| } catch (e) { | |
| console.error("Error in processAudio", e); | |
| statusText.innerText = 'Erreur'; | |
| } | |
| } else { | |
| console.error("โ Audio was empty!"); | |
| statusText.innerText = 'Audio Vide'; | |
| } | |
| // ๐ AUTO-RESTART LOOP (Crucial for Continuous Conversation) | |
| if (window.continuousMode) { | |
| // ๐ CRITICAL FIX: DO NOT RESTART IF TTS IS PLAYING! | |
| if (isTTSPlaying && window.lastBotAudio) { | |
| console.log("โธ๏ธ TTS Playing - Waiting for audio to finish before restarting..."); | |
| // Chain the restart to the onended event | |
| const originalEnded = window.lastBotAudio.onended; | |
| window.lastBotAudio.onended = () => { | |
| if (originalEnded) originalEnded(); | |
| console.log("โ TTS Finished - Restarting conversation loop"); | |
| // Small delay to ensure clean state | |
| setTimeout(() => { | |
| if (window.continuousMode) listenContinuously(); | |
| }, 100); | |
| }; | |
| return; | |
| } else { | |
| // No audio playing, restart immediately | |
| console.log("๐ Auto-restarting conversation loop (No TTS active)..."); | |
| setTimeout(() => { | |
| if (window.continuousMode) listenContinuously(); | |
| }, 100); | |
| } | |
| } else { | |
| statusText.innerText = 'Prรชt'; | |
| } | |
| }; | |
| mediaRecorder.start(); | |
| console.log("๐ค Recording started (with Auto-Stop)..."); | |
| } catch (err) { | |
| console.error(err); | |
| statusText.innerText = "Erreur Micro"; | |
| isRecording = false; | |
| recordBtn.classList.remove('active'); | |
| } | |
| } | |
| function stopSmartRecording() { | |
| if (mediaRecorder && mediaRecorder.state !== 'inactive') mediaRecorder.stop(); | |
| if (recognition) { try { recognition.stop(); } catch (e) { } } | |
| isRecording = false; | |
| recordBtn.classList.remove('active'); | |
| statusText.innerText = 'Rรฉflexion...'; | |
| } | |
| // [Function setupRealTimeTranscription removed - Consolidated into startRealTimeTranscription] | |
| function debouncedStreamTranslation(text) { | |
| if (streamTimeout) clearTimeout(streamTimeout); | |
| streamTimeout = setTimeout(() => performStreamTranslation(text), 200); | |
| } | |
| async function performStreamTranslation(text) { | |
| try { | |
| const res = await axios.post('/stream_text', { | |
| text: text, | |
| target_lang: quickLangSelector?.value || 'English' | |
| }); | |
| if (res.data.translation) { | |
| translatedTextField.innerText = res.data.translation; | |
| // โจ SHOW TRANSLATION CARD - Make it visible! | |
| if (res.data.translation.trim().length > 0) { | |
| translatedTextField.style.opacity = '1'; | |
| console.log('๐ Real-time translation:', res.data.translation); | |
| } | |
| } | |
| } catch (e) { console.error("Stream Error", e); } | |
| } | |
| // Helper function to analyze audio energy and detect silence | |
| function analyzeAudioEnergy(blob) { | |
| return new Promise((resolve) => { | |
| const reader = new FileReader(); | |
| reader.readAsArrayBuffer(blob); | |
| reader.onloadend = async () => { | |
| try { | |
| const audioContext = new (window.AudioContext || window.webkitAudioContext)(); | |
| const audioBuffer = await audioContext.decodeAudioData(reader.result); | |
| // Get audio samples | |
| const channelData = audioBuffer.getChannelData(0); | |
| // Calculate RMS (Root Mean Square) energy | |
| let sum = 0; | |
| for (let i = 0; i < channelData.length; i++) { | |
| sum += channelData[i] * channelData[i]; | |
| } | |
| const rms = Math.sqrt(sum / channelData.length); | |
| // Calculate peak amplitude | |
| let peak = 0; | |
| for (let i = 0; i < channelData.length; i++) { | |
| const abs = Math.abs(channelData[i]); | |
| if (abs > peak) peak = abs; | |
| } | |
| // Duration in seconds | |
| const duration = audioBuffer.duration; | |
| console.log(`๐ Audio Analysis: RMS=${rms.toFixed(4)}, Peak=${peak.toFixed(4)}, Duration=${duration.toFixed(2)}s`); | |
| // ๐ก๏ธ WAR MODE SENSITIVITY: Pick up even whispers (0.002) | |
| // Was 0.01 - Lowered to prevent cutting out during "Chaos" | |
| resolve({ rms, peak, duration, isSilent: rms < 0.002 && peak < 0.01 }); | |
| } catch (e) { | |
| console.error('โ ๏ธ Audio analysis failed:', e); | |
| resolve({ rms: 0, peak: 0, duration: 0, isSilent: true }); | |
| } | |
| }; | |
| }); | |
| } | |
| async function processAudio(blob, bypassSilenceCheck = false) { | |
| // ๐ PREVENT DUPLICATE PROCESSING | |
| if (isProcessingAudio) { | |
| console.log('โ ๏ธ Audio already being processed, skipping...'); | |
| return; | |
| } | |
| isProcessingAudio = true; | |
| recordBtn.classList.add('processing'); // ๐ต Visual Feedback: Blue Spinner | |
| recordBtn.classList.remove('active'); // Stop Red pulse | |
| recordBtn.classList.remove('active-speech'); // Stop Green pulse | |
| // ๐ CRITICAL: SILENCE DETECTION - Prevent hallucination | |
| // โก OPTIMIZATION: If we already confirmed speech in monitorAudio, skip this heavy decoding! | |
| if (!bypassSilenceCheck) { | |
| const audioAnalysis = await analyzeAudioEnergy(blob); | |
| // Reject if audio is too quiet (silence/background noise) | |
| if (audioAnalysis.isSilent) { | |
| console.warn('๐ SILENCE DETECTED (Threshold check failed) - Skipping processing'); | |
| console.log(`๐ Analysis: RMS=${audioAnalysis.rms}, Peak=${audioAnalysis.peak}`); | |
| statusText.innerText = 'Trop silencieux'; | |
| isProcessingAudio = false; | |
| // Reset UI | |
| // โก WAR MODE: Restart INSTANTLY (100ms) insead of 1s | |
| setTimeout(() => { | |
| statusText.innerText = 'Prรชt'; | |
| // Force restart if in continuous mode! | |
| if (window.continuousMode) listenContinuously(); | |
| }, 100); | |
| return; | |
| } | |
| // Reject if audio is too short (likely just a click) | |
| if (audioAnalysis.duration < 0.5) { | |
| console.log(`โฑ๏ธ Audio too short (${audioAnalysis.duration.toFixed(2)}s) - Skipping`); | |
| statusText.innerText = 'Audio trop court'; | |
| isProcessingAudio = false; | |
| setTimeout(() => { | |
| statusText.innerText = 'Prรชt'; | |
| if (window.continuousMode) listenContinuously(); | |
| }, 800); | |
| return; | |
| } | |
| } else { | |
| console.log("โก SPEED: Bypassing secondary silence check (Speech already confirmed)"); | |
| } | |
| console.log('โ Audio validation passed - Processing...'); | |
| const startTime = Date.now(); | |
| const reader = new FileReader(); | |
| reader.readAsDataURL(blob); | |
| reader.onloadend = async () => { | |
| const base64 = reader.result.split(',')[1]; | |
| try { | |
| // ๐ STEAL THE MICROPHONE (Client-Side STT Injection) | |
| // Use the global variable captured by Web Speech API directly! | |
| // This is cleaner than reading DOM. | |
| let textInput = (window.currentTranscript || originalTextField.innerText || "").trim(); | |
| // Clean up placeholders | |
| textInput = textInput.replace('...', '').replace('๐ค', '').trim(); | |
| // Filter out placeholder indicators | |
| if (textInput.includes('รcoute') || textInput.length < 2) { | |
| textInput = ''; // Empty = backend will use Whisper/Gemini transcription | |
| console.log('๐ฏ Using backend STT only (no client text available)'); | |
| } else { | |
| console.log(`๐ค Client-Side STT Injected: "${textInput}" (Skipping Server STT)`); | |
| } | |
| // Get languages from quick selectors | |
| const targetLangQuick = document.getElementById('target-lang-quick'); | |
| const sourceLangQuick = document.getElementById('source-lang-quick'); | |
| const selectedTarget = targetLangQuick?.value || quickLangSelector?.value || 'French'; | |
| const selectedSource = sourceLangQuick?.value || 'auto'; | |
| const settings = { | |
| audio: base64, | |
| text_input: textInput, // Only send real transcribed text, not placeholders | |
| target_language: selectedTarget, | |
| source_language: selectedSource === 'auto' ? 'auto' : selectedSource, // Pass manual selection to backend | |
| stt_engine: localStorage.getItem('sttEngine') || 'openai-whisper', // โก WHISPER (Requested by User) | |
| model: localStorage.getItem('aiModel') || 'gpt-4o-mini', // โ CHATGPT (Requested by User) | |
| tts_engine: localStorage.getItem('ttsEngine') || 'seamless', // ๐ SEAMLESS TTS (Kaggle GPU - FREE!) | |
| openai_api_key: localStorage.getItem('openaiKey'), | |
| google_api_key: localStorage.getItem('googleKey'), // โ For Gemini STT | |
| openai_voice: localStorage.getItem('openaiVoice') || 'nova', | |
| elevenlabs_key: localStorage.getItem('elevenlabsKey'), // Fixed: elevenlabs_key not elevenlabs_api_key | |
| use_grammar_correction: localStorage.getItem('grammarCorrectionEnabled') !== 'false', // Default: enabled | |
| voice_gender_preference: localStorage.getItem('voiceGenderPreference') || 'auto' // ๐๏ธ Voice gender: auto/male/female | |
| }; | |
| console.log(`๐ Grammar Correction: ${settings.use_grammar_correction ? 'ENABLED (GPT)' : 'DISABLED (Direct Translation)'}`); | |
| console.log(`๐๏ธ Voice Gender Preference: ${settings.voice_gender_preference.toUpperCase()}`); | |
| // VOICE CLONING LOGIC - Check if enabled via toggle | |
| // ๐ SPEED FIRST: Default is DISABLED for instant translations | |
| const voiceCloneEnabled = localStorage.getItem('voiceCloneEnabled') === 'true'; // Default: DISABLED | |
| const ttsEngine = settings.tts_engine; | |
| console.log(`๐ญ Voice Cloning Status: ${voiceCloneEnabled ? 'ENABLED' : 'DISABLED'}`); | |
| // Send voice cloning data if enabled | |
| if (voiceCloneEnabled) { | |
| console.log('๐ค Voice Cloning ENABLED โ Sending audio sample to server'); | |
| settings.voice_audio = `data:audio/wav;base64,${base64}`; | |
| settings.voice_cloning = true; | |
| } else { | |
| console.log('๐ Voice Cloning DISABLED โ Using gender-matched fallback voices'); | |
| settings.voice_cloning = false; | |
| } | |
| const res = await axios.post('/process_audio', settings); | |
| if (res.data.translated_text) { | |
| const translation = res.data.translated_text; | |
| const userText = settings.text_input; | |
| console.log('โ Response received:', { | |
| original: userText?.substring(0, 50), | |
| translation: translation?.substring(0, 50), | |
| hasAudio: !!res.data.tts_audio | |
| }); | |
| // ๐ FORCE DISPLAY RESULT (Fallback) | |
| const resultDisplay = document.getElementById('result-display'); | |
| const originalDisplay = document.getElementById('original-display'); | |
| const translationDisplay = document.getElementById('translation-display'); | |
| const pronunciationDisplay = document.getElementById('pronunciation-display'); | |
| const greeting = document.getElementById('greeting'); | |
| if (resultDisplay && translationDisplay) { | |
| if (greeting) greeting.style.display = 'none'; | |
| resultDisplay.style.display = 'block'; | |
| if (originalDisplay) originalDisplay.innerText = userText || 'Audio input'; | |
| // Pronunciation | |
| if (pronunciationDisplay) { | |
| const pronunciation = res.data.pronunciation; | |
| if (pronunciation && pronunciation !== translation) { | |
| pronunciationDisplay.innerText = pronunciation; | |
| pronunciationDisplay.style.display = 'block'; | |
| } else { | |
| pronunciationDisplay.style.display = 'none'; | |
| } | |
| } | |
| translationDisplay.innerText = translation; | |
| console.log('๐บ Result displayed on screen'); | |
| } | |
| // ๐ FORCE PLAY AUDIO | |
| if (res.data.tts_audio) { | |
| const audioSrc = `data:audio/mp3;base64,${res.data.tts_audio}`; | |
| const audio = new Audio(audioSrc); | |
| audio.play().then(() => { | |
| console.log('๐ Audio playing!'); | |
| }).catch(err => { | |
| console.log('โ Auto-play blocked:', err); | |
| // Show play button | |
| if (translationDisplay) { | |
| translationDisplay.innerHTML += ' <button onclick="this.previousSibling.click()" style="background:#4CAF50;color:white;border:none;padding:5px 10px;border-radius:5px;cursor:pointer;">โถ๏ธ Play</button>'; | |
| } | |
| }); | |
| window.lastAudio = audio; | |
| } | |
| // ๐ก๏ธ HALLUCINATION CHECK - Block fake Whisper outputs | |
| // userText already defined above | |
| if (isHallucination(userText) || isHallucination(translation)) { | |
| console.log(`๐ซ HALLUCINATION DETECTED - Skipping message creation`); | |
| console.log(` User: "${userText}" | Translation: "${translation}"`); | |
| statusText.innerText = 'Prรชt'; | |
| isProcessingAudio = false; | |
| recordBtn.classList.remove('processing'); | |
| return; | |
| } | |
| // AUTOMATIC LANGUAGE DETECTION - Update UI | |
| if (res.data.source_language_full && sourceLangSelector) { | |
| const detectedLang = res.data.source_language_full; | |
| // Store detected language for potential future auto-selection | |
| detectedLanguage = detectedLang; | |
| console.log(`๐ Language auto-detected: ${detectedLang}`); | |
| // Update real-time transcription language for next recording | |
| if (recognition) { | |
| const langMap = { | |
| 'English': 'en-US', 'French': 'fr-FR', 'Spanish': 'es-ES', | |
| 'German': 'de-DE', 'Italian': 'it-IT', 'Portuguese': 'pt-PT', | |
| 'Russian': 'ru-RU', 'Japanese': 'ja-JP', 'Korean': 'ko-KR', | |
| 'Chinese': 'zh-CN', 'Arabic': 'ar-SA', 'Hindi': 'hi-IN', | |
| 'Dutch': 'nl-NL', 'Polish': 'pl-PL', 'Turkish': 'tr-TR', | |
| 'Indonesian': 'id-ID', 'Malay': 'ms-MY', 'Thai': 'th-TH', | |
| 'Vietnamese': 'vi-VN', 'Bengali': 'bn-IN', 'Urdu': 'ur-PK', | |
| 'Swahili': 'sw-KE', 'Hebrew': 'he-IL', 'Persian': 'fa-IR', | |
| 'Ukrainian': 'uk-UA', 'Swedish': 'sv-SE', 'Greek': 'el-GR', | |
| 'Czech': 'cs-CZ', 'Romanian': 'ro-RO', 'Hungarian': 'hu-HU', | |
| 'Danish': 'da-DK', 'Finnish': 'fi-FI', 'Norwegian': 'no-NO', | |
| 'Slovak': 'sk-SK', 'Filipino': 'fil-PH', 'Amharic': 'am-ET' | |
| }; | |
| const speechLang = langMap[detectedLang] || navigator.language || 'en-US'; | |
| console.log(`๐ค Speech recognition updated to: ${speechLang}`); | |
| } | |
| } | |
| // Hide greeting | |
| const greetingEl = document.getElementById('greeting'); | |
| if (greetingEl) greetingEl.style.display = 'none'; | |
| // 2. Add User Message to Chat (with source language) | |
| // userText already defined above for hallucination check | |
| const sourceLang = res.data.source_language_full || 'Auto'; | |
| const targetLang = res.data.target_language || 'Translation'; | |
| createChatMessage('user', userText, null, null, sourceLang); | |
| // 3. Create NEW audio for this message | |
| let messageAudioSrc = null; | |
| if (res.data.tts_audio) { | |
| messageAudioSrc = `data:audio/mp3;base64,${res.data.tts_audio}`; | |
| // Play on global player | |
| audioPlayer.src = messageAudioSrc; | |
| audioPlayer.play().catch(err => { | |
| console.log('Auto-play blocked:', err); | |
| }); | |
| } | |
| // 4. Add Bot Message with its OWN Audio Player (with target language) | |
| const info = { | |
| latency: ((Date.now() - startTime) / 1000).toFixed(2), | |
| stt: res.data.stt_engine, | |
| translation: res.data.translation_engine, | |
| tts: res.data.tts_engine | |
| }; | |
| createChatMessage('bot', translation, messageAudioSrc, info, targetLang); | |
| // ๐๏ธ IMPORTANT: Update status based on mode | |
| if (window.continuousMode) { | |
| // Keep button active in continuous mode | |
| statusText.innerText = 'รcoute en continu...'; | |
| console.log('โ TTS gรฉnรฉrรฉ - En attente de la prochaine phrase'); | |
| } else { | |
| // Normal mode: reset button | |
| isRecording = false; | |
| recordBtn.classList.remove('active'); | |
| recordBtn.disabled = false; | |
| statusText.innerText = 'Prรชt'; | |
| console.log('โ TTS gรฉnรฉrรฉ - Bouton prรชt'); | |
| } | |
| } | |
| } catch (e) { | |
| console.error("Erreur de traitement:", e); | |
| statusText.innerText = "Erreur de connexion"; | |
| // Re-enable button even on error | |
| isRecording = false; | |
| recordBtn.classList.remove('active'); | |
| recordBtn.disabled = false; | |
| } | |
| finally { | |
| // Ensure button is always ready | |
| recordBtn.disabled = false; | |
| recordBtn.classList.remove('processing'); // ๐ต Stop Blue Spinner | |
| isProcessingAudio = false; // ๐ Reset processing flag | |
| // If NOT continuous mode, ensure text says Ready | |
| if (!window.continuousMode) { | |
| statusText.innerText = 'Prรชt'; | |
| } | |
| } | |
| }; | |
| } | |
| // Logic de sauvegarde/chargement des paramรจtres | |
| window.loadModalSettings = () => { | |
| document.getElementById('stt-engine').value = localStorage.getItem('sttEngine') || 'openai-whisper'; | |
| document.getElementById('openai-key').value = localStorage.getItem('openaiKey') || ''; | |
| if (localStorage.getItem('sourceLang')) document.getElementById('source-lang-selector').value = localStorage.getItem('sourceLang'); | |
| // ๐ฏ Set default target language for bidirectional translation | |
| const savedTargetLang = localStorage.getItem('targetLang'); | |
| if (savedTargetLang && quickLangSelector) { | |
| quickLangSelector.value = savedTargetLang; | |
| } else if (quickLangSelector) { | |
| // Default to French for Arabic โ French bidirectional translation | |
| quickLangSelector.value = 'French'; | |
| console.log('๐ Default target language set to French for bidirectional translation'); | |
| } | |
| }; | |
| window.saveModalSettings = () => { | |
| localStorage.setItem('sttEngine', document.getElementById('stt-engine').value); | |
| localStorage.setItem('openaiKey', document.getElementById('openai-key').value); | |
| localStorage.setItem('targetLang', quickLangSelector.value); | |
| localStorage.setItem('sourceLang', document.getElementById('source-lang-selector').value); | |
| }; | |
| // Removed: replay-trigger button (deleted from HTML) | |
| // Functionality removed as button no longer exists | |
| // =================================== | |
| // ๐ ๏ธ BUTTON TOGGLE LOGIC (FIXED) | |
| // =================================== | |
| function setupToggle(id, storageKey, defaultValue, onToggle) { | |
| const btn = document.getElementById(id); | |
| if (!btn) return; | |
| // Load initial state | |
| const saved = localStorage.getItem(storageKey); | |
| const isActive = saved === null ? defaultValue : saved === 'true'; | |
| if (isActive) btn.classList.add('active'); | |
| else btn.classList.remove('active'); | |
| btn.addEventListener('click', (e) => { | |
| e.stopPropagation(); // Prevent bubbling | |
| const currentlyActive = btn.classList.contains('active'); | |
| const newState = !currentlyActive; | |
| // Toggle visual | |
| if (newState) btn.classList.add('active'); | |
| else btn.classList.remove('active'); | |
| // Save state | |
| localStorage.setItem(storageKey, newState); | |
| // Optional callback | |
| if (onToggle) onToggle(newState); | |
| console.log(`๐ Toggle ${id}: ${newState ? 'ON' : 'OFF'}`); | |
| }); | |
| } | |
| function setupCycle(id, storageKey, values, onCycle) { | |
| const btn = document.getElementById(id); | |
| if (!btn) return; | |
| // Load initial state | |
| let currentVal = localStorage.getItem(storageKey) || values[0]; | |
| if (!values.includes(currentVal)) currentVal = values[0]; // Fallback | |
| const updateVisual = (val) => { | |
| // Remove all active classes first if needed, or just set generic active | |
| // For gender, we might want different icons? | |
| // For now, simple active state if not default | |
| if (val !== values[0]) btn.classList.add('active'); | |
| else btn.classList.remove('active'); | |
| // Tooltip feedback | |
| btn.title = `Mode: ${val.toUpperCase()}`; | |
| }; | |
| updateVisual(currentVal); | |
| btn.addEventListener('click', (e) => { | |
| e.stopPropagation(); | |
| const currentIndex = values.indexOf(currentVal); | |
| const nextIndex = (currentIndex + 1) % values.length; | |
| currentVal = values[nextIndex]; | |
| localStorage.setItem(storageKey, currentVal); | |
| updateVisual(currentVal); | |
| if (onCycle) onCycle(currentVal); | |
| console.log(`๐ Cycle ${id}: ${currentVal}`); | |
| // Visual text feedback (Toast) | |
| statusText.innerText = `Mode: ${currentVal.toUpperCase()}`; | |
| setTimeout(() => statusText.innerText = 'Prรชt', 1500); | |
| }); | |
| } | |
| // ๐ฏ Initialize Toggles when DOM is ready | |
| document.addEventListener('DOMContentLoaded', () => { | |
| // 1. Magic/Grammar Toggle | |
| setupToggle('grammar-toggle', 'grammarCorrectionEnabled', true, (state) => { | |
| statusText.innerText = state ? 'โจ Correction: ON' : '๐ Correction: OFF'; | |
| setTimeout(() => statusText.innerText = 'Prรชt', 1500); | |
| }); | |
| // 2. Voice Gender Toggle (Auto -> Male -> Female) | |
| setupCycle('voice-gender-toggle', 'voiceGenderPreference', ['auto', 'male', 'female']); | |
| // 3. Smart Mode Toggle (Brain) | |
| setupToggle('smart-mode-toggle', 'smartModeEnabled', true, (state) => { | |
| statusText.innerText = state ? '๐ง Mode Smart: ON' : '๐ง Mode Smart: OFF'; | |
| setTimeout(() => statusText.innerText = 'Prรชt', 1500); | |
| }); | |
| }); | |
| // โ๏ธ SETTINGS MODAL LOGIC (Fixing the "Broken Button") | |
| document.addEventListener('DOMContentLoaded', () => { | |
| const settingsBtn = document.getElementById('settings-trigger'); // Found ID | |
| const closeSettingsBtn = document.getElementById('close-settings'); | |
| const settingsModal = document.getElementById('settings-modal'); | |
| // Open | |
| if (settingsBtn && settingsModal) { | |
| settingsBtn.addEventListener('click', () => { | |
| settingsModal.style.display = 'flex'; // Or remove hidden class | |
| // settingsModal.classList.remove('hidden'); // If using classes | |
| console.log('โ๏ธ Settings Opened'); | |
| }); | |
| } else { | |
| console.error('โ Settings Trigger or Modal NOT FOUND'); | |
| } | |
| // Close Button | |
| if (closeSettingsBtn && settingsModal) { | |
| closeSettingsBtn.addEventListener('click', () => { | |
| settingsModal.style.display = 'none'; | |
| }); | |
| } | |
| // Close on Outside Click | |
| window.addEventListener('click', (e) => { | |
| if (e.target === settingsModal) { | |
| settingsModal.style.display = 'none'; | |
| } | |
| }); | |
| // ๐พ SAVE & LOAD LOGIC | |
| const saveBtn = document.getElementById('save-settings'); | |
| const aiSelector = document.getElementById('ai-model-selector'); | |
| const ttsSelector = document.getElementById('tts-selector'); | |
| // Load Initial Values | |
| if (aiSelector) aiSelector.value = localStorage.getItem('aiModel') || 'gpt-4o-mini'; | |
| if (ttsSelector) ttsSelector.value = localStorage.getItem('ttsEngine') || 'openai'; | |
| // Save Handler | |
| if (saveBtn) { | |
| saveBtn.addEventListener('click', () => { | |
| if (aiSelector) { | |
| localStorage.setItem('aiModel', aiSelector.value); | |
| console.log(`๐ง AI Model set to: ${aiSelector.value}`); | |
| } | |
| if (ttsSelector) { | |
| localStorage.setItem('ttsEngine', ttsSelector.value); | |
| console.log(`๐ฃ๏ธ TTS Engine set to: ${ttsSelector.value}`); | |
| } | |
| // Close Modal | |
| if (settingsModal) settingsModal.style.display = 'none'; | |
| // Feedback | |
| if (statusText) { | |
| statusText.innerText = 'โ Sauvegardรฉ!'; | |
| setTimeout(() => statusText.innerText = 'Prรชt', 2000); | |
| } | |
| }); | |
| } | |
| }); | |
| // ๐ฏ Initialize language settings on page load | |
| document.addEventListener('DOMContentLoaded', () => { | |
| // Set default target language for bidirectional translation | |
| if (quickLangSelector) { | |
| const savedTargetLang = localStorage.getItem('targetLang'); | |
| if (savedTargetLang) { | |
| quickLangSelector.value = savedTargetLang; | |
| console.log(`๐ Loaded saved target language: ${savedTargetLang}`); | |
| } else { | |
| quickLangSelector.value = 'French'; | |
| console.log('๐ Default target language set to French for Arabic โ French bidirectional translation'); | |
| } | |
| } | |
| // Set source to auto for automatic detection | |
| const sourceLangSelector = document.getElementById('source-lang-selector'); | |
| if (sourceLangSelector) { | |
| sourceLangSelector.value = 'auto'; | |
| console.log('๐ฏ Source language set to AUTO for automatic detection'); | |
| // ๐ HARD SYNC: If user changes Source, clear "Smart History" to prevent confusion | |
| sourceLangSelector.addEventListener('change', function () { | |
| console.log('๐ Source Language Changed -> Clearing Smart History...'); | |
| localStorage.setItem('sourceLang', this.value); | |
| fetch('/clear_cache', { method: 'POST' }); | |
| }); | |
| } | |
| // ๐ HARD SYNC: If user changes Target, clear "Smart History" to prevent confusion | |
| if (quickLangSelector) { | |
| quickLangSelector.addEventListener('change', function () { | |
| console.log('๐ Target Language Changed -> Clearing Smart History...'); | |
| localStorage.setItem('targetLang', this.value); | |
| fetch('/clear_cache', { method: 'POST' }); | |
| }); | |
| } | |
| // =================================== | |
| // ๐ญ VOICE CLONING TOGGLE | |
| // =================================== | |
| let voiceCloneEnabled = localStorage.getItem('voiceCloneEnabled') !== 'false'; // Default: ON | |
| const voiceCloneToggle = document.getElementById('voice-clone-toggle'); | |
| if (voiceCloneToggle) { | |
| // Set initial state | |
| if (voiceCloneEnabled) { | |
| voiceCloneToggle.classList.add('active'); | |
| } else { | |
| voiceCloneToggle.classList.remove('active'); | |
| } | |
| // Toggle on click | |
| voiceCloneToggle.addEventListener('click', function () { | |
| voiceCloneEnabled = !voiceCloneEnabled; | |
| localStorage.setItem('voiceCloneEnabled', voiceCloneEnabled); | |
| if (voiceCloneEnabled) { | |
| this.classList.add('active'); | |
| console.log('๐ญ Voice Cloning: ON'); | |
| } else { | |
| this.classList.remove('active'); | |
| console.log('๐ญ Voice Cloning: OFF'); | |
| } | |
| }); | |
| } | |
| }); |