advanced-audio / index.html
omar1232's picture
Add 2 files
87f14b1 verified
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Advanced Audio Visualizer with Transcription</title>
<script src="https://cdn.tailwindcss.com"></script>
<script src="https://kit.fontawesome.com/a076d05399.js" crossorigin="anonymous"></script>
<style>
.visualizer-container {
position: relative;
width: 100%;
height: 300px;
background: linear-gradient(135deg, #1a1a2e 0%, #16213e 100%);
border-radius: 12px;
overflow: hidden;
box-shadow: 0 10px 30px rgba(0, 0, 0, 0.3);
}
.bar {
position: absolute;
bottom: 0;
width: 8px;
background: linear-gradient(to top, #00b4db, #0083b0);
border-radius: 4px 4px 0 0;
transition: height 0.05s ease-out;
box-shadow: 0 0 10px rgba(0, 180, 219, 0.7);
filter: drop-shadow(0 0 5px currentColor);
}
.audio-wave {
position: absolute;
bottom: 0;
left: 0;
width: 100%;
height: 100px;
background: linear-gradient(to top, rgba(0, 180, 219, 0.1), transparent);
clip-path: polygon(0% 100%, 100% 100%, 100% 0%, 0% 0%);
}
#audioPlayer {
display: none;
}
.progress-container {
height: 4px;
background-color: rgba(255, 255, 255, 0.1);
border-radius: 2px;
overflow: hidden;
}
.progress-bar {
height: 100%;
background: linear-gradient(to right, #00b4db, #0083b0);
width: 0%;
transition: width 0.1s linear;
}
.loading-spinner {
border: 3px solid rgba(255, 255, 255, 0.1);
border-radius: 50%;
border-top: 3px solid #00b4db;
width: 30px;
height: 30px;
animation: spin 1s linear infinite;
}
@keyframes spin {
0% { transform: rotate(0deg); }
100% { transform: rotate(360deg); }
}
.theme-neon .bar {
background: linear-gradient(to top, #00ff9d, #00f7ff);
box-shadow: 0 0 10px rgba(0, 255, 157, 0.7);
}
.theme-sunset .bar {
background: linear-gradient(to top, #ff7b00, #ff00aa);
box-shadow: 0 0 10px rgba(255, 0, 170, 0.7);
}
.theme-ocean .bar {
background: linear-gradient(to top, #0061ff, #00ffea);
box-shadow: 0 0 10px rgba(0, 97, 255, 0.7);
}
.light-mode {
background-color: #f8f9fa;
color: #212529;
}
.light-mode .bg-gray-800 {
background-color: #e9ecef !important;
}
.light-mode .text-gray-400 {
color: #6c757d !important;
}
.light-mode .visualizer-container {
background: linear-gradient(135deg, #e9ecef 0%, #dee2e6 100%);
}
.light-mode .progress-container {
background-color: rgba(0, 0, 0, 0.1);
}
.tooltip {
position: relative;
display: inline-block;
}
.tooltip .tooltip-text {
visibility: hidden;
width: 120px;
background-color: #333;
color: #fff;
text-align: center;
border-radius: 6px;
padding: 5px;
position: absolute;
z-index: 1;
bottom: 125%;
left: 50%;
margin-left: -60px;
opacity: 0;
transition: opacity 0.3s;
}
.tooltip:hover .tooltip-text {
visibility: visible;
opacity: 1;
}
.light-mode .tooltip .tooltip-text {
background-color: #555;
}
/* Modern visualizer styles */
.modern-visualizer {
position: relative;
width: 100%;
height: 300px;
background: linear-gradient(135deg, #1a1a2e 0%, #16213e 100%);
border-radius: 12px;
overflow: hidden;
box-shadow: 0 10px 30px rgba(0, 0, 0, 0.3);
}
.modern-bar {
position: absolute;
bottom: 0;
width: 10px;
background: linear-gradient(to top, #00b4db, #0083b0);
border-radius: 5px 5px 0 0;
transition: height 0.05s ease-out, transform 0.1s ease-out;
box-shadow: 0 0 10px rgba(0, 180, 219, 0.7);
transform-origin: bottom center;
}
.modern-visualizer .audio-wave {
position: absolute;
bottom: 0;
left: 0;
width: 100%;
height: 100px;
background: linear-gradient(to top, rgba(0, 180, 219, 0.1), transparent);
clip-path: polygon(0% 100%, 100% 100%, 100% 0%, 0% 0%);
}
/* Recording indicator */
.recording-indicator {
display: inline-block;
width: 12px;
height: 12px;
background-color: #ff0000;
border-radius: 50%;
animation: pulse 1.5s infinite;
margin-right: 8px;
}
@keyframes pulse {
0% { opacity: 1; }
50% { opacity: 0.3; }
100% { opacity: 1; }
}
/* Effect buttons */
.effect-btn {
transition: all 0.2s ease;
}
.effect-btn.active {
transform: scale(1.1);
box-shadow: 0 0 15px currentColor;
}
/* Audio isolation indicator */
.isolation-indicator {
display: inline-block;
width: 12px;
height: 12px;
background-color: #00ff00;
border-radius: 50%;
margin-right: 8px;
}
/* Visualizer styles */
.visualizer-style-1 {
background: linear-gradient(135deg, #1a1a2e 0%, #16213e 100%);
}
.visualizer-style-2 {
background: linear-gradient(135deg, #0f2027 0%, #203a43 50%, #2c5364 100%);
}
.visualizer-style-3 {
background: linear-gradient(135deg, #000000 0%, #434343 100%);
}
.visualizer-style-4 {
background: linear-gradient(135deg, #0f0c29 0%, #302b63 50%, #24243e 100%);
}
/* Screen styles */
.screen-style-1 {
background-color: #1a1a2e;
}
.screen-style-2 {
background-color: #0f0f1e;
}
.screen-style-3 {
background-color: #121212;
}
.screen-style-4 {
background-color: #000000;
}
/* Recording visualization */
.recording-visualization {
position: absolute;
top: 0;
left: 0;
width: 100%;
height: 100%;
background: rgba(255, 0, 0, 0.1);
display: none;
z-index: 10;
}
.recording-visualization.active {
display: block;
animation: recordingPulse 2s infinite;
}
@keyframes recordingPulse {
0% { background: rgba(255, 0, 0, 0.1); }
50% { background: rgba(255, 0, 0, 0.3); }
100% { background: rgba(255, 0, 0, 0.1); }
}
/* Reverse effect */
.reverse-effect {
transform: scaleX(-1);
}
/* Transcription styles */
.transcript-container {
max-height: 300px;
overflow-y: auto;
background-color: rgba(0, 0, 0, 0.2);
border-radius: 8px;
padding: 15px;
margin-bottom: 20px;
border: 1px solid rgba(255, 255, 255, 0.1);
}
.transcript-text {
white-space: pre-wrap;
line-height: 1.6;
font-size: 16px;
}
.transcript-active {
background-color: rgba(0, 180, 219, 0.2);
border-left: 3px solid #00b4db;
padding-left: 10px;
margin-left: -13px;
}
.transcript-controls {
display: flex;
gap: 10px;
margin-bottom: 15px;
}
.transcript-language {
flex-grow: 1;
}
.transcript-status {
font-size: 14px;
color: #00b4db;
margin-top: 5px;
font-style: italic;
}
.light-mode .transcript-container {
background-color: rgba(0, 0, 0, 0.05);
border: 1px solid rgba(0, 0, 0, 0.1);
}
.light-mode .transcript-active {
background-color: rgba(0, 180, 219, 0.1);
}
.light-mode .transcript-status {
color: #007bff;
}
</style>
</head>
<body class="bg-gray-900 text-white min-h-screen flex flex-col items-center justify-center p-4 transition-colors duration-300 screen-style-1">
<div class="max-w-4xl w-full">
<div class="flex justify-between items-center mb-4">
<h1 class="text-4xl font-bold bg-gradient-to-r from-cyan-400 to-pink-500 bg-clip-text text-transparent">
Advanced Audio Visualizer
</h1>
<div class="flex items-center space-x-4">
<button id="fullscreenBtn" class="bg-gray-800 hover:bg-gray-700 p-2 rounded-lg transition-all duration-300">
<i class="fas fa-expand"></i>
</button>
<button id="themeToggle" class="bg-gray-800 hover:bg-gray-700 p-2 rounded-lg transition-all duration-300">
<i class="fas fa-moon"></i>
</button>
</div>
</div>
<p class="text-gray-400 text-center mb-8">Visualization reacts to frequency with recording and transcription features</p>
<!-- Modern visualizer container -->
<div class="modern-visualizer mb-4 visualizer-style-1" id="visualizer">
<div class="recording-visualization" id="recordingVisualization"></div>
<div class="audio-wave" id="audioWave"></div>
</div>
<div class="progress-container mb-4">
<div class="progress-bar" id="progressBar"></div>
</div>
<div class="flex items-center justify-between mb-4">
<span id="currentTime" class="text-sm text-gray-400">0:00</span>
<span id="duration" class="text-sm text-gray-400">0:00</span>
</div>
<div class="grid grid-cols-1 md:grid-cols-3 gap-4 mb-8">
<div class="flex flex-col md:flex-row gap-4 items-center justify-center md:col-span-2">
<div class="flex-1 w-full">
<input type="file" id="audioUpload" accept="audio/*" class="hidden" />
<label for="audioUpload" class="cursor-pointer bg-gradient-to-r from-cyan-500 to-blue-500 hover:from-cyan-600 hover:to-blue-600 text-white font-bold py-3 px-6 rounded-lg flex items-center justify-center transition-all duration-300 shadow-lg hover:shadow-xl w-full">
<i class="fas fa-music mr-2"></i>
<span id="uploadText">Choose Audio File</span>
<div id="loadingSpinner" class="loading-spinner ml-2 hidden"></div>
</label>
</div>
<div class="flex-1 w-full">
<button id="playButton" class="bg-gradient-to-r from-pink-500 to-purple-500 hover:from-pink-600 hover:to-purple-600 text-white font-bold py-3 px-6 rounded-lg w-full flex items-center justify-center transition-all duration-300 shadow-lg hover:shadow-xl">
<i class="fas fa-play mr-2"></i> Play
</button>
</div>
</div>
<div class="flex items-center">
<i class="fas fa-volume-up mr-2 text-gray-400"></i>
<input type="range" id="volumeControl" min="0" max="1" step="0.01" value="0.7" class="w-full accent-cyan-500">
</div>
</div>
<audio id="audioPlayer" controls></audio>
<!-- Transcription section -->
<div class="bg-gray-800 p-4 rounded-lg mb-4">
<h3 class="text-purple-400 font-semibold mb-4"><i class="fas fa-comment-dots mr-2"></i>Live Transcription</h3>
<div class="transcript-controls">
<select id="transcriptLanguage" class="transcript-language bg-gray-700 text-white rounded p-2">
<option value="en-US">English (US)</option>
<option value="es-ES">Spanish (Spain)</option>
<option value="fr-FR">French (France)</option>
<option value="de-DE">German (Germany)</option>
<option value="it-IT">Italian (Italy)</option>
<option value="pt-BR">Portuguese (Brazil)</option>
<option value="ru-RU">Russian (Russia)</option>
<option value="ja-JP">Japanese (Japan)</option>
<option value="zh-CN">Chinese (China)</option>
<option value="ar-SA">Arabic (Saudi Arabia)</option>
</select>
<button id="startTranscriptBtn" class="bg-gradient-to-r from-green-500 to-green-600 hover:from-green-600 hover:to-green-700 text-white font-bold py-2 px-4 rounded transition-all duration-300 shadow hover:shadow-lg">
<i class="fas fa-microphone mr-2"></i> Start
</button>
<button id="stopTranscriptBtn" class="bg-gradient-to-r from-red-500 to-red-600 hover:from-red-600 hover:to-red-700 text-white font-bold py-2 px-4 rounded transition-all duration-300 shadow hover:shadow-lg" disabled>
<i class="fas fa-stop mr-2"></i> Stop
</button>
<button id="downloadTranscriptBtn" class="bg-gradient-to-r from-blue-500 to-blue-600 hover:from-blue-600 hover:to-blue-700 text-white font-bold py-2 px-4 rounded transition-all duration-300 shadow hover:shadow-lg" disabled>
<i class="fas fa-download mr-2"></i> Save
</button>
</div>
<p id="transcriptStatus" class="transcript-status">Transcription is ready</p>
<div class="transcript-container">
<div id="transcriptText" class="transcript-text"></div>
</div>
</div>
<!-- Recording section -->
<div class="bg-gray-800 p-4 rounded-lg mb-4">
<h3 class="text-pink-400 font-semibold mb-4"><i class="fas fa-microphone mr-2"></i>Recording</h3>
<div class="grid grid-cols-1 md:grid-cols-3 gap-4">
<div>
<button id="recordButton" class="bg-gradient-to-r from-red-500 to-red-600 hover:from-red-600 hover:to-red-700 text-white font-bold py-3 px-6 rounded-lg w-full flex items-center justify-center transition-all duration-300 shadow-lg hover:shadow-xl">
<span id="recordingIndicator" class="recording-indicator hidden"></span>
<span id="recordText">Start Recording</span>
</button>
</div>
<div>
<button id="stopRecordButton" class="bg-gray-700 hover:bg-gray-600 text-white font-bold py-3 px-6 rounded-lg w-full flex items-center justify-center transition-all duration-300 shadow-lg hover:shadow-xl" disabled>
<i class="fas fa-stop mr-2"></i> Stop Recording
</button>
</div>
<div>
<button id="downloadButton" class="bg-gradient-to-r from-green-500 to-green-600 hover:from-green-600 hover:to-green-700 text-white font-bold py-3 px-6 rounded-lg w-full flex items-center justify-center transition-all duration-300 shadow-lg hover:shadow-xl" disabled>
<i class="fas fa-download mr-2"></i> Download
</button>
</div>
</div>
<div class="mt-4">
<p id="recordingStatus" class="text-sm text-gray-400">Ready to record</p>
</div>
</div>
<!-- Audio processing section -->
<div class="bg-gray-800 p-4 rounded-lg mb-4">
<h3 class="text-cyan-400 font-semibold mb-4"><i class="fas fa-sliders-h mr-2"></i>Audio Processing</h3>
<div class="grid grid-cols-1 md:grid-cols-2 gap-4">
<div>
<button id="isolateMusicBtn" class="effect-btn bg-gradient-to-r from-purple-500 to-blue-500 hover:from-purple-600 hover:to-blue-600 text-white font-bold py-3 px-6 rounded-lg w-full flex items-center justify-center transition-all duration-300 shadow-lg hover:shadow-xl">
<span id="isolationIndicator" class="isolation-indicator hidden"></span>
<i class="fas fa-music mr-2"></i> Isolate Music
</button>
</div>
<div>
<button id="noiseReductionBtn" class="effect-btn bg-gradient-to-r from-blue-500 to-cyan-500 hover:from-blue-600 hover:to-cyan-600 text-white font-bold py-3 px-6 rounded-lg w-full flex items-center justify-center transition-all duration-300 shadow-lg hover:shadow-xl">
<i class="fas fa-volume-off mr-2"></i> Reduce Noise
</button>
</div>
<div>
<button id="reverseBtn" class="effect-btn bg-gradient-to-r from-yellow-500 to-orange-500 hover:from-yellow-600 hover:to-orange-600 text-white font-bold py-3 px-6 rounded-lg w-full flex items-center justify-center transition-all duration-300 shadow-lg hover:shadow-xl">
<i class="fas fa-exchange-alt mr-2"></i> Reverse Audio
</button>
</div>
</div>
<div class="mt-4">
<p id="processingStatus" class="text-sm text-gray-400">No effects applied</p>
</div>
</div>
<!-- Visualizer and Screen Styles -->
<div class="bg-gray-800 p-4 rounded-lg mb-4">
<h3 class="text-purple-400 font-semibold mb-4"><i class="fas fa-palette mr-2"></i>Visual Styles</h3>
<div class="grid grid-cols-1 md:grid-cols-2 gap-4">
<div>
<h4 class="text-cyan-400 text-sm mb-2">Visualizer Style</h4>
<div class="grid grid-cols-2 gap-2">
<button data-style="1" class="visualizer-style-btn bg-gradient-to-r from-cyan-500 to-blue-500 text-white py-2 px-4 rounded">Style 1</button>
<button data-style="2" class="visualizer-style-btn bg-gradient-to-r from-blue-500 to-indigo-500 text-white py-2 px-4 rounded">Style 2</button>
<button data-style="3" class="visualizer-style-btn bg-gradient-to-r from-gray-500 to-gray-700 text-white py-2 px-4 rounded">Style 3</button>
<button data-style="4" class="visualizer-style-btn bg-gradient-to-r from-indigo-500 to-purple-500 text-white py-2 px-4 rounded">Style 4</button>
</div>
</div>
<div>
<h4 class="text-cyan-400 text-sm mb-2">Screen Style</h4>
<div class="grid grid-cols-2 gap-2">
<button data-screen="1" class="screen-style-btn bg-gradient-to-r from-gray-800 to-gray-900 text-white py-2 px-4 rounded">Dark 1</button>
<button data-screen="2" class="screen-style-btn bg-gradient-to-r from-gray-900 to-gray-800 text-white py-2 px-4 rounded">Dark 2</button>
<button data-screen="3" class="screen-style-btn bg-gradient-to-r from-gray-700 to-gray-900 text-white py-2 px-4 rounded">Dark 3</button>
<button data-screen="4" class="screen-style-btn bg-gradient-to-r from-black to-gray-800 text-white py-2 px-4 rounded">Dark 4</button>
</div>
</div>
</div>
</div>
<div class="grid grid-cols-1 md:grid-cols-3 gap-4 mb-8">
<div class="bg-gray-800 p-4 rounded-lg">
<h3 class="text-cyan-400 font-semibold mb-2"><i class="fas fa-sliders-h mr-2"></i>Controls</h3>
<div class="space-y-4">
<div>
<label class="block text-gray-400 mb-1 flex justify-between">
<span>Sensitivity <span class="tooltip"><i class="fas fa-info-circle"></i><span class="tooltip-text">Adjust beat detection sensitivity</span></span></span>
<span id="sensitivityValue">0.5</span>
</label>
<input type="range" id="sensitivity" min="0.1", max="1", step="0.05", value="0.5", class="w-full accent-cyan-500">
</div>
<div>
<label class="block text-gray-400 mb-1 flex justify-between">
<span>Bar Count <span class="tooltip"><i class="fas fa-info-circle"></i><span class="tooltip-text">Number of frequency bars</span></span></span>
<span id="barCountValue">80</span>
</label>
<input type="range" id="barCount", min="20", max="200", step="10", value="80", class="w-full accent-cyan-500">
</div>
<div>
<label class="block text-gray-400 mb-1 flex justify-between">
<span>Bar Spacing <span class="tooltip"><i class="fas fa-info-circle"></i><span class="tooltip-text">Space between bars</span></span></span>
<span id="barSpacingValue">1</span>
</label>
<input type="range" id="barSpacing", min="0", max="2", step="0.1", value="1", class="w-full accent-cyan-500">
</div>
<div>
<label class="block text-gray-400 mb-1 flex justify-between">
<span>Reverb <span class="tooltip"><i class="fas fa-info-circle"></i><span class="tooltip-text">Add space effect to audio</span></span></span>
<span id="reverbValue">0</span>
</label>
<input type="range" id="reverb", min="0", max="1", step="0.1", value="0", class="w-full accent-cyan-500">
</div>
</div>
</div>
<div class="bg-gray-800 p-4 rounded-lg">
<h3 class="text-purple-400 font-semibold mb-2"><i class="fas fa-chart-bar mr-2"></i>Audio Info</h3>
<div class="space-y-2">
<p>Status: <span id="status" class="text-pink-400">Waiting for audio</span></p>
<p>Volume: <span id="volumeLevel">70</span>%</p>
<p>Duration: <span id="durationInfo">0:00</span></p>
<p>File: <span id="fileName" class="truncate">None</span></p>
<p>Recording: <span id="recordingTime">00:00</span></p>
</div>
<h3 class="text-purple-400 font-semibold mt-4 mb-2"><i class="fas fa-palette mr-2"></i>Theme</h3>
<select id="themeSelect" class="w-full bg-gray-700 text-white rounded p-2">
<option value="default">Default</option>
<option value="neon">Neon</option>
<option value="sunset">Sunset</option>
<option value="ocean">Ocean</option>
</select>
<h3 class="text-purple-400 font-semibold mt-4 mb-2"><i class="fas fa-equalizer mr-2"></i>EQ Preset</h3>
<select id="eqPreset" class="w-full bg-gray-700 text-white rounded p-2">
<option value="flat">Flat</option>
<option value="bass">Bass Boost</option>
<option value="treble">Treble Boost</option>
<option value="vocal">Vocal Boost</option>
</select>
</div>
<div class="bg-gray-800 p-4 rounded-lg">
<h3 class="text-pink-400 font-semibold mb-2"><i class="fas fa-info-circle mr-2"></i>About</h3>
<p class="text-gray-400 text-sm mb-4">This advanced visualization analyzes audio frequencies with recording and transcription features.</p>
<div id="errorMessage" class="hidden bg-red-900 text-white p-2 rounded text-sm mb-4"></div>
<div class="text-xs text-gray-500 mt-4">
<p>Keyboard shortcuts:</p>
<p>Space: Play/Pause</p>
<p>↑/↓: Adjust sensitivity</p>
<p>←/→: Seek audio</p>
<p>R: Start/Stop recording</p>
<p>T: Start/Stop transcription</p>
</div>
</div>
</div>
</div>
<script>
document.addEventListener('DOMContentLoaded', () => {
// Audio elements
let audioContext = null;
let audioSource = null;
let analyser = null;
let dataArray = null;
let isPlaying = false;
let animationId = null;
let reverbNode = null;
let eqNodes = [];
let mediaRecorder = null;
let recordedChunks = [];
let recordingStartTime = 0;
let recordingInterval = null;
let audioIsolationActive = false;
let noiseReductionActive = false;
let reverseActive = false;
// Transcription elements
let speechRecognizer = null;
let isTranscribing = false;
let transcriptText = '';
let transcriptStartTime = 0;
let transcriptInterval = null;
let finalTranscript = '';
// DOM elements
const visualizer = document.getElementById('visualizer');
const audioWave = document.getElementById('audioWave');
const playButton = document.getElementById('playButton');
const audioUpload = document.getElementById('audioUpload');
const audioPlayer = document.getElementById('audioPlayer');
const statusElement = document.getElementById('status');
const volumeLevelElement = document.getElementById('volumeLevel');
const sensitivityInput = document.getElementById('sensitivity');
const barCountInput = document.getElementById('barCount');
const barSpacingInput = document.getElementById('barSpacing');
const reverbInput = document.getElementById('reverb');
const volumeControl = document.getElementById('volumeControl');
const progressBar = document.getElementById('progressBar');
const currentTimeElement = document.getElementById('currentTime');
const durationElement = document.getElementById('duration');
const durationInfoElement = document.getElementById('durationInfo');
const fileNameElement = document.getElementById('fileName');
const loadingSpinner = document.getElementById('loadingSpinner');
const uploadText = document.getElementById('uploadText');
const errorMessage = document.getElementById('errorMessage');
const themeSelect = document.getElementById('themeSelect');
const eqPreset = document.getElementById('eqPreset');
const fullscreenBtn = document.getElementById('fullscreenBtn');
const themeToggle = document.getElementById('themeToggle');
const recordButton = document.getElementById('recordButton');
const stopRecordButton = document.getElementById('stopRecordButton');
const downloadButton = document.getElementById('downloadButton');
const recordingIndicator = document.getElementById('recordingIndicator');
const recordText = document.getElementById('recordText');
const recordingStatus = document.getElementById('recordingStatus');
const recordingTime = document.getElementById('recordingTime');
const isolateMusicBtn = document.getElementById('isolateMusicBtn');
const noiseReductionBtn = document.getElementById('noiseReductionBtn');
const reverseBtn = document.getElementById('reverseBtn');
const processingStatus = document.getElementById('processingStatus');
const isolationIndicator = document.getElementById('isolationIndicator');
const sensitivityValue = document.getElementById('sensitivityValue');
const barCountValue = document.getElementById('barCountValue');
const barSpacingValue = document.getElementById('barSpacingValue');
const reverbValue = document.getElementById('reverbValue');
const recordingVisualization = document.getElementById('recordingVisualization');
const visualizerStyleBtns = document.querySelectorAll('.visualizer-style-btn');
const screenStyleBtns = document.querySelectorAll('.screen-style-btn');
// Transcription elements
const startTranscriptBtn = document.getElementById('startTranscriptBtn');
const stopTranscriptBtn = document.getElementById('stopTranscriptBtn');
const downloadTranscriptBtn = document.getElementById('downloadTranscriptBtn');
const transcriptLanguage = document.getElementById('transcriptLanguage');
const transcriptStatus = document.getElementById('transcriptStatus');
const transcriptTextElement = document.getElementById('transcriptText');
let barCount = parseInt(barCountInput.value);
let bars = [];
let sensitivity = parseFloat(sensitivityInput.value);
let barSpacing = parseFloat(barSpacingInput.value);
let reverbAmount = parseFloat(reverbInput.value);
let currentTheme = 'default';
let isDarkMode = true;
// Create initial bars
function createBars() {
// Clear existing bars
visualizer.querySelectorAll('.modern-bar').forEach(bar => bar.remove());
bars = [];
const containerWidth = visualizer.clientWidth;
const barWidth = Math.max(4, (containerWidth / barCount) - barSpacing);
// Create modern bars
for (let i = 0; i < barCount; i++) {
const bar = document.createElement('div');
bar.className = 'modern-bar';
bar.style.left = `${i * (barWidth + barSpacing)}px`;
bar.style.width = `${barWidth}px`;
bar.style.height = '0px';
visualizer.appendChild(bar);
bars.push(bar);
}
}
createBars();
// Initialize audio context on first user interaction
function initAudioContext() {
if (!audioContext) {
audioContext = new (window.AudioContext || window.webkitAudioContext)();
setupAudio();
}
return audioContext;
}
// Setup audio context and analyzer
function setupAudio() {
if (!audioContext) return;
if (audioSource) {
audioSource.disconnect();
}
analyser = audioContext.createAnalyser();
analyser.fftSize = 2048;
audioSource = audioContext.createMediaElementSource(audioPlayer);
// Connect directly to analyser without any effects by default
audioSource.connect(analyser);
analyser.connect(audioContext.destination);
const bufferLength = analyser.frequencyBinCount;
dataArray = new Uint8Array(bufferLength);
}
// Setup reverb effect
function setupReverb() {
if (!audioContext) return;
if (reverbNode) {
reverbNode.disconnect();
}
reverbNode = audioContext.createConvolver();
// Create impulse response for reverb
const length = audioContext.sampleRate * reverbAmount * 2;
const impulse = audioContext.createBuffer(2, length, audioContext.sampleRate);
const leftChannel = impulse.getChannelData(0);
const rightChannel = impulse.getChannelData(1);
for (let i = 0; i < length; i++) {
const n = length - i;
leftChannel[i] = (Math.random() * 2 - 1) * Math.pow(1 - i / length, 10);
rightChannel[i] = (Math.random() * 2 - 1) * Math.pow(1 - i / length, 10);
}
reverbNode.buffer = impulse;
if (audioSource && analyser) {
audioSource.disconnect();
audioSource.connect(reverbNode);
reverbNode.connect(analyser);
analyser.connect(audioContext.destination);
}
}
// Setup EQ filters
function setupEQ() {
if (!audioContext) return;
// Clear existing EQ nodes
eqNodes.forEach(node => node.disconnect());
eqNodes = [];
const preset = eqPreset.value;
if (preset === 'flat') {
// No EQ applied
return;
}
// Create EQ nodes based on preset
if (preset === 'bass') {
// Bass boost
const lowPass = audioContext.createBiquadFilter();
lowPass.type = "lowshelf";
lowPass.frequency.value = 250;
lowPass.gain.value = 8;
eqNodes.push(lowPass);
} else if (preset === 'treble') {
// Treble boost
const highPass = audioContext.createBiquadFilter();
highPass.type = "highshelf";
highPass.frequency.value = 4000;
highPass.gain.value = 8;
eqNodes.push(highPass);
} else if (preset === 'vocal') {
// Vocal boost (mid range)
const bandPass = audioContext.createBiquadFilter();
bandPass.type = "peaking";
bandPass.frequency.value = 1500;
bandPass.Q.value = 1;
bandPass.gain.value = 10;
eqNodes.push(bandPass);
}
// Connect EQ nodes if audio is playing
if (audioSource && analyser) {
audioSource.disconnect();
let lastNode = audioSource;
eqNodes.forEach(node => {
lastNode.connect(node);
lastNode = node;
});
lastNode.connect(reverbNode || analyser);
if (reverbNode) {
reverbNode.connect(analyser);
}
analyser.connect(audioContext.destination);
}
}
// Setup audio isolation (simplified - in a real app you'd use a proper algorithm)
function setupAudioIsolation() {
if (!audioContext || !audioSource) return;
// In a real implementation, you would use a proper algorithm or library
// like WebAudio's ChannelMerger/Splitter or a third-party solution
// This is just a simplified demonstration
if (audioIsolationActive) {
// Apply a bandpass filter to isolate music frequencies
const bandPass = audioContext.createBiquadFilter();
bandPass.type = "bandpass";
bandPass.frequency.value = 1000;
bandPass.Q.value = 1;
// Reconnect audio with isolation
audioSource.disconnect();
audioSource.connect(bandPass);
let lastNode = bandPass;
eqNodes.forEach(node => {
lastNode.connect(node);
lastNode = node;
});
lastNode.connect(reverbNode || analyser);
if (reverbNode) {
reverbNode.connect(analyser);
}
processingStatus.textContent = "Music isolation active";
isolationIndicator.classList.remove('hidden');
isolateMusicBtn.classList.add('active');
} else {
// Remove isolation
audioSource.disconnect();
let lastNode = audioSource;
eqNodes.forEach(node => {
lastNode.connect(node);
lastNode = node;
});
lastNode.connect(reverbNode || analyser);
if (reverbNode) {
reverbNode.connect(analyser);
}
processingStatus.textContent = "No effects applied";
isolationIndicator.classList.add('hidden');
isolateMusicBtn.classList.remove('active');
}
}
// Setup noise reduction (simplified - in a real app you'd use a proper algorithm)
function setupNoiseReduction() {
if (!audioContext || !audioSource) return;
// In a real implementation, you would use a proper noise reduction algorithm
// This is just a simplified demonstration
if (noiseReductionActive) {
// Apply a highpass filter to reduce low-frequency noise
const highPass = audioContext.createBiquadFilter();
highPass.type = "highpass";
highPass.frequency.value = 200;
highPass.Q.value = 1;
// Reconnect audio with noise reduction
audioSource.disconnect();
audioSource.connect(highPass);
let lastNode = highPass;
eqNodes.forEach(node => {
lastNode.connect(node);
lastNode = node;
});
lastNode.connect(reverbNode || analyser);
if (reverbNode) {
reverbNode.connect(analyser);
}
processingStatus.textContent = "Noise reduction active";
noiseReductionBtn.classList.add('active');
} else {
// Remove noise reduction
audioSource.disconnect();
let lastNode = audioSource;
eqNodes.forEach(node => {
lastNode.connect(node);
lastNode = node;
});
lastNode.connect(reverbNode || analyser);
if (reverbNode) {
reverbNode.connect(analyser);
}
processingStatus.textContent = "No effects applied";
noiseReductionBtn.classList.remove('active');
}
}
// Setup reverse effect
function setupReverseEffect() {
if (!audioContext || !audioSource) return;
if (reverseActive) {
// In a real implementation, you would need to buffer the audio and play it in reverse
// This is just a visual demonstration
visualizer.classList.add('reverse-effect');
processingStatus.textContent = "Reverse effect active";
reverseBtn.classList.add('active');
} else {
visualizer.classList.remove('reverse-effect');
processingStatus.textContent = "No effects applied";
reverseBtn.classList.remove('active');
}
}
// Handle file upload
audioUpload.addEventListener('change', (e) => {
const file = e.target.files[0];
if (!file) return;
// Show loading state
uploadText.textContent = 'Processing...';
loadingSpinner.classList.remove('hidden');
errorMessage.classList.add('hidden');
// Check file type
if (!file.type.match('audio.*')) {
showError('Please select an audio file');
return;
}
// Check file size (limit to 10MB)
if (file.size > 10 * 1024 * 1024) {
showError('File too large (max 10MB)');
return;
}
const fileURL = URL.createObjectURL(file);
audioPlayer.src = fileURL;
fileNameElement.textContent = file.name;
// Reset visualization
if (isPlaying) {
stopAudio();
}
// Setup audio when ready
audioPlayer.oncanplaythrough = () => {
uploadText.textContent = 'Choose Audio File';
loadingSpinner.classList.add('hidden');
statusElement.textContent = 'Ready to play';
statusElement.className = 'text-green-400';
// Update duration info
durationElement.textContent = formatTime(audioPlayer.duration);
durationInfoElement.textContent = formatTime(audioPlayer.duration);
// Initialize audio context on first interaction
initAudioContext();
};
audioPlayer.onerror = () => {
showError('Error loading audio file');
};
});
// Show error message
function showError(message) {
errorMessage.textContent = message;
errorMessage.classList.remove('hidden');
uploadText.textContent = 'Choose Audio File';
loadingSpinner.classList.add('hidden');
statusElement.textContent = 'Error';
statusElement.className = 'text-red-400';
}
// Format time (seconds to MM:SS)
function formatTime(seconds) {
if (isNaN(seconds)) return '0:00';
const minutes = Math.floor(seconds / 60);
const secs = Math.floor(seconds % 60);
return `${minutes}:${secs < 10 ? '0' : ''}${secs}`;
}
// Format time for recording (milliseconds to MM:SS)
function formatRecordingTime(ms) {
const totalSeconds = Math.floor(ms / 1000);
const minutes = Math.floor(totalSeconds / 60);
const seconds = totalSeconds % 60;
return `${minutes}:${seconds < 10 ? '0' : ''}${seconds}`;
}
// Play button click
playButton.addEventListener('click', () => {
if (!audioPlayer.src) {
statusElement.textContent = 'No audio selected';
statusElement.className = 'text-red-400';
return;
}
// Initialize audio context on first play
initAudioContext();
if (isPlaying) {
stopAudio();
playButton.innerHTML = '<i class="fas fa-play mr-2"></i> Play';
} else {
startAudio();
playButton.innerHTML = '<i class="fas fa-pause mr-2"></i> Pause';
}
});
// Start audio playback and visualization
function startAudio() {
if (!audioContext) return;
if (audioContext.state === 'suspended') {
audioContext.resume();
}
audioPlayer.play().then(() => {
isPlaying = true;
statusElement.textContent = 'Playing';
statusElement.className = 'text-green-400';
// Start visualization
visualize();
}).catch(error => {
showError('Error playing audio: ' + error.message);
});
}
// Stop audio playback and visualization
function stopAudio() {
audioPlayer.pause();
isPlaying = false;
statusElement.textContent = 'Paused';
statusElement.className = 'text-yellow-400';
// Stop visualization
cancelAnimationFrame(animationId);
// Reset bars
bars.forEach(bar => {
bar.style.height = '0px';
});
}
// Start recording
function startRecording() {
recordedChunks = [];
// Check if we're already recording
if (mediaRecorder && mediaRecorder.state === 'recording') {
return;
}
// Get user media (microphone)
navigator.mediaDevices.getUserMedia({ audio: true })
.then(stream => {
mediaRecorder = new MediaRecorder(stream);
mediaRecorder.ondataavailable = (e) => {
if (e.data.size > 0) {
recordedChunks.push(e.data);
}
};
mediaRecorder.onstop = () => {
const audioBlob = new Blob(recordedChunks, { type: 'audio/wav' });
const audioUrl = URL.createObjectURL(audioBlob);
// Set the recorded audio as the current audio
audioPlayer.src = audioUrl;
fileNameElement.textContent = 'Recording ' + new Date().toLocaleString();
// Setup audio for visualization
initAudioContext();
setupAudio();
// Update UI
recordingStatus.textContent = 'Recording saved';
recordingIndicator.classList.add('hidden');
recordText.textContent = 'Start Recording';
stopRecordButton.disabled = true;
downloadButton.disabled = false;
recordingVisualization.classList.remove('active');
// Stop the timer
clearInterval(recordingInterval);
recordingTime.textContent = '00:00';
};
mediaRecorder.start(100); // Collect data every 100ms
recordingStartTime = Date.now();
// Start timer
recordingInterval = setInterval(() => {
const elapsedTime = Date.now() - recordingStartTime;
recordingTime.textContent = formatRecordingTime(elapsedTime);
}, 1000);
// Update UI
recordingStatus.textContent = 'Recording...';
recordingIndicator.classList.remove('hidden');
recordText.textContent = 'Recording';
stopRecordButton.disabled = false;
downloadButton.disabled = true;
recordingVisualization.classList.add('active');
})
.catch(err => {
console.error('Error accessing microphone:', err);
showError('Microphone access denied');
});
}
// Stop recording
function stopRecording() {
if (mediaRecorder && mediaRecorder.state === 'recording') {
mediaRecorder.stop();
// Stop all tracks in the stream
mediaRecorder.stream.getTracks().forEach(track => track.stop());
}
}
// Download recorded audio
function downloadRecording() {
if (recordedChunks.length === 0) {
showError('No recording to download');
return;
}
const audioBlob = new Blob(recordedChunks, { type: 'audio/wav' });
const audioUrl = URL.createObjectURL(audioBlob);
const a = document.createElement('a');
a.style.display = 'none';
a.href = audioUrl;
a.download = 'recording-' + new Date().toISOString().slice(0, 19) + '.wav';
document.body.appendChild(a);
a.click();
setTimeout(() => {
document.body.removeChild(a);
window.URL.revokeObjectURL(audioUrl);
}, 100);
}
// Initialize speech recognition
function initializeSpeechRecognition() {
// Check if browser supports speech recognition
if (!('webkitSpeechRecognition' in window) && !('SpeechRecognition' in window)) {
showError('Speech recognition not supported in this browser');
return false;
}
// Create speech recognition object
const SpeechRecognition = window.SpeechRecognition || window.webkitSpeechRecognition;
speechRecognizer = new SpeechRecognition();
// Configure recognition
speechRecognizer.continuous = true;
speechRecognizer.interimResults = true;
speechRecognizer.lang = transcriptLanguage.value;
// Event handlers
speechRecognizer.onstart = () => {
isTranscribing = true;
transcriptStatus.textContent = 'Transcribing...';
startTranscriptBtn.disabled = true;
stopTranscriptBtn.disabled = false;
downloadTranscriptBtn.disabled = true;
// Start timer
transcriptStartTime = Date.now();
transcriptInterval = setInterval(() => {
const elapsedTime = Date.now() - transcriptStartTime;
transcriptStatus.textContent = `Transcribing... (${formatRecordingTime(elapsedTime)})`;
}, 1000);
};
speechRecognizer.onerror = (event) => {
console.error('Speech recognition error:', event.error);
stopTranscription();
if (event.error === 'no-speech') {
transcriptStatus.textContent = 'No speech detected';
} else if (event.error === 'audio-capture') {
transcriptStatus.textContent = 'No microphone found';
} else if (event.error === 'not-allowed') {
transcriptStatus.textContent = 'Microphone access denied';
} else {
transcriptStatus.textContent = 'Error occurred during transcription';
}
};
speechRecognizer.onend = () => {
if (isTranscribing) {
// Restart recognition if still transcribing
speechRecognizer.start();
}
};
speechRecognizer.onresult = (event) => {
let interimTranscript = '';
let finalTranscriptPart = '';
for (let i = event.resultIndex; i < event.results.length; i++) {
const transcript = event.results[i][0].transcript;
if (event.results[i].isFinal) {
finalTranscriptPart += transcript + ' ';
finalTranscript += transcript + ' ';
} else {
interimTranscript += transcript;
}
}
// Update transcript display
transcriptText = finalTranscript + interimTranscript;
// Create HTML with active text highlighted
let transcriptHTML = finalTranscript.replace(/\n/g, '<br>');
if (interimTranscript) {
transcriptHTML += `<span class="transcript-active">${interimTranscript}</span>`;
}
transcriptTextElement.innerHTML = transcriptHTML;
// Auto-scroll to bottom
transcriptTextElement.parentElement.scrollTop = transcriptTextElement.parentElement.scrollHeight;
};
return true;
}
// Start transcription
function startTranscription() {
// Initialize if not already done
if (!speechRecognizer) {
if (!initializeSpeechRecognition()) {
return;
}
}
// Reset transcript
transcriptText = '';
finalTranscript = '';
transcriptTextElement.textContent = '';
// Start recognition
try {
speechRecognizer.start();
} catch (error) {
console.error('Error starting speech recognition:', error);
transcriptStatus.textContent = 'Error starting transcription';
}
}
// Stop transcription
function stopTranscription() {
isTranscribing = false;
if (speechRecognizer) {
speechRecognizer.stop();
}
// Update UI
transcriptStatus.textContent = 'Transcription ready';
startTranscriptBtn.disabled = false;
stopTranscriptBtn.disabled = true;
downloadTranscriptBtn.disabled = false;
// Stop timer
clearInterval(transcriptInterval);
}
// Download transcript as text file
function downloadTranscript() {
if (!finalTranscript) {
showError('No transcript to download');
return;
}
const blob = new Blob([finalTranscript], { type: 'text/plain' });
const url = URL.createObjectURL(blob);
const a = document.createElement('a');
a.style.display = 'none';
a.href = url;
a.download = `transcript-${new Date().toISOString().slice(0, 10)}.txt`;
document.body.appendChild(a);
a.click();
setTimeout(() => {
document.body.removeChild(a);
window.URL.revokeObjectURL(url);
}, 100);
}
// Main visualization function
function visualize() {
if (!analyser || !dataArray) return;
animationId = requestAnimationFrame(visualize);
analyser.getByteFrequencyData(dataArray);
// Calculate average volume
let sum = 0;
for (let i = 0; i < dataArray.length; i++) {
sum += dataArray[i];
}
const average = sum / dataArray.length;
const volumePercent = Math.min(Math.round((average / 255) * 100), 100);
volumeLevelElement.textContent = volumePercent;
// Update progress bar
if (audioPlayer.duration) {
const progress = (audioPlayer.currentTime / audioPlayer.duration) * 100;
progressBar.style.width = `${progress}%`;
currentTimeElement.textContent = formatTime(audioPlayer.currentTime);
}
// Update bars
const barGroupSize = Math.floor(dataArray.length / barCount);
for (let i = 0; i < barCount; i++) {
const start = i * barGroupSize;
let sum = 0;
for (let j = start; j < start + barGroupSize; j++) {
sum += dataArray[j];
}
const average = sum / barGroupSize;
const height = (average / 255) * visualizer.clientHeight * 1.2;
bars[i].style.height = `${height}px`;
bars[i].style.opacity = `${0.2 + (height / visualizer.clientHeight) * 0.8}`;
// Add subtle animation to bars
bars[i].style.transform = `scaleY(${1 + (height / visualizer.clientHeight) * 0.5})`;
}
// Update audio wave
analyser.getByteTimeDomainData(dataArray);
let wavePath = 'path(\'M0 ' + (visualizer.clientHeight / 2) + ' ';
for (let i = 0; i < dataArray.length; i++) {
const x = (i / dataArray.length) * visualizer.clientWidth;
const y = (dataArray[i] / 255) * visualizer.clientHeight;
wavePath += 'L' + x + ' ' + y + ' ';
}
wavePath += 'L' + visualizer.clientWidth + ' ' + (visualizer.clientHeight / 2) + ' Z\')';
audioWave.style.clipPath = wavePath;
}
// Handle sensitivity change
sensitivityInput.addEventListener('input', () => {
sensitivity = parseFloat(sensitivityInput.value);
sensitivityValue.textContent = sensitivity.toFixed(2);
});
// Handle bar count change
barCountInput.addEventListener('input', () => {
barCount = parseInt(barCountInput.value);
barCountValue.textContent = barCount;
createBars();
});
// Handle bar spacing change
barSpacingInput.addEventListener('input', () => {
barSpacing = parseFloat(barSpacingInput.value);
barSpacingValue.textContent = barSpacing.toFixed(1);
createBars();
});
// Handle reverb change
reverbInput.addEventListener('input', () => {
reverbAmount = parseFloat(reverbInput.value);
reverbValue.textContent = reverbAmount.toFixed(1);
setupReverb();
});
// Handle volume change
volumeControl.addEventListener('input', () => {
audioPlayer.volume = parseFloat(volumeControl.value);
volumeLevelElement.textContent = Math.round(volumeControl.value * 100);
});
// Handle theme change
themeSelect.addEventListener('change', () => {
currentTheme = themeSelect.value;
visualizer.className = 'modern-visualizer mb-4';
if (currentTheme !== 'default') {
visualizer.classList.add(`theme-${currentTheme}`);
}
});
// Handle EQ preset change
eqPreset.addEventListener('change', () => {
setupEQ();
});
// Handle window resize
window.addEventListener('resize', () => {
createBars();
});
// Handle keyboard controls
document.addEventListener('keydown', (e) => {
if (e.code === 'Space') {
e.preventDefault();
playButton.click();
} else if (e.code === 'ArrowUp') {
e.preventDefault();
sensitivityInput.value = Math.min(1, parseFloat(sensitivityInput.value) + 0.05);
sensitivityInput.dispatchEvent(new Event('input'));
} else if (e.code === 'ArrowDown') {
e.preventDefault();
sensitivityInput.value = Math.max(0.1, parseFloat(sensitivityInput.value) - 0.05);
sensitivityInput.dispatchEvent(new Event('input'));
} else if (e.code === 'ArrowRight' && audioPlayer.duration) {
e.preventDefault();
audioPlayer.currentTime = Math.min(audioPlayer.duration, audioPlayer.currentTime + 5);
} else if (e.code === 'ArrowLeft' && audioPlayer.duration) {
e.preventDefault();
audioPlayer.currentTime = Math.max(0, audioPlayer.currentTime - 5);
} else if (e.code === 'KeyR') {
e.preventDefault();
if (mediaRecorder && mediaRecorder.state === 'recording') {
stopRecording();
} else {
startRecording();
}
} else if (e.code === 'KeyT') {
e.preventDefault();
if (isTranscribing) {
stopTranscription();
} else {
startTranscription();
}
}
});
// Handle fullscreen toggle
fullscreenBtn.addEventListener('click', () => {
if (!document.fullscreenElement) {
document.documentElement.requestFullscreen();
fullscreenBtn.innerHTML = '<i class="fas fa-compress"></i>';
} else {
if (document.exitFullscreen) {
document.exitFullscreen();
fullscreenBtn.innerHTML = '<i class="fas fa-expand"></i>';
}
}
});
// Handle theme toggle (dark/light)
themeToggle.addEventListener('click', () => {
isDarkMode = !isDarkMode;
if (isDarkMode) {
document.body.classList.remove('light-mode');
themeToggle.innerHTML = '<i class="fas fa-moon"></i>';
} else {
document.body.classList.add('light-mode');
themeToggle.innerHTML = '<i class="fas fa-sun"></i>';
}
});
// Handle record button
recordButton.addEventListener('click', () => {
startRecording();
});
// Handle stop record button
stopRecordButton.addEventListener('click', () => {
stopRecording();
});
// Handle download button
downloadButton.addEventListener('click', () => {
downloadRecording();
});
// Handle isolate music button
isolateMusicBtn.addEventListener('click', () => {
audioIsolationActive = !audioIsolationActive;
setupAudioIsolation();
});
// Handle noise reduction button
noiseReductionBtn.addEventListener('click', () => {
noiseReductionActive = !noiseReductionActive;
setupNoiseReduction();
});
// Handle reverse button
reverseBtn.addEventListener('click', () => {
reverseActive = !reverseActive;
setupReverseEffect();
});
// Handle visualizer style buttons
visualizerStyleBtns.forEach(btn => {
btn.addEventListener('click', () => {
const style = btn.getAttribute('data-style');
visualizer.className = 'modern-visualizer mb-4';
visualizer.classList.add(`visualizer-style-${style}`);
// Update active button
visualizerStyleBtns.forEach(b => b.classList.remove('ring-2', 'ring-white'));
btn.classList.add('ring-2', 'ring-white');
});
});
// Handle screen style buttons
screenStyleBtns.forEach(btn => {
btn.addEventListener('click', () => {
const style = btn.getAttribute('data-screen');
document.body.className = 'bg-gray-900 text-white min-h-screen flex flex-col items-center justify-center p-4 transition-colors duration-300';
document.body.classList.add(`screen-style-${style}`);
// Update active button
screenStyleBtns.forEach(b => b.classList.remove('ring-2', 'ring-white'));
btn.classList.add('ring-2', 'ring-white');
});
});
// Handle transcription language change
transcriptLanguage.addEventListener('change', () => {
if (speechRecognizer) {
speechRecognizer.lang = transcriptLanguage.value;
}
});
// Handle start transcription button
startTranscriptBtn.addEventListener('click', () => {
startTranscription();
});
// Handle stop transcription button
stopTranscriptBtn.addEventListener('click', () => {
stopTranscription();
});
// Handle download transcript button
downloadTranscriptBtn.addEventListener('click', () => {
downloadTranscript();
});
// Initialize audio context on first user interaction
document.addEventListener('click', function initOnFirstInteraction() {
initAudioContext();
document.removeEventListener('click', initOnFirstInteraction);
}, { once: true });
});
</script>
<p style="border-radius: 8px; text-align: center; font-size: 12px; color: #fff; margin-top: 16px;position: fixed; left: 8px; bottom: 8px; z-index: 10; background: rgba(0, 0, 0, 0.8); padding: 4px 8px;">Made with <img src="https://enzostvs-deepsite.hf.space/logo.svg" alt="DeepSite Logo" style="width: 16px; height: 16px; vertical-align: middle;display:inline-block;margin-right:3px;filter:brightness(0) invert(1);"><a href="https://enzostvs-deepsite.hf.space" style="color: #fff;text-decoration: underline;" target="_blank" >DeepSite</a> - 🧬 <a href="https://enzostvs-deepsite.hf.space?remix=omar1232/advanced-audio" style="color: #fff;text-decoration: underline;" target="_blank" >Remix</a></p></body>
</html>