audioform_dataset / audioform /webXOS_Audioform_v1.html
webxos's picture
Rename webXOS_Audioform_v1.html to audioform/webXOS_Audioform_v1.html
9cf5bb4 verified
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>AUDIOFORM: by webXOS</title>
<script src="https://cdnjs.cloudflare.com/ajax/libs/three.js/r128/three.min.js"></script>
<script src="https://cdn.jsdelivr.net/npm/jszip@3.7.1/dist/jszip.min.js"></script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/FileSaver.js/2.0.5/FileSaver.min.js"></script>
<style>
* {
margin: 0;
padding: 0;
box-sizing: border-box;
font-family: 'Courier New', monospace;
}
body {
background: #0a0a12;
color: #00ff9d;
overflow: hidden;
height: 100vh;
position: relative;
}
/* Loading Screen */
#loading-screen {
position: fixed;
top: 0;
left: 0;
width: 100%;
height: 100%;
background: #000000;
display: flex;
flex-direction: column;
justify-content: center;
align-items: center;
z-index: 1000;
transition: opacity 1s;
}
.loading-title {
font-size: 3rem;
color: #00ff9d;
text-shadow: 0 0 10px #00ff9d;
margin-bottom: 2rem;
letter-spacing: 5px;
}
.loading-bar {
width: 300px;
height: 4px;
background: #111;
border-radius: 2px;
overflow: hidden;
margin-top: 20px;
}
.loading-progress {
height: 100%;
background: #00ff9d;
width: 0%;
transition: width 0.3s;
}
/* Main UI */
#ui-container {
position: absolute;
top: 0;
left: 0;
width: 100%;
height: 100%;
pointer-events: none;
z-index: 10;
}
.ui-section {
position: absolute;
background: rgba(5, 5, 15, 0.85);
border: 1px solid #00ff9d;
border-radius: 5px;
padding: 15px;
box-shadow: 0 0 15px rgba(0, 255, 157, 0.3);
pointer-events: all;
}
/* Control Panel */
#control-panel {
top: 20px;
left: 20px;
width: 320px;
}
/* Visualization Panel */
#viz-panel {
bottom: 20px;
left: 20px;
width: 320px;
}
/* Export Panel */
#export-panel {
top: 20px;
right: 20px;
width: 320px;
}
/* Title */
#title {
position: absolute;
top: 20px;
left: 50%;
transform: translateX(-50%);
font-size: 2.2rem;
color: #00ff9d;
text-shadow: 0 0 10px #00ff9d;
letter-spacing: 3px;
text-align: center;
background: rgba(5, 5, 15, 0.7);
padding: 10px 30px;
border-radius: 5px;
border: 1px solid #00ff9d;
box-shadow: 0 0 15px rgba(0, 255, 157, 0.3);
z-index: 11;
}
/* Fullscreen Button */
#fullscreen-btn {
position: absolute;
bottom: 20px;
right: 20px;
width: 50px;
height: 50px;
background: rgba(5, 5, 15, 0.85);
border: 1px solid #00ff9d;
border-radius: 50%;
color: #00ff9d;
font-size: 24px;
cursor: pointer;
display: flex;
justify-content: center;
align-items: center;
z-index: 11;
pointer-events: all;
transition: all 0.3s;
}
#fullscreen-btn:hover {
background: rgba(0, 255, 157, 0.2);
box-shadow: 0 0 15px #00ff9d;
}
/* UI Elements */
h2 {
color: #00ff9d;
margin-bottom: 15px;
border-bottom: 1px solid #00ff9d;
padding-bottom: 5px;
font-size: 1.3rem;
}
.btn {
background: #001a0f;
color: #00ff9d;
border: 1px solid #00ff9d;
padding: 10px 15px;
border-radius: 3px;
cursor: pointer;
margin: 5px 0;
width: 100%;
font-weight: bold;
transition: all 0.3s;
}
.btn:hover {
background: #003320;
box-shadow: 0 0 10px #00ff9d;
}
.btn:active {
background: #00ff9d;
color: #000;
}
.btn-red {
background: #1a0000;
border-color: #ff0033;
color: #ff0033;
}
.btn-red:hover {
background: #330000;
box-shadow: 0 0 10px #ff0033;
}
.file-input {
width: 100%;
margin: 10px 0;
padding: 8px;
background: #000;
color: #00ff9d;
border: 1px solid #00ff9d;
border-radius: 3px;
}
.slider-container {
margin: 15px 0;
}
.slider-label {
display: flex;
justify-content: space-between;
margin-bottom: 5px;
}
.slider {
width: 100%;
-webkit-appearance: none;
height: 6px;
background: #001a0f;
border-radius: 3px;
outline: none;
}
.slider::-webkit-slider-thumb {
-webkit-appearance: none;
width: 18px;
height: 18px;
background: #00ff9d;
border-radius: 50%;
cursor: pointer;
}
.toggle {
display: flex;
align-items: center;
margin: 10px 0;
}
.toggle input {
display: none;
}
.toggle-slider {
width: 50px;
height: 24px;
background: #001a0f;
border-radius: 12px;
margin-right: 10px;
position: relative;
cursor: pointer;
border: 1px solid #00ff9d;
}
.toggle-slider:after {
content: '';
position: absolute;
width: 20px;
height: 20px;
background: #00ff9d;
border-radius: 50%;
top: 1px;
left: 1px;
transition: 0.3s;
}
.toggle input:checked + .toggle-slider:after {
transform: translateX(26px);
}
.info-text {
font-size: 0.85rem;
color: #00cc7a;
margin-top: 10px;
line-height: 1.4;
}
/* Canvas */
#canvas-container {
width: 100%;
height: 100%;
}
canvas {
display: block;
}
/* Metrics Display */
.metrics {
background: rgba(0, 0, 0, 0.7);
padding: 10px;
border-radius: 3px;
margin-top: 15px;
border: 1px solid #ff0033;
}
.metric-row {
display: flex;
justify-content: space-between;
margin: 5px 0;
font-size: 0.9rem;
}
.metric-value {
color: #ff0033;
}
</style>
</head>
<body>
<!-- Loading Screen -->
<div id="loading-screen">
<div class="loading-title">AUDIOFORM</div>
<div>Initializing 3D Audio Visualization System...</div>
<div class="loading-bar">
<div class="loading-progress" id="loading-progress"></div>
</div>
</div>
<!-- Main UI -->
<div id="ui-container">
<div id="title">AUDIOFORM by webXOS</div>
<div class="ui-section" id="control-panel">
<h2>Audio Input</h2>
<input type="file" id="audio-upload" accept=".wav" class="file-input">
<button id="load-sample" class="btn">Load Sample Audio</button>
<h2>Synthetic Noise Generator</h2>
<div class="toggle">
<input type="checkbox" id="noise-toggle">
<label for="noise-toggle" class="toggle-slider"></label>
<label for="noise-toggle">Enable 8-bit Tone Generation</label>
</div>
<div class="slider-container">
<div class="slider-label">
<span>Tone Frequency</span>
<span id="freq-value">440 Hz</span>
</div>
<input type="range" min="50" max="2000" value="440" class="slider" id="freq-slider">
</div>
<div class="slider-container">
<div class="slider-label">
<span>Time Scaling</span>
<span id="time-value">1.0x</span>
</div>
<input type="range" min="0.1" max="5" step="0.1" value="1" class="slider" id="time-slider">
</div>
<button id="generate-noise" class="btn">Generate Synthetic Audio</button>
<div class="info-text">
Synthetic mode generates 8-bit tones with timelapse time-scaling for dataset creation.
</div>
</div>
<div class="ui-section" id="viz-panel">
<h2>Visualization Controls</h2>
<div class="slider-container">
<div class="slider-label">
<span>Waveform Scale</span>
<span id="scale-value">1.0</span>
</div>
<input type="range" min="0.1" max="3" step="0.1" value="1" class="slider" id="scale-slider">
</div>
<div class="slider-container">
<div class="slider-label">
<span>Rotation Speed</span>
<span id="rotation-value">0.5</span>
</div>
<input type="range" min="0" max="2" step="0.1" value="0.5" class="slider" id="rotation-slider">
</div>
<div class="toggle">
<input type="checkbox" id="auto-rotate" checked>
<label for="auto-rotate" class="toggle-slider"></label>
<label for="auto-rotate">Auto-Rotate Visualization</label>
</div>
<button id="capture-frame" class="btn">Capture Current Frame</button>
<div class="metrics">
<div class="metric-row">
<span>Frames Captured:</span>
<span class="metric-value" id="frame-count">0</span>
</div>
<div class="metric-row">
<span>Audio Duration:</span>
<span class="metric-value" id="audio-duration">0.0s</span>
</div>
<div class="metric-row">
<span>Current Time:</span>
<span class="metric-value" id="current-time">0.0s</span>
</div>
</div>
</div>
<div class="ui-section" id="export-panel">
<h2>Dataset Export</h2>
<input type="text" id="dataset-name" placeholder="Dataset Name" class="file-input" value="audioform_dataset">
<div class="slider-container">
<div class="slider-label">
<span>Frames to Capture</span>
<span id="capture-count-value">10</span>
</div>
<input type="range" min="5" max="100" value="10" class="slider" id="capture-count-slider">
</div>
<button id="start-capture" class="btn">Start Timelapse Capture</button>
<button id="export-dataset" class="btn">Export Dataset (.zip)</button>
<button id="reset-captures" class="btn btn-red">Reset All Captures</button>
<div class="info-text">
Exports Hugging Face formatted .zip with README.md, images, and classification CSV.
</div>
</div>
<div id="fullscreen-btn"></div>
</div>
<!-- Three.js Canvas Container -->
<div id="canvas-container"></div>
<script>
// Main Application
document.addEventListener('DOMContentLoaded', function() {
// Simulate loading progress
let progress = 0;
const progressInterval = setInterval(() => {
progress += Math.random() * 15;
if (progress >= 100) {
progress = 100;
clearInterval(progressInterval);
setTimeout(() => {
document.getElementById('loading-screen').style.opacity = '0';
setTimeout(() => {
document.getElementById('loading-screen').style.display = 'none';
}, 1000);
}, 500);
}
document.getElementById('loading-progress').style.width = `${progress}%`;
}, 150);
// Global variables
let scene, camera, renderer, audioContext, audioSource, analyser;
let waveformGeometry, waveformMaterial, waveformMesh;
let capturedFrames = [];
let isPlaying = false;
let isNoiseMode = false;
let currentAudioBuffer = null;
let startTime = 0;
let rotationSpeed = 0.5;
let autoRotate = true;
let timeScale = 1.0;
let noiseFrequency = 440;
let frameCaptureCount = 0;
// Initialize Three.js scene
function initThreeJS() {
// Create scene
scene = new THREE.Scene();
scene.background = new THREE.Color(0x000000);
// Create camera
camera = new THREE.PerspectiveCamera(75, window.innerWidth / window.innerHeight, 0.1, 1000);
camera.position.z = 15;
camera.position.y = 5;
// Create renderer
renderer = new THREE.WebGLRenderer({ antialias: true });
renderer.setSize(window.innerWidth, window.innerHeight);
renderer.setPixelRatio(window.devicePixelRatio);
document.getElementById('canvas-container').appendChild(renderer.domElement);
// Add lights
const ambientLight = new THREE.AmbientLight(0x222222);
scene.add(ambientLight);
const directionalLight = new THREE.DirectionalLight(0x00ff9d, 1);
directionalLight.position.set(10, 10, 5);
scene.add(directionalLight);
const redLight = new THREE.DirectionalLight(0xff0033, 0.5);
redLight.position.set(-10, 5, -5);
scene.add(redLight);
// Create initial waveform visualization
createWaveformVisualization();
// Add grid helper
const gridHelper = new THREE.GridHelper(30, 30, 0x00aa00, 0x003300);
gridHelper.position.y = -5;
scene.add(gridHelper);
// Add axes helper
const axesHelper = new THREE.AxesHelper(10);
scene.add(axesHelper);
// Handle window resize
window.addEventListener('resize', onWindowResize);
}
// Create waveform visualization
function createWaveformVisualization() {
// Create geometry for waveform visualization
const geometry = new THREE.BufferGeometry();
const pointCount = 512;
const positions = new Float32Array(pointCount * 3);
// Initialize with flat line
for (let i = 0; i < pointCount; i++) {
positions[i * 3] = (i - pointCount / 2) * 0.05;
positions[i * 3 + 1] = 0;
positions[i * 3 + 2] = 0;
}
geometry.setAttribute('position', new THREE.BufferAttribute(positions, 3));
// Create material with neon green color
const material = new THREE.LineBasicMaterial({
color: 0x00ff9d,
linewidth: 2
});
// Create mesh and add to scene
waveformMesh = new THREE.Line(geometry, material);
scene.add(waveformMesh);
// Also create a particle system for additional visualization
const particleGeometry = new THREE.BufferGeometry();
const particlePositions = new Float32Array(pointCount * 3);
const particleColors = new Float32Array(pointCount * 3);
for (let i = 0; i < pointCount; i++) {
particlePositions[i * 3] = (i - pointCount / 2) * 0.05;
particlePositions[i * 3 + 1] = 0;
particlePositions[i * 3 + 2] = 0;
// Green color for particles
particleColors[i * 3] = 0.0; // R
particleColors[i * 3 + 1] = 1.0; // G
particleColors[i * 3 + 2] = 0.0; // B
}
particleGeometry.setAttribute('position', new THREE.BufferAttribute(particlePositions, 3));
particleGeometry.setAttribute('color', new THREE.BufferAttribute(particleColors, 3));
const particleMaterial = new THREE.PointsMaterial({
size: 0.05,
vertexColors: true,
transparent: true
});
const particles = new THREE.Points(particleGeometry, particleMaterial);
waveformMesh.add(particles);
waveformGeometry = geometry;
}
// Update waveform visualization based on audio data
function updateWaveform(dataArray) {
if (!waveformGeometry) return;
const positions = waveformGeometry.attributes.position.array;
const pointCount = positions.length / 3;
for (let i = 0; i < pointCount; i++) {
// Get audio data value (normalized)
const value = dataArray[i] / 128.0;
// Update Y position based on audio data
positions[i * 3 + 1] = value * 5;
// Add some Z variation for 3D effect
positions[i * 3 + 2] = Math.sin(i * 0.05 + Date.now() * 0.001) * value;
}
waveformGeometry.attributes.position.needsUpdate = true;
}
// Initialize audio context
function initAudio() {
audioContext = new (window.AudioContext || window.webkitAudioContext)();
analyser = audioContext.createAnalyser();
analyser.fftSize = 512;
analyser.smoothingTimeConstant = 0.8;
}
// Load and play audio file
function loadAudioFile(file) {
if (!audioContext) initAudio();
const reader = new FileReader();
reader.onload = function(e) {
audioContext.decodeAudioData(e.target.result, function(buffer) {
playAudioBuffer(buffer);
}, function(e) {
console.error("Error decoding audio data", e);
});
};
reader.readAsArrayBuffer(file);
}
// Play audio buffer
function playAudioBuffer(buffer) {
stopAudio();
currentAudioBuffer = buffer;
audioSource = audioContext.createBufferSource();
audioSource.buffer = buffer;
audioSource.connect(analyser);
analyser.connect(audioContext.destination);
// Set playback rate based on time scaling
audioSource.playbackRate.value = timeScale;
audioSource.start(0);
isPlaying = true;
startTime = audioContext.currentTime;
// Update duration display
document.getElementById('audio-duration').textContent = `${buffer.duration.toFixed(2)}s`;
// Start animation loop
animate();
}
// Generate synthetic 8-bit tone
function generateSyntheticNoise() {
if (!audioContext) initAudio();
stopAudio();
// Create oscillator for 8-bit tone
const oscillator = audioContext.createOscillator();
oscillator.type = 'square'; // Square wave for 8-bit sound
oscillator.frequency.setValueAtTime(noiseFrequency, audioContext.currentTime);
// Create gain node for volume control
const gainNode = audioContext.createGain();
gainNode.gain.setValueAtTime(0.1, audioContext.currentTime);
// Connect nodes
oscillator.connect(gainNode);
gainNode.connect(analyser);
analyser.connect(audioContext.destination);
// Create buffer source for continuous playback
const bufferLength = 2; // 2 seconds buffer
const buffer = audioContext.createBuffer(1, audioContext.sampleRate * bufferLength, audioContext.sampleRate);
const channelData = buffer.getChannelData(0);
// Fill buffer with 8-bit style square wave
for (let i = 0; i < channelData.length; i++) {
const time = i / audioContext.sampleRate;
const frequency = noiseFrequency;
// Square wave calculation
channelData[i] = Math.sign(Math.sin(2 * Math.PI * frequency * time));
// Add some noise for "8-bit" character
channelData[i] += (Math.random() - 0.5) * 0.1;
// Clamp to prevent distortion
channelData[i] = Math.max(-0.8, Math.min(0.8, channelData[i]));
}
// Play the buffer
audioSource = audioContext.createBufferSource();
audioSource.buffer = buffer;
audioSource.loop = true;
audioSource.playbackRate.value = timeScale;
audioSource.connect(analyser);
analyser.connect(audioContext.destination);
audioSource.start(0);
isPlaying = true;
startTime = audioContext.currentTime;
// Update duration display for synthetic audio
document.getElementById('audio-duration').textContent = `∞ (synthetic)`;
// Start animation loop
animate();
}
// Stop audio playback
function stopAudio() {
if (audioSource) {
audioSource.stop();
audioSource.disconnect();
isPlaying = false;
}
}
// Capture current frame as image
function captureFrame() {
// Render the current frame to a data URL
renderer.render(scene, camera);
const dataURL = renderer.domElement.toDataURL('image/png');
// Create frame object
const frame = {
id: frameCaptureCount,
timestamp: audioContext ? (audioContext.currentTime - startTime).toFixed(3) : '0.000',
dataURL: dataURL,
frequency: isNoiseMode ? noiseFrequency : 0,
timeScale: timeScale,
date: new Date().toISOString()
};
capturedFrames.push(frame);
frameCaptureCount++;
// Update UI
document.getElementById('frame-count').textContent = frameCaptureCount;
// Add visual feedback
waveformMesh.material.color.setHex(0xff0033);
setTimeout(() => {
waveformMesh.material.color.setHex(0x00ff9d);
}, 100);
return frame;
}
// Start timelapse capture
function startTimelapseCapture() {
const totalFrames = parseInt(document.getElementById('capture-count-slider').value);
const interval = 0.5; // Capture every 0.5 seconds
let framesCaptured = 0;
const captureInterval = setInterval(() => {
if (framesCaptured >= totalFrames) {
clearInterval(captureInterval);
alert(`Timelapse capture complete! Captured ${totalFrames} frames.`);
return;
}
captureFrame();
framesCaptured++;
}, interval * 1000);
}
// Export dataset as Hugging Face formatted zip
async function exportDataset() {
if (capturedFrames.length === 0) {
alert('No frames captured yet! Capture some frames before exporting.');
return;
}
const datasetName = document.getElementById('dataset-name').value || 'audioform_dataset';
const zip = new JSZip();
// Create dataset folder structure
const datasetFolder = zip.folder(datasetName);
const imagesFolder = datasetFolder.folder('images');
// Create metadata CSV with REQUIRED file_name column for Hugging Face compatibility
let csvContent = 'file_name,frame_id,timestamp,frequency,time_scale,capture_date\n';
// Add images to zip and build CSV simultaneously
capturedFrames.forEach((frame, index) => {
// Convert dataURL to blob
const data = frame.dataURL.split(',')[1];
const blob = b64toBlob(data, 'image/png');
// Add image to zip
const imageName = `frame_${String(index).padStart(4, '0')}.png`;
imagesFolder.file(imageName, blob);
// Add to CSV with file_name column (required by Hugging Face ImageFolder)
csvContent += `images/${imageName},${frame.id},${frame.timestamp},${frame.frequency},${frame.timeScale},${frame.date}\n`;
});
datasetFolder.file('metadata.csv', csvContent);
// Create README.md
const readmeContent = `# ${datasetName}
## Dataset Description
This dataset was generated using AUDIOFORM, a 3D audio visualization system.
- **Total Frames**: ${capturedFrames.length}
- **Generation Date**: ${new Date().toISOString().split('T')[0]}
- **Audio Type**: ${isNoiseMode ? 'Synthetic 8-bit Tone' : 'Uploaded WAV File'}
- **Time Scaling**: ${timeScale}x
## Dataset Structure
- \`images/\`: Contains all captured frames in PNG format
- \`metadata.csv\`: Contains classification data for each frame
## Metadata Columns
- \`file_name\`: Relative path to the image file (e.g., images/frame_0001.png) - **REQUIRED for Hugging Face**
- \`frame_id\`: Unique identifier for each frame
- \`timestamp\`: Time in seconds when frame was captured
- \`frequency\`: Audio frequency at capture time (Hz)
- \`time_scale\`: Playback speed multiplier
- \`capture_date\`: ISO date string of capture
## Intended Use
This dataset is intended for training machine learning models on audio visualization patterns, waveform classification, or generative AI tasks.
## Generation Details
Generated with AUDIOFORM v1.0 - A Three.js based audio visualization dataset generator.
`;
datasetFolder.file('README.md', readmeContent);
// Generate and download zip file
const zipBlob = await zip.generateAsync({type: 'blob'});
saveAs(zipBlob, `${datasetName}.zip`);
alert(`Dataset exported successfully as ${datasetName}.zip\n\nThe dataset includes the required 'file_name' column for Hugging Face compatibility.`);
}
// Helper function to convert base64 to blob
function b64toBlob(b64Data, contentType = '', sliceSize = 512) {
const byteCharacters = atob(b64Data);
const byteArrays = [];
for (let offset = 0; offset < byteCharacters.length; offset += sliceSize) {
const slice = byteCharacters.slice(offset, offset + sliceSize);
const byteNumbers = new Array(slice.length);
for (let i = 0; i < slice.length; i++) {
byteNumbers[i] = slice.charCodeAt(i);
}
const byteArray = new Uint8Array(byteNumbers);
byteArrays.push(byteArray);
}
const blob = new Blob(byteArrays, {type: contentType});
return blob;
}
// Reset all captured frames
function resetCaptures() {
if (capturedFrames.length > 0 && !confirm('Are you sure you want to reset all captured frames?')) {
return;
}
capturedFrames = [];
frameCaptureCount = 0;
document.getElementById('frame-count').textContent = '0';
}
// Animation loop
function animate() {
requestAnimationFrame(animate);
// Update audio visualization if playing
if (isPlaying && analyser) {
const dataArray = new Uint8Array(analyser.frequencyBinCount);
analyser.getByteFrequencyData(dataArray);
updateWaveform(dataArray);
// Update current time display
if (audioContext) {
const currentTime = audioContext.currentTime - startTime;
document.getElementById('current-time').textContent = `${currentTime.toFixed(2)}s`;
}
}
// Auto-rotate visualization
if (autoRotate && waveformMesh) {
waveformMesh.rotation.y += 0.005 * rotationSpeed;
waveformMesh.rotation.x += 0.002 * rotationSpeed;
}
// Render scene
renderer.render(scene, camera);
}
// Handle window resize
function onWindowResize() {
camera.aspect = window.innerWidth / window.innerHeight;
camera.updateProjectionMatrix();
renderer.setSize(window.innerWidth, window.innerHeight);
}
// Toggle fullscreen mode
function toggleFullscreen() {
if (!document.fullscreenElement) {
document.documentElement.requestFullscreen().catch(err => {
console.error(`Error attempting to enable fullscreen: ${err.message}`);
});
} else {
if (document.exitFullscreen) {
document.exitFullscreen();
}
}
}
// Load sample audio
function loadSampleAudio() {
// Create a simple 8-bit style sample audio
if (!audioContext) initAudio();
// Create a buffer with multiple frequencies
const buffer = audioContext.createBuffer(1, audioContext.sampleRate * 3, audioContext.sampleRate);
const channelData = buffer.getChannelData(0);
// Generate a sample with varying frequencies
for (let i = 0; i < channelData.length; i++) {
const time = i / audioContext.sampleRate;
// Vary frequency over time
const freq = 200 + Math.sin(time * 2) * 100;
// Square wave
channelData[i] = Math.sign(Math.sin(2 * Math.PI * freq * time));
// Add some noise
channelData[i] += (Math.random() - 0.5) * 0.05;
// Clamp
channelData[i] = Math.max(-0.8, Math.min(0.8, channelData[i]));
}
playAudioBuffer(buffer);
}
// Setup event listeners
function setupEventListeners() {
// File upload
document.getElementById('audio-upload').addEventListener('change', function(e) {
if (e.target.files.length > 0) {
loadAudioFile(e.target.files[0]);
isNoiseMode = false;
}
});
// Sample audio button
document.getElementById('load-sample').addEventListener('click', loadSampleAudio);
// Noise generator toggle
document.getElementById('noise-toggle').addEventListener('change', function(e) {
isNoiseMode = e.target.checked;
});
// Generate synthetic noise
document.getElementById('generate-noise').addEventListener('click', generateSyntheticNoise);
// Frequency slider
const freqSlider = document.getElementById('freq-slider');
const freqValue = document.getElementById('freq-value');
freqSlider.addEventListener('input', function() {
noiseFrequency = parseInt(this.value);
freqValue.textContent = `${noiseFrequency} Hz`;
// Regenerate noise if currently playing in noise mode
if (isPlaying && isNoiseMode) {
generateSyntheticNoise();
}
});
// Time scaling slider
const timeSlider = document.getElementById('time-slider');
const timeValue = document.getElementById('time-value');
timeSlider.addEventListener('input', function() {
timeScale = parseFloat(this.value);
timeValue.textContent = `${timeScale.toFixed(1)}x`;
// Update playback rate if audio is playing
if (audioSource && isPlaying) {
audioSource.playbackRate.value = timeScale;
}
});
// Waveform scale slider
const scaleSlider = document.getElementById('scale-slider');
const scaleValue = document.getElementById('scale-value');
scaleSlider.addEventListener('input', function() {
const scale = parseFloat(this.value);
scaleValue.textContent = scale.toFixed(1);
if (waveformMesh) {
waveformMesh.scale.y = scale;
}
});
// Rotation speed slider
const rotationSlider = document.getElementById('rotation-slider');
const rotationValue = document.getElementById('rotation-value');
rotationSlider.addEventListener('input', function() {
rotationSpeed = parseFloat(this.value);
rotationValue.textContent = rotationSpeed.toFixed(1);
});
// Auto-rotate toggle
document.getElementById('auto-rotate').addEventListener('change', function(e) {
autoRotate = e.target.checked;
});
// Capture frame button
document.getElementById('capture-frame').addEventListener('click', captureFrame);
// Capture count slider
const captureCountSlider = document.getElementById('capture-count-slider');
const captureCountValue = document.getElementById('capture-count-value');
captureCountSlider.addEventListener('input', function() {
captureCountValue.textContent = this.value;
});
// Start timelapse capture
document.getElementById('start-capture').addEventListener('click', startTimelapseCapture);
// Export dataset
document.getElementById('export-dataset').addEventListener('click', exportDataset);
// Reset captures
document.getElementById('reset-captures').addEventListener('click', resetCaptures);
// Fullscreen button
document.getElementById('fullscreen-btn').addEventListener('click', toggleFullscreen);
// Handle fullscreen change
document.addEventListener('fullscreenchange', function() {
const fullscreenBtn = document.getElementById('fullscreen-btn');
if (document.fullscreenElement) {
fullscreenBtn.textContent = '⛶';
} else {
fullscreenBtn.textContent = '⛶';
}
});
}
// Initialize everything
initThreeJS();
initAudio();
setupEventListeners();
// Start animation loop
animate();
// Show a welcome message
setTimeout(() => {
console.log('AUDIOFORM initialized. Ready to generate audio visualization datasets.');
}, 1000);
});
</script>
</body>
</html>