|
|
<!DOCTYPE html> |
|
|
<html lang="en"> |
|
|
<head> |
|
|
<meta charset="UTF-8"> |
|
|
<meta name="viewport" content="width=device-width, initial-scale=1.0"> |
|
|
<title>AUDIOFORM: by webXOS</title> |
|
|
<script src="https://cdnjs.cloudflare.com/ajax/libs/three.js/r128/three.min.js"></script> |
|
|
<script src="https://cdn.jsdelivr.net/npm/jszip@3.7.1/dist/jszip.min.js"></script> |
|
|
<script src="https://cdnjs.cloudflare.com/ajax/libs/FileSaver.js/2.0.5/FileSaver.min.js"></script> |
|
|
<style> |
|
|
* { |
|
|
margin: 0; |
|
|
padding: 0; |
|
|
box-sizing: border-box; |
|
|
font-family: 'Courier New', monospace; |
|
|
} |
|
|
|
|
|
body { |
|
|
background: #0a0a12; |
|
|
color: #00ff9d; |
|
|
overflow: hidden; |
|
|
height: 100vh; |
|
|
position: relative; |
|
|
} |
|
|
|
|
|
|
|
|
#loading-screen { |
|
|
position: fixed; |
|
|
top: 0; |
|
|
left: 0; |
|
|
width: 100%; |
|
|
height: 100%; |
|
|
background: #000000; |
|
|
display: flex; |
|
|
flex-direction: column; |
|
|
justify-content: center; |
|
|
align-items: center; |
|
|
z-index: 1000; |
|
|
transition: opacity 1s; |
|
|
} |
|
|
|
|
|
.loading-title { |
|
|
font-size: 3rem; |
|
|
color: #00ff9d; |
|
|
text-shadow: 0 0 10px #00ff9d; |
|
|
margin-bottom: 2rem; |
|
|
letter-spacing: 5px; |
|
|
} |
|
|
|
|
|
.loading-bar { |
|
|
width: 300px; |
|
|
height: 4px; |
|
|
background: #111; |
|
|
border-radius: 2px; |
|
|
overflow: hidden; |
|
|
margin-top: 20px; |
|
|
} |
|
|
|
|
|
.loading-progress { |
|
|
height: 100%; |
|
|
background: #00ff9d; |
|
|
width: 0%; |
|
|
transition: width 0.3s; |
|
|
} |
|
|
|
|
|
|
|
|
#ui-container { |
|
|
position: absolute; |
|
|
top: 0; |
|
|
left: 0; |
|
|
width: 100%; |
|
|
height: 100%; |
|
|
pointer-events: none; |
|
|
z-index: 10; |
|
|
} |
|
|
|
|
|
.ui-section { |
|
|
position: absolute; |
|
|
background: rgba(5, 5, 15, 0.85); |
|
|
border: 1px solid #00ff9d; |
|
|
border-radius: 5px; |
|
|
padding: 15px; |
|
|
box-shadow: 0 0 15px rgba(0, 255, 157, 0.3); |
|
|
pointer-events: all; |
|
|
} |
|
|
|
|
|
|
|
|
#control-panel { |
|
|
top: 20px; |
|
|
left: 20px; |
|
|
width: 320px; |
|
|
} |
|
|
|
|
|
|
|
|
#viz-panel { |
|
|
bottom: 20px; |
|
|
left: 20px; |
|
|
width: 320px; |
|
|
} |
|
|
|
|
|
|
|
|
#export-panel { |
|
|
top: 20px; |
|
|
right: 20px; |
|
|
width: 320px; |
|
|
} |
|
|
|
|
|
|
|
|
#title { |
|
|
position: absolute; |
|
|
top: 20px; |
|
|
left: 50%; |
|
|
transform: translateX(-50%); |
|
|
font-size: 2.2rem; |
|
|
color: #00ff9d; |
|
|
text-shadow: 0 0 10px #00ff9d; |
|
|
letter-spacing: 3px; |
|
|
text-align: center; |
|
|
background: rgba(5, 5, 15, 0.7); |
|
|
padding: 10px 30px; |
|
|
border-radius: 5px; |
|
|
border: 1px solid #00ff9d; |
|
|
box-shadow: 0 0 15px rgba(0, 255, 157, 0.3); |
|
|
z-index: 11; |
|
|
} |
|
|
|
|
|
|
|
|
#fullscreen-btn { |
|
|
position: absolute; |
|
|
bottom: 20px; |
|
|
right: 20px; |
|
|
width: 50px; |
|
|
height: 50px; |
|
|
background: rgba(5, 5, 15, 0.85); |
|
|
border: 1px solid #00ff9d; |
|
|
border-radius: 50%; |
|
|
color: #00ff9d; |
|
|
font-size: 24px; |
|
|
cursor: pointer; |
|
|
display: flex; |
|
|
justify-content: center; |
|
|
align-items: center; |
|
|
z-index: 11; |
|
|
pointer-events: all; |
|
|
transition: all 0.3s; |
|
|
} |
|
|
|
|
|
#fullscreen-btn:hover { |
|
|
background: rgba(0, 255, 157, 0.2); |
|
|
box-shadow: 0 0 15px #00ff9d; |
|
|
} |
|
|
|
|
|
|
|
|
h2 { |
|
|
color: #00ff9d; |
|
|
margin-bottom: 15px; |
|
|
border-bottom: 1px solid #00ff9d; |
|
|
padding-bottom: 5px; |
|
|
font-size: 1.3rem; |
|
|
} |
|
|
|
|
|
.btn { |
|
|
background: #001a0f; |
|
|
color: #00ff9d; |
|
|
border: 1px solid #00ff9d; |
|
|
padding: 10px 15px; |
|
|
border-radius: 3px; |
|
|
cursor: pointer; |
|
|
margin: 5px 0; |
|
|
width: 100%; |
|
|
font-weight: bold; |
|
|
transition: all 0.3s; |
|
|
} |
|
|
|
|
|
.btn:hover { |
|
|
background: #003320; |
|
|
box-shadow: 0 0 10px #00ff9d; |
|
|
} |
|
|
|
|
|
.btn:active { |
|
|
background: #00ff9d; |
|
|
color: #000; |
|
|
} |
|
|
|
|
|
.btn-red { |
|
|
background: #1a0000; |
|
|
border-color: #ff0033; |
|
|
color: #ff0033; |
|
|
} |
|
|
|
|
|
.btn-red:hover { |
|
|
background: #330000; |
|
|
box-shadow: 0 0 10px #ff0033; |
|
|
} |
|
|
|
|
|
.file-input { |
|
|
width: 100%; |
|
|
margin: 10px 0; |
|
|
padding: 8px; |
|
|
background: #000; |
|
|
color: #00ff9d; |
|
|
border: 1px solid #00ff9d; |
|
|
border-radius: 3px; |
|
|
} |
|
|
|
|
|
.slider-container { |
|
|
margin: 15px 0; |
|
|
} |
|
|
|
|
|
.slider-label { |
|
|
display: flex; |
|
|
justify-content: space-between; |
|
|
margin-bottom: 5px; |
|
|
} |
|
|
|
|
|
.slider { |
|
|
width: 100%; |
|
|
-webkit-appearance: none; |
|
|
height: 6px; |
|
|
background: #001a0f; |
|
|
border-radius: 3px; |
|
|
outline: none; |
|
|
} |
|
|
|
|
|
.slider::-webkit-slider-thumb { |
|
|
-webkit-appearance: none; |
|
|
width: 18px; |
|
|
height: 18px; |
|
|
background: #00ff9d; |
|
|
border-radius: 50%; |
|
|
cursor: pointer; |
|
|
} |
|
|
|
|
|
.toggle { |
|
|
display: flex; |
|
|
align-items: center; |
|
|
margin: 10px 0; |
|
|
} |
|
|
|
|
|
.toggle input { |
|
|
display: none; |
|
|
} |
|
|
|
|
|
.toggle-slider { |
|
|
width: 50px; |
|
|
height: 24px; |
|
|
background: #001a0f; |
|
|
border-radius: 12px; |
|
|
margin-right: 10px; |
|
|
position: relative; |
|
|
cursor: pointer; |
|
|
border: 1px solid #00ff9d; |
|
|
} |
|
|
|
|
|
.toggle-slider:after { |
|
|
content: ''; |
|
|
position: absolute; |
|
|
width: 20px; |
|
|
height: 20px; |
|
|
background: #00ff9d; |
|
|
border-radius: 50%; |
|
|
top: 1px; |
|
|
left: 1px; |
|
|
transition: 0.3s; |
|
|
} |
|
|
|
|
|
.toggle input:checked + .toggle-slider:after { |
|
|
transform: translateX(26px); |
|
|
} |
|
|
|
|
|
.info-text { |
|
|
font-size: 0.85rem; |
|
|
color: #00cc7a; |
|
|
margin-top: 10px; |
|
|
line-height: 1.4; |
|
|
} |
|
|
|
|
|
|
|
|
#canvas-container { |
|
|
width: 100%; |
|
|
height: 100%; |
|
|
} |
|
|
|
|
|
canvas { |
|
|
display: block; |
|
|
} |
|
|
|
|
|
|
|
|
.metrics { |
|
|
background: rgba(0, 0, 0, 0.7); |
|
|
padding: 10px; |
|
|
border-radius: 3px; |
|
|
margin-top: 15px; |
|
|
border: 1px solid #ff0033; |
|
|
} |
|
|
|
|
|
.metric-row { |
|
|
display: flex; |
|
|
justify-content: space-between; |
|
|
margin: 5px 0; |
|
|
font-size: 0.9rem; |
|
|
} |
|
|
|
|
|
.metric-value { |
|
|
color: #ff0033; |
|
|
} |
|
|
</style> |
|
|
</head> |
|
|
<body> |
|
|
|
|
|
<div id="loading-screen"> |
|
|
<div class="loading-title">AUDIOFORM</div> |
|
|
<div>Initializing 3D Audio Visualization System...</div> |
|
|
<div class="loading-bar"> |
|
|
<div class="loading-progress" id="loading-progress"></div> |
|
|
</div> |
|
|
</div> |
|
|
|
|
|
|
|
|
<div id="ui-container"> |
|
|
<div id="title">AUDIOFORM by webXOS</div> |
|
|
|
|
|
<div class="ui-section" id="control-panel"> |
|
|
<h2>Audio Input</h2> |
|
|
<input type="file" id="audio-upload" accept=".wav" class="file-input"> |
|
|
<button id="load-sample" class="btn">Load Sample Audio</button> |
|
|
|
|
|
<h2>Synthetic Noise Generator</h2> |
|
|
<div class="toggle"> |
|
|
<input type="checkbox" id="noise-toggle"> |
|
|
<label for="noise-toggle" class="toggle-slider"></label> |
|
|
<label for="noise-toggle">Enable 8-bit Tone Generation</label> |
|
|
</div> |
|
|
|
|
|
<div class="slider-container"> |
|
|
<div class="slider-label"> |
|
|
<span>Tone Frequency</span> |
|
|
<span id="freq-value">440 Hz</span> |
|
|
</div> |
|
|
<input type="range" min="50" max="2000" value="440" class="slider" id="freq-slider"> |
|
|
</div> |
|
|
|
|
|
<div class="slider-container"> |
|
|
<div class="slider-label"> |
|
|
<span>Time Scaling</span> |
|
|
<span id="time-value">1.0x</span> |
|
|
</div> |
|
|
<input type="range" min="0.1" max="5" step="0.1" value="1" class="slider" id="time-slider"> |
|
|
</div> |
|
|
|
|
|
<button id="generate-noise" class="btn">Generate Synthetic Audio</button> |
|
|
|
|
|
<div class="info-text"> |
|
|
Synthetic mode generates 8-bit tones with timelapse time-scaling for dataset creation. |
|
|
</div> |
|
|
</div> |
|
|
|
|
|
<div class="ui-section" id="viz-panel"> |
|
|
<h2>Visualization Controls</h2> |
|
|
|
|
|
<div class="slider-container"> |
|
|
<div class="slider-label"> |
|
|
<span>Waveform Scale</span> |
|
|
<span id="scale-value">1.0</span> |
|
|
</div> |
|
|
<input type="range" min="0.1" max="3" step="0.1" value="1" class="slider" id="scale-slider"> |
|
|
</div> |
|
|
|
|
|
<div class="slider-container"> |
|
|
<div class="slider-label"> |
|
|
<span>Rotation Speed</span> |
|
|
<span id="rotation-value">0.5</span> |
|
|
</div> |
|
|
<input type="range" min="0" max="2" step="0.1" value="0.5" class="slider" id="rotation-slider"> |
|
|
</div> |
|
|
|
|
|
<div class="toggle"> |
|
|
<input type="checkbox" id="auto-rotate" checked> |
|
|
<label for="auto-rotate" class="toggle-slider"></label> |
|
|
<label for="auto-rotate">Auto-Rotate Visualization</label> |
|
|
</div> |
|
|
|
|
|
<button id="capture-frame" class="btn">Capture Current Frame</button> |
|
|
|
|
|
<div class="metrics"> |
|
|
<div class="metric-row"> |
|
|
<span>Frames Captured:</span> |
|
|
<span class="metric-value" id="frame-count">0</span> |
|
|
</div> |
|
|
<div class="metric-row"> |
|
|
<span>Audio Duration:</span> |
|
|
<span class="metric-value" id="audio-duration">0.0s</span> |
|
|
</div> |
|
|
<div class="metric-row"> |
|
|
<span>Current Time:</span> |
|
|
<span class="metric-value" id="current-time">0.0s</span> |
|
|
</div> |
|
|
</div> |
|
|
</div> |
|
|
|
|
|
<div class="ui-section" id="export-panel"> |
|
|
<h2>Dataset Export</h2> |
|
|
|
|
|
<input type="text" id="dataset-name" placeholder="Dataset Name" class="file-input" value="audioform_dataset"> |
|
|
|
|
|
<div class="slider-container"> |
|
|
<div class="slider-label"> |
|
|
<span>Frames to Capture</span> |
|
|
<span id="capture-count-value">10</span> |
|
|
</div> |
|
|
<input type="range" min="5" max="100" value="10" class="slider" id="capture-count-slider"> |
|
|
</div> |
|
|
|
|
|
<button id="start-capture" class="btn">Start Timelapse Capture</button> |
|
|
<button id="export-dataset" class="btn">Export Dataset (.zip)</button> |
|
|
<button id="reset-captures" class="btn btn-red">Reset All Captures</button> |
|
|
|
|
|
<div class="info-text"> |
|
|
Exports Hugging Face formatted .zip with README.md, images, and classification CSV. |
|
|
</div> |
|
|
</div> |
|
|
|
|
|
<div id="fullscreen-btn">⛶</div> |
|
|
</div> |
|
|
|
|
|
|
|
|
<div id="canvas-container"></div> |
|
|
|
|
|
<script> |
|
|
|
|
|
document.addEventListener('DOMContentLoaded', function() { |
|
|
|
|
|
let progress = 0; |
|
|
const progressInterval = setInterval(() => { |
|
|
progress += Math.random() * 15; |
|
|
if (progress >= 100) { |
|
|
progress = 100; |
|
|
clearInterval(progressInterval); |
|
|
setTimeout(() => { |
|
|
document.getElementById('loading-screen').style.opacity = '0'; |
|
|
setTimeout(() => { |
|
|
document.getElementById('loading-screen').style.display = 'none'; |
|
|
}, 1000); |
|
|
}, 500); |
|
|
} |
|
|
document.getElementById('loading-progress').style.width = `${progress}%`; |
|
|
}, 150); |
|
|
|
|
|
|
|
|
let scene, camera, renderer, audioContext, audioSource, analyser; |
|
|
let waveformGeometry, waveformMaterial, waveformMesh; |
|
|
let capturedFrames = []; |
|
|
let isPlaying = false; |
|
|
let isNoiseMode = false; |
|
|
let currentAudioBuffer = null; |
|
|
let startTime = 0; |
|
|
let rotationSpeed = 0.5; |
|
|
let autoRotate = true; |
|
|
let timeScale = 1.0; |
|
|
let noiseFrequency = 440; |
|
|
let frameCaptureCount = 0; |
|
|
|
|
|
|
|
|
function initThreeJS() { |
|
|
|
|
|
scene = new THREE.Scene(); |
|
|
scene.background = new THREE.Color(0x000000); |
|
|
|
|
|
|
|
|
camera = new THREE.PerspectiveCamera(75, window.innerWidth / window.innerHeight, 0.1, 1000); |
|
|
camera.position.z = 15; |
|
|
camera.position.y = 5; |
|
|
|
|
|
|
|
|
renderer = new THREE.WebGLRenderer({ antialias: true }); |
|
|
renderer.setSize(window.innerWidth, window.innerHeight); |
|
|
renderer.setPixelRatio(window.devicePixelRatio); |
|
|
document.getElementById('canvas-container').appendChild(renderer.domElement); |
|
|
|
|
|
|
|
|
const ambientLight = new THREE.AmbientLight(0x222222); |
|
|
scene.add(ambientLight); |
|
|
|
|
|
const directionalLight = new THREE.DirectionalLight(0x00ff9d, 1); |
|
|
directionalLight.position.set(10, 10, 5); |
|
|
scene.add(directionalLight); |
|
|
|
|
|
const redLight = new THREE.DirectionalLight(0xff0033, 0.5); |
|
|
redLight.position.set(-10, 5, -5); |
|
|
scene.add(redLight); |
|
|
|
|
|
|
|
|
createWaveformVisualization(); |
|
|
|
|
|
|
|
|
const gridHelper = new THREE.GridHelper(30, 30, 0x00aa00, 0x003300); |
|
|
gridHelper.position.y = -5; |
|
|
scene.add(gridHelper); |
|
|
|
|
|
|
|
|
const axesHelper = new THREE.AxesHelper(10); |
|
|
scene.add(axesHelper); |
|
|
|
|
|
|
|
|
window.addEventListener('resize', onWindowResize); |
|
|
} |
|
|
|
|
|
|
|
|
function createWaveformVisualization() { |
|
|
|
|
|
const geometry = new THREE.BufferGeometry(); |
|
|
const pointCount = 512; |
|
|
const positions = new Float32Array(pointCount * 3); |
|
|
|
|
|
|
|
|
for (let i = 0; i < pointCount; i++) { |
|
|
positions[i * 3] = (i - pointCount / 2) * 0.05; |
|
|
positions[i * 3 + 1] = 0; |
|
|
positions[i * 3 + 2] = 0; |
|
|
} |
|
|
|
|
|
geometry.setAttribute('position', new THREE.BufferAttribute(positions, 3)); |
|
|
|
|
|
|
|
|
const material = new THREE.LineBasicMaterial({ |
|
|
color: 0x00ff9d, |
|
|
linewidth: 2 |
|
|
}); |
|
|
|
|
|
|
|
|
waveformMesh = new THREE.Line(geometry, material); |
|
|
scene.add(waveformMesh); |
|
|
|
|
|
|
|
|
const particleGeometry = new THREE.BufferGeometry(); |
|
|
const particlePositions = new Float32Array(pointCount * 3); |
|
|
const particleColors = new Float32Array(pointCount * 3); |
|
|
|
|
|
for (let i = 0; i < pointCount; i++) { |
|
|
particlePositions[i * 3] = (i - pointCount / 2) * 0.05; |
|
|
particlePositions[i * 3 + 1] = 0; |
|
|
particlePositions[i * 3 + 2] = 0; |
|
|
|
|
|
|
|
|
particleColors[i * 3] = 0.0; |
|
|
particleColors[i * 3 + 1] = 1.0; |
|
|
particleColors[i * 3 + 2] = 0.0; |
|
|
} |
|
|
|
|
|
particleGeometry.setAttribute('position', new THREE.BufferAttribute(particlePositions, 3)); |
|
|
particleGeometry.setAttribute('color', new THREE.BufferAttribute(particleColors, 3)); |
|
|
|
|
|
const particleMaterial = new THREE.PointsMaterial({ |
|
|
size: 0.05, |
|
|
vertexColors: true, |
|
|
transparent: true |
|
|
}); |
|
|
|
|
|
const particles = new THREE.Points(particleGeometry, particleMaterial); |
|
|
waveformMesh.add(particles); |
|
|
|
|
|
waveformGeometry = geometry; |
|
|
} |
|
|
|
|
|
|
|
|
function updateWaveform(dataArray) { |
|
|
if (!waveformGeometry) return; |
|
|
|
|
|
const positions = waveformGeometry.attributes.position.array; |
|
|
const pointCount = positions.length / 3; |
|
|
|
|
|
for (let i = 0; i < pointCount; i++) { |
|
|
|
|
|
const value = dataArray[i] / 128.0; |
|
|
|
|
|
|
|
|
positions[i * 3 + 1] = value * 5; |
|
|
|
|
|
|
|
|
positions[i * 3 + 2] = Math.sin(i * 0.05 + Date.now() * 0.001) * value; |
|
|
} |
|
|
|
|
|
waveformGeometry.attributes.position.needsUpdate = true; |
|
|
} |
|
|
|
|
|
|
|
|
function initAudio() { |
|
|
audioContext = new (window.AudioContext || window.webkitAudioContext)(); |
|
|
analyser = audioContext.createAnalyser(); |
|
|
analyser.fftSize = 512; |
|
|
analyser.smoothingTimeConstant = 0.8; |
|
|
} |
|
|
|
|
|
|
|
|
function loadAudioFile(file) { |
|
|
if (!audioContext) initAudio(); |
|
|
|
|
|
const reader = new FileReader(); |
|
|
reader.onload = function(e) { |
|
|
audioContext.decodeAudioData(e.target.result, function(buffer) { |
|
|
playAudioBuffer(buffer); |
|
|
}, function(e) { |
|
|
console.error("Error decoding audio data", e); |
|
|
}); |
|
|
}; |
|
|
reader.readAsArrayBuffer(file); |
|
|
} |
|
|
|
|
|
|
|
|
function playAudioBuffer(buffer) { |
|
|
stopAudio(); |
|
|
|
|
|
currentAudioBuffer = buffer; |
|
|
audioSource = audioContext.createBufferSource(); |
|
|
audioSource.buffer = buffer; |
|
|
audioSource.connect(analyser); |
|
|
analyser.connect(audioContext.destination); |
|
|
|
|
|
|
|
|
audioSource.playbackRate.value = timeScale; |
|
|
|
|
|
audioSource.start(0); |
|
|
isPlaying = true; |
|
|
startTime = audioContext.currentTime; |
|
|
|
|
|
|
|
|
document.getElementById('audio-duration').textContent = `${buffer.duration.toFixed(2)}s`; |
|
|
|
|
|
|
|
|
animate(); |
|
|
} |
|
|
|
|
|
|
|
|
function generateSyntheticNoise() { |
|
|
if (!audioContext) initAudio(); |
|
|
|
|
|
stopAudio(); |
|
|
|
|
|
|
|
|
const oscillator = audioContext.createOscillator(); |
|
|
oscillator.type = 'square'; |
|
|
oscillator.frequency.setValueAtTime(noiseFrequency, audioContext.currentTime); |
|
|
|
|
|
|
|
|
const gainNode = audioContext.createGain(); |
|
|
gainNode.gain.setValueAtTime(0.1, audioContext.currentTime); |
|
|
|
|
|
|
|
|
oscillator.connect(gainNode); |
|
|
gainNode.connect(analyser); |
|
|
analyser.connect(audioContext.destination); |
|
|
|
|
|
|
|
|
const bufferLength = 2; |
|
|
const buffer = audioContext.createBuffer(1, audioContext.sampleRate * bufferLength, audioContext.sampleRate); |
|
|
const channelData = buffer.getChannelData(0); |
|
|
|
|
|
|
|
|
for (let i = 0; i < channelData.length; i++) { |
|
|
const time = i / audioContext.sampleRate; |
|
|
const frequency = noiseFrequency; |
|
|
|
|
|
channelData[i] = Math.sign(Math.sin(2 * Math.PI * frequency * time)); |
|
|
|
|
|
channelData[i] += (Math.random() - 0.5) * 0.1; |
|
|
|
|
|
channelData[i] = Math.max(-0.8, Math.min(0.8, channelData[i])); |
|
|
} |
|
|
|
|
|
|
|
|
audioSource = audioContext.createBufferSource(); |
|
|
audioSource.buffer = buffer; |
|
|
audioSource.loop = true; |
|
|
audioSource.playbackRate.value = timeScale; |
|
|
audioSource.connect(analyser); |
|
|
analyser.connect(audioContext.destination); |
|
|
|
|
|
audioSource.start(0); |
|
|
isPlaying = true; |
|
|
startTime = audioContext.currentTime; |
|
|
|
|
|
|
|
|
document.getElementById('audio-duration').textContent = `∞ (synthetic)`; |
|
|
|
|
|
|
|
|
animate(); |
|
|
} |
|
|
|
|
|
|
|
|
function stopAudio() { |
|
|
if (audioSource) { |
|
|
audioSource.stop(); |
|
|
audioSource.disconnect(); |
|
|
isPlaying = false; |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
function captureFrame() { |
|
|
|
|
|
renderer.render(scene, camera); |
|
|
const dataURL = renderer.domElement.toDataURL('image/png'); |
|
|
|
|
|
|
|
|
const frame = { |
|
|
id: frameCaptureCount, |
|
|
timestamp: audioContext ? (audioContext.currentTime - startTime).toFixed(3) : '0.000', |
|
|
dataURL: dataURL, |
|
|
frequency: isNoiseMode ? noiseFrequency : 0, |
|
|
timeScale: timeScale, |
|
|
date: new Date().toISOString() |
|
|
}; |
|
|
|
|
|
capturedFrames.push(frame); |
|
|
frameCaptureCount++; |
|
|
|
|
|
|
|
|
document.getElementById('frame-count').textContent = frameCaptureCount; |
|
|
|
|
|
|
|
|
waveformMesh.material.color.setHex(0xff0033); |
|
|
setTimeout(() => { |
|
|
waveformMesh.material.color.setHex(0x00ff9d); |
|
|
}, 100); |
|
|
|
|
|
return frame; |
|
|
} |
|
|
|
|
|
|
|
|
function startTimelapseCapture() { |
|
|
const totalFrames = parseInt(document.getElementById('capture-count-slider').value); |
|
|
const interval = 0.5; |
|
|
|
|
|
let framesCaptured = 0; |
|
|
|
|
|
const captureInterval = setInterval(() => { |
|
|
if (framesCaptured >= totalFrames) { |
|
|
clearInterval(captureInterval); |
|
|
alert(`Timelapse capture complete! Captured ${totalFrames} frames.`); |
|
|
return; |
|
|
} |
|
|
|
|
|
captureFrame(); |
|
|
framesCaptured++; |
|
|
}, interval * 1000); |
|
|
} |
|
|
|
|
|
|
|
|
async function exportDataset() { |
|
|
if (capturedFrames.length === 0) { |
|
|
alert('No frames captured yet! Capture some frames before exporting.'); |
|
|
return; |
|
|
} |
|
|
|
|
|
const datasetName = document.getElementById('dataset-name').value || 'audioform_dataset'; |
|
|
const zip = new JSZip(); |
|
|
|
|
|
|
|
|
const datasetFolder = zip.folder(datasetName); |
|
|
const imagesFolder = datasetFolder.folder('images'); |
|
|
|
|
|
|
|
|
let csvContent = 'file_name,frame_id,timestamp,frequency,time_scale,capture_date\n'; |
|
|
|
|
|
|
|
|
capturedFrames.forEach((frame, index) => { |
|
|
|
|
|
const data = frame.dataURL.split(',')[1]; |
|
|
const blob = b64toBlob(data, 'image/png'); |
|
|
|
|
|
|
|
|
const imageName = `frame_${String(index).padStart(4, '0')}.png`; |
|
|
imagesFolder.file(imageName, blob); |
|
|
|
|
|
|
|
|
csvContent += `images/${imageName},${frame.id},${frame.timestamp},${frame.frequency},${frame.timeScale},${frame.date}\n`; |
|
|
}); |
|
|
|
|
|
datasetFolder.file('metadata.csv', csvContent); |
|
|
|
|
|
|
|
|
const readmeContent = `# ${datasetName} |
|
|
|
|
|
## Dataset Description |
|
|
|
|
|
This dataset was generated using AUDIOFORM, a 3D audio visualization system. |
|
|
|
|
|
- **Total Frames**: ${capturedFrames.length} |
|
|
- **Generation Date**: ${new Date().toISOString().split('T')[0]} |
|
|
- **Audio Type**: ${isNoiseMode ? 'Synthetic 8-bit Tone' : 'Uploaded WAV File'} |
|
|
- **Time Scaling**: ${timeScale}x |
|
|
|
|
|
## Dataset Structure |
|
|
|
|
|
- \`images/\`: Contains all captured frames in PNG format |
|
|
- \`metadata.csv\`: Contains classification data for each frame |
|
|
|
|
|
## Metadata Columns |
|
|
|
|
|
- \`file_name\`: Relative path to the image file (e.g., images/frame_0001.png) - **REQUIRED for Hugging Face** |
|
|
- \`frame_id\`: Unique identifier for each frame |
|
|
- \`timestamp\`: Time in seconds when frame was captured |
|
|
- \`frequency\`: Audio frequency at capture time (Hz) |
|
|
- \`time_scale\`: Playback speed multiplier |
|
|
- \`capture_date\`: ISO date string of capture |
|
|
|
|
|
## Intended Use |
|
|
|
|
|
This dataset is intended for training machine learning models on audio visualization patterns, waveform classification, or generative AI tasks. |
|
|
|
|
|
## Generation Details |
|
|
|
|
|
Generated with AUDIOFORM v1.0 - A Three.js based audio visualization dataset generator. |
|
|
`; |
|
|
|
|
|
datasetFolder.file('README.md', readmeContent); |
|
|
|
|
|
|
|
|
const zipBlob = await zip.generateAsync({type: 'blob'}); |
|
|
saveAs(zipBlob, `${datasetName}.zip`); |
|
|
|
|
|
alert(`Dataset exported successfully as ${datasetName}.zip\n\nThe dataset includes the required 'file_name' column for Hugging Face compatibility.`); |
|
|
} |
|
|
|
|
|
|
|
|
function b64toBlob(b64Data, contentType = '', sliceSize = 512) { |
|
|
const byteCharacters = atob(b64Data); |
|
|
const byteArrays = []; |
|
|
|
|
|
for (let offset = 0; offset < byteCharacters.length; offset += sliceSize) { |
|
|
const slice = byteCharacters.slice(offset, offset + sliceSize); |
|
|
|
|
|
const byteNumbers = new Array(slice.length); |
|
|
for (let i = 0; i < slice.length; i++) { |
|
|
byteNumbers[i] = slice.charCodeAt(i); |
|
|
} |
|
|
|
|
|
const byteArray = new Uint8Array(byteNumbers); |
|
|
byteArrays.push(byteArray); |
|
|
} |
|
|
|
|
|
const blob = new Blob(byteArrays, {type: contentType}); |
|
|
return blob; |
|
|
} |
|
|
|
|
|
|
|
|
function resetCaptures() { |
|
|
if (capturedFrames.length > 0 && !confirm('Are you sure you want to reset all captured frames?')) { |
|
|
return; |
|
|
} |
|
|
|
|
|
capturedFrames = []; |
|
|
frameCaptureCount = 0; |
|
|
document.getElementById('frame-count').textContent = '0'; |
|
|
} |
|
|
|
|
|
|
|
|
function animate() { |
|
|
requestAnimationFrame(animate); |
|
|
|
|
|
|
|
|
if (isPlaying && analyser) { |
|
|
const dataArray = new Uint8Array(analyser.frequencyBinCount); |
|
|
analyser.getByteFrequencyData(dataArray); |
|
|
updateWaveform(dataArray); |
|
|
|
|
|
|
|
|
if (audioContext) { |
|
|
const currentTime = audioContext.currentTime - startTime; |
|
|
document.getElementById('current-time').textContent = `${currentTime.toFixed(2)}s`; |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
if (autoRotate && waveformMesh) { |
|
|
waveformMesh.rotation.y += 0.005 * rotationSpeed; |
|
|
waveformMesh.rotation.x += 0.002 * rotationSpeed; |
|
|
} |
|
|
|
|
|
|
|
|
renderer.render(scene, camera); |
|
|
} |
|
|
|
|
|
|
|
|
function onWindowResize() { |
|
|
camera.aspect = window.innerWidth / window.innerHeight; |
|
|
camera.updateProjectionMatrix(); |
|
|
renderer.setSize(window.innerWidth, window.innerHeight); |
|
|
} |
|
|
|
|
|
|
|
|
function toggleFullscreen() { |
|
|
if (!document.fullscreenElement) { |
|
|
document.documentElement.requestFullscreen().catch(err => { |
|
|
console.error(`Error attempting to enable fullscreen: ${err.message}`); |
|
|
}); |
|
|
} else { |
|
|
if (document.exitFullscreen) { |
|
|
document.exitFullscreen(); |
|
|
} |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
function loadSampleAudio() { |
|
|
|
|
|
if (!audioContext) initAudio(); |
|
|
|
|
|
|
|
|
const buffer = audioContext.createBuffer(1, audioContext.sampleRate * 3, audioContext.sampleRate); |
|
|
const channelData = buffer.getChannelData(0); |
|
|
|
|
|
|
|
|
for (let i = 0; i < channelData.length; i++) { |
|
|
const time = i / audioContext.sampleRate; |
|
|
|
|
|
|
|
|
const freq = 200 + Math.sin(time * 2) * 100; |
|
|
|
|
|
|
|
|
channelData[i] = Math.sign(Math.sin(2 * Math.PI * freq * time)); |
|
|
|
|
|
|
|
|
channelData[i] += (Math.random() - 0.5) * 0.05; |
|
|
|
|
|
|
|
|
channelData[i] = Math.max(-0.8, Math.min(0.8, channelData[i])); |
|
|
} |
|
|
|
|
|
playAudioBuffer(buffer); |
|
|
} |
|
|
|
|
|
|
|
|
function setupEventListeners() { |
|
|
|
|
|
document.getElementById('audio-upload').addEventListener('change', function(e) { |
|
|
if (e.target.files.length > 0) { |
|
|
loadAudioFile(e.target.files[0]); |
|
|
isNoiseMode = false; |
|
|
} |
|
|
}); |
|
|
|
|
|
|
|
|
document.getElementById('load-sample').addEventListener('click', loadSampleAudio); |
|
|
|
|
|
|
|
|
document.getElementById('noise-toggle').addEventListener('change', function(e) { |
|
|
isNoiseMode = e.target.checked; |
|
|
}); |
|
|
|
|
|
|
|
|
document.getElementById('generate-noise').addEventListener('click', generateSyntheticNoise); |
|
|
|
|
|
|
|
|
const freqSlider = document.getElementById('freq-slider'); |
|
|
const freqValue = document.getElementById('freq-value'); |
|
|
freqSlider.addEventListener('input', function() { |
|
|
noiseFrequency = parseInt(this.value); |
|
|
freqValue.textContent = `${noiseFrequency} Hz`; |
|
|
|
|
|
|
|
|
if (isPlaying && isNoiseMode) { |
|
|
generateSyntheticNoise(); |
|
|
} |
|
|
}); |
|
|
|
|
|
|
|
|
const timeSlider = document.getElementById('time-slider'); |
|
|
const timeValue = document.getElementById('time-value'); |
|
|
timeSlider.addEventListener('input', function() { |
|
|
timeScale = parseFloat(this.value); |
|
|
timeValue.textContent = `${timeScale.toFixed(1)}x`; |
|
|
|
|
|
|
|
|
if (audioSource && isPlaying) { |
|
|
audioSource.playbackRate.value = timeScale; |
|
|
} |
|
|
}); |
|
|
|
|
|
|
|
|
const scaleSlider = document.getElementById('scale-slider'); |
|
|
const scaleValue = document.getElementById('scale-value'); |
|
|
scaleSlider.addEventListener('input', function() { |
|
|
const scale = parseFloat(this.value); |
|
|
scaleValue.textContent = scale.toFixed(1); |
|
|
|
|
|
if (waveformMesh) { |
|
|
waveformMesh.scale.y = scale; |
|
|
} |
|
|
}); |
|
|
|
|
|
|
|
|
const rotationSlider = document.getElementById('rotation-slider'); |
|
|
const rotationValue = document.getElementById('rotation-value'); |
|
|
rotationSlider.addEventListener('input', function() { |
|
|
rotationSpeed = parseFloat(this.value); |
|
|
rotationValue.textContent = rotationSpeed.toFixed(1); |
|
|
}); |
|
|
|
|
|
|
|
|
document.getElementById('auto-rotate').addEventListener('change', function(e) { |
|
|
autoRotate = e.target.checked; |
|
|
}); |
|
|
|
|
|
|
|
|
document.getElementById('capture-frame').addEventListener('click', captureFrame); |
|
|
|
|
|
|
|
|
const captureCountSlider = document.getElementById('capture-count-slider'); |
|
|
const captureCountValue = document.getElementById('capture-count-value'); |
|
|
captureCountSlider.addEventListener('input', function() { |
|
|
captureCountValue.textContent = this.value; |
|
|
}); |
|
|
|
|
|
|
|
|
document.getElementById('start-capture').addEventListener('click', startTimelapseCapture); |
|
|
|
|
|
|
|
|
document.getElementById('export-dataset').addEventListener('click', exportDataset); |
|
|
|
|
|
|
|
|
document.getElementById('reset-captures').addEventListener('click', resetCaptures); |
|
|
|
|
|
|
|
|
document.getElementById('fullscreen-btn').addEventListener('click', toggleFullscreen); |
|
|
|
|
|
|
|
|
document.addEventListener('fullscreenchange', function() { |
|
|
const fullscreenBtn = document.getElementById('fullscreen-btn'); |
|
|
if (document.fullscreenElement) { |
|
|
fullscreenBtn.textContent = '⛶'; |
|
|
} else { |
|
|
fullscreenBtn.textContent = '⛶'; |
|
|
} |
|
|
}); |
|
|
} |
|
|
|
|
|
|
|
|
initThreeJS(); |
|
|
initAudio(); |
|
|
setupEventListeners(); |
|
|
|
|
|
|
|
|
animate(); |
|
|
|
|
|
|
|
|
setTimeout(() => { |
|
|
console.log('AUDIOFORM initialized. Ready to generate audio visualization datasets.'); |
|
|
}, 1000); |
|
|
}); |
|
|
</script> |
|
|
</body> |
|
|
</html> |
|
|
|