liminal-sessions / src /engine /AudioEngine.ts
Severian's picture
Upload 32 files
6a03d7f verified
import type {
AudioData,
AudioEngine as AudioEngineInterface,
Track,
PeakDetection,
BeatManager,
FrequencyMapping
} from '../types/audio'
export class AudioEngine implements AudioEngineInterface {
private audioContext: AudioContext | null = null
private analyser: AnalyserNode | null = null
private audioElement: HTMLAudioElement | null = null
private sourceNode: MediaElementAudioSourceNode | null = null
private isInitialized = false
private isContextStarted = false
private currentTrack: Track | null = null
// Auto-play functionality
private onTrackEndedCallback: (() => void) | null = null
// Audio analysis configuration
private readonly frequencyMapping: FrequencyMapping = {
deepFreq: { min: 20, max: 250 }, // Deep Earth Pulse (trunk/roots)
midFreq: { min: 250, max: 4000 }, // Heartwood Resonance (core glyphs)
highFreq: { min: 4000, max: 20000 } // Canopy Shiver (fractal branches)
}
private peakDetection: PeakDetection = {
energyHistory: [],
historyLength: 30,
lastPeakTime: 0,
minTimeBetweenPeaks: 200,
sensitivity: 1.1
}
private beatManager: BeatManager = {
currentWaveRadius: 0,
waveStrength: 0,
isWaveActive: false,
triggerWave: (energy: number) => {
const maxEnergy = 255
const energyExcess = energy - 200
this.beatManager.waveStrength = (energyExcess / (maxEnergy - 200)) * 20.0
this.beatManager.currentWaveRadius = 0
this.beatManager.isWaveActive = true
},
update: (deltaTime: number) => {
if (this.beatManager.isWaveActive) {
this.beatManager.currentWaveRadius += deltaTime * 1.0
this.beatManager.waveStrength *= 0.98
if (this.beatManager.currentWaveRadius > 1.0 || this.beatManager.waveStrength < 0.1) {
this.beatManager.isWaveActive = false
}
}
},
getWaveForce: (distance: number) => {
if (!this.beatManager.isWaveActive) return 0
const distanceFromWave = Math.abs(distance - this.beatManager.currentWaveRadius)
if (distanceFromWave < 0.1) {
return this.beatManager.waveStrength * Math.exp(-distanceFromWave * 10)
}
return 0
}
}
async initialize(): Promise<void> {
if (this.isInitialized) return
try {
// Create AudioContext but don't start it yet (browser security)
this.audioContext = new (window.AudioContext || (window as any).webkitAudioContext)()
this.analyser = this.audioContext.createAnalyser()
this.analyser.fftSize = 2048
// Don't resume context here - wait for user gesture
this.isInitialized = true
console.log('AudioEngine initialized (context suspended until user gesture)')
} catch (error) {
console.error('Failed to initialize AudioEngine:', error)
throw error
}
}
private async ensureContextStarted(): Promise<void> {
if (!this.audioContext || this.isContextStarted) return
if (this.audioContext.state === 'suspended') {
try {
await this.audioContext.resume()
this.isContextStarted = true
console.log('AudioContext resumed after user gesture')
} catch (error) {
console.error('Failed to resume AudioContext:', error)
throw error
}
} else {
this.isContextStarted = true
}
}
async loadTrack(track: Track): Promise<void> {
if (!this.isInitialized) {
throw new Error('AudioEngine not initialized')
}
try {
console.log(`🎵 Loading track: ${track.title} from ${track.url}`)
// Test if the file is accessible
console.log('🔍 Testing file accessibility...')
try {
const response = await fetch(track.url, { method: 'HEAD' })
if (!response.ok) {
throw new Error(`HTTP ${response.status}: ${response.statusText}`)
}
console.log('✅ File is accessible via HTTP')
console.log('📊 Response headers:', {
contentType: response.headers.get('content-type'),
contentLength: response.headers.get('content-length'),
status: response.status
})
} catch (fetchError) {
console.error('❌ File accessibility test failed:', fetchError)
throw new Error(`Cannot access audio file: ${fetchError}`)
}
// Create audio element if it doesn't exist
if (!this.audioElement) {
console.log('🎧 Creating new audio element')
this.audioElement = document.createElement('audio')
this.audioElement.crossOrigin = 'anonymous'
this.audioElement.preload = 'metadata'
this.audioElement.volume = 0.7 // Set initial volume
console.log('🔊 Audio element volume set to:', this.audioElement.volume)
// Add comprehensive event listeners
this.audioElement.addEventListener('loadedmetadata', () => {
console.log(`✅ Track metadata loaded: ${this.audioElement?.duration}s`)
})
this.audioElement.addEventListener('timeupdate', () => {
// This enables real-time current time tracking
})
this.audioElement.addEventListener('ended', () => {
console.log('🔚 Track ended:', this.currentTrack?.title)
// Trigger auto-play callback if set
if (this.onTrackEndedCallback) {
console.log('🎵 Triggering auto-play to next track')
this.onTrackEndedCallback()
}
})
this.audioElement.addEventListener('error', (e) => {
console.error('❌ Audio error:', e)
console.error('Error code:', this.audioElement?.error?.code)
console.error('Error message:', this.audioElement?.error?.message)
})
this.audioElement.addEventListener('canplaythrough', () => {
console.log('✅ Track can play through')
})
this.audioElement.addEventListener('loadstart', () => {
console.log('🔄 Started loading track')
})
this.audioElement.addEventListener('progress', () => {
console.log('📊 Loading progress')
})
this.audioElement.addEventListener('canplay', () => {
console.log('✅ Track can start playing')
})
this.audioElement.addEventListener('stalled', () => {
console.warn('⚠️ Track loading stalled')
})
this.audioElement.addEventListener('suspend', () => {
console.warn('⚠️ Track loading suspended')
})
this.audioElement.addEventListener('abort', () => {
console.warn('⚠️ Track loading aborted')
})
console.log('✅ Audio element created with event listeners')
}
// Load the new track
console.log('📝 Setting audio source...')
this.audioElement.src = track.url
this.currentTrack = track
console.log('📊 Audio element initial state:', {
src: this.audioElement.src,
readyState: this.audioElement.readyState,
networkState: this.audioElement.networkState
})
// Wait for metadata to load
return new Promise((resolve, reject) => {
const timeout = setTimeout(() => {
this.audioElement?.removeEventListener('loadedmetadata', onLoadedMetadata)
this.audioElement?.removeEventListener('error', onError)
console.error('❌ Track loading timeout after 10 seconds')
reject(new Error('Track loading timeout after 10 seconds'))
}, 10000)
const onLoadedMetadata = () => {
clearTimeout(timeout)
this.audioElement?.removeEventListener('loadedmetadata', onLoadedMetadata)
this.audioElement?.removeEventListener('error', onError)
console.log(`✅ Track loaded successfully: ${track.title}`)
console.log('📊 Final audio element state:', {
duration: this.audioElement?.duration,
readyState: this.audioElement?.readyState,
networkState: this.audioElement?.networkState
})
resolve()
}
const onError = () => {
clearTimeout(timeout)
this.audioElement?.removeEventListener('loadedmetadata', onLoadedMetadata)
this.audioElement?.removeEventListener('error', onError)
console.error(`❌ Failed to load track: ${track.title}`)
console.error('❌ Audio element error state:', {
error: this.audioElement?.error,
readyState: this.audioElement?.readyState,
networkState: this.audioElement?.networkState
})
reject(new Error('Failed to load audio track'))
}
console.log('⏳ Waiting for metadata to load...')
this.audioElement?.addEventListener('loadedmetadata', onLoadedMetadata)
this.audioElement?.addEventListener('error', onError)
this.audioElement?.load()
})
} catch (error) {
console.error('❌ Failed to load track:', error)
throw error
}
}
async play(): Promise<void> {
if (!this.audioElement || !this.currentTrack) {
throw new Error('No track loaded')
}
console.log(`🎵 Attempting to play: ${this.currentTrack.title}`)
console.log(`📊 Audio element state:`, {
src: this.audioElement.src,
readyState: this.audioElement.readyState,
paused: this.audioElement.paused,
duration: this.audioElement.duration,
currentTime: this.audioElement.currentTime,
volume: this.audioElement.volume
})
try {
// Ensure AudioContext is started (user gesture required)
await this.ensureContextStarted()
// Create audio source connection if not already done
if (!this.sourceNode && this.audioContext && this.analyser && this.isContextStarted) {
console.log('🔗 Connecting audio source to analyser')
this.sourceNode = this.audioContext.createMediaElementSource(this.audioElement)
this.sourceNode.connect(this.analyser)
this.analyser.connect(this.audioContext.destination)
console.log('✅ Audio source connected to analyser')
}
console.log('▶️ Calling audio.play()')
await this.audioElement.play()
console.log('✅ Playback started successfully')
} catch (error) {
console.error('❌ Playback failed:', error)
if (error instanceof DOMException) {
console.error('DOMException details:', {
name: error.name,
message: error.message,
code: error.code
})
}
throw error
}
}
pause(): void {
if (this.audioElement) {
this.audioElement.pause()
console.log('Playback paused')
}
}
seekTo(time: number): void {
if (this.audioElement) {
this.audioElement.currentTime = time
}
}
setVolume(volume: number): void {
if (this.audioElement) {
this.audioElement.volume = Math.max(0, Math.min(1, volume))
}
}
getAudioData(): AudioData {
if (!this.analyser || !this.audioContext) {
return {
frequencies: new Uint8Array(1024),
deepEnergy: 0,
midEnergy: 0,
highEnergy: 0,
overallAmplitude: 0,
peakDetected: false,
beatDetected: false
}
}
try {
const frequencies = new Uint8Array(this.analyser.frequencyBinCount)
this.analyser.getByteFrequencyData(frequencies)
// Convert frequency to array index
const frequencyToIndex = (frequency: number): number => {
return Math.round(frequency / (this.audioContext!.sampleRate / 2) * this.analyser!.frequencyBinCount)
}
// Extract energy for each frequency range
const deepIndices = {
min: frequencyToIndex(this.frequencyMapping.deepFreq.min),
max: frequencyToIndex(this.frequencyMapping.deepFreq.max)
}
const midIndices = {
min: frequencyToIndex(this.frequencyMapping.midFreq.min),
max: frequencyToIndex(this.frequencyMapping.midFreq.max)
}
const highIndices = {
min: frequencyToIndex(this.frequencyMapping.highFreq.min),
max: frequencyToIndex(this.frequencyMapping.highFreq.max)
}
// Calculate normalized energy levels
const deepRange = frequencies.slice(deepIndices.min, deepIndices.max + 1)
const midRange = frequencies.slice(midIndices.min, midIndices.max + 1)
const highRange = frequencies.slice(highIndices.min, highIndices.max + 1)
const deepEnergy = deepRange.reduce((a, b) => a + b, 0) / deepRange.length / 255
const midEnergy = midRange.reduce((a, b) => a + b, 0) / midRange.length / 255
const highEnergy = highRange.reduce((a, b) => a + b, 0) / highRange.length / 255
// Overall amplitude
const overallAmplitude = frequencies.reduce((a, b) => a + b, 0) / frequencies.length / 255
// Peak detection
const currentEnergy = midEnergy * 255 // Use mid-range for peak detection
this.peakDetection.energyHistory.push(currentEnergy)
if (this.peakDetection.energyHistory.length > this.peakDetection.historyLength) {
this.peakDetection.energyHistory.shift()
}
const averageEnergy = this.peakDetection.energyHistory.reduce((a, b) => a + b, 0) /
this.peakDetection.energyHistory.length
const now = performance.now()
const peakDetected = currentEnergy > averageEnergy * this.peakDetection.sensitivity &&
now - this.peakDetection.lastPeakTime > this.peakDetection.minTimeBetweenPeaks
if (peakDetected) {
this.peakDetection.lastPeakTime = now
}
// Beat detection (based on deep frequencies)
const beatThreshold = 150 // Normalized threshold
const beatDetected = deepEnergy * 255 > beatThreshold
if (beatDetected && !this.beatManager.isWaveActive) {
this.beatManager.triggerWave(deepEnergy * 255)
}
return {
frequencies,
deepEnergy,
midEnergy,
highEnergy,
overallAmplitude,
peakDetected,
beatDetected
}
} catch (error) {
console.error('Audio analysis failed:', error)
return {
frequencies: new Uint8Array(1024),
deepEnergy: 0,
midEnergy: 0,
highEnergy: 0,
overallAmplitude: 0,
peakDetected: false,
beatDetected: false
}
}
}
// Public method to update beat manager
updateBeatManager(deltaTime: number): void {
this.beatManager.update(deltaTime)
}
// Public method to get wave force for visualization
getWaveForce(distance: number): number {
return this.beatManager.getWaveForce(distance)
}
cleanup(): void {
if (this.sourceNode) {
this.sourceNode.disconnect()
this.sourceNode = null
}
if (this.audioContext) {
this.audioContext.close()
this.audioContext = null
}
if (this.audioElement) {
this.audioElement.pause()
this.audioElement.src = ''
this.audioElement = null
}
this.analyser = null
this.isInitialized = false
console.log('AudioEngine cleaned up')
}
// New methods for better audio element integration
getCurrentTime(): number {
return this.audioElement?.currentTime || 0
}
getDuration(): number {
return this.audioElement?.duration || 0
}
getAudioElement(): HTMLAudioElement | null {
return this.audioElement
}
isPlaying(): boolean {
return this.audioElement ? !this.audioElement.paused : false
}
// Test method to verify basic audio functionality
async testAudioPlayback(url: string): Promise<boolean> {
console.log('🧪 Testing audio file accessibility and format...')
try {
// First test HTTP accessibility
const response = await fetch(url, { method: 'HEAD' })
if (!response.ok) {
console.error('❌ HTTP test failed:', response.status, response.statusText)
return false
}
const contentType = response.headers.get('content-type')
console.log('📊 Content-Type:', contentType)
if (!contentType || !contentType.includes('audio/')) {
console.error('❌ Invalid content type:', contentType)
return false
}
// Test audio element loading (without playing to avoid user gesture requirement)
const testAudio = document.createElement('audio')
testAudio.volume = 0.1 // Very low volume for safety
testAudio.src = url
return new Promise((resolve) => {
const timeout = setTimeout(() => {
cleanup()
console.error('❌ Audio test timeout')
resolve(false)
}, 5000)
const cleanup = () => {
clearTimeout(timeout)
testAudio.removeEventListener('loadedmetadata', onLoadedMetadata)
testAudio.removeEventListener('error', onError)
testAudio.src = ''
}
const onLoadedMetadata = () => {
console.log('✅ Audio metadata loaded successfully, duration:', testAudio.duration)
cleanup()
resolve(true)
}
const onError = (e: Event) => {
console.error('❌ Audio loading error:', e)
console.error('Error details:', testAudio.error)
cleanup()
resolve(false)
}
testAudio.addEventListener('loadedmetadata', onLoadedMetadata)
testAudio.addEventListener('error', onError)
testAudio.load()
})
} catch (error) {
console.error('❌ Audio test failed:', error)
return false
}
}
// Enhanced frequency analysis for particle systems
public getAdvancedAudioData(sphere: {
params: {
minFrequency: number
maxFrequency: number
minFrequencyBeat: number
maxFrequencyBeat: number
gainMultiplier: number
peakSensitivity: number
}
peakDetection: {
energyHistory: number[]
historyLength: number
lastPeakTime: number
minTimeBetweenPeaks: number
}
}): {
average: number
frequencies: Uint8Array
peakDetected: boolean
rangeEnergy: number
rangeEnergyBeat: number
} {
if (!this.analyser || !this.audioContext) {
return {
average: 0,
frequencies: new Uint8Array(),
peakDetected: false,
rangeEnergy: 0,
rangeEnergyBeat: 0
}
}
try {
const frequencies = new Uint8Array(this.analyser.frequencyBinCount)
this.analyser.getByteFrequencyData(frequencies)
// Apply gain multiplier
const gainMultiplier = sphere.params.gainMultiplier
frequencies.forEach((value, index) => {
frequencies[index] = Math.min(value * gainMultiplier, 255)
})
// Calculate frequency range indices
const frequencyToIndex = (frequency: number) =>
Math.round(frequency / (this.audioContext!.sampleRate / 2) * this.analyser!.frequencyBinCount)
// Main frequency range for visualization
const minFreqIndex = frequencyToIndex(sphere.params.minFrequency)
const maxFreqIndex = frequencyToIndex(sphere.params.maxFrequency)
const frequencyRange = frequencies.slice(minFreqIndex, maxFreqIndex + 1)
const rangeEnergy = frequencyRange.reduce((a, b) => a + b, 0) / frequencyRange.length
// Beat detection frequency range
const minFreqBeatIndex = frequencyToIndex(sphere.params.minFrequencyBeat)
const maxFreqBeatIndex = frequencyToIndex(sphere.params.maxFrequencyBeat)
const frequencyRangeBeat = frequencies.slice(minFreqBeatIndex, maxFreqBeatIndex + 1)
const rangeEnergyBeat = frequencyRangeBeat.reduce((a, b) => a + b, 0) / frequencyRangeBeat.length
// Peak detection logic
sphere.peakDetection.energyHistory.push(rangeEnergy)
if (sphere.peakDetection.energyHistory.length > sphere.peakDetection.historyLength) {
sphere.peakDetection.energyHistory.shift()
}
const averageEnergy = sphere.peakDetection.energyHistory.reduce((a, b) => a + b, 0) /
sphere.peakDetection.energyHistory.length
const now = performance.now()
const peakDetected = rangeEnergy > averageEnergy * sphere.params.peakSensitivity &&
now - sphere.peakDetection.lastPeakTime > sphere.peakDetection.minTimeBetweenPeaks
if (peakDetected) {
sphere.peakDetection.lastPeakTime = now
console.log(`🎵 PEAK DETECTED! Energy: ${rangeEnergy.toFixed(1)}, Average: ${averageEnergy.toFixed(1)}`)
}
return {
average: rangeEnergy / 255,
frequencies,
peakDetected,
rangeEnergy,
rangeEnergyBeat
}
} catch (error) {
console.error('❌ Enhanced audio analysis failed:', error)
return {
average: 0,
frequencies: new Uint8Array(),
peakDetected: false,
rangeEnergy: 0,
rangeEnergyBeat: 0
}
}
}
// Get smooth volume for rotation effects
public getSmoothVolume(lastValidVolume: number, volumeChangeThreshold: number): {
volume: number
shouldUpdate: boolean
} {
if (!this.analyser) {
return { volume: 0, shouldUpdate: false }
}
const bufferLength = this.analyser.frequencyBinCount
const dataArray = new Uint8Array(bufferLength)
this.analyser.getByteFrequencyData(dataArray)
let sum = 0
for (let i = 0; i < bufferLength; i++) {
sum += dataArray[i]
}
const average = sum / bufferLength
const normalizedVolume = average / 255
let shouldUpdate = true
if (lastValidVolume === 0) {
lastValidVolume = normalizedVolume
} else {
const change = Math.abs(normalizedVolume - lastValidVolume)
if (change <= volumeChangeThreshold) {
lastValidVolume = normalizedVolume
} else {
shouldUpdate = false
}
}
return { volume: lastValidVolume, shouldUpdate }
}
// Auto-play functionality
public setOnTrackEndedCallback(callback: (() => void) | null): void {
this.onTrackEndedCallback = callback
console.log('🎵 Auto-play callback', callback ? 'enabled' : 'disabled')
}
}