Spaces:
Sleeping
Sleeping
| import gradio as gr | |
| import numpy as np | |
| def process_audio(audio): | |
| # This function will receive audio data from the client | |
| # and can perform any server-side processing | |
| # For this example, we'll just return the audio as-is | |
| return audio | |
| with gr.Blocks() as demo: | |
| audio_input = gr.Audio(sources="microphone", streaming=True, visible=False) | |
| audio_output = gr.Audio(streaming=True, visible=False) | |
| html = gr.HTML(""" | |
| <button id="startButton">Start Recording</button> | |
| <button id="stopButton" disabled>Stop Recording</button> | |
| <div id="status">Ready</div> | |
| <div id="debug"></div> | |
| <script> | |
| let audioContext; | |
| let mediaStreamSource; | |
| let processor; | |
| let recording = false; | |
| async function startRecording() { | |
| try { | |
| audioContext = new (window.AudioContext || window.webkitAudioContext)(); | |
| const stream = await navigator.mediaDevices.getUserMedia({ audio: true }); | |
| mediaStreamSource = audioContext.createMediaStreamSource(stream); | |
| processor = audioContext.createScriptProcessor(1024, 1, 1); | |
| mediaStreamSource.connect(processor); | |
| processor.connect(audioContext.destination); | |
| processor.onaudioprocess = function(e) { | |
| if (!recording) return; | |
| const audioData = e.inputBuffer.getChannelData(0); | |
| document.getElementById('debug').textContent = 'Processing audio chunk...'; | |
| // Send audio data to the server | |
| const blob = new Blob([audioData], {type: 'audio/wav'}); | |
| const file = new File([blob], 'audio.wav', {type: 'audio/wav'}); | |
| const dt = new DataTransfer(); | |
| dt.items.add(file); | |
| gradioApp().querySelector('#component-0').querySelector('input[type=file]').files = dt.files; | |
| gradioApp().querySelector('#component-0').querySelector('button[type=submit]').click(); | |
| }; | |
| recording = true; | |
| document.getElementById('status').textContent = 'Recording...'; | |
| document.getElementById('startButton').disabled = true; | |
| document.getElementById('stopButton').disabled = false; | |
| } catch (err) { | |
| console.error('Error starting recording:', err); | |
| document.getElementById('status').textContent = 'Error: ' + err.message; | |
| } | |
| } | |
| function stopRecording() { | |
| if (processor) { | |
| processor.disconnect(); | |
| mediaStreamSource.disconnect(); | |
| } | |
| recording = false; | |
| document.getElementById('status').textContent = 'Stopped'; | |
| document.getElementById('startButton').disabled = false; | |
| document.getElementById('stopButton').disabled = true; | |
| } | |
| document.getElementById('startButton').addEventListener('click', startRecording); | |
| document.getElementById('stopButton').addEventListener('click', stopRecording); | |
| </script> | |
| """) | |
| audio_input.stream(process_audio, inputs=audio_input, outputs=audio_output) | |
| if __name__ == "__main__": | |
| demo.launch() |