midiplayer / app.py
NeoPy's picture
Update app.py
867d88a verified
import gradio as gr
import os
import base64
# CSS for custom styling and the visualizer container
css = """
#visualizer-container {
position: relative;
width: 100%;
height: 400px;
background: #111;
border-radius: 8px;
overflow: hidden;
box-shadow: 0 4px 6px rgba(0,0,0,0.3);
}
canvas {
display: block;
width: 100%;
@media (max-width: 600px) {
height: 300px;
}
}
.piano-key {
position: absolute;
bottom: 0;
background: linear-gradient(to bottom, #444, #222);
border: 1px solid #000;
border-top: none;
border-radius: 0 0 4px 4px;
z-index: 10;
}
.piano-key.black {
background: linear-gradient(to bottom, #000, #333);
height: 60% !important;
z-index: 20;
}
.piano-key.active {
background: #4caf50 !important;
box-shadow: 0 0 10px #4caf50;
}
.controls {
margin-top: 10px;
display: flex;
gap: 10px;
align-items: center;
justify-content: center;
}
"""
# HTML Template containing the Logic
html_template = """
<div id="visualizer-container">
<canvas id="midiCanvas"></canvas>
</div>
<div class="controls">
<button id="playBtn" class="gr-button gr-button-lg gr-button-primary">Play</button>
<button id="stopBtn" class="gr-button gr-button-lg">Stop</button>
<input type="range" id="volumeSlider" min="0" max="1" step="0.01" value="0.7" style="width: 100px;">
<span id="timeDisplay">00:00</span>
</div>
<div style="margin-top: 10px; font-size: 0.9em; color: #888;">
Load a SoundFont (.sf2) for realistic instruments, otherwise uses synthesis.
</div>
<!-- External Libraries for MIDI and SoundFont parsing -->
<script src="https://cdn.jsdelivr.net/npm/@tonejs/midi"></script>
<script src="https://cdn.jsdelivr.net/npm/sf2-parser"></script>
<script src="https://cdn.jsdelivr.net/npm/spark-md5"></script>
<script>
// --- State Management ---
let audioCtx;
let currentMidi = null;
let soundFont = null;
let isPlaying = false;
let startTime = 0;
let pauseTime = 0;
let animationId;
let scheduledEvents = [];
let outputNodes = [];
let activeNotes = {}; // track active notes for visualization
let visualizerCanvas, ctx;
// --- Visualizer Constants ---
const NOTE_NAMES = ["C", "C#", "D", "D#", "E", "F", "F#", "G", "G#", "A", "A#", "B"];
const CANVAS_HEIGHT = 400;
const NOTE_WIDTH = 20; // Width of each column
const SCROLL_SPEED = 2; // Pixels per frame (approx)
// --- Initialization ---
document.addEventListener("DOMContentLoaded", () => {
visualizerCanvas = document.getElementById('midiCanvas');
ctx = visualizerCanvas.getContext('2d');
resizeCanvas();
window.addEventListener('resize', resizeCanvas);
});
function resizeCanvas() {
const container = document.getElementById('visualizer-container');
visualizerCanvas.width = container.clientWidth;
visualizerCanvas.height = container.clientHeight;
}
// --- Audio Engine Setup ---
async function initAudio() {
if (!audioCtx) {
audioCtx = new (window.AudioContext || window.webkitAudioContext)();
}
if (audioCtx.state === 'suspended') {
await audioCtx.resume();
}
}
// --- SoundFont Loader ---
async function loadSoundFont(arrayBuffer) {
try {
// Note: sf2-parser is a low-level parser. For a full synth,
// a more complex engine like sf2.js is usually needed.
// For this demo, we will simulate loading and basic sample extraction
// or fallback to synthesis if specific library complexity is too high for a single file.
// To keep it robust within the constraints, we will use Tone.js/Midi
// for synthesis primarily, but acknowledge the SF2 upload.
// A true SF2 player in JS is very heavy (megabytes of JS).
// We will implement a "mock" SF2 loader that uses the file name to select synthesis presets
// if we can't load the full samples into memory reliably in this snippet.
// However, let's try to parse the header at least.
console.log("SF2 file loaded (size: " + arrayBuffer.byteLength + " bytes)");
return true;
} catch (e) {
console.error("Error parsing SF2", e);
return false;
}
}
// --- Playback Logic ---
function playMidi() {
if (!currentMidi) return;
initAudio().then(() => {
if (isPlaying) return;
// Calculate start time
const now = audioCtx.currentTime;
// If we paused, continue from pauseTime, else start from 0
startTime = now - pauseTime;
isPlaying = true;
scheduleNotes();
animate();
});
}
function stopMidi() {
isPlaying = false;
cancelAnimationFrame(animationId);
pauseTime = 0;
// Stop all sounds
outputNodes.forEach(node => {
if (node.stop) node.stop();
});
outputNodes = [];
// Clear visualizer state
activeNotes = {};
drawVisualizer(0);
document.getElementById('timeDisplay').innerText = "00:00";
}
function pauseMidi() {
isPlaying = false;
cancelAnimationFrame(animationId);
pauseTime = audioCtx.currentTime - startTime;
// Stop all sounds (simple fade out or hard stop)
outputNodes.forEach(node => {
try { node.stop(); } catch(e){}
});
outputNodes = [];
}
function scheduleNotes() {
if (!isPlaying) return;
const now = audioCtx.currentTime;
const currentTime = now - startTime; // Current playback position in seconds
// Look ahead for events to schedule
// We schedule a bit ahead of time (e.g., 0.1s)
// Note: In a real app, you'd use a web worker or look-ahead scheduler.
// Here, we do a simple check every animation frame.
currentMidi.tracks.forEach(track => {
track.notes.forEach(note => {
if (!note.played && note.time <= currentTime + 0.1) {
playNote(note);
note.played = true; // Mark as played to avoid re-triggering
}
});
});
// Check if song ended
if (currentTime > currentMidi.duration) {
stopMidi();
}
}
function playNote(note) {
const duration = note.duration;
const time = note.time; // Time relative to song start
// Absolute time to play
const playTime = startTime + time;
// Create Oscillator (Synth)
const osc = audioCtx.createOscillator();
const gainNode = audioCtx.createGain();
const panner = audioCtx.createStereoPanner();
// Map MIDI note to Frequency
// A4 = 440Hz, MIDI Note 69
// Formula: 440 * 2^((note - 69)/12)
const frequency = 440 * Math.pow(2, (note.midi - 69) / 12);
osc.type = 'triangle'; // Synth type
osc.frequency.value = frequency;
// Pan based on note number for stereo spread effect
panner.pan.value = (note.midi - 60) / 40;
// Volume envelope
gainNode.gain.setValueAtTime(0, playTime);
gainNode.gain.linearRampToValueAtTime(0.3, playTime + 0.02); // Attack
gainNode.gain.exponentialRampToValueAtTime(0.001, playTime + duration); // Decay
// Connect graph
osc.connect(panner);
panner.connect(gainNode);
gainNode.connect(audioCtx.destination);
osc.start(playTime);
osc.stop(playTime + duration);
// Keep track to stop if user hits stop
outputNodes.push(osc);
// Add to visualizer queue
// We need to manage this carefully for the waterfall effect
addNoteToVisualizer(note);
}
// --- Visualizer Logic ---
let visualizerNotes = []; // { note: int, startTime: float, duration: float, color: string }
function addNoteToVisualizer(note) {
// Generate a color based on pitch
const hue = (note.midi * 10) % 360;
const color = `hsl(${hue}, 70%, 60%)`;
visualizerNotes.push({
midi: note.midi,
time: note.time,
duration: note.duration,
color: color,
active: true
});
}
function animate() {
if (!isPlaying) return;
scheduleNotes(); // Keep scheduling notes
const currentTime = audioCtx.currentTime - startTime;
// Update time display
const mins = Math.floor(currentTime / 60).toString().padStart(2, '0');
const secs = Math.floor(currentTime % 60).toString().padStart(2, '0');
document.getElementById('timeDisplay').innerText = `${mins}:${secs}`;
drawVisualizer(currentTime);
animationId = requestAnimationFrame(animate);
}
function drawVisualizer(currentTime) {
const width = visualizerCanvas.width;
const height = visualizerCanvas.height;
ctx.clearRect(0, 0, width, height);
// Draw Background Grid (Piano Roll lines)
ctx.strokeStyle = '#333';
ctx.lineWidth = 1;
const totalNotes = 128; // MIDI range
const noteHeight = height / totalNotes;
for (let i = 0; i < totalNotes; i++) {
// Draw horizontal line
ctx.beginPath();
ctx.moveTo(0, height - (i * noteHeight));
ctx.lineTo(width, height - (i * noteHeight));
ctx.stroke();
// Highlight C notes
if (i % 12 === 0) {
ctx.fillStyle = '#222';
ctx.fillRect(0, height - ((i+1) * noteHeight), width, noteHeight);
}
}
// Draw Playhead line
// We center the view on current time, or scroll from right to left.
// Let's do a scrolling waterfall from right to left.
// X = width - ((currentTime - note.time) * scale)
const pixelsPerSecond = 100;
const playheadX = width - 100; // Fixed playhead position
// Draw Notes
visualizerNotes.forEach(n => {
// Calculate position
// x position relative to playhead
const x = playheadX - ((currentTime - n.time) * pixelsPerSecond);
const w = n.duration * pixelsPerSecond;
const y = height - (n.midi * noteHeight);
// Check if note is currently playing (under playhead)
const isActive = (currentTime >= n.time && currentTime < n.time + n.duration);
// Cull notes that are off screen
if (x + w > 0 && x < width) {
ctx.fillStyle = n.color;
// Active note glow
if (isActive) {
ctx.shadowBlur = 15;
ctx.shadowColor = n.color;
ctx.fillStyle = '#fff'; // Bright white for active note
} else {
ctx.shadowBlur = 0;
}
ctx.fillRect(x, y, w - 1, noteHeight - 1);
ctx.shadowBlur = 0; // Reset
}
});
// Draw Playhead
ctx.strokeStyle = '#fff';
ctx.lineWidth = 2;
ctx.beginPath();
ctx.moveTo(playheadX, 0);
ctx.lineTo(playheadX, height);
ctx.stroke();
}
// --- Gradio Interface Interactions ---
// 1. Handle MIDI Upload
function handleMidiUpload(file) {
const reader = new FileReader();
reader.onload = function(e) {
const arrayBuffer = e.target.result;
Tone.Midi.parse(arrayBuffer).then(midi => {
currentMidi = midi;
// Reset state
stopMidi();
// Pre-calculate visualizer bounds or prep data if needed
// For this simple player, we just store the midi object
console.log("MIDI Loaded:", midi.name);
});
};
reader.readAsArrayBuffer(file);
}
// 2. Handle SF2 Upload
function handleSF2Upload(file) {
const reader = new FileReader();
reader.onload = function(e) {
const arrayBuffer = e.target.result;
loadSoundFont(arrayBuffer).then(success => {
if(success) alert("SoundFont loaded! (Simulated for demo - synthesis active)");
});
};
reader.readAsArrayBuffer(file);
}
// --- Event Listeners ---
document.getElementById('playBtn').addEventListener('click', () => {
if(isPlaying) pauseMidi();
else playMidi();
});
document.getElementById('stopBtn').addEventListener('click', stopMidi);
document.getElementById('volumeSlider').addEventListener('input', (e) => {
// In a real implementation, this would connect to a master gain node
// For this simple demo, we just log it or apply to next notes
console.log("Volume:", e.target.value);
});
</script>
"""
# Define the Gradio Interface
with gr.Blocks(title="MIDI Player & Visualizer") as demo:
gr.Markdown("# 🎹 MIDI Player & Visualizer")
gr.Markdown("Upload a MIDI file to play. Optionally upload a SoundFont (.sf2) for improved synthesis logic (simulated here).")
with gr.Row():
with gr.Column():
midi_input = gr.File(label="Upload MIDI (.mid)", file_types=[".mid", ".midi"])
sf2_input = gr.File(label="Upload SoundFont (.sf2) [Optional]", file_types=[".sf2"])
with gr.Column():
# The HTML component renders our visualizer and controls
gr.HTML(html_template)
#
def process_midi(file):
if file is None: return None
with open(file, "rb") as f:
return base64.b64encode(f.read()).decode("utf-8")
def process_sf2(file):
if file is None: return None
with open(file, "rb") as f:
return base64.b64encode(f.read()).decode("utf-8")
# We use hidden components to transfer data
midi_b64 = gr.Textbox(visible=False)
sf2_b64 = gr.Textbox(visible=False)
# Logic flow:
# Upload -> Process Function -> Hidden Textbox -> JS Change Event
midi_input.upload(process_midi, midi_input, midi_b64)
sf2_input.upload(process_sf2, sf2_input, sf2_b64)
# JS to handle the base64 data from the hidden textboxes
# We append this script to the main HTML to listen to Gradio events
js_handler = """
<script>
// Listen for changes in the hidden base64 fields
midi_b64_input = document.querySelector('#midi_b64 textarea'); // Gradio creates a textarea inside textbox
sf2_b64_input = document.querySelector('#sf2_b64 textarea');
// Helper to convert base64 to ArrayBuffer
function base64ToArrayBuffer(base64) {
const binaryString = window.atob(base64);
const len = binaryString.length;
const bytes = new Uint8Array(len);
for (let i = 0; i < len; i++) {
bytes[i] = binaryString.charCodeAt(i);
}
return bytes.buffer;
}
// Observer for MIDI
new MutationObserver((mutations, observer) => {
if (midi_b64_input.value) {
const buffer = base64ToArrayBuffer(midi_b64_input.value);
handleMidiUpload({ target: { result: buffer } }); // Reuse the handler defined above
}
}).observe(midi_b64_input, { attributes: true, childList: true, characterData: true });
// Observer for SF2
new MutationObserver((mutations, observer) => {
if (sf2_b64_input.value) {
const buffer = base64ToArrayBuffer(sf2_b64_input.value);
handleSF2Upload({ target: { result: buffer } });
}
}).observe(sf2_b64_input, { attributes: true, childList: true, characterData: true });
</script>
"""
# Append the handler script
demo.load(None, None, None, js=js_handler)
if __name__ == "__main__":
demo.launch(css=css)