anycoder-863dbc88 / index.js
flen-crypto's picture
Upload index.js with huggingface_hub
20d31e3 verified
/**
* app.js
*
* Browser-based mini-DAW demo with:
* - Simple 4-track Web Audio drum step sequencer
* - Text-based "piano roll" notes area
* - AI Song Starter powered by transformers.js running in a Web Worker
*
* This is intentionally focused: in a real implementation you would plug
* this UI and audio engine into the much larger architecture described
* in the technical spec (cloud sync, collaboration, Web3, etc.).
*/
const GRID_STEPS = 16;
const TRACKS = [
{ name: "Kick", freq: 60, trackIndex: 0 },
{ name: "Snare", freq: 180, trackIndex: 1 },
{ name: "Hi-hat", freq: 4000, trackIndex: 2 },
{ name: "Bass", freq: 90, trackIndex: 3 }, // simple bass thump
];
// Simple in-memory "project" state
const state = {
grid: Array(TRACKS.length)
.fill(null)
.map(() => Array(GRID_STEPS).fill(false)),
bpm: 90,
isPlaying: false,
currentStep: 0,
audio: {
ctx: null,
masterGain: null,
trackGains: [],
metronomeGain: null,
},
};
// DOM references
const gridEl = document.getElementById("step-grid");
const playBtn = document.getElementById("play-btn");
const stopBtn = document.getElementById("stop-btn");
const clearGridBtn = document.getElementById("clear-grid-btn");
const bpmInput = document.getElementById("project-bpm");
const metronomeToggle = document.getElementById("metronome-toggle");
const exportJsonBtn = document.getElementById("export-json-btn");
const downloadLink = document.getElementById("download-link");
// AI panel refs
const aiGenreInput = document.getElementById("ai-genre");
const aiMoodInput = document.getElementById("ai-mood");
const aiTaskSelect = document.getElementById("ai-task");
const aiOutput = document.getElementById("ai-output");
const aiError = document.getElementById("ai-error");
const aiGenerateBtn = document.getElementById("generate-idea-btn");
const modelStatus = document.getElementById("model-status");
const modelProgress = document.getElementById("model-progress");
const progressText = document.getElementById("model-progress-text");
const progressBarInner = document.getElementById("model-progress-bar-inner");
const exampleChips = document.querySelectorAll(".chip");
// Melody notes area (text-based "piano roll")
const melodyNotesArea = document.getElementById("melody-notes");
// Audio scheduling
let nextNoteTime = 0;
let stepTimerId = null;
/**
* AUDIO ENGINE
*/
function ensureAudioContext() {
if (state.audio.ctx) return;
const ctx = new (window.AudioContext || window.webkitAudioContext)();
const masterGain = ctx.createGain();
masterGain.gain.value = 0.9;
masterGain.connect(ctx.destination);
const trackGains = TRACKS.map(() => {
const g = ctx.createGain();
g.gain.value = 0.8;
g.connect(masterGain);
return g;
});
const metGain = ctx.createGain();
metGain.gain.value = 0.0;
metGain.connect(masterGain);
state.audio.ctx = ctx;
state.audio.masterGain = masterGain;
state.audio.trackGains = trackGains;
state.audio.metronomeGain = metGain;
}
function triggerDrum(trackIndex, time) {
const ctx = state.audio.ctx;
const freq = TRACKS[trackIndex].freq;
if (trackIndex === 0) {
// Kick: short decaying sine
const osc = ctx.createOscillator();
const gain = ctx.createGain();
osc.type = "sine";
osc.frequency.setValueAtTime(freq, time);
osc.frequency.exponentialRampToValueAtTime(40, time + 0.1);
gain.gain.setValueAtTime(0.9, time);
gain.gain.exponentialRampToValueAtTime(0.001, time + 0.2);
osc.connect(gain);
gain.connect(state.audio.trackGains[trackIndex]);
osc.start(time);
osc.stop(time + 0.25);
} else if (trackIndex === 1) {
// Snare: noise + high tone
const noiseBuf = ctx.createBuffer(1, ctx.sampleRate * 0.2, ctx.sampleRate);
const data = noiseBuf.getChannelData(0);
for (let i = 0; i < data.length; i++) {
data[i] = (Math.random() * 2 - 1) * 0.5;
}
const noise = ctx.createBufferSource();
noise.buffer = noiseBuf;
const noiseGain = ctx.createGain();
noiseGain.gain.setValueAtTime(0.7, time);
noiseGain.gain.exponentialRampToValueAtTime(0.001, time + 0.2);
const bandpass = ctx.createBiquadFilter();
bandpass.type = "bandpass";
bandpass.frequency.value = 1800;
bandpass.Q.value = 0.5;
noise.connect(bandpass);
bandpass.connect(noiseGain);
noiseGain.connect(state.audio.trackGains[trackIndex]);
noise.start(time);
noise.stop(time + 0.25);
} else if (trackIndex === 2) {
// Hi-hat: high-pass noise
const noiseBuf = ctx.createBuffer(1, ctx.sampleRate * 0.08, ctx.sampleRate);
const data = noiseBuf.getChannelData(0);
for (let i = 0; i < data.length; i++) {
data[i] = (Math.random() * 2 - 1) * 0.4;
}
const noise = ctx.createBufferSource();
noise.buffer = noiseBuf;
const hp = ctx.createBiquadFilter();
hp.type = "highpass";
hp.frequency.value = 7000;
const g = ctx.createGain();
g.gain.setValueAtTime(0.5, time);
g.gain.exponentialRampToValueAtTime(0.001, time + 0.08);
noise.connect(hp);
hp.connect(g);
g.connect(state.audio.trackGains[trackIndex]);
noise.start(time);
noise.stop(time + 0.1);
} else if (trackIndex === 3) {
// Bass thump
const osc = ctx.createOscillator();
const gain = ctx.createGain();
osc.type = "sawtooth";
osc.frequency.setValueAtTime(freq, time);
gain.gain.setValueAtTime(0.35, time);
gain.gain.exponentialRampToValueAtTime(0.001, time + 0.25);
osc.connect(gain);
gain.connect(state.audio.trackGains[trackIndex]);
osc.start(time);
osc.stop(time + 0.3);
}
}
function triggerMetronome(time, isBarStart) {
if (!metronomeToggle.checked) return;
const ctx = state.audio.ctx;
const osc = ctx.createOscillator();
const gain = ctx.createGain();
osc.type = "square";
osc.frequency.value = isBarStart ? 2200 : 1400;
gain.gain.setValueAtTime(isBarStart ? 0.45 : 0.28, time);
gain.gain.exponentialRampToValueAtTime(0.001, time + 0.05);
osc.connect(gain);
gain.connect(state.audio.metronomeGain);
osc.start(time);
osc.stop(time + 0.08);
}
function scheduleNextStep() {
if (!state.isPlaying || !state.audio.ctx) return;
const secondsPerBeat = 60 / state.bpm;
const stepDuration = secondsPerBeat / 4; // 16th note
const ctx = state.audio.ctx;
while (nextNoteTime < ctx.currentTime + 0.1) {
const stepIndex = state.currentStep;
const isBarStart = stepIndex % 4 === 0;
// Schedule metronome
triggerMetronome(nextNoteTime, isBarStart);
// Schedule tracks if active
for (let t = 0; t < TRACKS.length; t++) {
if (state.grid[t][stepIndex]) {
triggerDrum(t, nextNoteTime);
}
}
// Visual highlight
highlightStep(stepIndex);
// Advance
state.currentStep = (state.currentStep + 1) % GRID_STEPS;
nextNoteTime += stepDuration;
}
stepTimerId = requestAnimationFrame(scheduleNextStep);
}
function startPlayback() {
ensureAudioContext();
if (state.isPlaying) return;
const ctx = state.audio.ctx;
if (ctx.state === "suspended") ctx.resume();
state.isPlaying = true;
nextNoteTime = ctx.currentTime + 0.05;
state.currentStep = 0;
scheduleNextStep();
}
function stopPlayback() {
state.isPlaying = false;
if (stepTimerId) cancelAnimationFrame(stepTimerId);
stepTimerId = null;
clearStepHighlight();
}
/**
* GRID UI
*/
function buildGrid() {
gridEl.innerHTML = "";
gridEl.style.setProperty("--grid-cols", GRID_STEPS);
for (let trackIndex = 0; trackIndex < TRACKS.length; trackIndex++) {
for (let step = 0; step < GRID_STEPS; step++) {
const cell = document.createElement("div");
cell.className = "step-cell";
cell.dataset.track = String(trackIndex);
cell.dataset.step = String(step);
cell.addEventListener("click", onGridCellClick);
gridEl.appendChild(cell);
}
}
}
function onGridCellClick(e) {
const cell = e.currentTarget;
const trackIndex = Number(cell.dataset.track);
const step = Number(cell.dataset.step);
state.grid[trackIndex][step] = !state.grid[trackIndex][step];
cell.classList.toggle("active", state.grid[trackIndex][step]);
// One-shot preview when clicking while stopped
if (!state.isPlaying) {
ensureAudioContext();
triggerDrum(trackIndex, state.audio.ctx.currentTime + 0.01);
}
}
function highlightStep(stepIndex) {
const cells = gridEl.querySelectorAll(".step-cell");
cells.forEach((cell) => {
const s = Number(cell.dataset.step);
if (s === stepIndex) {
cell.style.boxShadow = "0 0 0 1px rgba(159,210,123,0.9), 0 0 12px rgba(159,210,123,0.7)";
} else {
cell.style.boxShadow = "none";
}
});
}
function clearStepHighlight() {
const cells = gridEl.querySelectorAll(".step-cell");
cells.forEach((cell) => (cell.style.boxShadow = "none"));
}
/**
* TRACK CONTROLS (mute/solo/gain)
*/
function initTrackControls() {
const muteButtons = document.querySelectorAll(".mute-btn");
const soloButtons = document.querySelectorAll(".solo-btn");
const faders = document.querySelectorAll('.track-faders input[type="range"]');
muteButtons.forEach((btn) => {
btn.addEventListener("click", () => {
const trackIndex = Number(btn.dataset.track);
btn.classList.toggle("active");
updateTrackGains();
});
});
soloButtons.forEach((btn) => {
btn.addEventListener("click", () => {
const trackIndex = Number(btn.dataset.track);
btn.classList.toggle("active");
updateTrackGains();
});
});
faders.forEach((fader) => {
fader.addEventListener("input", () => {
const trackIndex = Number(fader.dataset.track);
const value = Number(fader.value);
ensureAudioContext();
state.audio.trackGains[trackIndex].gain.value = value;
});
});
}
function updateTrackGains() {
ensureAudioContext();
const mutes = document.querySelectorAll(".mute-btn");
const solos = document.querySelectorAll(".solo-btn");
const faders = document.querySelectorAll('.track-faders input[type="range"]');
const soloed = new Set(
Array.from(solos)
.filter((b) => b.classList.contains("active"))
.map((b) => Number(b.dataset.track))
);
for (let i = 0; i < TRACKS.length; i++) {
const isMuted = Array.from(mutes).some(
(b) => Number(b.dataset.track) === i && b.classList.contains("active")
);
const hasSolo = soloed.size > 0;
const isSoloed = soloed.has(i);
const baseGain =
Number(
Array.from(faders).find((f) => Number(f.dataset.track) === i)?.value ?? 0.8
) || 0.8;
let gain = baseGain;
if (hasSolo && !isSoloed) {
gain = 0;
} else if (isMuted) {
gain = 0;
}
state.audio.trackGains[i].gain.value = gain;
}
}
/**
* EXPORT (simple JSON snapshot, illustrating serialization)
*/
function exportProjectJson() {
const project = {
title: document.getElementById("project-title").value || "Untitled Web DAW Sketch",
bpm: state.bpm,
bars: 4,
gridSteps: GRID_STEPS,
tracks: TRACKS.map((t, i) => ({
name: t.name,
index: i,
pattern: state.grid[i],
})),
melodyNotes: melodyNotesArea.value,
aiNotes: aiOutput.value,
createdAt: new Date().toISOString(),
};
const blob = new Blob([JSON.stringify(project, null, 2)], {
type: "application/json",
});
const url = URL.createObjectURL(blob);
const filename = `${(project.title || "project").replace(/\s+/g, "_")}.json`;
downloadLink.href = url;
downloadLink.download = filename;
downloadLink.click();
setTimeout(() => URL.revokeObjectURL(url), 10000);
}
/**
* AI WORKER / TRANSFORMERS.JS
*/
// Web worker instance
let aiWorker = null;
let modelReady = false;
function initWorker() {
aiWorker = new Worker("worker.js", { type: "module" });
aiWorker.onmessage = (event) => {
const { type, payload } = event.data || {};
switch (type) {
case "status":
handleWorkerStatus(payload);
break;
case "progress":
handleWorkerProgress(payload);
break;
case "ready":
modelReady = true;
modelStatus.textContent = "Model loaded – you can now generate ideas.";
modelStatus.className = "status status-ok";
aiGenerateBtn.disabled = false;
aiGenerateBtn.textContent = "Generate AI idea";
modelProgress.classList.add("hidden");
break;
case "result":
handleWorkerResult(payload);
break;
case "error":
handleWorkerError(payload);
break;
default:
break;
}
};
aiWorker.onerror = (e) => {
console.error("Worker error:", e);
aiError.textContent =
"Worker error while loading transformers.js. Check console for details.";
aiError.classList.remove("hidden");
modelStatus.textContent = "Error initializing model.";
modelStatus.className = "status status-error";
aiGenerateBtn.disabled = true;
};
}
function handleWorkerStatus(message) {
modelStatus.textContent = message;
}
function handleWorkerProgress({ loaded, total }) {
if (!total || total <= 0) return;
const pct = Math.round((loaded / total) * 100);
progressText.textContent = `${pct}%`;
progressBarInner.style.width = `${pct}%`;
modelProgress.classList.remove("hidden");
}
function handleWorkerResult({ text }) {
aiGenerateBtn.disabled = false;
aiGenerateBtn.textContent = "Generate AI idea";
aiError.classList.add("hidden");
const trimmed = text.trim();
aiOutput.value = trimmed;
// Simple heuristic: if chord or drum idea, also drop into melody notes area as a sketch
if (aiTaskSelect.value === "chords" || aiTaskSelect.value === "melody") {
melodyNotesArea.value = trimmed;
}
}
function handleWorkerError({ error }) {
console.error("AI error:", error);
aiGenerateBtn.disabled = false;
aiGenerateBtn.textContent = "Generate AI idea";
aiError.textContent = error || "Unknown error from AI worker.";
aiError.classList.remove("hidden");
}
function triggerAIGeneration() {
if (!aiWorker || !modelReady) return;
aiError.classList.add("hidden");
const genre = (aiGenreInput.value || "lofi hip hop").trim();
const mood = (aiMoodInput.value || "chill").trim();
const task = aiTaskSelect.value;
const bpm = Number(bpmInput.value) || 90;
const key = (document.getElementById("project-key").value || "C minor").trim();
let prompt;
if (task === "chords") {
prompt = `You are an expert music theory assistant for beatmakers. Suggest a 4-bar chord progression in ${key} for a ${genre} track with a ${mood} vibe at ${bpm} BPM.
Return ONLY a compact, bar-by-bar text description, one bar per line. Example format:
Bar 1: Cmin7 - Gmin7
Bar 2: Ebmaj7 - Fmin7
Bar 3: Abmaj7 - Gmin7
Bar 4: turnaround...`;
} else if (task === "drums") {
prompt = `You are a drum programmer helping a producer. Suggest a 1-bar, 16-step drum pattern for a ${genre} beat (${mood}, ${bpm} BPM).
Use a compact ASCII grid with K (kick), S (snare), H (hi-hat), . for rest, grouped by 4 steps per beat. Example:
Kick : K..K .... K..K ....
Snare: .... S... .... S...
Hat : H.H. H.H. H.H. H.H.`;
} else if (task === "melody") {
prompt = `You are a melody writer. Based on ${genre} with a ${mood} vibe in ${key} at ${bpm} BPM,
suggest a simple 2-bar hook melody. Return a clear, text-only description like:
Bar 1: notes (timing), e.g. C4 (1&), D4 (1e), ...
Bar 2: ...`;
} else if (task === "arrangement") {
prompt = `You are a modern music producer. For a ${genre} track with a ${mood} vibe in ${key} at ${bpm} BPM,
suggest a simple A/B arrangement for 16 bars. Example:
Bars 1-4: Intro (filtered drums, no bass)
Bars 5-8: A-section (full drums, bass, chords)
Bars 9-12: B-section (add lead, remove hi-hats)
Bars 13-16: Drop / outro.`;
} else if (task === "mix") {
prompt = `You are an AI mix engineer. The user has a beat in ${genre} (${mood}, ${bpm} BPM, key ${key}).
Give concise bullet-point mixing tips focusing on kick, bass, drums bus, and main melody.
Keep it short, text-only.`;
}
aiGenerateBtn.disabled = true;
aiGenerateBtn