flen-crypto commited on
Commit
20d31e3
·
verified ·
1 Parent(s): ab30f71

Upload index.js with huggingface_hub

Browse files
Files changed (1) hide show
  1. index.js +515 -0
index.js ADDED
@@ -0,0 +1,515 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ /**
2
+ * app.js
3
+ *
4
+ * Browser-based mini-DAW demo with:
5
+ * - Simple 4-track Web Audio drum step sequencer
6
+ * - Text-based "piano roll" notes area
7
+ * - AI Song Starter powered by transformers.js running in a Web Worker
8
+ *
9
+ * This is intentionally focused: in a real implementation you would plug
10
+ * this UI and audio engine into the much larger architecture described
11
+ * in the technical spec (cloud sync, collaboration, Web3, etc.).
12
+ */
13
+
14
+ const GRID_STEPS = 16;
15
+ const TRACKS = [
16
+ { name: "Kick", freq: 60, trackIndex: 0 },
17
+ { name: "Snare", freq: 180, trackIndex: 1 },
18
+ { name: "Hi-hat", freq: 4000, trackIndex: 2 },
19
+ { name: "Bass", freq: 90, trackIndex: 3 }, // simple bass thump
20
+ ];
21
+
22
+ // Simple in-memory "project" state
23
+ const state = {
24
+ grid: Array(TRACKS.length)
25
+ .fill(null)
26
+ .map(() => Array(GRID_STEPS).fill(false)),
27
+ bpm: 90,
28
+ isPlaying: false,
29
+ currentStep: 0,
30
+ audio: {
31
+ ctx: null,
32
+ masterGain: null,
33
+ trackGains: [],
34
+ metronomeGain: null,
35
+ },
36
+ };
37
+
38
+ // DOM references
39
+ const gridEl = document.getElementById("step-grid");
40
+ const playBtn = document.getElementById("play-btn");
41
+ const stopBtn = document.getElementById("stop-btn");
42
+ const clearGridBtn = document.getElementById("clear-grid-btn");
43
+ const bpmInput = document.getElementById("project-bpm");
44
+ const metronomeToggle = document.getElementById("metronome-toggle");
45
+ const exportJsonBtn = document.getElementById("export-json-btn");
46
+ const downloadLink = document.getElementById("download-link");
47
+
48
+ // AI panel refs
49
+ const aiGenreInput = document.getElementById("ai-genre");
50
+ const aiMoodInput = document.getElementById("ai-mood");
51
+ const aiTaskSelect = document.getElementById("ai-task");
52
+ const aiOutput = document.getElementById("ai-output");
53
+ const aiError = document.getElementById("ai-error");
54
+ const aiGenerateBtn = document.getElementById("generate-idea-btn");
55
+ const modelStatus = document.getElementById("model-status");
56
+ const modelProgress = document.getElementById("model-progress");
57
+ const progressText = document.getElementById("model-progress-text");
58
+ const progressBarInner = document.getElementById("model-progress-bar-inner");
59
+ const exampleChips = document.querySelectorAll(".chip");
60
+
61
+ // Melody notes area (text-based "piano roll")
62
+ const melodyNotesArea = document.getElementById("melody-notes");
63
+
64
+ // Audio scheduling
65
+ let nextNoteTime = 0;
66
+ let stepTimerId = null;
67
+
68
+ /**
69
+ * AUDIO ENGINE
70
+ */
71
+ function ensureAudioContext() {
72
+ if (state.audio.ctx) return;
73
+ const ctx = new (window.AudioContext || window.webkitAudioContext)();
74
+ const masterGain = ctx.createGain();
75
+ masterGain.gain.value = 0.9;
76
+ masterGain.connect(ctx.destination);
77
+
78
+ const trackGains = TRACKS.map(() => {
79
+ const g = ctx.createGain();
80
+ g.gain.value = 0.8;
81
+ g.connect(masterGain);
82
+ return g;
83
+ });
84
+
85
+ const metGain = ctx.createGain();
86
+ metGain.gain.value = 0.0;
87
+ metGain.connect(masterGain);
88
+
89
+ state.audio.ctx = ctx;
90
+ state.audio.masterGain = masterGain;
91
+ state.audio.trackGains = trackGains;
92
+ state.audio.metronomeGain = metGain;
93
+ }
94
+
95
+ function triggerDrum(trackIndex, time) {
96
+ const ctx = state.audio.ctx;
97
+ const freq = TRACKS[trackIndex].freq;
98
+
99
+ if (trackIndex === 0) {
100
+ // Kick: short decaying sine
101
+ const osc = ctx.createOscillator();
102
+ const gain = ctx.createGain();
103
+ osc.type = "sine";
104
+ osc.frequency.setValueAtTime(freq, time);
105
+ osc.frequency.exponentialRampToValueAtTime(40, time + 0.1);
106
+ gain.gain.setValueAtTime(0.9, time);
107
+ gain.gain.exponentialRampToValueAtTime(0.001, time + 0.2);
108
+ osc.connect(gain);
109
+ gain.connect(state.audio.trackGains[trackIndex]);
110
+ osc.start(time);
111
+ osc.stop(time + 0.25);
112
+ } else if (trackIndex === 1) {
113
+ // Snare: noise + high tone
114
+ const noiseBuf = ctx.createBuffer(1, ctx.sampleRate * 0.2, ctx.sampleRate);
115
+ const data = noiseBuf.getChannelData(0);
116
+ for (let i = 0; i < data.length; i++) {
117
+ data[i] = (Math.random() * 2 - 1) * 0.5;
118
+ }
119
+ const noise = ctx.createBufferSource();
120
+ noise.buffer = noiseBuf;
121
+
122
+ const noiseGain = ctx.createGain();
123
+ noiseGain.gain.setValueAtTime(0.7, time);
124
+ noiseGain.gain.exponentialRampToValueAtTime(0.001, time + 0.2);
125
+
126
+ const bandpass = ctx.createBiquadFilter();
127
+ bandpass.type = "bandpass";
128
+ bandpass.frequency.value = 1800;
129
+ bandpass.Q.value = 0.5;
130
+
131
+ noise.connect(bandpass);
132
+ bandpass.connect(noiseGain);
133
+ noiseGain.connect(state.audio.trackGains[trackIndex]);
134
+
135
+ noise.start(time);
136
+ noise.stop(time + 0.25);
137
+ } else if (trackIndex === 2) {
138
+ // Hi-hat: high-pass noise
139
+ const noiseBuf = ctx.createBuffer(1, ctx.sampleRate * 0.08, ctx.sampleRate);
140
+ const data = noiseBuf.getChannelData(0);
141
+ for (let i = 0; i < data.length; i++) {
142
+ data[i] = (Math.random() * 2 - 1) * 0.4;
143
+ }
144
+ const noise = ctx.createBufferSource();
145
+ noise.buffer = noiseBuf;
146
+
147
+ const hp = ctx.createBiquadFilter();
148
+ hp.type = "highpass";
149
+ hp.frequency.value = 7000;
150
+ const g = ctx.createGain();
151
+ g.gain.setValueAtTime(0.5, time);
152
+ g.gain.exponentialRampToValueAtTime(0.001, time + 0.08);
153
+
154
+ noise.connect(hp);
155
+ hp.connect(g);
156
+ g.connect(state.audio.trackGains[trackIndex]);
157
+
158
+ noise.start(time);
159
+ noise.stop(time + 0.1);
160
+ } else if (trackIndex === 3) {
161
+ // Bass thump
162
+ const osc = ctx.createOscillator();
163
+ const gain = ctx.createGain();
164
+ osc.type = "sawtooth";
165
+ osc.frequency.setValueAtTime(freq, time);
166
+ gain.gain.setValueAtTime(0.35, time);
167
+ gain.gain.exponentialRampToValueAtTime(0.001, time + 0.25);
168
+ osc.connect(gain);
169
+ gain.connect(state.audio.trackGains[trackIndex]);
170
+ osc.start(time);
171
+ osc.stop(time + 0.3);
172
+ }
173
+ }
174
+
175
+ function triggerMetronome(time, isBarStart) {
176
+ if (!metronomeToggle.checked) return;
177
+ const ctx = state.audio.ctx;
178
+ const osc = ctx.createOscillator();
179
+ const gain = ctx.createGain();
180
+ osc.type = "square";
181
+ osc.frequency.value = isBarStart ? 2200 : 1400;
182
+ gain.gain.setValueAtTime(isBarStart ? 0.45 : 0.28, time);
183
+ gain.gain.exponentialRampToValueAtTime(0.001, time + 0.05);
184
+ osc.connect(gain);
185
+ gain.connect(state.audio.metronomeGain);
186
+ osc.start(time);
187
+ osc.stop(time + 0.08);
188
+ }
189
+
190
+ function scheduleNextStep() {
191
+ if (!state.isPlaying || !state.audio.ctx) return;
192
+
193
+ const secondsPerBeat = 60 / state.bpm;
194
+ const stepDuration = secondsPerBeat / 4; // 16th note
195
+ const ctx = state.audio.ctx;
196
+
197
+ while (nextNoteTime < ctx.currentTime + 0.1) {
198
+ const stepIndex = state.currentStep;
199
+ const isBarStart = stepIndex % 4 === 0;
200
+
201
+ // Schedule metronome
202
+ triggerMetronome(nextNoteTime, isBarStart);
203
+
204
+ // Schedule tracks if active
205
+ for (let t = 0; t < TRACKS.length; t++) {
206
+ if (state.grid[t][stepIndex]) {
207
+ triggerDrum(t, nextNoteTime);
208
+ }
209
+ }
210
+
211
+ // Visual highlight
212
+ highlightStep(stepIndex);
213
+
214
+ // Advance
215
+ state.currentStep = (state.currentStep + 1) % GRID_STEPS;
216
+ nextNoteTime += stepDuration;
217
+ }
218
+
219
+ stepTimerId = requestAnimationFrame(scheduleNextStep);
220
+ }
221
+
222
+ function startPlayback() {
223
+ ensureAudioContext();
224
+ if (state.isPlaying) return;
225
+ const ctx = state.audio.ctx;
226
+ if (ctx.state === "suspended") ctx.resume();
227
+
228
+ state.isPlaying = true;
229
+ nextNoteTime = ctx.currentTime + 0.05;
230
+ state.currentStep = 0;
231
+ scheduleNextStep();
232
+ }
233
+
234
+ function stopPlayback() {
235
+ state.isPlaying = false;
236
+ if (stepTimerId) cancelAnimationFrame(stepTimerId);
237
+ stepTimerId = null;
238
+ clearStepHighlight();
239
+ }
240
+
241
+ /**
242
+ * GRID UI
243
+ */
244
+ function buildGrid() {
245
+ gridEl.innerHTML = "";
246
+ gridEl.style.setProperty("--grid-cols", GRID_STEPS);
247
+ for (let trackIndex = 0; trackIndex < TRACKS.length; trackIndex++) {
248
+ for (let step = 0; step < GRID_STEPS; step++) {
249
+ const cell = document.createElement("div");
250
+ cell.className = "step-cell";
251
+ cell.dataset.track = String(trackIndex);
252
+ cell.dataset.step = String(step);
253
+ cell.addEventListener("click", onGridCellClick);
254
+ gridEl.appendChild(cell);
255
+ }
256
+ }
257
+ }
258
+
259
+ function onGridCellClick(e) {
260
+ const cell = e.currentTarget;
261
+ const trackIndex = Number(cell.dataset.track);
262
+ const step = Number(cell.dataset.step);
263
+ state.grid[trackIndex][step] = !state.grid[trackIndex][step];
264
+ cell.classList.toggle("active", state.grid[trackIndex][step]);
265
+ // One-shot preview when clicking while stopped
266
+ if (!state.isPlaying) {
267
+ ensureAudioContext();
268
+ triggerDrum(trackIndex, state.audio.ctx.currentTime + 0.01);
269
+ }
270
+ }
271
+
272
+ function highlightStep(stepIndex) {
273
+ const cells = gridEl.querySelectorAll(".step-cell");
274
+ cells.forEach((cell) => {
275
+ const s = Number(cell.dataset.step);
276
+ if (s === stepIndex) {
277
+ cell.style.boxShadow = "0 0 0 1px rgba(159,210,123,0.9), 0 0 12px rgba(159,210,123,0.7)";
278
+ } else {
279
+ cell.style.boxShadow = "none";
280
+ }
281
+ });
282
+ }
283
+
284
+ function clearStepHighlight() {
285
+ const cells = gridEl.querySelectorAll(".step-cell");
286
+ cells.forEach((cell) => (cell.style.boxShadow = "none"));
287
+ }
288
+
289
+ /**
290
+ * TRACK CONTROLS (mute/solo/gain)
291
+ */
292
+ function initTrackControls() {
293
+ const muteButtons = document.querySelectorAll(".mute-btn");
294
+ const soloButtons = document.querySelectorAll(".solo-btn");
295
+ const faders = document.querySelectorAll('.track-faders input[type="range"]');
296
+
297
+ muteButtons.forEach((btn) => {
298
+ btn.addEventListener("click", () => {
299
+ const trackIndex = Number(btn.dataset.track);
300
+ btn.classList.toggle("active");
301
+ updateTrackGains();
302
+ });
303
+ });
304
+
305
+ soloButtons.forEach((btn) => {
306
+ btn.addEventListener("click", () => {
307
+ const trackIndex = Number(btn.dataset.track);
308
+ btn.classList.toggle("active");
309
+ updateTrackGains();
310
+ });
311
+ });
312
+
313
+ faders.forEach((fader) => {
314
+ fader.addEventListener("input", () => {
315
+ const trackIndex = Number(fader.dataset.track);
316
+ const value = Number(fader.value);
317
+ ensureAudioContext();
318
+ state.audio.trackGains[trackIndex].gain.value = value;
319
+ });
320
+ });
321
+ }
322
+
323
+ function updateTrackGains() {
324
+ ensureAudioContext();
325
+ const mutes = document.querySelectorAll(".mute-btn");
326
+ const solos = document.querySelectorAll(".solo-btn");
327
+ const faders = document.querySelectorAll('.track-faders input[type="range"]');
328
+
329
+ const soloed = new Set(
330
+ Array.from(solos)
331
+ .filter((b) => b.classList.contains("active"))
332
+ .map((b) => Number(b.dataset.track))
333
+ );
334
+
335
+ for (let i = 0; i < TRACKS.length; i++) {
336
+ const isMuted = Array.from(mutes).some(
337
+ (b) => Number(b.dataset.track) === i && b.classList.contains("active")
338
+ );
339
+ const hasSolo = soloed.size > 0;
340
+ const isSoloed = soloed.has(i);
341
+ const baseGain =
342
+ Number(
343
+ Array.from(faders).find((f) => Number(f.dataset.track) === i)?.value ?? 0.8
344
+ ) || 0.8;
345
+
346
+ let gain = baseGain;
347
+ if (hasSolo && !isSoloed) {
348
+ gain = 0;
349
+ } else if (isMuted) {
350
+ gain = 0;
351
+ }
352
+ state.audio.trackGains[i].gain.value = gain;
353
+ }
354
+ }
355
+
356
+ /**
357
+ * EXPORT (simple JSON snapshot, illustrating serialization)
358
+ */
359
+ function exportProjectJson() {
360
+ const project = {
361
+ title: document.getElementById("project-title").value || "Untitled Web DAW Sketch",
362
+ bpm: state.bpm,
363
+ bars: 4,
364
+ gridSteps: GRID_STEPS,
365
+ tracks: TRACKS.map((t, i) => ({
366
+ name: t.name,
367
+ index: i,
368
+ pattern: state.grid[i],
369
+ })),
370
+ melodyNotes: melodyNotesArea.value,
371
+ aiNotes: aiOutput.value,
372
+ createdAt: new Date().toISOString(),
373
+ };
374
+
375
+ const blob = new Blob([JSON.stringify(project, null, 2)], {
376
+ type: "application/json",
377
+ });
378
+ const url = URL.createObjectURL(blob);
379
+ const filename = `${(project.title || "project").replace(/\s+/g, "_")}.json`;
380
+
381
+ downloadLink.href = url;
382
+ downloadLink.download = filename;
383
+ downloadLink.click();
384
+
385
+ setTimeout(() => URL.revokeObjectURL(url), 10000);
386
+ }
387
+
388
+ /**
389
+ * AI WORKER / TRANSFORMERS.JS
390
+ */
391
+
392
+ // Web worker instance
393
+ let aiWorker = null;
394
+ let modelReady = false;
395
+
396
+ function initWorker() {
397
+ aiWorker = new Worker("worker.js", { type: "module" });
398
+
399
+ aiWorker.onmessage = (event) => {
400
+ const { type, payload } = event.data || {};
401
+ switch (type) {
402
+ case "status":
403
+ handleWorkerStatus(payload);
404
+ break;
405
+ case "progress":
406
+ handleWorkerProgress(payload);
407
+ break;
408
+ case "ready":
409
+ modelReady = true;
410
+ modelStatus.textContent = "Model loaded – you can now generate ideas.";
411
+ modelStatus.className = "status status-ok";
412
+ aiGenerateBtn.disabled = false;
413
+ aiGenerateBtn.textContent = "Generate AI idea";
414
+ modelProgress.classList.add("hidden");
415
+ break;
416
+ case "result":
417
+ handleWorkerResult(payload);
418
+ break;
419
+ case "error":
420
+ handleWorkerError(payload);
421
+ break;
422
+ default:
423
+ break;
424
+ }
425
+ };
426
+
427
+ aiWorker.onerror = (e) => {
428
+ console.error("Worker error:", e);
429
+ aiError.textContent =
430
+ "Worker error while loading transformers.js. Check console for details.";
431
+ aiError.classList.remove("hidden");
432
+ modelStatus.textContent = "Error initializing model.";
433
+ modelStatus.className = "status status-error";
434
+ aiGenerateBtn.disabled = true;
435
+ };
436
+ }
437
+
438
+ function handleWorkerStatus(message) {
439
+ modelStatus.textContent = message;
440
+ }
441
+
442
+ function handleWorkerProgress({ loaded, total }) {
443
+ if (!total || total <= 0) return;
444
+ const pct = Math.round((loaded / total) * 100);
445
+ progressText.textContent = `${pct}%`;
446
+ progressBarInner.style.width = `${pct}%`;
447
+ modelProgress.classList.remove("hidden");
448
+ }
449
+
450
+ function handleWorkerResult({ text }) {
451
+ aiGenerateBtn.disabled = false;
452
+ aiGenerateBtn.textContent = "Generate AI idea";
453
+ aiError.classList.add("hidden");
454
+
455
+ const trimmed = text.trim();
456
+ aiOutput.value = trimmed;
457
+ // Simple heuristic: if chord or drum idea, also drop into melody notes area as a sketch
458
+ if (aiTaskSelect.value === "chords" || aiTaskSelect.value === "melody") {
459
+ melodyNotesArea.value = trimmed;
460
+ }
461
+ }
462
+
463
+ function handleWorkerError({ error }) {
464
+ console.error("AI error:", error);
465
+ aiGenerateBtn.disabled = false;
466
+ aiGenerateBtn.textContent = "Generate AI idea";
467
+ aiError.textContent = error || "Unknown error from AI worker.";
468
+ aiError.classList.remove("hidden");
469
+ }
470
+
471
+ function triggerAIGeneration() {
472
+ if (!aiWorker || !modelReady) return;
473
+ aiError.classList.add("hidden");
474
+
475
+ const genre = (aiGenreInput.value || "lofi hip hop").trim();
476
+ const mood = (aiMoodInput.value || "chill").trim();
477
+ const task = aiTaskSelect.value;
478
+
479
+ const bpm = Number(bpmInput.value) || 90;
480
+ const key = (document.getElementById("project-key").value || "C minor").trim();
481
+
482
+ let prompt;
483
+ if (task === "chords") {
484
+ prompt = `You are an expert music theory assistant for beatmakers. Suggest a 4-bar chord progression in ${key} for a ${genre} track with a ${mood} vibe at ${bpm} BPM.
485
+ Return ONLY a compact, bar-by-bar text description, one bar per line. Example format:
486
+ Bar 1: Cmin7 - Gmin7
487
+ Bar 2: Ebmaj7 - Fmin7
488
+ Bar 3: Abmaj7 - Gmin7
489
+ Bar 4: turnaround...`;
490
+ } else if (task === "drums") {
491
+ prompt = `You are a drum programmer helping a producer. Suggest a 1-bar, 16-step drum pattern for a ${genre} beat (${mood}, ${bpm} BPM).
492
+ Use a compact ASCII grid with K (kick), S (snare), H (hi-hat), . for rest, grouped by 4 steps per beat. Example:
493
+ Kick : K..K .... K..K ....
494
+ Snare: .... S... .... S...
495
+ Hat : H.H. H.H. H.H. H.H.`;
496
+ } else if (task === "melody") {
497
+ prompt = `You are a melody writer. Based on ${genre} with a ${mood} vibe in ${key} at ${bpm} BPM,
498
+ suggest a simple 2-bar hook melody. Return a clear, text-only description like:
499
+ Bar 1: notes (timing), e.g. C4 (1&), D4 (1e), ...
500
+ Bar 2: ...`;
501
+ } else if (task === "arrangement") {
502
+ prompt = `You are a modern music producer. For a ${genre} track with a ${mood} vibe in ${key} at ${bpm} BPM,
503
+ suggest a simple A/B arrangement for 16 bars. Example:
504
+ Bars 1-4: Intro (filtered drums, no bass)
505
+ Bars 5-8: A-section (full drums, bass, chords)
506
+ Bars 9-12: B-section (add lead, remove hi-hats)
507
+ Bars 13-16: Drop / outro.`;
508
+ } else if (task === "mix") {
509
+ prompt = `You are an AI mix engineer. The user has a beat in ${genre} (${mood}, ${bpm} BPM, key ${key}).
510
+ Give concise bullet-point mixing tips focusing on kick, bass, drums bus, and main melody.
511
+ Keep it short, text-only.`;
512
+ }
513
+
514
+ aiGenerateBtn.disabled = true;
515
+ aiGenerateBtn