+ {/* Resize Handle */}
+
+
+ {/* ---- Column 1: System & Signal (merged indicators) ---- */}
+
+
+
System & Signal
+
+
{appStore.backend()}
+
+
VAD
+
+
+
+
+
Mode
+
+
+ {(mode) => (
+ {
+ if (!isRecording()) {
+ appStore.setTranscriptionMode(mode.id);
+ }
+ }}
+ disabled={isRecording()}
+ title={isRecording() ? 'Stop recording to change mode' : mode.label}
+ >
+ {mode.short}
+
+ )}
+
+
+
+
+
+
+ RTFx
+
+ {appStore.rtfxAverage() > 0 ? Math.round(appStore.rtfxAverage()) : '–'}
+
+
+
+ Latency
+ {Math.round(appStore.inferenceLatencyAverage())}ms
+
+
+
+
+
+ Buffer
+ {(appStore.bufferMetrics().fillRatio * 100).toFixed(0)}%
+
+
+
+
+
+
+
Merger
+
+
+
Sent
+
{appStore.v4MergerStats().sentencesFinalized}
+
+
+
Cursor
+
{appStore.matureCursorTime().toFixed(1)}s
+
+
+
Uttr
+
{appStore.v4MergerStats().utterancesProcessed}
+
+
+
+
+
+
+
+ RMS Energy
+ appStore.energyThreshold() ? 'text-[var(--color-earthy-muted-green)]' : 'text-[var(--color-earthy-soft-brown)]'}>
+ {(appStore.audioLevel() * 100).toFixed(1)}%
+
+
+
+
+
+
+ 0 ? 'opacity-100' : 'opacity-40'}`}>
+
+ VAD Prob
+ appStore.sileroThreshold() ? 'text-[var(--color-earthy-coral)] font-bold' : 'text-[var(--color-earthy-soft-brown)]'}>
+ {(appStore.vadState().sileroProbability * 100).toFixed(0)}%
+
+
+
+
+
appStore.sileroThreshold() ? 'bg-[var(--color-earthy-coral)]' : 'bg-[var(--color-earthy-soft-brown)]'}`}
+ style={{ width: `${Math.min(100, appStore.vadState().sileroProbability * 100)}%` }}
+ />
+
+
+
+ SNR
+ 3 ? 'text-[var(--color-earthy-muted-green)]' : 'text-[var(--color-earthy-soft-brown)]'}`}>
+ {appStore.vadState().snr.toFixed(1)} dB
+
+
+
+
+
+
+
+
Overlap
+
{appStore.streamingOverlap().toFixed(1)}s
+
+
+
Chunks
+
{appStore.mergeInfo().chunkCount}
+
+
+
+
+
State
+
+ {appStore.vadState().hybridState}
+
+
+
+
Windows
+
{appStore.v4MergerStats().utterancesProcessed}
+
+
+
+
+
+ {/* ---- Column 2: Live Context (mode-dependent) ---- */}
+
+
+
+ {isV4() ? 'Transcript State' : isV3() ? 'Stream Sync' : 'Segments'}
+
+
+ {/* v3: LCS indicators */}
+
+
+
+
+ join_inner
+ Match: {appStore.mergeInfo().lcsLength}
+
+
+
+
+ {/* v4: VAD state indicator */}
+
+
+
+
+
+ {appStore.vadState().hybridState}
+
+
+
0 ? 'opacity-100' : 'opacity-0'}`}>
+ VAD
+ 0.5 ? 'text-[var(--color-earthy-coral)]' : 'text-[var(--color-earthy-soft-brown)]'}`}>
+ {(appStore.vadState().sileroProbability * 100).toFixed(0)}%
+
+
+
+
+
+
+
+
+
+ );
+};
diff --git a/src/components/EnergyMeter.tsx b/src/components/EnergyMeter.tsx
new file mode 100644
index 0000000000000000000000000000000000000000..46c72af525fcfff49eebac24201b7cf998b2d5d1
--- /dev/null
+++ b/src/components/EnergyMeter.tsx
@@ -0,0 +1,103 @@
+import { Component, createSignal, createEffect, onCleanup } from 'solid-js';
+import { AudioEngine } from '../lib/audio/types';
+import { appStore } from '../stores/appStore';
+
+interface EnergyMeterProps {
+ audioEngine?: AudioEngine;
+}
+
+export const EnergyMeter: Component
= (props) => {
+ const [energy, setEnergy] = createSignal(0);
+ const [metrics, setMetrics] = createSignal({ noiseFloor: 0, snr: 0, threshold: 0.02, snrThreshold: 3.0 });
+ const [isSpeaking, setIsSpeaking] = createSignal(false);
+
+ const updateFromEngine = (engine: AudioEngine) => {
+ const currentE = engine.getCurrentEnergy();
+ const currentM = engine.getSignalMetrics();
+
+ setEnergy(currentE);
+ setMetrics(currentM);
+ // Check if speaking based on SNR threshold (matching VAD logic)
+ setIsSpeaking(currentM.snr > currentM.snrThreshold || currentE > currentM.threshold);
+ };
+
+ createEffect(() => {
+ const engine = props.audioEngine;
+ if (!engine) return;
+
+ updateFromEngine(engine);
+ const unsubscribe = engine.onVisualizationUpdate(() => {
+ updateFromEngine(engine);
+ });
+
+ onCleanup(() => unsubscribe());
+ });
+
+ // Logarithmic scaling for better visualization
+ const toPercent = (val: number) => {
+ // e.g. mapping 0.0001 -> 1.0 to 0% -> 100% log scale
+ // log10(0.0001) = -4, log10(1) = 0
+ const minLog = -4;
+ const maxLog = 0;
+ const v = Math.max(0.0001, val);
+ const log = Math.log10(v);
+ return Math.max(0, Math.min(100, ((log - minLog) / (maxLog - minLog)) * 100));
+ };
+
+ return (
+
+
+
Signal_Analysis
+ {/* Speaking indicator - Neumorphic LED style */}
+
+
+
+ {isSpeaking() ? 'SPEECH' : 'SILENCE'}
+
+
+
+
+ {/* Energy Bar */}
+
+ {/* Energy Fill - color based on speech state */}
+
+
+ {/* Noise Floor Marker */}
+
+
+ {/* Energy Threshold Marker */}
+
+
+
+
+
+ Noise
+ {metrics().noiseFloor.toFixed(5)}
+
+
+ Energy
+ {energy().toFixed(4)}
+
+
+ SNR_Ratio
+ metrics().snrThreshold ? 'text-emerald-500' : 'text-amber-500'}`}>
+ {metrics().snr.toFixed(1)} dB
+
+
+
+
+ );
+};
diff --git a/src/components/LayeredBufferVisualizer.tsx b/src/components/LayeredBufferVisualizer.tsx
new file mode 100644
index 0000000000000000000000000000000000000000..e37641087aeb8b2d75c2b7012764da25a8b5687b
--- /dev/null
+++ b/src/components/LayeredBufferVisualizer.tsx
@@ -0,0 +1,442 @@
+import { Component, onMount, onCleanup, createSignal } from 'solid-js';
+import type { AudioEngine } from '../lib/audio/types';
+import type { MelWorkerClient } from '../lib/audio/MelWorkerClient';
+import { normalizeMelForDisplay } from '../lib/audio/mel-display';
+import { appStore } from '../stores/appStore';
+
+interface LayeredBufferVisualizerProps {
+ audioEngine?: AudioEngine;
+ melClient?: MelWorkerClient;
+ height?: number; // Total height
+ windowDuration?: number; // default 8.0s
+}
+
+const MEL_BINS = 128; // Standard for this app
+
+// dB scaling is in mel-display.ts (shared with bar visualizer)
+
+// Pre-computed 256-entry RGB lookup table for mel heatmap (black to red).
+// Built once at module load; indexed by Math.round(intensity * 255).
+// Colormap: black -> blue -> purple -> green -> yellow -> orange -> red.
+const COLORMAP_LUT = (() => {
+ const stops: [number, number, number, number][] = [
+ [0, 0, 0, 0], // black
+ [0.12, 0, 0, 180], // blue
+ [0.30, 120, 0, 160], // purple
+ [0.48, 0, 180, 80], // green
+ [0.65, 220, 220, 0], // yellow
+ [0.82, 255, 140, 0], // orange
+ [1, 255, 0, 0], // red
+ ];
+ // 256 entries * 3 channels (R, G, B) packed into a Uint8Array
+ const lut = new Uint8Array(256 * 3);
+ for (let i = 0; i < 256; i++) {
+ const intensity = i / 255;
+ let r = 0, g = 0, b = 0;
+ for (let s = 0; s < stops.length - 1; s++) {
+ const [t0, r0, g0, b0] = stops[s];
+ const [t1, r1, g1, b1] = stops[s + 1];
+ if (intensity >= t0 && intensity <= t1) {
+ const t = (intensity - t0) / (t1 - t0);
+ r = Math.round(r0 + t * (r1 - r0));
+ g = Math.round(g0 + t * (g1 - g0));
+ b = Math.round(b0 + t * (b1 - b0));
+ break;
+ }
+ }
+ if (intensity >= stops[stops.length - 1][0]) {
+ const last = stops[stops.length - 1];
+ r = last[1]; g = last[2]; b = last[3];
+ }
+ const base = i * 3;
+ lut[base] = r;
+ lut[base + 1] = g;
+ lut[base + 2] = b;
+ }
+ return lut;
+})();
+
+export const LayeredBufferVisualizer: Component = (props) => {
+ let canvasRef: HTMLCanvasElement | undefined;
+ let ctx: CanvasRenderingContext2D | null = null;
+ let animationFrameId: number;
+
+ const getWindowDuration = () => props.windowDuration || 8.0;
+
+ // Offscreen canvas for spectrogram caching (scrolling)
+ let specCanvas: HTMLCanvasElement | undefined;
+ let specCtx: CanvasRenderingContext2D | null = null;
+
+ // State for last fetch to throttle spectrogram updates
+ let lastSpecFetchTime = 0;
+ const SPEC_FETCH_INTERVAL = 100; // Update spectrogram every 100ms (10fps)
+ const DRAW_INTERVAL_MS = 33; // Throttle full redraw to ~30fps
+ let lastDrawTime = 0;
+
+ // --- Cached layout dimensions (updated via ResizeObserver, NOT per-frame) ---
+ // Avoids getBoundingClientRect() every animation frame which forces synchronous
+ // layout reflow and was the #1 perf bottleneck (1.5s layout-shift clusters).
+ let cachedPhysicalWidth = 0;
+ let cachedPhysicalHeight = 0;
+ let cachedDpr = window.devicePixelRatio || 1;
+ let resizeObserver: ResizeObserver | null = null;
+ let dprMediaQuery: MediaQueryList | null = null;
+
+ /** Recompute physical canvas dimensions from cached logical size + DPR. */
+ const updateCanvasDimensions = (logicalW: number, logicalH: number) => {
+ cachedDpr = window.devicePixelRatio || 1;
+ cachedPhysicalWidth = Math.floor(logicalW * cachedDpr);
+ cachedPhysicalHeight = Math.floor(logicalH * cachedDpr);
+
+ // Resize canvases immediately so next frame uses correct size
+ if (canvasRef && (canvasRef.width !== cachedPhysicalWidth || canvasRef.height !== cachedPhysicalHeight)) {
+ canvasRef.width = cachedPhysicalWidth;
+ canvasRef.height = cachedPhysicalHeight;
+ }
+ if (specCanvas && (specCanvas.width !== cachedPhysicalWidth || specCanvas.height !== cachedPhysicalHeight)) {
+ specCanvas.width = cachedPhysicalWidth;
+ specCanvas.height = cachedPhysicalHeight;
+ }
+ };
+
+ // --- Pre-allocated ImageData for spectrogram rendering ---
+ // Avoids creating a new ImageData object every spectrogram draw (~10fps),
+ // which caused GC pressure from large short-lived allocations.
+ let cachedSpecImgData: ImageData | null = null;
+ let cachedSpecImgWidth = 0;
+ let cachedSpecImgHeight = 0;
+
+ // --- Pre-allocated waveform read buffer ---
+ // Avoids allocating a new Float32Array(~128000) every animation frame.
+ // Grows only when the required size exceeds current capacity.
+ let waveformReadBuf: Float32Array | null = null;
+
+ // Store spectrogram data with its time alignment
+ let cachedSpecData: {
+ features: Float32Array;
+ melBins: number;
+ timeSteps: number;
+ startTime: number;
+ endTime: number;
+ } | null = null;
+
+ onMount(() => {
+ if (canvasRef) {
+ ctx = canvasRef.getContext('2d', { alpha: false });
+
+ // Use ResizeObserver to cache dimensions instead of per-frame getBoundingClientRect
+ resizeObserver = new ResizeObserver((entries) => {
+ for (const entry of entries) {
+ // contentRect gives CSS-pixel (logical) dimensions without forcing layout
+ const cr = entry.contentRect;
+ updateCanvasDimensions(cr.width, cr.height);
+ }
+ });
+ resizeObserver.observe(canvasRef);
+
+ // Watch for DPR changes (browser zoom, display change)
+ const setupDprWatch = () => {
+ dprMediaQuery = window.matchMedia(`(resolution: ${window.devicePixelRatio}dppx)`);
+ const onDprChange = () => {
+ if (canvasRef) {
+ const rect = canvasRef.getBoundingClientRect(); // one-time on zoom change only
+ updateCanvasDimensions(rect.width, rect.height);
+ }
+ // Re-register for the next change at the new DPR
+ setupDprWatch();
+ };
+ dprMediaQuery.addEventListener('change', onDprChange, { once: true });
+ };
+ setupDprWatch();
+
+ // Initial dimensions (one-time)
+ const rect = canvasRef.getBoundingClientRect();
+ updateCanvasDimensions(rect.width, rect.height);
+ }
+
+ // Create offscreen canvas
+ specCanvas = document.createElement('canvas');
+ specCtx = specCanvas.getContext('2d', { alpha: false });
+
+ loop();
+ });
+
+ onCleanup(() => {
+ cancelAnimationFrame(animationFrameId);
+ if (resizeObserver) {
+ resizeObserver.disconnect();
+ resizeObserver = null;
+ }
+ });
+
+ const loop = (now: number = performance.now()) => {
+ if (!ctx || !canvasRef || !props.audioEngine) {
+ animationFrameId = requestAnimationFrame(loop);
+ return;
+ }
+
+ if (now - lastDrawTime < DRAW_INTERVAL_MS) {
+ animationFrameId = requestAnimationFrame(loop);
+ return;
+ }
+ lastDrawTime = now;
+
+ // Use cached dimensions (updated by ResizeObserver / DPR watcher)
+ const dpr = cachedDpr;
+ const width = cachedPhysicalWidth;
+ const height = cachedPhysicalHeight;
+
+ if (width === 0 || height === 0) {
+ animationFrameId = requestAnimationFrame(loop);
+ return;
+ }
+
+ // Colors
+ const bgColor = '#0f172a';
+ ctx.fillStyle = bgColor;
+ ctx.fillRect(0, 0, width, height);
+
+ const ringBuffer = props.audioEngine.getRingBuffer();
+ const currentTime = ringBuffer.getCurrentTime();
+ const duration = getWindowDuration();
+ const startTime = currentTime - duration;
+ const sampleRate = ringBuffer.sampleRate;
+
+ // Layout:
+ // Top 55%: Spectrogram
+ // Middle 35%: Waveform
+ // Bottom 10%: VAD signal
+ const specHeight = Math.floor(height * 0.55);
+ const waveHeight = Math.floor(height * 0.35);
+ const vadHeight = height - specHeight - waveHeight;
+ const waveY = specHeight;
+ const vadY = specHeight + waveHeight;
+
+ // 1. Spectrogram (async fetch with stored alignment)
+ if (props.melClient && specCtx && specCanvas) {
+ if (now - lastSpecFetchTime > SPEC_FETCH_INTERVAL) {
+ lastSpecFetchTime = now;
+
+ const fetchStartSample = Math.round(startTime * sampleRate);
+ const fetchEndSample = Math.round(currentTime * sampleRate);
+
+ // Request RAW (unnormalized) features for fixed dB scaling.
+ // ASR transcription still uses normalized features (default).
+ props.melClient.getFeatures(fetchStartSample, fetchEndSample, false).then(features => {
+ if (features && specCtx && specCanvas) {
+ // Store with time alignment info
+ cachedSpecData = {
+ features: features.features,
+ melBins: features.melBins,
+ timeSteps: features.T,
+ startTime: startTime,
+ endTime: currentTime
+ };
+ drawSpectrogramToCanvas(specCtx, features.features, features.melBins, features.T, width, specHeight);
+ }
+ }).catch(() => { });
+ }
+
+ // Draw cached spectrogram aligned to current view
+ if (cachedSpecData && cachedSpecData.timeSteps > 0) {
+ // Calculate offset to align cached data with current time window
+ const cachedDuration = cachedSpecData.endTime - cachedSpecData.startTime;
+ const timeOffset = startTime - cachedSpecData.startTime;
+ const offsetX = Math.floor((timeOffset / cachedDuration) * width);
+
+ // Draw the portion of cached spectrogram that's still visible
+ ctx.drawImage(specCanvas, offsetX, 0, width - offsetX, specHeight, 0, 0, width - offsetX, specHeight);
+ }
+ }
+
+ // 2. Waveform (sync with current time window, zero-allocation read)
+ try {
+ const startSample = Math.floor(startTime * sampleRate);
+ const endSample = Math.floor(currentTime * sampleRate);
+ const neededLen = endSample - startSample;
+
+ const baseFrame = ringBuffer.getBaseFrameOffset();
+ if (startSample >= baseFrame && neededLen > 0) {
+ // Use readInto if available (zero-alloc), fall back to read()
+ if (ringBuffer.readInto) {
+ // Grow the pre-allocated buffer only when capacity is insufficient
+ if (!waveformReadBuf || waveformReadBuf.length < neededLen) {
+ waveformReadBuf = new Float32Array(neededLen);
+ }
+ const written = ringBuffer.readInto(startSample, endSample, waveformReadBuf);
+ // Pass a subarray view (no copy) of the exact length
+ drawWaveform(ctx, waveformReadBuf.subarray(0, written), width, waveHeight, waveY);
+ } else {
+ const audioData = ringBuffer.read(startSample, endSample);
+ drawWaveform(ctx, audioData, width, waveHeight, waveY);
+ }
+ }
+ } catch (e) {
+ // Data likely overwritten or not available
+ }
+
+ // 3. VAD Signal Layer
+ drawVadLayer(ctx, width, vadHeight, vadY, startTime, duration, dpr);
+
+ // 4. Overlay (time labels, trigger line)
+ drawOverlay(ctx, width, height, startTime, duration, dpr);
+
+ animationFrameId = requestAnimationFrame(loop);
+ };
+
+ const drawSpectrogramToCanvas = (
+ ctx: CanvasRenderingContext2D,
+ features: Float32Array,
+ melBins: number,
+ timeSteps: number,
+ width: number,
+ height: number
+ ) => {
+ // features layout: [melBins, T] (mel-major, flattened from [mel, time])
+ // So features[m * timeSteps + t].
+
+ if (timeSteps === 0) return;
+
+ // Reuse cached ImageData if dimensions match; allocate only on size change
+ if (!cachedSpecImgData || cachedSpecImgWidth !== width || cachedSpecImgHeight !== height) {
+ cachedSpecImgData = ctx.createImageData(width, height);
+ cachedSpecImgWidth = width;
+ cachedSpecImgHeight = height;
+ }
+ const imgData = cachedSpecImgData;
+ const data = imgData.data;
+
+ // Scaling factors
+ const timeScale = timeSteps / width;
+ const freqScale = melBins / height;
+
+ for (let x = 0; x < width; x++) {
+ const t = Math.floor(x * timeScale);
+ if (t >= timeSteps) break;
+
+ for (let y = 0; y < height; y++) {
+ // y=0 is top (high freq), y=height is bottom (low freq).
+ const m = Math.floor((height - 1 - y) * freqScale);
+ if (m >= melBins) continue;
+
+ const val = features[m * timeSteps + t];
+ const clamped = normalizeMelForDisplay(val);
+ const lutIdx = (clamped * 255) | 0;
+ const lutBase = lutIdx * 3;
+
+ const idx = (y * width + x) * 4;
+ data[idx] = COLORMAP_LUT[lutBase];
+ data[idx + 1] = COLORMAP_LUT[lutBase + 1];
+ data[idx + 2] = COLORMAP_LUT[lutBase + 2];
+ data[idx + 3] = 255;
+ }
+ }
+ ctx.putImageData(imgData, 0, 0);
+ };
+
+ // Use gain 1 so waveform shows true amplitude (float32 in [-1,1] fills half-height).
+ // No display amplification; ASR pipeline is unchanged.
+ const WAVEFORM_GAIN = 1;
+
+ const drawWaveform = (ctx: CanvasRenderingContext2D, data: Float32Array, width: number, height: number, offsetY: number) => {
+ if (data.length === 0) return;
+
+ const step = Math.ceil(data.length / width);
+ const amp = (height / 2) * WAVEFORM_GAIN;
+ const centerY = offsetY + height / 2;
+
+ ctx.strokeStyle = '#4ade80'; // Green
+ ctx.lineWidth = 1;
+ ctx.beginPath();
+
+ for (let x = 0; x < width; x++) {
+ const startIdx = x * step;
+ const endIdx = Math.min((x + 1) * step, data.length);
+
+ let min = 1;
+ let max = -1;
+ let hasData = false;
+
+ for (let i = startIdx; i < endIdx; i += Math.max(1, Math.floor((endIdx - startIdx) / 10))) {
+ const s = data[i];
+ if (s < min) min = s;
+ if (s > max) max = s;
+ hasData = true;
+ }
+
+ if (hasData) {
+ const yMin = centerY - min * amp;
+ const yMax = centerY - max * amp;
+ ctx.moveTo(x, Math.max(offsetY, Math.min(offsetY + height, yMin)));
+ ctx.lineTo(x, Math.max(offsetY, Math.min(offsetY + height, yMax)));
+ }
+ }
+ ctx.stroke();
+ };
+
+ const drawVadLayer = (ctx: CanvasRenderingContext2D, width: number, height: number, offsetY: number, startTime: number, duration: number, dpr: number) => {
+ // Draw VAD state as a colored bar
+ // For now, just show current VAD state as a solid bar (could be enhanced with historical data)
+ const vadState = appStore.vadState();
+ const isSpeech = vadState.isSpeech;
+
+ // Background
+ ctx.fillStyle = isSpeech ? 'rgba(249, 115, 22, 0.4)' : 'rgba(100, 116, 139, 0.2)'; // Orange when speech, slate when silence
+ ctx.fillRect(0, offsetY, width, height);
+
+ // If energy-based detection is active, show energy level as a bar
+ const energyLevel = appStore.audioLevel();
+ const energyThreshold = appStore.energyThreshold();
+
+ if (energyLevel > 0) {
+ const barWidth = Math.min(width, width * (energyLevel / 0.3)); // Scale to max 30% energy
+ ctx.fillStyle = energyLevel > energyThreshold ? 'rgba(249, 115, 22, 0.8)' : 'rgba(74, 222, 128, 0.6)';
+ ctx.fillRect(width - barWidth, offsetY, barWidth, height);
+ }
+
+ // Draw a thin separator line at top
+ ctx.strokeStyle = 'rgba(148, 163, 184, 0.3)';
+ ctx.lineWidth = 1 * dpr;
+ ctx.beginPath();
+ ctx.moveTo(0, offsetY);
+ ctx.lineTo(width, offsetY);
+ ctx.stroke();
+
+ // Label
+ ctx.fillStyle = isSpeech ? '#fb923c' : '#64748b';
+ ctx.font = `${8 * dpr}px monospace`;
+ ctx.fillText(isSpeech ? 'SPEECH' : 'SILENCE', 4 * dpr, offsetY + height - 2 * dpr);
+ };
+
+ const drawOverlay = (ctx: CanvasRenderingContext2D, width: number, height: number, startTime: number, duration: number, dpr: number) => {
+ // Draw Trigger line (1.5s from right) if in V3 mode
+ const triggerX = width - (1.5 / duration) * width;
+ ctx.strokeStyle = 'rgba(255, 255, 0, 0.5)';
+ ctx.lineWidth = 1 * dpr;
+ ctx.beginPath();
+ ctx.moveTo(triggerX, 0);
+ ctx.lineTo(triggerX, height);
+ ctx.stroke();
+
+ // Time labels
+ ctx.fillStyle = '#94a3b8';
+ ctx.font = `${10 * dpr}px monospace`;
+ for (let i = 0; i <= 8; i += 2) {
+ const t = i;
+ const x = width - (t / duration) * width;
+ ctx.fillText(`-${t}s`, x + 3 * dpr, height - 6 * dpr);
+ }
+ };
+
+ return (
+
+
+
+ SPECTROGRAM + WAVEFORM ({getWindowDuration()}s)
+
+
+ );
+};
diff --git a/src/components/ModelLoadingOverlay.tsx b/src/components/ModelLoadingOverlay.tsx
new file mode 100644
index 0000000000000000000000000000000000000000..de089eb9a301e2a1a2426fba0f56ac4952b76ee1
--- /dev/null
+++ b/src/components/ModelLoadingOverlay.tsx
@@ -0,0 +1,212 @@
+import { Component, Show, For, createEffect } from 'solid-js';
+
+interface ModelLoadingOverlayProps {
+ isVisible: boolean;
+ progress: number;
+ message: string;
+ file?: string;
+ backend: 'webgpu' | 'wasm';
+ state: 'unloaded' | 'loading' | 'ready' | 'error';
+ selectedModelId: string;
+ onModelSelect: (id: string) => void;
+ onStart: () => void;
+ onLocalLoad: (files: FileList) => void;
+ onClose?: () => void;
+}
+
+export const MODELS = [
+ { id: 'parakeet-tdt-0.6b-v2', name: 'Parakeet v2', desc: 'English optimized' },
+ { id: 'parakeet-tdt-0.6b-v3', name: 'Parakeet v3', desc: 'Multilingual Streaming' },
+];
+
+export function getModelDisplayName(id: string): string {
+ return (MODELS.find((m) => m.id === id)?.name ?? id) || 'Unknown model';
+}
+
+export const ModelLoadingOverlay: Component = (props) => {
+ const progressWidth = () => `${Math.max(0, Math.min(100, props.progress))}%`;
+ let fileInput: HTMLInputElement | undefined;
+
+ const handleFileChange = (e: Event) => {
+ const files = (e.target as HTMLInputElement).files;
+ if (files && files.length > 0) {
+ props.onLocalLoad(files);
+ }
+ };
+
+ const handleClose = () => props.onClose?.();
+
+ createEffect(() => {
+ if (!props.isVisible || !props.onClose) return;
+ const handler = (e: KeyboardEvent) => {
+ if (e.key === 'Escape') {
+ e.preventDefault();
+ props.onClose?.();
+ }
+ };
+ document.addEventListener('keydown', handler);
+ return () => document.removeEventListener('keydown', handler);
+ });
+
+ return (
+
+ e.target === e.currentTarget && handleClose()}
+ >
+
+
+
+
+ {/* Close Button - show whenever onClose is provided so user can dismiss in any state */}
+
+
+ close
+
+
+
+ {/* Header */}
+
+
+ warning}
+ >
+
+ {props.state === 'loading' ? 'downloading' : 'neurology'}
+
+
+
+
+
+
+
+ {props.state === 'unloaded' ? 'Select the AI engine for this transcription session.' : props.message}
+
+
+
+ {/* Content */}
+
+
+
+
+
+ {(model) => (
+ props.onModelSelect(model.id)}
+ class={`flex items-center text-left p-6 rounded-3xl transition-all ${props.selectedModelId === model.id
+ ? 'nm-inset text-[var(--color-earthy-muted-green)] ring-2 ring-[var(--color-earthy-muted-green)]/20'
+ : 'nm-flat text-[var(--color-earthy-dark-brown)] hover:shadow-neu-btn-hover'
+ }`}
+ >
+
+
+
{model.name}
+
{model.desc}
+
+
+ )}
+
+
+
fileInput?.click()}
+ class="flex items-center text-left p-6 rounded-3xl nm-flat opacity-70 hover:opacity-100 transition-all hover:shadow-neu-btn-hover"
+ >
+
+ file_open
+
+
+
Local Model
+
Load from disk
+
+
+
+
+
props.onStart()}
+ class="w-full mt-6 py-5 bg-[var(--color-earthy-muted-green)] text-white font-extrabold rounded-3xl shadow-xl active:scale-[0.98] transition-all uppercase tracking-widest text-xs"
+ >
+ Initialize AI Engine
+
+
+
+
+ {/* Progress */}
+
+
+
+
+
+
+ Downloaded
+ {props.progress}%
+
+
+ Active File
+
+ {props.file || 'Preparing assets...'}
+
+
+
+
+
+
+
+
+ props.onStart()}
+ class="w-full py-5 nm-flat text-[var(--color-earthy-coral)] font-black rounded-3xl shadow-none hover:opacity-90 transition-all"
+ >
+ Retry Connection
+
+
+
+
+
+ {/* Footer */}
+
+
+ offline_bolt
+
+ {props.backend === 'webgpu' ? 'GPU Accelerated' : 'WASM Native'}
+
+
+
+ PRIVACY SECURED
+
+
+
+
+
+
+ );
+};
+
+
diff --git a/src/components/PrivacyBadge.tsx b/src/components/PrivacyBadge.tsx
new file mode 100644
index 0000000000000000000000000000000000000000..bab25287e6ffd2a92a65d0e17332ce3e231c51c3
--- /dev/null
+++ b/src/components/PrivacyBadge.tsx
@@ -0,0 +1,19 @@
+import { Component } from 'solid-js';
+
+export const PrivacyBadge: Component = () => {
+ return (
+
+
+ shield
+ Private_Secure
+
+
+
+
+ );
+};
diff --git a/src/components/SettingsPanel.tsx b/src/components/SettingsPanel.tsx
new file mode 100644
index 0000000000000000000000000000000000000000..5b6749afe67ec3de201bc89405877db12433666b
--- /dev/null
+++ b/src/components/SettingsPanel.tsx
@@ -0,0 +1,225 @@
+import { Component, For, Show } from 'solid-js';
+import { appStore } from '../stores/appStore';
+import { getModelDisplayName, MODELS } from './ModelLoadingOverlay';
+import type { AudioEngine } from '../lib/audio/types';
+
+const formatInterval = (ms: number) => {
+ if (ms >= 1000) return `${(ms / 1000).toFixed(1)}s`;
+ return `${ms}ms`;
+};
+
+export type SettingsPanelSection = 'full' | 'audio' | 'model';
+
+export interface SettingsContentProps {
+ /** When 'audio' or 'model', only that section is shown (e.g. hover on mic or load button). */
+ section?: SettingsPanelSection;
+ onClose: () => void;
+ onLoadModel: () => void;
+ onLocalLoad?: (files: FileList) => void;
+ onOpenDebug: () => void;
+ onDeviceSelect?: (id: string) => void;
+ audioEngine?: AudioEngine | null;
+ /** When true, panel expands upward (bar in lower half); content order is reversed so ASR model stays adjacent to the bar. */
+ expandUp?: () => boolean;
+}
+
+/** Embeddable settings form (e.g. inside floating bar expansion). */
+export const SettingsContent: Component = (props) => {
+ const isV4 = () => appStore.transcriptionMode() === 'v4-utterance';
+ const isV3 = () => appStore.transcriptionMode() === 'v3-streaming';
+
+ const expandUp = () => props.expandUp?.() ?? false;
+ const section = () => props.section ?? 'full';
+ const showAsr = () => section() === 'full' || section() === 'model';
+ const showAudio = () => section() === 'full' || section() === 'audio';
+ const showSliders = () => section() === 'full';
+ const showDebug = () => section() === 'full';
+
+ return (
+
+ );
+};
diff --git a/src/components/Sidebar.tsx b/src/components/Sidebar.tsx
new file mode 100644
index 0000000000000000000000000000000000000000..4d61e8c9387c356597ec66ebf71e5dc4805399ed
--- /dev/null
+++ b/src/components/Sidebar.tsx
@@ -0,0 +1,142 @@
+import { Component, For, Show, createEffect, createSignal, onCleanup } from 'solid-js';
+
+interface SidebarProps {
+ activeTab: string;
+ onTabChange: (tab: string) => void;
+ // Recording controls
+ isRecording: boolean;
+ onToggleRecording: () => void;
+ // Model state
+ isModelReady: boolean;
+ onLoadModel: () => void;
+ modelState: string;
+ // Device selection
+ availableDevices: MediaDeviceInfo[];
+ selectedDeviceId: string;
+ onDeviceSelect: (id: string) => void;
+ // Audio feedback
+ audioLevel: number;
+}
+
+export const Sidebar: Component = (props) => {
+ const [showDevices, setShowDevices] = createSignal(false);
+ let triggerContainerRef: HTMLDivElement | undefined;
+ let popoverRef: HTMLDivElement | undefined;
+
+ createEffect(() => {
+ if (!showDevices()) return;
+ const onMouseDown = (e: MouseEvent) => {
+ const target = e.target as Node;
+ if (triggerContainerRef?.contains(target) || popoverRef?.contains(target)) return;
+ setShowDevices(false);
+ };
+ const onKeyDown = (e: KeyboardEvent) => {
+ if (e.key === 'Escape') setShowDevices(false);
+ };
+ document.addEventListener('mousedown', onMouseDown);
+ document.addEventListener('keydown', onKeyDown);
+ onCleanup(() => {
+ document.removeEventListener('mousedown', onMouseDown);
+ document.removeEventListener('keydown', onKeyDown);
+ });
+ });
+
+ return (
+
+ );
+};
+
+export default Sidebar;
+
+
diff --git a/src/components/StatusBar.tsx b/src/components/StatusBar.tsx
new file mode 100644
index 0000000000000000000000000000000000000000..f0b8cab73505e7ad032dd5eee97708cc1ac66373
--- /dev/null
+++ b/src/components/StatusBar.tsx
@@ -0,0 +1,55 @@
+import { Component, Show } from 'solid-js';
+import { appStore } from '../stores/appStore';
+
+export const StatusBar: Component = () => {
+ const modelStatusText = () => {
+ switch (appStore.modelState()) {
+ case 'unloaded': return 'Model not loaded';
+ case 'loading': return appStore.modelMessage() || `Loading... ${appStore.modelProgress()}%`;
+ case 'ready': return 'Ready';
+ case 'error': return 'Error';
+ default: return '';
+ }
+ };
+
+ const statusDotClass = () => {
+ switch (appStore.modelState()) {
+ case 'ready': return 'bg-green-500 shadow-[0_0_8px_rgba(34,197,94,0.6)]';
+ case 'loading': return 'bg-yellow-500 animate-pulse';
+ case 'error': return 'bg-red-500';
+ default: return 'bg-gray-400';
+ }
+ };
+
+ return (
+
+
+
+
+
{modelStatusText()}
+
+
+
+ memory
+ BACKEND: {appStore.backend().toUpperCase()}
+
+
+
+
+
+ BUILD: 20250828.VAD_REFIX
+
+
+
+ offline_bolt
+ 100% On-Device
+
+
+
+
+
{appStore.isOnline() ? 'SYNC: CONNECTED' : 'SYNC: OFFLINE'}
+
+
+
+ );
+};
diff --git a/src/components/TranscriptionDisplay.tsx b/src/components/TranscriptionDisplay.tsx
new file mode 100644
index 0000000000000000000000000000000000000000..97c7f93b7c8c39e6b2b6186dd1329d57cb48f3f5
--- /dev/null
+++ b/src/components/TranscriptionDisplay.tsx
@@ -0,0 +1,425 @@
+import { Component, For, Show, createEffect, createMemo, createSignal, onCleanup } from 'solid-js';
+import type { V4SentenceEntry } from '../lib/transcription/TranscriptionWorkerClient';
+
+export interface TranscriptionDisplayProps {
+ confirmedText: string;
+ pendingText: string;
+ sentenceEntries?: V4SentenceEntry[];
+ isV4Mode?: boolean;
+ isRecording: boolean;
+ lcsLength?: number;
+ anchorValid?: boolean;
+ showConfidence?: boolean;
+ placeholder?: string;
+ class?: string;
+}
+
+const formatClockTime = (timestamp: number): string => {
+ if (!Number.isFinite(timestamp)) return '--:--:--';
+ return new Date(timestamp).toLocaleTimeString([], {
+ hour12: false,
+ hour: '2-digit',
+ minute: '2-digit',
+ second: '2-digit',
+ });
+};
+
+const formatAudioTime = (seconds: number): string => {
+ if (!Number.isFinite(seconds)) return '0:00.00';
+ const totalSeconds = Math.max(0, seconds);
+ const minutes = Math.floor(totalSeconds / 60);
+ const secondPart = (totalSeconds % 60).toFixed(2).padStart(5, '0');
+ return `${minutes}:${secondPart}`;
+};
+
+const formatAudioRange = (startTime: number, endTime: number): string =>
+ `${formatAudioTime(startTime)} -> ${formatAudioTime(endTime)}`;
+
+const MERGED_SPLIT_STORAGE_KEY = 'keet-merged-split-ratio';
+const MIN_MERGED_SPLIT_RATIO = 0.3;
+const MAX_MERGED_SPLIT_RATIO = 0.7;
+
+const clampMergedSplitRatio = (ratio: number): number =>
+ Math.min(MAX_MERGED_SPLIT_RATIO, Math.max(MIN_MERGED_SPLIT_RATIO, ratio));
+
+const getInitialMergedSplitRatio = (): number => {
+ if (typeof localStorage === 'undefined') return 0.5;
+ try {
+ const raw = Number(localStorage.getItem(MERGED_SPLIT_STORAGE_KEY));
+ if (Number.isFinite(raw)) return clampMergedSplitRatio(raw);
+ } catch (_) {}
+ return 0.5;
+};
+
+export const TranscriptionDisplay: Component = (props) => {
+ let liveContainerRef: HTMLDivElement | undefined;
+ let mergedContainerRef: HTMLDivElement | undefined;
+ let mergedSplitContainerRef: HTMLDivElement | undefined;
+ let sentenceListDesktopRef: HTMLDivElement | undefined;
+ let sentenceListMobileRef: HTMLDivElement | undefined;
+ let scrollScheduled = false;
+ const [activeTab, setActiveTab] = createSignal<'live' | 'merged'>('live');
+ const [mergedSplitRatio, setMergedSplitRatio] = createSignal(getInitialMergedSplitRatio());
+ const [isSplitResizing, setIsSplitResizing] = createSignal(false);
+ let splitMouseMoveHandler: ((event: MouseEvent) => void) | null = null;
+ let splitMouseUpHandler: (() => void) | null = null;
+
+ const scrollToBottom = () => {
+ if (scrollScheduled) return;
+ scrollScheduled = true;
+ requestAnimationFrame(() => {
+ scrollScheduled = false;
+ const activeContainer = activeTab() === 'merged' ? mergedContainerRef : liveContainerRef;
+ if (activeContainer) {
+ activeContainer.scrollTop = activeContainer.scrollHeight;
+ }
+ });
+ };
+
+ const getVisibleSentenceListContainer = (): HTMLDivElement | undefined => {
+ if (sentenceListDesktopRef && sentenceListDesktopRef.offsetParent !== null) {
+ return sentenceListDesktopRef;
+ }
+ if (sentenceListMobileRef && sentenceListMobileRef.offsetParent !== null) {
+ return sentenceListMobileRef;
+ }
+ return sentenceListDesktopRef ?? sentenceListMobileRef;
+ };
+
+ const scrollSentenceListToBottom = () => {
+ requestAnimationFrame(() => {
+ const container = getVisibleSentenceListContainer();
+ if (!container) return;
+ container.scrollTop = container.scrollHeight;
+ });
+ };
+
+ const persistMergedSplitRatio = (ratio: number) => {
+ if (typeof localStorage === 'undefined') return;
+ try {
+ localStorage.setItem(MERGED_SPLIT_STORAGE_KEY, String(ratio));
+ } catch (_) {}
+ };
+
+ const startSplitResize = (event: MouseEvent) => {
+ if (!mergedSplitContainerRef) return;
+ event.preventDefault();
+
+ const rect = mergedSplitContainerRef.getBoundingClientRect();
+ if (rect.width <= 0) return;
+
+ setIsSplitResizing(true);
+ document.body.style.cursor = 'col-resize';
+ document.body.style.userSelect = 'none';
+
+ const applyRatioFromClientX = (clientX: number) => {
+ const nextRatio = clampMergedSplitRatio((clientX - rect.left) / rect.width);
+ setMergedSplitRatio(nextRatio);
+ };
+
+ const onMouseMove = (moveEvent: MouseEvent) => {
+ applyRatioFromClientX(moveEvent.clientX);
+ };
+
+ const onMouseUp = () => {
+ setIsSplitResizing(false);
+ document.body.style.cursor = '';
+ document.body.style.userSelect = '';
+ persistMergedSplitRatio(mergedSplitRatio());
+ window.removeEventListener('mousemove', onMouseMove);
+ window.removeEventListener('mouseup', onMouseUp);
+ splitMouseMoveHandler = null;
+ splitMouseUpHandler = null;
+ };
+
+ splitMouseMoveHandler = onMouseMove;
+ splitMouseUpHandler = onMouseUp;
+ window.addEventListener('mousemove', onMouseMove);
+ window.addEventListener('mouseup', onMouseUp);
+ };
+
+ createEffect(() => {
+ if (!props.isV4Mode && activeTab() !== 'live') {
+ setActiveTab('live');
+ }
+ });
+
+ const hasContent = createMemo(() =>
+ (props.confirmedText?.length ?? 0) > 0 || (props.pendingText?.length ?? 0) > 0
+ );
+
+ const finalizedEntries = createMemo(() => props.sentenceEntries ?? []);
+ const mergedCount = createMemo(() => finalizedEntries().length + (props.pendingText?.trim() ? 1 : 0));
+ const fullTextBody = createMemo(() => {
+ const finalized = finalizedEntries()
+ .map((entry) => entry.text.trim())
+ .filter((text) => text.length > 0)
+ .join(' ')
+ .trim();
+ const live = props.pendingText.trim();
+ if (finalized && live) return `${finalized} ${live}`.trim();
+ return finalized || live || '';
+ });
+
+ createEffect(() => {
+ activeTab();
+ props.confirmedText;
+ props.pendingText;
+ finalizedEntries().length;
+ scrollToBottom();
+ });
+
+ createEffect(() => {
+ if (!props.isV4Mode || activeTab() !== 'merged') return;
+ finalizedEntries().length;
+ props.pendingText;
+ scrollSentenceListToBottom();
+ });
+
+ onCleanup(() => {
+ document.body.style.cursor = '';
+ document.body.style.userSelect = '';
+ if (splitMouseMoveHandler) {
+ window.removeEventListener('mousemove', splitMouseMoveHandler);
+ }
+ if (splitMouseUpHandler) {
+ window.removeEventListener('mouseup', splitMouseUpHandler);
+ }
+ });
+
+ const renderFullTextContent = () => (
+ 0} fallback={
+
+ Waiting for transcript text...
+
+ }>
+
+ {fullTextBody()}
+
+
+ );
+
+ const renderSentenceListContent = () => (
+ 0 || !!props.pendingText.trim()} fallback={
+
+
view_list
+
+ No merged conversation entries yet...
+
+
+ }>
+
+
+ {(entry) => (
+
+
+ {formatClockTime(entry.emittedAt)}
+
+
+ [{formatAudioRange(entry.startTime, entry.endTime)}]
+
+
+ {entry.text}
+
+
+ )}
+
+
+
+
+
+ {formatClockTime(Date.now())}
+
+
+ LIVE
+
+
+ {props.pendingText}
+
+
+
+
+
+ );
+
+ return (
+
+
+
+ setActiveTab('live')}
+ >
+ Live
+
+ setActiveTab('merged')}
+ >
+ Merged
+
+ {mergedCount()}
+
+
+
+
+
+
+
+ graphic_eq
+
+ {props.placeholder ?? 'Ready to transcribe...'}
+
+
+ }
+ >
+
+
+
+ {/* Confirmed text */}
+
+ {props.confirmedText}
+
+
+ {/* Pending text */}
+
+
+ {props.pendingText}
+
+
+
+
+
+
+ {/* Listening indicator when idle but recording */}
+
+
+
+
+
+
+ }>
+