import React, { useState, useRef, useEffect } from 'react';
import {
Upload, Activity, Heart, FileAudio,
CheckCircle, AlertTriangle, RefreshCw,
Mic, Square, Download, Cpu, Sun, Moon, Menu, X
} from 'lucide-react';
import jsPDF from 'jspdf';
import './App.css';
// ─── Waveform Canvas with Time Axis + Heartbeat Markers ──────────────────────
// Waveform Canvas: static bottom layer + live animated overlay
function WaveformCanvas({ waveform, peakVisIndices, peakTimesSec, duration, isDisease, canvasRefOut, audioRef }) {
const staticRef = useRef(null);
const overlayRef = useRef(null);
const rafRef = useRef(null);
const accentColor = isDisease ? '#ef4444' : '#06b6d4';
const PAD = { L: 42, R: 12, T: 12, B: 36 };
// Static draw: runs once when waveform data changes
useEffect(() => {
const canvas = staticRef.current;
if (!canvas || !waveform || waveform.length === 0) return;
if (canvasRefOut) canvasRefOut.current = canvas;
const dpr = window.devicePixelRatio || 1;
const W = canvas.offsetWidth, H = canvas.offsetHeight;
canvas.width = W * dpr; canvas.height = H * dpr;
const ctx = canvas.getContext('2d');
ctx.scale(dpr, dpr);
const { L, R, T, B } = PAD;
const cW = W - L - R, cH = H - B - T;
const n = waveform.length;
const xOf = (i) => L + (i / (n - 1)) * cW;
const yOf = (v) => T + cH / 2 - v * cH * 0.42;
ctx.fillStyle = 'rgba(5,8,18,1)'; ctx.fillRect(0, 0, W, H);
ctx.strokeStyle = 'rgba(255,255,255,0.04)'; ctx.lineWidth = 1;
for (let i = 0; i <= 4; i++) {
const y = T + (cH * i) / 4;
ctx.beginPath(); ctx.moveTo(L, y); ctx.lineTo(L + cW, y); ctx.stroke();
}
const grad = ctx.createLinearGradient(0, T, 0, T + cH);
grad.addColorStop(0, isDisease ? 'rgba(239,68,68,0.5)' : 'rgba(6,182,212,0.5)');
grad.addColorStop(0.5, isDisease ? 'rgba(239,68,68,0.08)' : 'rgba(6,182,212,0.08)');
grad.addColorStop(1, 'rgba(0,0,0,0)');
ctx.beginPath();
ctx.moveTo(xOf(0), yOf(waveform[0]));
for (let i = 1; i < n; i++) ctx.lineTo(xOf(i), yOf(waveform[i]));
ctx.lineTo(xOf(n - 1), T + cH / 2); ctx.lineTo(xOf(0), T + cH / 2);
ctx.closePath(); ctx.fillStyle = grad; ctx.fill();
ctx.beginPath();
ctx.moveTo(xOf(0), yOf(waveform[0]));
for (let i = 1; i < n; i++) ctx.lineTo(xOf(i), yOf(waveform[i]));
ctx.strokeStyle = accentColor; ctx.lineWidth = 1.8;
ctx.shadowBlur = 8; ctx.shadowColor = isDisease ? 'rgba(239,68,68,0.4)' : 'rgba(6,182,212,0.4)';
ctx.stroke(); ctx.shadowBlur = 0;
if (peakVisIndices && peakVisIndices.length > 0) {
peakVisIndices.forEach((pidx, i) => {
const x = xOf(pidx);
ctx.setLineDash([3, 4]); ctx.strokeStyle = 'rgba(255,255,255,0.2)'; ctx.lineWidth = 1;
ctx.beginPath(); ctx.moveTo(x, T + 4); ctx.lineTo(x, T + cH); ctx.stroke();
ctx.setLineDash([]);
ctx.save(); ctx.translate(x, T + 6); ctx.rotate(Math.PI / 4);
ctx.fillStyle = accentColor; ctx.fillRect(-4, -4, 8, 8); ctx.restore();
if (peakTimesSec && peakTimesSec[i] !== undefined && (peakVisIndices.length < 30 || i % 2 === 0)) {
ctx.fillStyle = 'rgba(255,255,255,0.5)'; ctx.font = '9px monospace';
ctx.textAlign = 'center'; ctx.fillText(peakTimesSec[i].toFixed(1) + 's', x, T + 22);
}
});
}
const axisY = T + cH + 6;
ctx.strokeStyle = 'rgba(255,255,255,0.15)'; ctx.lineWidth = 1;
ctx.beginPath(); ctx.moveTo(L, axisY); ctx.lineTo(L + cW, axisY); ctx.stroke();
const numTicks = Math.min(12, Math.floor(duration));
for (let t = 0; t <= numTicks; t++) {
const ts = (t / numTicks) * duration;
const x = L + (ts / duration) * cW;
ctx.beginPath(); ctx.moveTo(x, axisY); ctx.lineTo(x, axisY + 5); ctx.stroke();
ctx.fillStyle = 'rgba(255,255,255,0.5)'; ctx.font = '10px monospace';
ctx.textAlign = 'center'; ctx.fillText(ts.toFixed(0) + 's', x, axisY + 16);
}
ctx.fillStyle = 'rgba(255,255,255,0.3)'; ctx.font = '10px sans-serif';
ctx.textAlign = 'left'; ctx.fillText('Time (seconds)', L, axisY + 30);
ctx.save(); ctx.translate(12, T + cH / 2); ctx.rotate(-Math.PI / 2);
ctx.fillStyle = 'rgba(255,255,255,0.3)'; ctx.font = '10px sans-serif';
ctx.textAlign = 'center'; ctx.fillText('Amplitude', 0, 0); ctx.restore();
}, [waveform, peakVisIndices, peakTimesSec, duration, isDisease]);
// Animation loop: playhead + beat pulse (runs while audio plays)
useEffect(() => {
const audio = audioRef?.current;
const overlay = overlayRef.current;
if (!overlay || !audio || !duration) return;
const { L, R, T, B } = PAD;
const BEAT_WIN = 0.35; // seconds a beat glows after being crossed
const drawOverlay = () => {
const dpr = window.devicePixelRatio || 1;
const W = overlay.offsetWidth, H = overlay.offsetHeight;
if (overlay.width !== Math.round(W * dpr)) overlay.width = Math.round(W * dpr);
if (overlay.height !== Math.round(H * dpr)) overlay.height = Math.round(H * dpr);
const ctx = overlay.getContext('2d');
ctx.save();
ctx.clearRect(0, 0, overlay.width, overlay.height);
ctx.scale(dpr, dpr);
const cW = W - L - R, cH = H - B - T;
const n = waveform ? waveform.length : 1;
const t = audio.currentTime;
if (t > 0 && cW > 0) {
const px = L + (t / duration) * cW;
// Scanned region overlay
ctx.fillStyle = 'rgba(255,255,255,0.025)';
ctx.fillRect(L, T, px - L, cH);
// White glowing playhead
ctx.save();
ctx.strokeStyle = 'rgba(255,255,255,0.92)'; ctx.lineWidth = 1.5;
ctx.shadowBlur = 10; ctx.shadowColor = 'rgba(255,255,255,0.8)';
ctx.beginPath(); ctx.moveTo(px, T); ctx.lineTo(px, T + cH); ctx.stroke();
ctx.restore();
// Beat pulse glows
if (peakTimesSec && peakVisIndices) {
peakTimesSec.forEach((beatT, i) => {
const diff = t - beatT;
if (diff < 0 || diff > BEAT_WIN) return;
const alpha = 1 - diff / BEAT_WIN;
const bx = L + (peakVisIndices[i] / (n - 1)) * cW;
// Radial halo
ctx.save();
const rg = ctx.createRadialGradient(bx, T + cH / 2, 0, bx, T + cH / 2, 40);
rg.addColorStop(0, isDisease ? `rgba(239,68,68,${alpha * 0.55})` : `rgba(6,182,212,${alpha * 0.55})`);
rg.addColorStop(1, 'rgba(0,0,0,0)');
ctx.fillStyle = rg;
ctx.fillRect(bx - 42, T, 84, cH);
ctx.restore();
// Pulsing diamond
const size = 5 + alpha * 10;
ctx.save();
ctx.translate(bx, T + cH / 2);
ctx.rotate(Math.PI / 4);
ctx.globalAlpha = alpha;
ctx.fillStyle = accentColor;
ctx.shadowBlur = 22 * alpha; ctx.shadowColor = accentColor;
ctx.fillRect(-size / 2, -size / 2, size, size);
ctx.restore();
// Beat number label
ctx.save();
ctx.globalAlpha = alpha * 0.85;
ctx.fillStyle = 'white'; ctx.font = 'bold 11px monospace';
ctx.textAlign = 'center';
ctx.fillText('\u2665 ' + (i + 1), bx, T + cH / 2 + 22);
ctx.restore();
});
}
}
ctx.restore();
rafRef.current = requestAnimationFrame(drawOverlay);
};
const startLoop = () => {
if (!rafRef.current) rafRef.current = requestAnimationFrame(drawOverlay);
};
const stopLoop = () => {
if (rafRef.current) { cancelAnimationFrame(rafRef.current); rafRef.current = null; }
const ctx = overlay.getContext('2d');
if (ctx) ctx.clearRect(0, 0, overlay.width, overlay.height);
};
audio.addEventListener('play', startLoop);
audio.addEventListener('pause', stopLoop);
audio.addEventListener('ended', stopLoop);
audio.addEventListener('seeked', () => { if (!audio.paused) startLoop(); });
return () => {
stopLoop();
audio.removeEventListener('play', startLoop);
audio.removeEventListener('pause', stopLoop);
audio.removeEventListener('ended', stopLoop);
};
}, [waveform, peakVisIndices, peakTimesSec, duration, isDisease, audioRef]);
return (
);
}
// Module-level WAV encoder (used by both recording and trimmer)
function bufferToWave(abuffer, startSample, numSamples) {
const numChan = abuffer.numberOfChannels;
const sr = abuffer.sampleRate;
const byteLen = numSamples * numChan * 2 + 44;
const buf = new ArrayBuffer(byteLen);
const view = new DataView(buf);
let pos = 0;
const w16 = (v) => { view.setUint16(pos, v, true); pos += 2; };
const w32 = (v) => { view.setUint32(pos, v, true); pos += 4; };
w32(0x46464952); w32(byteLen - 8); w32(0x45564157);
w32(0x20746d66); w32(16); w16(1); w16(numChan);
w32(sr); w32(sr * 2 * numChan); w16(numChan * 2); w16(16);
w32(0x61746164); w32(byteLen - pos - 4);
const channels = [];
for (let c = 0; c < numChan; c++) channels.push(abuffer.getChannelData(c));
for (let i = 0; i < numSamples; i++) {
for (let c = 0; c < numChan; c++) {
let s = Math.max(-1, Math.min(1, channels[c][startSample + i]));
s = (0.5 + s < 0 ? s * 32768 : s * 32767) | 0;
view.setInt16(pos, s, true); pos += 2;
}
}
return new Blob([buf], { type: 'audio/wav' });
}
// Decode any audio blob and return { waveform (Float32Array), duration, buffer }
async function decodeAudioBlob(blob) {
const arrayBuffer = await blob.arrayBuffer();
const ctx = new (window.AudioContext || window.webkitAudioContext)();
const audioBuffer = await ctx.decodeAudioData(arrayBuffer);
await ctx.close();
return audioBuffer;
}
// Downsample a Float32Array to ~800 points for display
function downsample(data, points = 800) {
const step = Math.max(1, Math.floor(data.length / points));
const out = [];
for (let i = 0; i < data.length; i += step) out.push(data[i]);
return out;
}
// ─── Audio Trimmer ─────────────────────────────────────────────────────────────
function AudioTrimmer({ waveform, duration, onAnalyze, onSkip }) {
const canvasRef = useRef(null);
const [startFrac, setStartFrac] = useState(0);
const [endFrac, setEndFrac] = useState(1);
const dragging = useRef(null); // 'start' | 'end' | null
const stateRef = useRef({ startFrac: 0, endFrac: 1 });
const PAD = { L: 16, R: 16, T: 24, B: 28 };
// Keep stateRef in sync for RAF access inside mouse handlers
useEffect(() => { stateRef.current = { startFrac, endFrac }; }, [startFrac, endFrac]);
// Draw whenever state or waveform changes
useEffect(() => {
const canvas = canvasRef.current;
if (!canvas || !waveform || waveform.length === 0) return;
const dpr = window.devicePixelRatio || 1;
const W = canvas.offsetWidth, H = canvas.offsetHeight;
canvas.width = W * dpr; canvas.height = H * dpr;
const ctx = canvas.getContext('2d');
ctx.scale(dpr, dpr);
const { L, R, T, B } = PAD;
const cW = W - L - R, cH = H - T - B;
const n = waveform.length;
const xOf = (i) => L + (i / (n - 1)) * cW;
const yOf = (v) => T + cH / 2 - v * cH * 0.42;
const sX = L + startFrac * cW;
const eX = L + endFrac * cW;
// Background
ctx.fillStyle = 'rgba(5,8,18,1)'; ctx.fillRect(0, 0, W, H);
// Waveform (dimmed outside selection)
for (let i = 1; i < n; i++) {
const x0 = xOf(i - 1), x1 = xOf(i);
const inside = x0 >= sX && x1 <= eX;
ctx.strokeStyle = inside ? '#06b6d4' : 'rgba(255,255,255,0.12)';
ctx.lineWidth = inside ? 1.8 : 1;
ctx.shadowBlur = inside ? 6 : 0;
ctx.shadowColor = '#06b6d4';
ctx.beginPath(); ctx.moveTo(x0, yOf(waveform[i - 1])); ctx.lineTo(x1, yOf(waveform[i])); ctx.stroke();
}
ctx.shadowBlur = 0;
// Dim excluded zones
ctx.fillStyle = 'rgba(5,8,18,0.6)';
ctx.fillRect(L, T, sX - L, cH);
ctx.fillRect(eX, T, L + cW - eX, cH);
// Selection highlight box
ctx.strokeStyle = 'rgba(6,182,212,0.4)'; ctx.lineWidth = 1;
ctx.strokeRect(sX, T, eX - sX, cH);
ctx.fillStyle = 'rgba(6,182,212,0.05)'; ctx.fillRect(sX, T, eX - sX, cH);
// Draw handles (vertical bar with grip arrows)
[[sX, 'start'], [eX, 'end']].forEach(([x, side]) => {
ctx.fillStyle = '#06b6d4';
ctx.fillRect(x - 2, T, 4, cH);
// Arrow triangle on handle
ctx.fillStyle = 'white';
const dir = side === 'start' ? 1 : -1;
ctx.beginPath();
ctx.moveTo(x + dir * 2, T + cH / 2);
ctx.lineTo(x + dir * 10, T + cH / 2 - 7);
ctx.lineTo(x + dir * 10, T + cH / 2 + 7);
ctx.closePath(); ctx.fill();
});
// Time labels on handles
ctx.fillStyle = 'rgba(255,255,255,0.85)'; ctx.font = 'bold 11px monospace';
ctx.textAlign = 'center';
ctx.fillText((startFrac * duration).toFixed(2) + 's', sX, T - 6);
ctx.fillText((endFrac * duration).toFixed(2) + 's', eX, T - 6);
// Bottom axis
const axisY = T + cH + 6;
ctx.strokeStyle = 'rgba(255,255,255,0.12)'; ctx.lineWidth = 1;
ctx.beginPath(); ctx.moveTo(L, axisY); ctx.lineTo(L + cW, axisY); ctx.stroke();
const numTicks = Math.min(10, Math.floor(duration));
for (let t = 0; t <= numTicks; t++) {
const x = L + (t / numTicks) * cW;
ctx.fillStyle = 'rgba(255,255,255,0.4)'; ctx.font = '9px monospace';
ctx.textAlign = 'center'; ctx.fillText(((t / numTicks) * duration).toFixed(0) + 's', x, axisY + 14);
}
// Selection duration label in center
const selSec = ((endFrac - startFrac) * duration).toFixed(1);
ctx.fillStyle = 'rgba(6,182,212,0.9)'; ctx.font = 'bold 12px sans-serif';
ctx.textAlign = 'center';
ctx.fillText(`Selection: ${selSec}s`, L + cW / 2, T + cH / 2 - 14);
}, [waveform, duration, startFrac, endFrac]);
// Mouse interaction
const fracFromX = (canvas, clientX) => {
const rect = canvas.getBoundingClientRect();
const { L, R } = PAD;
const cW = rect.width - L - R;
return Math.max(0, Math.min(1, (clientX - rect.left - L) / cW));
};
const onMouseDown = (e) => {
const canvas = canvasRef.current;
const f = fracFromX(canvas, e.clientX);
const { startFrac: s, endFrac: en } = stateRef.current;
const dS = Math.abs(f - s), dE = Math.abs(f - en);
dragging.current = dS < dE ? 'start' : 'end';
};
const onMouseMove = (e) => {
if (!dragging.current) return;
const f = fracFromX(canvasRef.current, e.clientX);
const { startFrac: s, endFrac: en } = stateRef.current;
if (dragging.current === 'start') setStartFrac(Math.min(f, en - 0.01));
else setEndFrac(Math.max(f, s + 0.01));
};
const onMouseUp = () => { dragging.current = null; };
// Touch support
const onTouchStart = (e) => onMouseDown(e.touches[0]);
const onTouchMove = (e) => { e.preventDefault(); onMouseMove(e.touches[0]); };
const onTouchEnd = () => onMouseUp();
return (
✂ Drag the handles to select the clean cardiac section
);
}
const API_URL = (import.meta.env.VITE_API_URL) ?? "http://127.0.0.1:8000/analyze";
function App() {
// ── All useState hooks first (React rules of hooks) ──────
const [appState, setAppState] = useState('upload');
const [patientData, setPatientData] = useState({ dogId: '', breed: '', age: '' });
const [analysisResult, setAnalysisResult] = useState(null);
const [isRecording, setIsRecording] = useState(false);
const [recordingTime, setRecordingTime] = useState(0);
const [audioBlob, setAudioBlob] = useState(null);
const [audioUrl, setAudioUrl] = useState(null);
const [trimWaveform, setTrimWaveform] = useState(null);
const [trimDuration, setTrimDuration] = useState(0);
const [theme, setTheme] = useState('dark');
const [mobileNavOpen, setMobileNavOpen] = useState(false);
// ── All useRef hooks next ─────────────────────────────────
const rawAudioBuffer = useRef(null);
const audioContextRef = useRef(null);
const analyserRef = useRef(null);
const mediaStreamRef = useRef(null);
const sourceRef = useRef(null);
const animationFrameRef = useRef(null);
const canvasRef = useRef(null);
const timerRef = useRef(null);
const mediaRecorderRef = useRef(null);
const audioChunksRef = useRef([]);
const waveformCanvasRef = useRef(null);
const audioRef = useRef(null);
// Apply theme attribute to element
useEffect(() => {
document.documentElement.setAttribute('data-theme', theme);
}, [theme]);
const toggleTheme = () => setTheme(t => t === 'dark' ? 'light' : 'dark');
const closeMobileNav = () => setMobileNavOpen(false);
const stopRecording = () => {
if (mediaRecorderRef.current && mediaRecorderRef.current.state !== 'inactive') {
mediaRecorderRef.current.stop();
}
if (mediaStreamRef.current) {
mediaStreamRef.current.getTracks().forEach(track => track.stop());
mediaStreamRef.current = null;
}
if (audioContextRef.current && audioContextRef.current.state !== 'closed') {
audioContextRef.current.close().catch(console.error);
audioContextRef.current = null;
}
if (animationFrameRef.current) cancelAnimationFrame(animationFrameRef.current);
if (timerRef.current) clearInterval(timerRef.current);
setIsRecording(false);
};
const startRecording = async () => {
if (!patientData.dogId) {
alert("Please enter a Dog ID first.");
return;
}
try {
setAppState('recording');
setIsRecording(true);
setRecordingTime(0);
audioChunksRef.current = [];
const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
mediaStreamRef.current = stream;
mediaRecorderRef.current = new MediaRecorder(stream);
mediaRecorderRef.current.ondataavailable = (event) => {
if (event.data.size > 0) audioChunksRef.current.push(event.data);
};
mediaRecorderRef.current.onstop = async () => {
const rawBlob = new Blob(audioChunksRef.current);
const audioBuffer = await decodeAudioBlob(rawBlob);
rawAudioBuffer.current = audioBuffer;
const wavBlob = bufferToWave(audioBuffer, 0, audioBuffer.length);
setAudioBlob(wavBlob);
setAudioUrl(URL.createObjectURL(wavBlob));
const wf = downsample(audioBuffer.getChannelData(0));
setTrimWaveform(wf);
setTrimDuration(audioBuffer.duration);
setAppState('trimming');
};
mediaRecorderRef.current.start();
audioContextRef.current = new (window.AudioContext || window.webkitAudioContext)();
analyserRef.current = audioContextRef.current.createAnalyser();
analyserRef.current.fftSize = 2048;
sourceRef.current = audioContextRef.current.createMediaStreamSource(stream);
sourceRef.current.connect(analyserRef.current);
drawWaveform();
timerRef.current = setInterval(() => setRecordingTime((prev) => prev + 1), 1000);
} catch (err) {
console.error("Microphone access error:", err);
alert("Microphone access denied or unavailable.");
setAppState('upload');
setIsRecording(false);
}
};
const drawWaveform = () => {
if (!canvasRef.current || !analyserRef.current) {
animationFrameRef.current = requestAnimationFrame(drawWaveform);
return;
}
const canvas = canvasRef.current;
const ctx = canvas.getContext('2d');
const analyser = analyserRef.current;
const bufferLength = analyser.frequencyBinCount;
const dataArray = new Uint8Array(bufferLength);
analyser.getByteTimeDomainData(dataArray);
ctx.fillStyle = 'rgba(11, 15, 25, 1)';
ctx.fillRect(0, 0, canvas.width, canvas.height);
ctx.lineWidth = 3;
ctx.strokeStyle = '#06b6d4';
ctx.shadowBlur = 10;
ctx.shadowColor = '#06b6d4';
ctx.beginPath();
const sliceWidth = canvas.width / bufferLength;
let x = 0;
for (let i = 0; i < bufferLength; i++) {
const v = dataArray[i] / 128.0;
const y = v * canvas.height / 2;
i === 0 ? ctx.moveTo(x, y) : ctx.lineTo(x, y);
x += sliceWidth;
}
ctx.lineTo(canvas.width, canvas.height / 2);
ctx.stroke();
animationFrameRef.current = requestAnimationFrame(drawWaveform);
};
const handleFinishRecording = () => {
setAppState('analyzing'); // will be overridden to 'trimming' once audio decoded in onstop
stopRecording();
};
const handleManualUpload = () => {
const input = document.createElement('input');
input.type = 'file';
input.accept = 'audio/*';
input.onchange = async (e) => {
const file = e.target.files[0];
if (!file) return;
if (!patientData.dogId) { alert('Please enter a Dog ID first.'); return; }
const audioBuffer = await decodeAudioBlob(file);
rawAudioBuffer.current = audioBuffer;
const wavBlob = bufferToWave(audioBuffer, 0, audioBuffer.length);
setAudioBlob(wavBlob);
setAudioUrl(URL.createObjectURL(wavBlob));
const wf = downsample(audioBuffer.getChannelData(0));
setTrimWaveform(wf);
setTrimDuration(audioBuffer.duration);
setAppState('trimming');
};
input.click();
};
const handleAnalyzeTrimmed = (startFrac, endFrac) => {
const ab = rawAudioBuffer.current;
if (!ab) return;
const startSample = Math.floor(startFrac * ab.length);
const numSamples = Math.floor((endFrac - startFrac) * ab.length);
const trimmedBlob = bufferToWave(ab, startSample, numSamples);
// Update the stored audio blob/url to the trimmed version
if (audioUrl) URL.revokeObjectURL(audioUrl);
const newUrl = URL.createObjectURL(trimmedBlob);
setAudioBlob(trimmedBlob);
setAudioUrl(newUrl);
setAppState('analyzing');
sendToBackend(trimmedBlob);
};
const handleSkipTrim = () => {
setAppState('analyzing');
sendToBackend(audioBlob);
};
const sendToBackend = async (audioBlob) => {
try {
const formData = new FormData();
formData.append('file', audioBlob, 'recording.wav');
const response = await fetch(API_URL, { method: "POST", body: formData });
const result = await response.json();
console.log("Backend response:", result);
if (result.error) throw new Error(result.error);
let bpmStatus = "Normal";
let bpmColor = "var(--success)";
if (result.bpm > 140) { bpmStatus = "High (Tachycardia?)"; bpmColor = "var(--danger)"; }
else if (result.bpm < 60 && result.bpm > 0) { bpmStatus = "Low (Bradycardia?)"; bpmColor = "var(--warning)"; }
else if (result.bpm === 0) { bpmStatus = "Undetected"; bpmColor = "var(--text-secondary)"; }
const waveformData = result.waveform.map((amp, idx) => ({ time: idx, amplitude: amp }));
setAnalysisResult({
...result,
bpmStatus,
bpmColor,
waveformData,
});
setAppState('dashboard');
} catch (error) {
console.error("Analysis error:", error);
alert(`Analysis failed: ${error.message}\n\nCheck if api.py is running.`);
resetApp();
}
};
const downloadAudio = () => {
if (!audioBlob) return;
const url = URL.createObjectURL(audioBlob);
const a = document.createElement('a');
a.href = url;
a.download = `heart_sound_${patientData.dogId}_${Date.now()}.wav`;
a.click();
URL.revokeObjectURL(url);
};
const downloadReport = async () => {
if (!analysisResult) return;
const r = analysisResult;
const ai = r.ai_classification;
const now = new Date().toLocaleString();
const isMurmur = ai.is_disease;
const pdf = new jsPDF('p', 'mm', 'a4');
const W = 210, H = 297;
const marginL = 18, marginR = 18;
const contentW = W - marginL - marginR;
let y = 0;
// === HEADER BAND ===
pdf.setFillColor(15, 23, 42);
pdf.rect(0, 0, W, 38, 'F');
// Accent line
pdf.setFillColor(isMurmur ? 239 : 6, isMurmur ? 68 : 182, isMurmur ? 68 : 212);
pdf.rect(0, 38, W, 2, 'F');
// Title
pdf.setTextColor(255, 255, 255);
pdf.setFont('helvetica', 'bold'); pdf.setFontSize(20);
pdf.text('CardioScreen AI', marginL, 18);
pdf.setFont('helvetica', 'normal'); pdf.setFontSize(10);
pdf.setTextColor(148, 163, 184);
pdf.text('Canine Cardiac Screening Report', marginL, 26);
// Date right-aligned
pdf.setFontSize(9); pdf.setTextColor(148, 163, 184);
pdf.text(now, W - marginR, 18, { align: 'right' });
pdf.text(`Patient: ${patientData.dogId}`, W - marginR, 26, { align: 'right' });
y = 48;
// === RESULT BANNER ===
pdf.setFillColor(isMurmur ? 254 : 240, isMurmur ? 242 : 253, isMurmur ? 242 : 250);
pdf.roundedRect(marginL, y, contentW, 28, 3, 3, 'F');
pdf.setDrawColor(isMurmur ? 239 : 34, isMurmur ? 68 : 197, isMurmur ? 68 : 94);
pdf.roundedRect(marginL, y, contentW, 28, 3, 3, 'S');
pdf.setFontSize(16); pdf.setFont('helvetica', 'bold');
pdf.setTextColor(isMurmur ? 185 : 21, isMurmur ? 28 : 128, isMurmur ? 28 : 61);
pdf.text(isMurmur ? '⚠ MURMUR DETECTED' : '✓ NORMAL HEART SOUND', marginL + 8, y + 12);
pdf.setFontSize(10); pdf.setFont('helvetica', 'normal');
pdf.setTextColor(100, 100, 100);
pdf.text(`Confidence: ${(ai.confidence * 100).toFixed(1)}%`, marginL + 8, y + 22);
// BPM right side
pdf.setFontSize(22); pdf.setFont('helvetica', 'bold');
pdf.setTextColor(isMurmur ? 185 : 21, isMurmur ? 28 : 128, isMurmur ? 28 : 61);
pdf.text(`${r.bpm}`, W - marginR - 30, y + 14, { align: 'right' });
pdf.setFontSize(9); pdf.setFont('helvetica', 'normal');
pdf.setTextColor(100, 100, 100);
pdf.text('BPM', W - marginR - 8, y + 14, { align: 'right' });
pdf.text(`${r.bpmStatus}`, W - marginR - 8, y + 22, { align: 'right' });
y += 36;
// === PATIENT INFO TABLE ===
pdf.setFontSize(11); pdf.setFont('helvetica', 'bold');
pdf.setTextColor(30, 41, 59);
pdf.text('Patient Information', marginL, y);
y += 6;
pdf.setFillColor(248, 250, 252);
pdf.rect(marginL, y, contentW, 22, 'F');
pdf.setDrawColor(226, 232, 240);
pdf.rect(marginL, y, contentW, 22, 'S');
pdf.setFontSize(9); pdf.setFont('helvetica', 'normal');
pdf.setTextColor(71, 85, 105);
const col1 = marginL + 5, col2 = marginL + 50, col3 = marginL + 95, col4 = marginL + 130;
pdf.text('Dog ID:', col1, y + 8); pdf.setFont('helvetica', 'bold'); pdf.setTextColor(30, 41, 59); pdf.text(patientData.dogId, col1, y + 15);
pdf.setFont('helvetica', 'normal'); pdf.setTextColor(71, 85, 105);
pdf.text('Breed:', col2, y + 8); pdf.setFont('helvetica', 'bold'); pdf.setTextColor(30, 41, 59); pdf.text(patientData.breed || 'N/A', col2, y + 15);
pdf.setFont('helvetica', 'normal'); pdf.setTextColor(71, 85, 105);
pdf.text('Age:', col3, y + 8); pdf.setFont('helvetica', 'bold'); pdf.setTextColor(30, 41, 59); pdf.text(patientData.age ? `${patientData.age} years` : 'N/A', col3, y + 15);
pdf.setFont('helvetica', 'normal'); pdf.setTextColor(71, 85, 105);
pdf.text('Duration:', col4, y + 8); pdf.setFont('helvetica', 'bold'); pdf.setTextColor(30, 41, 59); pdf.text(`${r.duration_seconds}s (${r.heartbeat_count} beats)`, col4, y + 15);
y += 30;
// === WAVEFORM IMAGE ===
pdf.setFontSize(11); pdf.setFont('helvetica', 'bold');
pdf.setTextColor(30, 41, 59);
pdf.text('Phonocardiogram', marginL, y);
y += 4;
if (waveformCanvasRef.current) {
try {
const imgData = waveformCanvasRef.current.toDataURL('image/png');
const imgW = contentW;
const imgH = imgW * 0.35;
pdf.addImage(imgData, 'PNG', marginL, y, imgW, imgH);
y += imgH + 4;
} catch (e) { console.warn('Could not capture waveform:', e); y += 4; }
} else { y += 4; }
// === PROBABILITY BREAKDOWN ===
pdf.setFontSize(11); pdf.setFont('helvetica', 'bold');
pdf.setTextColor(30, 41, 59);
pdf.text('AI Classification', marginL, y);
y += 6;
ai.all_classes.forEach(cls => {
const pct = (cls.probability * 100).toFixed(1);
const isM = cls.label.toLowerCase().includes('murmur');
pdf.setFontSize(9); pdf.setFont('helvetica', 'normal');
pdf.setTextColor(71, 85, 105);
pdf.text(`${cls.label}:`, marginL + 5, y + 4);
pdf.setFont('helvetica', 'bold');
pdf.text(`${pct}%`, marginL + 35, y + 4);
// Bar background
pdf.setFillColor(226, 232, 240);
pdf.roundedRect(marginL + 52, y, contentW - 60, 6, 2, 2, 'F');
// Bar fill
const barW = Math.max(2, (cls.probability) * (contentW - 60));
pdf.setFillColor(isM ? 239 : 34, isM ? 68 : 197, isM ? 68 : 94);
pdf.roundedRect(marginL + 52, y, barW, 6, 2, 2, 'F');
y += 12;
});
y += 4;
// === FEATURE DETAILS ===
pdf.setFontSize(11); pdf.setFont('helvetica', 'bold');
pdf.setTextColor(30, 41, 59);
pdf.text('Signal Analysis Features', marginL, y);
y += 5;
pdf.setFillColor(248, 250, 252);
pdf.rect(marginL, y, contentW, 8, 'F');
pdf.setDrawColor(226, 232, 240);
pdf.rect(marginL, y, contentW, 8, 'S');
pdf.setFontSize(8); pdf.setFont('helvetica', 'normal');
pdf.setTextColor(71, 85, 105);
pdf.text(ai.details || '', marginL + 4, y + 5.5);
y += 16;
// === DISCLAIMER ===
pdf.setFillColor(255, 251, 235);
pdf.roundedRect(marginL, y, contentW, 22, 2, 2, 'F');
pdf.setDrawColor(251, 191, 36);
pdf.roundedRect(marginL, y, contentW, 22, 2, 2, 'S');
pdf.setFontSize(8); pdf.setFont('helvetica', 'bold');
pdf.setTextColor(146, 64, 14);
pdf.text('IMPORTANT NOTICE', marginL + 5, y + 6);
pdf.setFont('helvetica', 'normal'); pdf.setFontSize(7.5);
pdf.setTextColor(120, 53, 15);
pdf.text('This is an AI-assisted screening tool for preliminary cardiac assessment. Results are NOT diagnostic.', marginL + 5, y + 12);
pdf.text('All findings should be confirmed by a veterinary cardiologist via echocardiography.', marginL + 5, y + 17);
y += 28;
// === FOOTER ===
pdf.setFontSize(7); pdf.setTextColor(148, 163, 184);
pdf.text('Model: CardioScreen Logistic Regression Classifier · Trained on: VetCPD + Hannover Vet School (21 canine recordings)', marginL, H - 12);
pdf.text(`Generated: ${now}`, W - marginR, H - 12, { align: 'right' });
pdf.save(`screening_${patientData.dogId}_${Date.now()}.pdf`);
};
const resetApp = () => {
stopRecording();
if (audioUrl) URL.revokeObjectURL(audioUrl);
setAppState('upload');
setPatientData({ dogId: '', breed: '', age: '' });
setAnalysisResult(null);
setAudioBlob(null);
setAudioUrl(null);
setTrimWaveform(null);
setTrimDuration(0);
rawAudioBuffer.current = null;
};
return (
{/* Sidebar */}
{/* Mobile Header */}
{/* Mobile nav overlay */}
{/* Main Content */}
{/* VIEW: UPLOAD / RECORD */}
{(appState === 'upload' || appState === 'recording') && (
New Patient Scan
Capture phonocardiogram via stethoscope + microphone
Patient Details
setPatientData({ ...patientData, dogId: e.target.value })} placeholder="e.g. DOG-001" disabled={appState === 'recording'}
style={{ width: '100%', background: 'var(--bg-input)', border: '1px solid var(--border-color)', padding: '10px 12px', borderRadius: '8px', color: 'var(--text-primary)', fontSize: '0.9rem', outline: 'none' }} />
setPatientData({ ...patientData, breed: e.target.value })} placeholder="e.g. German Shepherd" disabled={appState === 'recording'}
style={{ width: '100%', background: 'var(--bg-input)', border: '1px solid var(--border-color)', padding: '10px 12px', borderRadius: '8px', color: 'var(--text-primary)', fontSize: '0.9rem', outline: 'none' }} />
setPatientData({ ...patientData, age: e.target.value })} placeholder="e.g. 7" disabled={appState === 'recording'}
style={{ width: '100%', background: 'var(--bg-input)', border: '1px solid var(--border-color)', padding: '10px 12px', borderRadius: '8px', color: 'var(--text-primary)', fontSize: '0.9rem', outline: 'none' }} />
{appState === 'upload' ? (
Start Live Recording
Record from stethoscope microphone
Upload Audio File
WAV, MP3 or any audio format
) : (
00:{recordingTime.toString().padStart(2, '0')}
)}
)}
{/* VIEW: TRIMMING */}
{appState === 'trimming' && trimWaveform && (
Trim Recording
Remove noise before the stethoscope was placed correctly
Select Clean Section
Full recording: {trimDuration.toFixed(1)}s · Drag handles to select just the heart sounds
)}
{/* VIEW: ANALYZING */}
{appState === 'analyzing' && (
Analyzing Heart Sound...
Running cardiac screening via pre-trained AI model on your local machine.
)}
{/* VIEW: DASHBOARD */}
{appState === 'dashboard' && analysisResult && (
Screening Result
Patient: {patientData.dogId} | Breed: {patientData.breed || 'N/A'} | Age: {patientData.age || 'N/A'}
{audioBlob && (
)}
{/* Main Result Card */}
{/* Header: Diagnosis + BPM */}
AI Cardiac Screening
{analysisResult.ai_classification.is_disease ? (
) : (
)}
{analysisResult.clinical_summary}
{/* BPM + Heartbeat Count */}
{analysisResult.bpm}
BPM
{analysisResult.bpmStatus}
{analysisResult.heartbeat_count}
Beats
in {analysisResult.duration_seconds}s
{/* Audio Player */}
{audioUrl && (
)}
{/* Waveform with time axis + beat markers */}
Heartbeat peaks ({analysisResult.peak_times_seconds?.length ?? 0} detected)
Duration: {analysisResult.duration_seconds}s
{/* AI Probability Breakdown */}
AI Probability Breakdown
CardioScreen · Logistic Regression · 21 recordings
{analysisResult.ai_classification.all_classes.map((cls, idx) => {
const pct = (cls.probability * 100).toFixed(1);
const isTop = cls.label === analysisResult.ai_classification.label;
const isMurmur = cls.label.toLowerCase().includes('murmur') || cls.label.toLowerCase().includes('abnormal');
const barColor = isMurmur ? 'var(--danger)' : 'var(--success)';
return (
{cls.label} {isTop && '★'}
{pct}%
);
})}
{/* Canine Reference Table */}
Normal Heart Rate (Canine)
| Dog Size |
Normal Range |
| Large (60lb+) | 60 – 100 BPM |
| Medium (20–60lb) | 80 – 120 BPM |
| Small (<20lb) | 100 – 160 BPM |
| Puppies | Up to 220 BPM |
Murmur Grading (Levine Scale)
Grade I: Very faint
Grade II: Soft, easily heard
Grade III: Intermediate
Grade IV: Loud, no thrill
Grade V: With palpable thrill
Grade VI: Heard without stethoscope
)}
);
}
export default App;