import { useEffect, useState, useRef } from "react"; import Chat from "./components/Chat"; import ArrowRightIcon from "./components/icons/ArrowRightIcon"; import StopIcon from "./components/icons/StopIcon"; import Progress from "./components/Progress"; const IS_WEBGPU_AVAILABLE = !!navigator.gpu; const STICKY_SCROLL_THRESHOLD = 120; const EXAMPLES = [ "Triage: Patient with sudden chest pain and sweating.", "ABCDE assessment for an unconscious patient after a fall.", "Initial steps for a severe allergic reaction (Anaphylaxis).", ]; function App() { const worker = useRef(null); const textareaRef = useRef(null); const chatContainerRef = useRef(null); const [status, setStatus] = useState(null); const [error, setError] = useState(null); const [loadingMessage, setLoadingMessage] = useState(""); const [progressItems, setProgressItems] = useState([]); const [isRunning, setIsRunning] = useState(false); const [input, setInput] = useState(""); const [messages, setMessages] = useState([]); const [tps, setTps] = useState(null); const [numTokens, setNumTokens] = useState(null); function onEnter(message) { setMessages((prev) => [...prev, { role: "user", content: message }]); setTps(null); setIsRunning(true); setInput(""); } function onInterrupt() { worker.current.postMessage({ type: "interrupt" }); } useEffect(() => { resizeInput(); }, [input]); function resizeInput() { if (!textareaRef.current) return; const target = textareaRef.current; target.style.height = "auto"; const newHeight = Math.min(Math.max(target.scrollHeight, 24), 200); target.style.height = `${newHeight}px`; } useEffect(() => { if (!worker.current) { worker.current = new Worker(new URL("./worker.js", import.meta.url), { type: "module", }); worker.current.postMessage({ type: "check" }); } const onMessageReceived = (e) => { switch (e.data.status) { case "loading": setStatus("loading"); setLoadingMessage(e.data.data); break; case "initiate": setProgressItems((prev) => [...prev, e.data]); break; case "progress": setProgressItems((prev) => prev.map((item) => (item.file === e.data.file ? { ...item, ...e.data } : item)) ); break; case "done": setProgressItems((prev) => prev.filter((item) => item.file !== e.data.file)); break; case "ready": setStatus("ready"); break; case "start": setMessages((prev) => [...prev, { role: "assistant", content: "" }]); break; case "update": const { output, tps, numTokens } = e.data; setTps(tps); setNumTokens(numTokens); setMessages((prev) => { const cloned = [...prev]; const last = cloned.at(-1); cloned[cloned.length - 1] = { ...last, content: last.content + output }; return cloned; }); break; case "complete": setIsRunning(false); break; case "error": setError(e.data.data); break; } }; worker.current.addEventListener("message", onMessageReceived); return () => worker.current.removeEventListener("message", onMessageReceived); }, []); useEffect(() => { if (messages.filter((x) => x.role === "user").length === 0) return; if (messages.at(-1).role === "assistant") return; worker.current.postMessage({ type: "generate", data: messages }); }, [messages, isRunning]); useEffect(() => { if (!chatContainerRef.current || !isRunning) return; const element = chatContainerRef.current; if (element.scrollHeight - element.scrollTop - element.clientHeight < STICKY_SCROLL_THRESHOLD) { element.scrollTop = element.scrollHeight; } }, [messages, isRunning]); return IS_WEBGPU_AVAILABLE ? (
{status === null && messages.length === 0 && (
{/* أيقونة طبية بسيطة أو لوجو */}
🏥

ER Assistant

AI-Powered Support for Emergency Room Physicians.
Optimized for rapid triage and ABCDE protocols.

This tool runs locally on your device via WebGPU. Patient data never leaves this browser.

)} {status === "loading" && (

Preparing Clinical Database...

{progressItems.map(({ file, progress, total }, i) => ( ))}
)} {status === "ready" && (
{messages.length === 0 && (

Select a scenario to start:

{EXAMPLES.map((msg, i) => (
onEnter(msg)} > {msg}
))}
)} {/* إحصائيات الأداء */}
{tps && `Speed: ${tps.toFixed(2)} tokens/sec`}
)}