33-Nano-WebGPU / src /hooks /LLMContext.ts
Xenova's picture
Xenova HF Staff
Upload 88 files
f672a5d verified
raw
history blame contribute delete
980 Bytes
import { createContext } from "react";
let nextMessageId = 0;
export function createMessageId(): number {
return nextMessageId++;
}
export interface ChatMessage {
id: number;
role: "user" | "assistant" | "system";
content: string;
reasoning?: string;
thinkingSeconds?: number;
}
export type LoadingStatus =
| { state: "idle" }
| { state: "loading"; progress?: number; message?: string }
| { state: "ready" }
| { state: "error"; error: string };
export type ThinkingMode = "enabled" | "disabled";
export interface LLMContextValue {
status: LoadingStatus;
messages: ChatMessage[];
isGenerating: boolean;
tps: number;
thinkingMode: ThinkingMode;
setThinkingMode: (mode: ThinkingMode) => void;
send: (text: string) => void;
stop: () => void;
clearChat: () => void;
editMessage: (index: number, newContent: string) => void;
retryMessage: (index: number) => void;
}
export const LLMContext = createContext<LLMContextValue | null>(null);