"use client"; import { useState, useRef, useEffect } from "react"; import { Globe, Send, Loader2, Plus, ChevronDown, ChevronRight } from "lucide-react"; import { Markdown } from "@/components/markdown"; import { cn } from "@/lib/utils"; interface ReasoningBlock { content: string; duration?: number; } interface ToolInvocation { toolName: string; args: Record; result?: string; status: "pending" | "running" | "complete" | "error"; } // Each step can be a reasoning block or a tool invocation type MessageStep = | { type: "reasoning"; block: ReasoningBlock } | { type: "tool"; invocation: ToolInvocation }; interface Message { id: string; role: "user" | "assistant"; content: string; steps: MessageStep[]; } function ThinkingBlock({ content, duration, isStreaming }: { content: string; duration?: number; isStreaming?: boolean; }) { const [isOpen, setIsOpen] = useState(isStreaming); useEffect(() => { if (isStreaming) setIsOpen(true); }, [isStreaming]); const formatDuration = (s?: number) => { if (!s) return ""; if (s < 1) return " · <1s"; return ` · ${Math.round(s)}s`; }; return (
{isOpen && (
{content}
)}
); } function ToolBlock({ toolName, args, result, status }: { toolName: string; args: Record; result?: string; status: string; }) { const [isOpen, setIsOpen] = useState(false); const isRunning = status === "running" || status === "pending"; const query = (args.query as string) || JSON.stringify(args); return (
{isOpen && result && (
{result}
)}
); } export function Chat() { const [messages, setMessages] = useState([]); const [input, setInput] = useState(""); const [isLoading, setIsLoading] = useState(false); const [searchEnabled, setSearchEnabled] = useState(false); const [streamingMessage, setStreamingMessage] = useState(null); const messagesEndRef = useRef(null); const textareaRef = useRef(null); const messagesRef = useRef(messages); const searchEnabledRef = useRef(searchEnabled); const makeId = () => { try { return globalThis.crypto?.randomUUID?.() ?? `${Date.now()}-${Math.random().toString(16).slice(2)}`; } catch { return `${Date.now()}-${Math.random().toString(16).slice(2)}`; } }; const scrollToBottom = () => { messagesEndRef.current?.scrollIntoView({ behavior: "smooth" }); }; useEffect(() => { scrollToBottom(); }, [messages, streamingMessage]); useEffect(() => { messagesRef.current = messages; }, [messages]); useEffect(() => { searchEnabledRef.current = searchEnabled; }, [searchEnabled]); useEffect(() => { const textarea = textareaRef.current; if (textarea) { textarea.style.height = "auto"; textarea.style.height = Math.min(textarea.scrollHeight, 160) + "px"; } }, [input]); const handleSubmit = async (e: React.FormEvent) => { e.preventDefault(); if (!input.trim() || isLoading) return; const userMessage: Message = { id: makeId(), role: "user", content: input.trim(), steps: [], }; setMessages((prev) => [...prev, userMessage]); setInput(""); setIsLoading(true); const assistantMessage: Message = { id: makeId(), role: "assistant", content: "", steps: [], }; setStreamingMessage(assistantMessage); try { const messagesSnapshot = messagesRef.current; const searchEnabledSnapshot = searchEnabledRef.current; const response = await fetch("/api/chat", { method: "POST", headers: { "Content-Type": "application/json" }, body: JSON.stringify({ messages: [...messagesSnapshot, userMessage].map((m) => ({ role: m.role, content: m.content, })), searchEnabled: searchEnabledSnapshot, }), }); if (!response.ok) throw new Error("Failed to get response"); const reader = response.body?.getReader(); if (!reader) throw new Error("No reader available"); const decoder = new TextDecoder(); let buffer = ""; let currentContent = ""; let reasoningStart = 0; let inReasoning = false; let currentReasoningIdx = -1; while (true) { const { done, value } = await reader.read(); if (done) break; buffer += decoder.decode(value, { stream: true }); const lines = buffer.split("\n"); buffer = lines.pop() || ""; for (const line of lines) { if (!line.startsWith("data: ")) continue; const data = line.slice(6); if (data === "[DONE]") continue; try { const parsed = JSON.parse(data); if (parsed.type === "reasoning_start") { // Finalize previous reasoning block if any if (inReasoning && reasoningStart > 0) { const duration = (Date.now() - reasoningStart) / 1000; setStreamingMessage((prev) => { if (!prev || currentReasoningIdx < 0) return prev; const steps = [...prev.steps]; const step = steps[currentReasoningIdx]; if (step?.type === "reasoning") { steps[currentReasoningIdx] = { type: "reasoning", block: { ...step.block, duration }, }; } return { ...prev, steps }; }); } // Reset for new reasoning block inReasoning = false; currentReasoningIdx = -1; } else if (parsed.type === "reasoning") { if (!inReasoning) { inReasoning = true; reasoningStart = Date.now(); // Add new reasoning step setStreamingMessage((prev) => { if (!prev) return prev; const newStep: MessageStep = { type: "reasoning", block: { content: parsed.content }, }; currentReasoningIdx = prev.steps.length; return { ...prev, steps: [...prev.steps, newStep] }; }); } else { // Append to current reasoning block setStreamingMessage((prev) => { if (!prev || currentReasoningIdx < 0) return prev; const steps = [...prev.steps]; const step = steps[currentReasoningIdx]; if (step?.type === "reasoning") { steps[currentReasoningIdx] = { type: "reasoning", block: { ...step.block, content: step.block.content + parsed.content }, }; } return { ...prev, steps }; }); } } else if (parsed.type === "content") { // Finalize reasoning if we were in one if (inReasoning && reasoningStart > 0) { const duration = (Date.now() - reasoningStart) / 1000; setStreamingMessage((prev) => { if (!prev || currentReasoningIdx < 0) return prev; const steps = [...prev.steps]; const step = steps[currentReasoningIdx]; if (step?.type === "reasoning") { steps[currentReasoningIdx] = { type: "reasoning", block: { ...step.block, duration }, }; } return { ...prev, steps }; }); inReasoning = false; } currentContent += parsed.content; setStreamingMessage((prev) => prev ? { ...prev, content: currentContent } : prev ); } else if (parsed.type === "tool_call") { setStreamingMessage((prev) => { if (!prev) return prev; // Find existing tool step by name const existingIdx = prev.steps.findIndex( (s) => s.type === "tool" && s.invocation.toolName === parsed.name ); if (existingIdx >= 0) { const steps = [...prev.steps]; const step = steps[existingIdx]; if (step.type === "tool") { steps[existingIdx] = { type: "tool", invocation: { ...step.invocation, status: parsed.status, result: parsed.result, }, }; } return { ...prev, steps }; } // Add new tool step const newStep: MessageStep = { type: "tool", invocation: { toolName: parsed.name, args: parsed.args, status: parsed.status, result: parsed.result, }, }; return { ...prev, steps: [...prev.steps, newStep] }; }); } } catch { // Ignore parse errors } } } // Finalize reasoning duration if stream ended while still in reasoning if (inReasoning && reasoningStart > 0) { const duration = (Date.now() - reasoningStart) / 1000; setStreamingMessage((prev) => { if (!prev || currentReasoningIdx < 0) return prev; const steps = [...prev.steps]; const step = steps[currentReasoningIdx]; if (step?.type === "reasoning") { steps[currentReasoningIdx] = { type: "reasoning", block: { ...step.block, duration }, }; } return { ...prev, steps }; }); } // Get the final message and add to messages list setStreamingMessage((prev) => { if (prev) { // Use setTimeout to avoid nested state updates setTimeout(() => { setMessages((msgs) => [...msgs, prev]); }, 0); } return null; }); } catch (error) { console.error("Chat error:", error); setStreamingMessage(null); } finally { setIsLoading(false); } }; const handleKeyDown = (e: React.KeyboardEvent) => { if (e.key === "Enter" && !e.shiftKey) { e.preventDefault(); handleSubmit(e); } }; const clearChat = () => { setMessages([]); setStreamingMessage(null); }; const hasMessages = messages.length > 0 || streamingMessage; const renderMessage = (message: Message, isStreaming = false) => { const lastStep = message.steps[message.steps.length - 1]; const isStreamingReasoning = isStreaming && lastStep?.type === "reasoning" && !lastStep.block.duration; const hasAnyContent = message.steps.length > 0 || message.content; return (
{message.steps.map((step, idx) => { if (step.type === "reasoning") { const isLastReasoning = idx === message.steps.length - 1; return ( ); } else { return ( ); } })} {message.content && ( {message.content} )} {isStreaming && !hasAnyContent && (
)}
); }; return (
{/* Header */}
Teich AI Qwen3-4B-Thinking-2507-Claude-4.5-Opus
{hasMessages && ( )}
{/* Messages */}
{!hasMessages && (

How can I help you?

A reasoning model that thinks step by step. Enable web search for real-time information.

{["Explain quantum computing", "Write a Python function", "What's in the news?"].map((prompt) => ( ))}
)}
{messages.map((message) => (
{message.role === "user" ? (

{message.content}

) : ( renderMessage(message) )}
))} {streamingMessage && (
{renderMessage(streamingMessage, true)}
)}
{/* Input */}