"use client"; import { useState, useRef, useEffect } from 'react'; import { Send, User, Bot, Loader2 } from 'lucide-react'; import { api } from '@/lib/api'; import { useRouter } from 'next/navigation'; import ReactMarkdown from 'react-markdown'; import remarkGfm from 'remark-gfm'; export default function ChatInterface({ conversationId, initialMessages = [] }) { const [messages, setMessages] = useState(initialMessages); const [input, setInput] = useState(''); const [isLoading, setIsLoading] = useState(false); const messagesEndRef = useRef(null); const router = useRouter(); const [streamingContent, setStreamingContent] = useState(''); const scrollToBottom = () => { messagesEndRef.current?.scrollIntoView({ behavior: "smooth" }); }; useEffect(() => { scrollToBottom(); }, [messages, streamingContent]); const handleSubmit = async (e) => { e.preventDefault(); if (!input.trim() || isLoading) return; const userMessage = input.trim(); setInput(''); setMessages(prev => [...prev, { role: 'user', content: userMessage }]); setIsLoading(true); setStreamingContent(''); let currentId = conversationId; try { // If no conversation ID, create one first if (!currentId) { const newConv = await api.createConversation(); if (!newConv) throw new Error("Failed to create conversation"); currentId = newConv.conversation_id; // Update URL without reloading window.history.pushState({}, '', `/c/${currentId}`); // Or use router.replace if prefer Next.js way, but we want to stay mounted // We might need to handle this carefully. // For now, let's just proceed with the currentId for the request. } // Start streaming request const response = await fetch(api.getChatEndpoint(), { method: 'POST', headers: { 'Content-Type': 'application/json', }, body: JSON.stringify({ conversation_id: currentId, user_message: [{ type: 'text', text: userMessage }] }), }); if (!response.ok) throw new Error('Network response was not ok'); const reader = response.body.getReader(); const decoder = new TextDecoder(); let done = false; let fullAssistantMessage = ''; while (!done) { const { value, done: doneReading } = await reader.read(); done = doneReading; const chunkValue = decoder.decode(value, { stream: !done }); // Parse SSE format: "data: {...}\n\n" const lines = chunkValue.split('\n\n'); for (const line of lines) { if (line.startsWith('data: ')) { const dataStr = line.slice(6); if (dataStr === '[DONE]') { done = true; break; } try { const data = JSON.parse(dataStr); if (data.content) { fullAssistantMessage += data.content; setStreamingContent(fullAssistantMessage); } if (data.error) { console.error("Stream error:", data.error); } } catch (e) { // ignore partial json } } } } // Finalize message setMessages(prev => [...prev, { role: 'assistant', content: fullAssistantMessage }]); setStreamingContent(''); setIsLoading(false); // If we created a new chat, update the sidebar without reloading the chat component if (!conversationId && currentId) { window.dispatchEvent(new Event('chat-update')); // Ensure the router knows about the new path for future navigations, // but do it silently if possible or just rely on pushState. // We already did pushState. } else { // Triggers sidebar update for existing chats too (timestamp update) window.dispatchEvent(new Event('chat-update')); } } catch (error) { console.error("Error sending message:", error); setMessages(prev => [...prev, { role: 'system', content: "Error sending message. Please try again." }]); setIsLoading(false); } }; return (
Ask anything about your data. The AI will analyze and provide insights.
{children} :
{children}
}
}}
>
{msg.content}