import React, { useState, useRef, useEffect } from 'react'; import { MessageCircle, X, Send, Bot, Loader2, AlertCircle } from 'lucide-react'; import { CHAT_SYSTEM_PROMPT } from '../config/geminiConfig'; interface Message { id: string; text: string; sender: 'user' | 'bot' | 'error'; timestamp: Date; } export function ChatBot() { const [isOpen, setIsOpen] = useState(false); const [messages, setMessages] = useState([ { id: '1', text: "Hello! I'm Pathora AI — your colposcopy expert assistant. Ask me anything about examination techniques, findings interpretation, or management guidelines.", sender: 'bot', timestamp: new Date(), }, ]); const [inputMessage, setInputMessage] = useState(''); const [isLoading, setIsLoading] = useState(false); const messagesEndRef = useRef(null); // Auto-scroll to bottom when a new message arrives useEffect(() => { messagesEndRef.current?.scrollIntoView({ behavior: 'smooth' }); }, [messages, isLoading]); // ── Call Backend Chat API (routes through FastAPI backend to Gemini) ──── const callGemini = async (history: Message[], userText: string): Promise => { // Build conversation history for the backend const chatHistory = history .filter(m => m.sender === 'user' || m.sender === 'bot') .map(m => ({ role: m.sender === 'bot' ? 'bot' : 'user', text: m.text, })); const requestBody = { message: userText, history: chatHistory, system_prompt: CHAT_SYSTEM_PROMPT, }; const res = await fetch('/api/chat', { method: 'POST', headers: { 'Content-Type': 'application/json' }, body: JSON.stringify(requestBody), }); if (!res.ok) { const errData = await res.json().catch(() => ({})); throw new Error(errData?.detail || `Backend error: ${res.status}`); } const data = await res.json(); if (data.status !== 'success' || !data.response) { throw new Error('Invalid response from backend'); } return data.response; }; // ── Send handler ───────────────────────────────────────────────────────── const handleSendMessage = async () => { const trimmed = inputMessage.trim(); if (!trimmed || isLoading) return; const userMsg: Message = { id: Date.now().toString(), text: trimmed, sender: 'user', timestamp: new Date(), }; setMessages(prev => [...prev, userMsg]); setInputMessage(''); setIsLoading(true); try { // Pass current messages (before adding userMsg) as history for context const reply = await callGemini(messages, trimmed); setMessages(prev => [ ...prev, { id: (Date.now() + 1).toString(), text: reply, sender: 'bot', timestamp: new Date() }, ]); } catch (err: any) { setMessages(prev => [ ...prev, { id: (Date.now() + 1).toString(), text: err?.message || 'Something went wrong. Please try again.', sender: 'error', timestamp: new Date(), }, ]); } finally { setIsLoading(false); } }; const handleKeyDown = (e: React.KeyboardEvent) => { if (e.key === 'Enter' && !e.shiftKey) { e.preventDefault(); handleSendMessage(); } }; // ── Render ──────────────────────────────────────────────────────────────── return ( <> {/* Toggle button */} {/* Chat window */} {isOpen && (
{/* Header */}

Pathora AI

Colposcopy Expert Assistant

{/* Messages */}
{messages.map((message) => (
{message.sender === 'error' ? (
{message.text}
) : (
{message.text}
)}
))} {/* Typing indicator */} {isLoading && (
Pathora AI is thinking…
)}
{/* Input */}
setInputMessage(e.target.value)} onKeyDown={handleKeyDown} disabled={isLoading} placeholder="Ask about colposcopy findings…" className="flex-1 px-3 py-2 border border-gray-300 rounded-lg focus:ring-2 focus:ring-[#05998c] focus:border-transparent text-sm outline-none disabled:opacity-60" />
)} ); }