nusaibah0110's picture
Route LLM chat through backend API instead of direct Gemini calls
9289ee7
import React, { useState, useRef, useEffect } from 'react';
import { MessageCircle, X, Send, Bot, Loader2, AlertCircle } from 'lucide-react';
import { CHAT_SYSTEM_PROMPT } from '../config/geminiConfig';
interface Message {
id: string;
text: string;
sender: 'user' | 'bot' | 'error';
timestamp: Date;
}
export function ChatBot() {
const [isOpen, setIsOpen] = useState(false);
const [messages, setMessages] = useState<Message[]>([
{
id: '1',
text: "Hello! I'm Pathora AI β€” your colposcopy expert assistant. Ask me anything about examination techniques, findings interpretation, or management guidelines.",
sender: 'bot',
timestamp: new Date(),
},
]);
const [inputMessage, setInputMessage] = useState('');
const [isLoading, setIsLoading] = useState(false);
const messagesEndRef = useRef<HTMLDivElement>(null);
// Auto-scroll to bottom when a new message arrives
useEffect(() => {
messagesEndRef.current?.scrollIntoView({ behavior: 'smooth' });
}, [messages, isLoading]);
// ── Call Backend Chat API (routes through FastAPI backend to Gemini) ────
const callGemini = async (history: Message[], userText: string): Promise<string> => {
// Build conversation history for the backend
const chatHistory = history
.filter(m => m.sender === 'user' || m.sender === 'bot')
.map(m => ({
role: m.sender === 'bot' ? 'bot' : 'user',
text: m.text,
}));
const requestBody = {
message: userText,
history: chatHistory,
system_prompt: CHAT_SYSTEM_PROMPT,
};
const res = await fetch('/api/chat', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify(requestBody),
});
if (!res.ok) {
const errData = await res.json().catch(() => ({}));
throw new Error(errData?.detail || `Backend error: ${res.status}`);
}
const data = await res.json();
if (data.status !== 'success' || !data.response) {
throw new Error('Invalid response from backend');
}
return data.response;
};
// ── Send handler ─────────────────────────────────────────────────────────
const handleSendMessage = async () => {
const trimmed = inputMessage.trim();
if (!trimmed || isLoading) return;
const userMsg: Message = {
id: Date.now().toString(),
text: trimmed,
sender: 'user',
timestamp: new Date(),
};
setMessages(prev => [...prev, userMsg]);
setInputMessage('');
setIsLoading(true);
try {
// Pass current messages (before adding userMsg) as history for context
const reply = await callGemini(messages, trimmed);
setMessages(prev => [
...prev,
{ id: (Date.now() + 1).toString(), text: reply, sender: 'bot', timestamp: new Date() },
]);
} catch (err: any) {
setMessages(prev => [
...prev,
{
id: (Date.now() + 1).toString(),
text: err?.message || 'Something went wrong. Please try again.',
sender: 'error',
timestamp: new Date(),
},
]);
} finally {
setIsLoading(false);
}
};
const handleKeyDown = (e: React.KeyboardEvent) => {
if (e.key === 'Enter' && !e.shiftKey) {
e.preventDefault();
handleSendMessage();
}
};
// ── Render ────────────────────────────────────────────────────────────────
return (
<>
{/* Toggle button */}
<button
onClick={() => setIsOpen(!isOpen)}
className="fixed bottom-6 right-6 bg-[#05998c] hover:bg-[#047569] text-white p-4 rounded-full shadow-lg transition-all duration-300 hover:scale-110 z-50"
aria-label="Open AI Chat Assistant"
>
{isOpen ? <X className="w-6 h-6" /> : <MessageCircle className="w-6 h-6" />}
</button>
{/* Chat window */}
{isOpen && (
<div className="fixed bottom-20 right-6 w-80 h-[480px] bg-white rounded-xl shadow-2xl border border-gray-200 z-40 flex flex-col overflow-hidden">
{/* Header */}
<div className="bg-[#05998c] text-white p-4 flex items-center gap-3 shrink-0">
<Bot className="w-6 h-6" />
<div>
<h3 className="font-semibold leading-tight">Pathora AI</h3>
<p className="text-xs opacity-80">Colposcopy Expert Assistant</p>
</div>
</div>
{/* Messages */}
<div className="flex-1 overflow-y-auto p-4 space-y-3">
{messages.map((message) => (
<div
key={message.id}
className={`flex ${message.sender === 'user' ? 'justify-end' : 'justify-start'}`}
>
{message.sender === 'error' ? (
<div className="flex items-start gap-2 max-w-[85%] bg-red-50 border border-red-200 text-red-700 rounded-lg p-3 text-xs">
<AlertCircle className="w-4 h-4 shrink-0 mt-0.5" />
<span>{message.text}</span>
</div>
) : (
<div
className={`max-w-[85%] p-3 rounded-xl text-sm whitespace-pre-wrap leading-relaxed ${message.sender === 'user'
? 'bg-[#05998c] text-white rounded-br-sm'
: 'bg-gray-100 text-gray-800 rounded-bl-sm'
}`}
>
{message.text}
</div>
)}
</div>
))}
{/* Typing indicator */}
{isLoading && (
<div className="flex justify-start">
<div className="bg-gray-100 rounded-xl rounded-bl-sm p-3 flex items-center gap-2 text-gray-500 text-sm">
<Loader2 className="w-4 h-4 animate-spin" />
<span>Pathora AI is thinking…</span>
</div>
</div>
)}
<div ref={messagesEndRef} />
</div>
{/* Input */}
<div className="p-3 border-t border-gray-200 shrink-0">
<div className="flex gap-2">
<input
type="text"
value={inputMessage}
onChange={(e) => setInputMessage(e.target.value)}
onKeyDown={handleKeyDown}
disabled={isLoading}
placeholder="Ask about colposcopy findings…"
className="flex-1 px-3 py-2 border border-gray-300 rounded-lg focus:ring-2 focus:ring-[#05998c] focus:border-transparent text-sm outline-none disabled:opacity-60"
/>
<button
onClick={handleSendMessage}
disabled={!inputMessage.trim() || isLoading}
className="bg-[#05998c] hover:bg-[#047569] disabled:opacity-50 disabled:cursor-not-allowed text-white p-2 rounded-lg transition-colors shrink-0"
aria-label="Send message"
>
{isLoading ? <Loader2 className="w-4 h-4 animate-spin" /> : <Send className="w-4 h-4" />}
</button>
</div>
</div>
</div>
)}
</>
);
}