import React, { useRef, useEffect, useState } from 'react'; import { MessageSquare, Send, ClipboardList, Activity, Mic, Volume2, Camera, X, ArrowLeft, Sparkles, ScanEye, Zap, Server, Plus, Trash2, Edit2, MessageCircle, Save, Loader2, ArrowDown } from 'lucide-react'; import { ChatMessage, AppMode, ChatSession } from '../types'; import { generateQuickReplies, generateSpeech } from '../services/geminiService'; interface ChatProps { sessions: ChatSession[]; currentSessionId: string; onSwitchSession: (id: string) => void; onCreateSession: () => void; onRenameSession: (id: string, newName: string) => void; onDeleteSession: (id: string) => void; onSendMessage: (input: string, image?: string) => void; isProcessing: boolean; statusMessage?: string; // New prop for transparency mode: AppMode; setMode: (mode: AppMode) => void; onSummarize: () => void; isSummarizing: boolean; chatSummary?: string; } const Chat: React.FC = ({ sessions, currentSessionId, onSwitchSession, onCreateSession, onRenameSession, onDeleteSession, onSendMessage, isProcessing, statusMessage, mode, setMode, onSummarize, isSummarizing, chatSummary }) => { const scrollRef = useRef(null); const containerRef = useRef(null); const fileInputRef = useRef(null); const editInputRef = useRef(null); const [currentInput, setCurrentInput] = useState(''); const [isListening, setIsListening] = useState(false); // TTS State const [speakingId, setSpeakingId] = useState(null); const [isGeneratingAudio, setIsGeneratingAudio] = useState(null); const audioContextRef = useRef(null); const sourceNodeRef = useRef(null); const [viewMode, setViewMode] = useState<'chat' | 'summary'>('chat'); const [selectedImage, setSelectedImage] = useState(null); const [quickReplies, setQuickReplies] = useState([]); const [showVisionTip, setShowVisionTip] = useState(true); const [showScrollDown, setShowScrollDown] = useState(false); const [showSidebar, setShowSidebar] = useState(false); const [editingSessionId, setEditingSessionId] = useState(null); const [editName, setEditName] = useState(''); const currentSession = sessions.find(s => s.id === currentSessionId) || sessions[0]; const chatHistory = currentSession?.messages || []; // RESOURCE MANAGEMENT: Stop Audio on Page Hide useEffect(() => { const handleVisibilityChange = () => { if (document.hidden) { stopAudio(); } }; document.addEventListener('visibilitychange', handleVisibilityChange); return () => document.removeEventListener('visibilitychange', handleVisibilityChange); }, []); // SCROLL LOGIC const scrollToBottom = (smooth = true) => { if (scrollRef.current) { scrollRef.current.scrollIntoView({ behavior: smooth ? 'smooth' : 'auto' }); } }; const handleScroll = () => { if (!containerRef.current) return; const { scrollTop, scrollHeight, clientHeight } = containerRef.current; const isNotAtBottom = scrollHeight - scrollTop - clientHeight > 300; setShowScrollDown(isNotAtBottom); }; useEffect(() => { scrollToBottom(); }, [chatHistory, isProcessing, quickReplies]); useEffect(() => { if (chatHistory.length > 0 && !isProcessing) { generateQuickReplies(chatHistory).then(setQuickReplies); } else if (isProcessing) { setQuickReplies([]); } }, [chatHistory, isProcessing]); useEffect(() => { if (editingSessionId && editInputRef.current) { editInputRef.current.focus(); } }, [editingSessionId]); const handleSummarizeClick = async () => { await onSummarize(); setViewMode('summary'); }; const handleImageSelect = (e: React.ChangeEvent) => { const file = e.target.files?.[0]; if (file) { const reader = new FileReader(); reader.onloadend = () => { setSelectedImage(reader.result as string); }; reader.readAsDataURL(file); setShowVisionTip(false); } }; const handleSend = () => { if (!currentInput.trim() && !selectedImage) return; onSendMessage(currentInput, selectedImage || undefined); setCurrentInput(''); setSelectedImage(null); setQuickReplies([]); }; // --- TTS ENGINE --- const stopAudio = () => { if (sourceNodeRef.current) { sourceNodeRef.current.stop(); sourceNodeRef.current = null; } if ('speechSynthesis' in window) { window.speechSynthesis.cancel(); } setSpeakingId(null); }; const browserTTS = (text: string, id: string) => { if ('speechSynthesis' in window) { const speak = () => { const utterance = new SpeechSynthesisUtterance(text); const voices = window.speechSynthesis.getVoices(); const maleVoice = voices.find(v => v.name.includes('Google US English Male') || v.name.includes('Microsoft David') || v.name.includes('Daniel') || (v.name.includes('Male') && v.lang.includes('en')) ); const anyEnglish = voices.find(v => v.lang.includes('en-US')); if (maleVoice) utterance.voice = maleVoice; else if (anyEnglish) utterance.voice = anyEnglish; utterance.rate = 1.05; utterance.pitch = 0.95; utterance.onend = () => setSpeakingId(null); utterance.onerror = () => setSpeakingId(null); setSpeakingId(id); window.speechSynthesis.speak(utterance); }; if (window.speechSynthesis.getVoices().length === 0) { window.speechSynthesis.onvoiceschanged = speak; } else { speak(); } } else { alert("TTS not supported."); } }; const speakText = async (text: string, id: string) => { if (speakingId === id) { stopAudio(); return; } stopAudio(); setIsGeneratingAudio(id); try { const timeoutPromise = new Promise((_, reject) => setTimeout(() => reject(new Error("Timeout")), 8000) ); const base64Audio = await Promise.race([ generateSpeech(text), timeoutPromise ]); if (base64Audio && typeof base64Audio === 'string') { const binaryString = window.atob(base64Audio); const len = binaryString.length; const bytes = new Uint8Array(len); for (let i = 0; i < len; i++) { bytes[i] = binaryString.charCodeAt(i); } if (!audioContextRef.current) { audioContextRef.current = new (window.AudioContext || (window as any).webkitAudioContext)({ sampleRate: 24000 }); } const audioBuffer = await audioContextRef.current.decodeAudioData(bytes.buffer); const source = audioContextRef.current.createBufferSource(); source.buffer = audioBuffer; source.connect(audioContextRef.current.destination); source.onended = () => setSpeakingId(null); source.start(0); sourceNodeRef.current = source; setSpeakingId(id); } else { throw new Error("No audio returned"); } } catch (e) { console.warn("TTS Fallback used."); browserTTS(text, id); } finally { setIsGeneratingAudio(null); } }; const toggleListening = () => { if (!('webkitSpeechRecognition' in window)) { alert("Voice input is not supported in this browser."); return; } if (isListening) { setIsListening(false); return; } // eslint-disable-next-line @typescript-eslint/no-explicit-any const recognition = new (window as any).webkitSpeechRecognition(); recognition.continuous = false; recognition.interimResults = false; recognition.lang = 'en-US'; recognition.onstart = () => setIsListening(true); // eslint-disable-next-line @typescript-eslint/no-explicit-any recognition.onresult = (event: any) => { const transcript = event.results[0][0].transcript; setCurrentInput(transcript); setIsListening(false); }; recognition.onerror = () => setIsListening(false); recognition.onend = () => setIsListening(false); recognition.start(); }; const startEditing = (session: ChatSession) => { setEditingSessionId(session.id); setEditName(session.name); }; const saveSessionName = () => { if (editingSessionId && editName.trim()) { onRenameSession(editingSessionId, editName.trim()); } setEditingSessionId(null); }; if (viewMode === 'summary') { return (

 Session Brief

{isSummarizing ? (

ANALYZING CONVERSATION...

) : (
{chatSummary || "No summary available."}
)}
); } // --- RESPONSIVE LAYOUT --- return (
{/* SESSIONS SIDEBAR */}
{showSidebar && }
{sessions.map(s => (
{ onSwitchSession(s.id); setShowSidebar(false); }}> {editingSessionId === s.id ? (
e.stopPropagation()}> setEditName(e.target.value)} onKeyDown={e => e.key === 'Enter' && saveSessionName()} />
) : ( {s.name} )} {s.id === currentSessionId && !editingSessionId && (
)}
))}
{/* CHAT AREA */}
{/* FIXED HEADER FOR MOBILE */}
{/* MESSAGES */}
{chatHistory.length === 0 && (

Start a session with SomAI.

Vision EnabledSecurePrivate

)} {chatHistory.map((msg) => (
{msg.image && (
Upload
)}

{msg.text}

{msg.role === 'model' && msg.modelUsed && (
{msg.modelUsed.includes('Gemini') ? <> {msg.modelUsed} : <> {msg.modelUsed}}
)} {msg.role === 'model' && !isProcessing && ( )}
))} {/* LOADING INDICATOR */} {isProcessing && (
{statusMessage && {statusMessage}}
)}
{showScrollDown && ( )} {/* Quick Replies */} {!isProcessing && quickReplies.length > 0 && (
{quickReplies.map((reply, i) => ( ))}
)}
{showVisionTip && !selectedImage && (
SomAI Vision: Upload nutrition labels, skin symptoms, or reports for analysis.
)} {selectedImage && (
Selected
Image attached
)}
setCurrentInput(e.target.value)} onKeyDown={(e) => e.key === 'Enter' && handleSend()} placeholder={isListening ? "Listening..." : "Message SomAI..."} className="w-full bg-black/40 border border-white/10 rounded-xl py-3 pl-4 pr-10 text-white placeholder-gray-600 focus:border-neon-blue outline-none transition-all" />
); }; export default Chat;