import React, { useState, useRef, useEffect } from 'react'; import { AIChatMessage, User } from '../../types'; import { Bot, Mic, Square, Volume2, Send, Sparkles, Loader2, Image as ImageIcon, Trash2, X, StopCircle, Globe, Brain, Search, Copy, ChevronDown, ChevronRight, Camera } from 'lucide-react'; import ReactMarkdown from 'react-markdown'; import remarkGfm from 'remark-gfm'; import { blobToBase64, base64ToUint8Array, decodePCM, cleanTextForTTS, compressImage } from '../../utils/mediaHelpers'; import { Toast, ToastState } from '../Toast'; interface ChatPanelProps { currentUser: User | null; } export const ChatPanel: React.FC = ({ currentUser }) => { const [messages, setMessages] = useState(() => { try { const saved = localStorage.getItem('ai_chat_history'); return saved ? JSON.parse(saved) : [{ id: 'welcome', role: 'model', text: '你好!我是你的 AI 智能助教。', timestamp: Date.now() }]; } catch (e) { return [{ id: 'welcome', role: 'model', text: '你好!', timestamp: Date.now() }]; } }); // Input States const [textInput, setTextInput] = useState(''); const [isRecording, setIsRecording] = useState(false); const [isMobile, setIsMobile] = useState(false); // Config States const [enableThinking, setEnableThinking] = useState(false); const [enableSearch, setEnableSearch] = useState(false); // Attachments const [selectedImages, setSelectedImages] = useState([]); const [audioAttachment, setAudioAttachment] = useState(null); // Base64 const [isChatProcessing, setIsChatProcessing] = useState(false); const [playingMessageId, setPlayingMessageId] = useState(null); const [toast, setToast] = useState({ show: false, message: '', type: 'success' }); const [isThinkingExpanded, setIsThinkingExpanded] = useState>({}); const mediaRecorderRef = useRef(null); const audioChunksRef = useRef([]); const audioContextRef = useRef(null); const currentSourceRef = useRef(null); const messagesEndRef = useRef(null); const scrollContainerRef = useRef(null); const fileInputRef = useRef(null); const cameraInputRef = useRef(null); const abortControllerRef = useRef(null); // Initialize AudioContext & Check Mobile useEffect(() => { // @ts-ignore const AudioCtor = window.AudioContext || window.webkitAudioContext; audioContextRef.current = new AudioCtor(); const checkMobile = () => { const userAgent = navigator.userAgent || navigator.vendor || (window as any).opera; const mobile = /android|webos|iphone|ipad|ipod|blackberry|iemobile|opera mini/i.test(userAgent.toLowerCase()); setIsMobile(mobile); }; checkMobile(); return () => { stopPlayback(); window.speechSynthesis.cancel(); }; }, []); // Persist messages useEffect(() => { try { const MAX_COUNT = 50; const welcome = messages.find(m => m.id === 'welcome'); const others = messages.filter(m => m.id !== 'welcome'); const recent = others.slice(-MAX_COUNT); const messagesToSave = (welcome ? [welcome] : []).concat(recent); localStorage.setItem('ai_chat_history', JSON.stringify(messagesToSave)); } catch (e) {} }, [messages]); // SMART SCROLL LOGIC useEffect(() => { if (!scrollContainerRef.current || !messagesEndRef.current) return; const container = scrollContainerRef.current; const { scrollTop, scrollHeight, clientHeight } = container; const isNearBottom = scrollHeight - scrollTop - clientHeight < 150; const lastMsg = messages[messages.length - 1]; const isUserMsg = lastMsg?.role === 'user'; if (isNearBottom || (isUserMsg && !isChatProcessing)) { messagesEndRef.current.scrollIntoView({ behavior: 'smooth', block: 'end' }); } }, [messages, isChatProcessing, isThinkingExpanded]); const stopPlayback = () => { if (currentSourceRef.current) { try { currentSourceRef.current.stop(); } catch (e) {} currentSourceRef.current = null; } window.speechSynthesis.cancel(); setPlayingMessageId(null); }; const handleStopGeneration = () => { if (abortControllerRef.current) { abortControllerRef.current.abort(); abortControllerRef.current = null; setIsChatProcessing(false); setToast({ show: true, message: '已停止生成', type: 'error' }); } }; const playAudio = async (msg: AIChatMessage) => { stopPlayback(); if (msg.audio) { try { if (!audioContextRef.current) { // @ts-ignore const AudioCtor = window.AudioContext || window.webkitAudioContext; audioContextRef.current = new AudioCtor(); } if (audioContextRef.current?.state === 'suspended') { await audioContextRef.current.resume(); } const bytes = base64ToUint8Array(msg.audio); const audioBuffer = decodePCM(bytes, audioContextRef.current!); const source = audioContextRef.current!.createBufferSource(); source.buffer = audioBuffer; source.connect(audioContextRef.current!.destination); source.onended = () => setPlayingMessageId(null); source.start(0); currentSourceRef.current = source; setPlayingMessageId(msg.id); return; } catch (e) { console.warn("PCM Playback failed, falling back to Browser TTS", e); } } if (!msg.text) return; const cleanText = cleanTextForTTS(msg.text); const utterance = new SpeechSynthesisUtterance(cleanText); utterance.lang = 'zh-CN'; utterance.onstart = () => setPlayingMessageId(msg.id); utterance.onend = () => setPlayingMessageId(null); utterance.onerror = () => setPlayingMessageId(null); window.speechSynthesis.speak(utterance); }; const startRecording = async () => { if (audioAttachment) return; try { const stream = await navigator.mediaDevices.getUserMedia({ audio: true }); const mediaRecorder = new MediaRecorder(stream); mediaRecorderRef.current = mediaRecorder; audioChunksRef.current = []; mediaRecorder.ondataavailable = (event) => { if (event.data.size > 0) audioChunksRef.current.push(event.data); }; mediaRecorder.start(); setIsRecording(true); } catch (e) { setToast({ show: true, message: '无法访问麦克风', type: 'error' }); } }; const stopRecording = () => { if (mediaRecorderRef.current && isRecording) { mediaRecorderRef.current.stop(); setIsRecording(false); mediaRecorderRef.current.onstop = async () => { const audioBlob = new Blob(audioChunksRef.current, { type: 'audio/webm' }); const base64 = await blobToBase64(audioBlob); setAudioAttachment(base64); mediaRecorderRef.current?.stream.getTracks().forEach(track => track.stop()); }; } }; const handleImageSelect = (e: React.ChangeEvent) => { if (e.target.files) { setSelectedImages(prev => [...prev, ...Array.from(e.target.files!)]); } }; const handleCopy = (text: string) => { navigator.clipboard.writeText(text); setToast({ show: true, message: '已复制', type: 'success' }); }; const handleSubmit = async () => { if ((!textInput.trim() && !audioAttachment && selectedImages.length === 0) || isChatProcessing) return; stopPlayback(); const currentText = textInput; const currentAudio = audioAttachment; const currentImages = [...selectedImages]; setTextInput(''); setAudioAttachment(null); setSelectedImages([]); if (audioContextRef.current?.state === 'suspended') { try { await audioContextRef.current.resume(); } catch(e){} } setIsChatProcessing(true); abortControllerRef.current = new AbortController(); const newAiMsgId = crypto.randomUUID(); const newUserMsgId = crypto.randomUUID(); try { const base64Images = await Promise.all(currentImages.map(f => compressImage(f))); const newUserMsg: AIChatMessage = { id: newUserMsgId, role: 'user', text: currentAudio ? '(语音消息)' : (currentText || (currentImages.length ? '' : '')), isAudioMessage: !!currentAudio, images: base64Images, timestamp: Date.now() }; const newAiMsg: AIChatMessage = { id: newAiMsgId, role: 'model', text: '', thought: '', timestamp: Date.now(), isSearching: enableSearch }; setMessages(prev => [...prev, newUserMsg, newAiMsg]); setTimeout(() => { messagesEndRef.current?.scrollIntoView({ behavior: 'smooth', block: 'end' }); }, 100); if (enableThinking) setIsThinkingExpanded(prev => ({ ...prev, [newAiMsgId]: true })); const response = await fetch('/api/ai/chat', { method: 'POST', headers: { 'Content-Type': 'application/json', 'x-user-username': currentUser?.username || '', 'x-user-role': currentUser?.role || '', 'x-school-id': currentUser?.schoolId || '' }, body: JSON.stringify({ text: currentText, audio: currentAudio, images: base64Images, history: messages.filter(m => m.id !== 'welcome').map(m => ({ role: m.role, text: m.text })), enableThinking, enableSearch }), signal: abortControllerRef.current.signal }); if (!response.ok) throw new Error(response.statusText); if (!response.body) throw new Error('No response body'); const reader = response.body.getReader(); const decoder = new TextDecoder(); let aiTextAccumulated = ''; let aiThoughtAccumulated = ''; let buffer = ''; while (true) { const { done, value } = await reader.read(); if (done) break; buffer += decoder.decode(value, { stream: true }); const parts = buffer.split('\n\n'); buffer = parts.pop() || ''; for (const line of parts) { if (line.startsWith('data: ')) { const jsonStr = line.replace('data: ', '').trim(); try { const data = JSON.parse(jsonStr); if (data.type === 'text') { if (aiTextAccumulated === '' && aiThoughtAccumulated !== '') { setIsThinkingExpanded(prev => ({ ...prev, [newAiMsgId]: false })); } aiTextAccumulated += data.content; setMessages(prev => prev.map(m => m.id === newAiMsgId ? { ...m, text: aiTextAccumulated, isSearching: false } : m)); } else if (data.type === 'thinking') { aiThoughtAccumulated += data.content; setMessages(prev => prev.map(m => m.id === newAiMsgId ? { ...m, thought: aiThoughtAccumulated } : m)); } else if (data.type === 'search') { setMessages(prev => prev.map(m => m.id === newAiMsgId ? { ...m, isSearching: true } : m)); } else if (data.type === 'status' && data.status === 'tts') { setMessages(prev => prev.map(m => m.id === newAiMsgId ? { ...m, isGeneratingAudio: true } : m)); } else if (data.type === 'audio') { setMessages(prev => prev.map(m => m.id === newAiMsgId ? { ...m, audio: data.audio, isGeneratingAudio: false } : m)); const tempMsg = { ...newAiMsg, text: aiTextAccumulated, audio: data.audio }; playAudio(tempMsg); } else if (data.type === 'status' && data.ttsSkipped) { setMessages(prev => prev.map(m => m.id === newAiMsgId ? { ...m, isGeneratingAudio: false } : m)); if (aiTextAccumulated) { const tempMsg = { ...newAiMsg, text: aiTextAccumulated }; playAudio(tempMsg); } } else if (data.type === 'error') { setMessages(prev => prev.map(m => m.id === newAiMsgId ? { ...m, text: `⚠️ 错误: ${data.message}`, isGeneratingAudio: false, isSearching: false } : m)); } } catch (e) {} } } } } catch (error: any) { if (error.name !== 'AbortError') { setMessages(prev => prev.map(m => m.id === newAiMsgId ? { ...m, text: '抱歉,连接断开或发生错误。', isSearching: false } : m)); } } finally { setMessages(prev => prev.map(m => m.id === newAiMsgId ? { ...m, isSearching: false, isGeneratingAudio: false } : m)); setIsChatProcessing(false); abortControllerRef.current = null; } }; const clearHistory = () => { setMessages([{ id: 'welcome', role: 'model', text: '你好!我是你的 AI 智能助教。', timestamp: Date.now() }]); }; return (
{toast.show && setToast({...toast, show: false})}/>}
{messages.map(msg => (
{msg.role === 'model' ? : }
{msg.role === 'model' && msg.thought && (
{isThinkingExpanded[msg.id] && (
{msg.thought}
)}
)} {msg.role === 'model' && msg.isSearching && (
正在联网搜索相关信息...
)}
{msg.images && msg.images.length > 0 && (
{msg.images.map((img, i) => ( sent ))}
)}
{msg.text || ''}
{msg.role === 'model' && !msg.text && !msg.isSearching && isChatProcessing && (
思考中...
)} {msg.isGeneratingAudio && (
正在合成语音...
)} {(msg.role === 'model' && (msg.text || msg.audio) && !isChatProcessing && !msg.isGeneratingAudio) && (
)}
))}
{(selectedImages.length > 0 || audioAttachment) && (
{selectedImages.map((file, idx) => (
))} {audioAttachment && (
语音已录制
)}
)}
{isMobile && ( )}
(e.currentTarget.value = '')} /> {isMobile && ( (e.currentTarget.value = '')} /> )}
{audioAttachment ? (
语音消息已就绪 (文字输入已禁用)
) : (