import React, { useState, useRef, useEffect } from 'react'; import { Plus, ArrowUp, Settings2, Mic, X, Check, Loader2, Search, Sparkles, Play, Pause } from 'lucide-react'; import { useMCPTools } from '../../hooks/useMCPTools.ts'; import type { MCPTool } from '../../types/index.ts'; type ChatInputProps = { onSubmit?: (message: string, selectedTool?: string | null, stance?: 'positive' | 'negative') => void; onAudioSubmit?: (audioBlob: Blob, selectedTool?: string | null, stance?: 'positive' | 'negative') => void; placeholder?: string; }; // Type guard to ensure tool type safety (used for runtime validation if needed) const isMCPTool = (value: any): value is MCPTool => { return value && typeof value === 'object' && typeof value.name === 'string'; }; // Allowed tools - only these 3 will be shown const ALLOWED_TOOLS = ['detect stance', 'generate argument', 'extract topic']; // Helper function to check if a tool name matches one of the allowed tools const isAllowedTool = (toolName: string): boolean => { const toolLower = toolName.toLowerCase(); return ALLOWED_TOOLS.some(allowed => toolLower.includes(allowed.split(' ')[0]) && toolLower.includes(allowed.split(' ')[1]) ); }; // Helper function to normalize tool name to standard format const normalizeToolName = (toolName: string): string => { const toolLower = toolName.toLowerCase(); if (toolLower.includes('detect') && toolLower.includes('stance')) { return 'detect stance'; } if (toolLower.includes('generate') && toolLower.includes('argument')) { return 'generate argument'; } if (toolLower.includes('extract') && toolLower.includes('topic')) { return 'extract topic'; } return toolName; }; // Helper function to get dynamic placeholder based on selected tool const getPlaceholder = (selectedTool: string | null, defaultPlaceholder: string): string => { if (!selectedTool) { return defaultPlaceholder; } const normalizedTool = normalizeToolName(selectedTool); switch (normalizedTool) { case 'detect stance': return ''; // Will use separate fields, no placeholder needed case 'generate argument': return 'Enter a debate topic to generate an argument (e.g., "cannabis legalization")...'; case 'extract topic': return 'Enter text to extract the topic (e.g., "Should we legalize assisted suicide?")...'; default: return defaultPlaceholder; } }; const ChatInput = ({ onSubmit, onAudioSubmit, placeholder = 'Ask a follow-up...' }: ChatInputProps) => { const [input, setInput] = useState(''); const [isRecording, setIsRecording] = useState(false); const [showToolsDropdown, setShowToolsDropdown] = useState(false); const [selectedTool, setSelectedTool] = useState(null); const [selectedStance, setSelectedStance] = useState<'positive' | 'negative' | null>(null); const [searchQuery, setSearchQuery] = useState(''); // Separate inputs for detect stance tool const [detectStanceTopic, setDetectStanceTopic] = useState(''); const [detectStanceArgument, setDetectStanceArgument] = useState(''); const [focusedIndex, setFocusedIndex] = useState(-1); const [dropdownPosition, setDropdownPosition] = useState<'below' | 'above'>('below'); const [dropdownMaxHeight, setDropdownMaxHeight] = useState(320); const [audioBlob, setAudioBlob] = useState(null); const [audioUrl, setAudioUrl] = useState(null); const [isPlaying, setIsPlaying] = useState(false); const [recordingTime, setRecordingTime] = useState(0); const dropdownRef = useRef(null); const dropdownContentRef = useRef(null); const searchInputRef = useRef(null); const mediaRecorderRef = useRef(null); const audioChunksRef = useRef([]); const audioRef = useRef(null); const recordingTimerRef = useRef(null); const { tools, loading, error, refetch } = useMCPTools(); const handleSubmit = (e: React.FormEvent) => { e.preventDefault(); e.stopPropagation(); const normalizedTool = selectedTool ? normalizeToolName(selectedTool) : null; // Handle detect stance tool with two fields if (normalizedTool === 'detect stance') { if (detectStanceTopic.trim() && detectStanceArgument.trim()) { // Format as JSON string for detect stance: topic and argument const detectStanceInput = JSON.stringify({ topic: detectStanceTopic.trim(), argument: detectStanceArgument.trim(), }); if (onSubmit) { onSubmit(detectStanceInput, selectedTool); } setDetectStanceTopic(''); setDetectStanceArgument(''); } } else if (input.trim()) { if (onSubmit) { onSubmit(input, selectedTool, selectedStance || undefined); } console.log('Submitted:', input); setInput(''); // Reset stance after submit if generate argument tool if (normalizedTool === 'generate argument') { setSelectedStance(null); } } return false; }; const handleMicClick = async () => { try { const stream = await navigator.mediaDevices.getUserMedia({ audio: true }); // Try to use a supported mime type let options: MediaRecorderOptions = {}; if (MediaRecorder.isTypeSupported('audio/webm;codecs=opus')) { options = { mimeType: 'audio/webm;codecs=opus' }; } else if (MediaRecorder.isTypeSupported('audio/webm')) { options = { mimeType: 'audio/webm' }; } else if (MediaRecorder.isTypeSupported('audio/mp4')) { options = { mimeType: 'audio/mp4' }; } const mediaRecorder = new MediaRecorder(stream, options); mediaRecorderRef.current = mediaRecorder; audioChunksRef.current = []; mediaRecorder.ondataavailable = (event) => { if (event.data.size > 0) { audioChunksRef.current.push(event.data); } }; mediaRecorder.onstop = () => { const mimeType = mediaRecorder.mimeType || 'audio/webm'; const blob = new Blob(audioChunksRef.current, { type: mimeType }); setAudioBlob(blob); const url = URL.createObjectURL(blob); setAudioUrl(url); // Clean up old audio element if (audioRef.current) { audioRef.current.pause(); audioRef.current = null; } // Stop all tracks to release microphone stream.getTracks().forEach(track => track.stop()); }; mediaRecorder.start(); setIsRecording(true); setRecordingTime(0); // Start recording timer recordingTimerRef.current = setInterval(() => { setRecordingTime(prev => prev + 1); }, 1000); } catch (error) { console.error('Error accessing microphone:', error); alert('Could not access microphone. Please check your permissions.'); } }; const handleCancelRecording = () => { if (mediaRecorderRef.current && mediaRecorderRef.current.state !== 'inactive') { mediaRecorderRef.current.stop(); } if (recordingTimerRef.current) { clearInterval(recordingTimerRef.current); recordingTimerRef.current = null; } if (audioRef.current) { audioRef.current.pause(); audioRef.current = null; } setIsRecording(false); setIsPlaying(false); setRecordingTime(0); setAudioBlob(null); if (audioUrl) { URL.revokeObjectURL(audioUrl); setAudioUrl(null); } audioChunksRef.current = []; }; const handleConfirmRecording = () => { if (mediaRecorderRef.current && mediaRecorderRef.current.state !== 'inactive') { // Set up a temporary onstop handler specifically for submission const originalOnStop = mediaRecorderRef.current.onstop; mediaRecorderRef.current.onstop = () => { // Call the original onStop to ensure all state is properly set if (originalOnStop) { originalOnStop.call(mediaRecorderRef.current, new Event('stop')); } // Create the blob directly from audioChunksRef to ensure we have the data const mimeType = mediaRecorderRef.current?.mimeType || 'audio/webm'; const blob = new Blob(audioChunksRef.current, { type: mimeType }); // Submit the audio immediately if (blob && onAudioSubmit) { onAudioSubmit(blob, selectedTool); } else { console.error('Audio blob not available for submission', { blob: blob.size > 0, onAudioSubmit }); } }; mediaRecorderRef.current.stop(); } if (recordingTimerRef.current) { clearInterval(recordingTimerRef.current); recordingTimerRef.current = null; } setIsRecording(false); }; const handlePlayPause = async () => { if (!audioUrl) { console.error('Audio URL not available'); return; } // Ensure audio element exists if (!audioRef.current) { console.error('Audio element not initialized'); return; } try { if (isPlaying) { audioRef.current.pause(); setIsPlaying(false); } else { // Reset to beginning if needed if (audioRef.current.ended) { audioRef.current.currentTime = 0; } // Play the audio const playPromise = audioRef.current.play(); if (playPromise !== undefined) { await playPromise; setIsPlaying(true); } else { setIsPlaying(true); } } } catch (error) { console.error('Error playing audio:', error); setIsPlaying(false); // Check if it's an autoplay policy issue if (error instanceof Error && error.name === 'NotAllowedError') { alert('Please interact with the page first, then try playing again.'); } else { alert('Could not play audio. Please try again.'); } } }; const handleSendRecording = () => { // Stop audio playback if playing if (audioRef.current && isPlaying) { audioRef.current.pause(); setIsPlaying(false); } // Submit the recorded audio if available if (audioBlob && onAudioSubmit) { onAudioSubmit(audioBlob, selectedTool, selectedStance || undefined); // Reset stance after submit if generate argument tool if (selectedTool && normalizeToolName(selectedTool) === 'generate argument') { setSelectedStance(null); } } // Clean up and return to normal chat mode handleCancelRecording(); // Focus back to text input setTimeout(() => { const textarea = document.querySelector('textarea'); if (textarea) textarea.focus(); }, 100); }; // Initialize audio element when audioUrl is available useEffect(() => { if (audioUrl) { // Clean up old audio element if URL changed if (audioRef.current && audioRef.current.src !== audioUrl) { audioRef.current.pause(); audioRef.current = null; } // Create new audio element if it doesn't exist if (!audioRef.current) { const audio = new Audio(audioUrl); audioRef.current = audio; audio.volume = 1.0; audio.onended = () => { setIsPlaying(false); }; audio.onerror = (error) => { console.error('Audio initialization error:', error); setIsPlaying(false); }; audio.onloadeddata = () => { console.log('Audio loaded and ready'); }; audio.oncanplay = () => { console.log('Audio can play'); }; } } return () => { // Don't clean up audio element here - let it persist for playback }; }, [audioUrl]); // Cleanup on unmount useEffect(() => { return () => { if (recordingTimerRef.current) { clearInterval(recordingTimerRef.current); } if (audioRef.current) { audioRef.current.pause(); audioRef.current = null; } if (audioUrl) { URL.revokeObjectURL(audioUrl); } }; }, [audioUrl]); const WaveAnimation = () => { const [animationKey, setAnimationKey] = useState(0); useEffect(() => { const interval = setInterval(() => { setAnimationKey((prev) => prev + 1); }, 100); return () => clearInterval(interval); }, []); const bars = Array.from({ length: 50 }, (_, i) => { const height = Math.random() * 20 + 4; const delay = Math.random() * 2; return (
); }); return (
{bars}
); }; // Close dropdown when clicking outside useEffect(() => { const handleClickOutside = (event: MouseEvent) => { if (dropdownRef.current && !dropdownRef.current.contains(event.target as Node)) { setShowToolsDropdown(false); setFocusedIndex(-1); } }; document.addEventListener('mousedown', handleClickOutside); return () => { document.removeEventListener('mousedown', handleClickOutside); }; }, []); // Handle keyboard navigation useEffect(() => { const handleKeyDown = (event: KeyboardEvent) => { if (!showToolsDropdown) return; const filteredTools = tools.filter(isMCPTool).filter(tool => tool.name.toLowerCase().includes(searchQuery.toLowerCase()) || (tool.description && tool.description.toLowerCase().includes(searchQuery.toLowerCase())) ); switch (event.key) { case 'ArrowDown': event.preventDefault(); setFocusedIndex(prev => (prev + 1) % filteredTools.length); break; case 'ArrowUp': event.preventDefault(); setFocusedIndex(prev => prev <= 0 ? filteredTools.length - 1 : prev - 1); break; case 'Enter': event.preventDefault(); if (focusedIndex >= 0 && filteredTools[focusedIndex]) { setSelectedTool(filteredTools[focusedIndex].name); setShowToolsDropdown(false); setFocusedIndex(-1); } break; case 'Escape': event.preventDefault(); setShowToolsDropdown(false); setFocusedIndex(-1); break; } }; document.addEventListener('keydown', handleKeyDown); return () => { document.removeEventListener('keydown', handleKeyDown); }; }, [showToolsDropdown, focusedIndex, tools, searchQuery]); // Calculate dropdown position and max height based on viewport useEffect(() => { if (showToolsDropdown && dropdownRef.current) { const calculatePosition = () => { const buttonElement = dropdownRef.current?.querySelector('button'); if (!buttonElement) return; const buttonRect = buttonElement.getBoundingClientRect(); const viewportHeight = window.innerHeight; const viewportWidth = window.innerWidth; const spaceBelow = viewportHeight - buttonRect.bottom; const spaceAbove = buttonRect.top; const dropdownHeight = 500; // Approximate max height (increased from 400) const minSpace = 20; // Minimum space from viewport edge // Determine if dropdown should be above or below if (spaceBelow < dropdownHeight + minSpace && spaceAbove > spaceBelow) { setDropdownPosition('above'); // Calculate max height based on available space above const maxHeight = Math.min(450, spaceAbove - minSpace - 60); // Increased from 320 to 450 setDropdownMaxHeight(Math.max(250, maxHeight)); // Increased minimum from 200 to 250 } else { setDropdownPosition('below'); // Calculate max height based on available space below const maxHeight = Math.min(450, spaceBelow - minSpace - 60); // Increased from 320 to 450 setDropdownMaxHeight(Math.max(250, maxHeight)); // Increased minimum from 200 to 250 } // Adjust horizontal position if dropdown would overflow const dropdownWidth = 384; // w-96 = 384px (increased from w-80) const dropdownElement = dropdownRef.current?.querySelector('[data-dropdown-content]') as HTMLElement; if (dropdownElement) { if (buttonRect.left + dropdownWidth > viewportWidth - minSpace) { // Would overflow on the right, align to right edge dropdownElement.style.right = '0'; dropdownElement.style.left = 'auto'; } else { // Reset to left alignment dropdownElement.style.right = 'auto'; dropdownElement.style.left = '0'; } } }; calculatePosition(); // Recalculate on window resize or scroll window.addEventListener('resize', calculatePosition); window.addEventListener('scroll', calculatePosition, true); return () => { window.removeEventListener('resize', calculatePosition); window.removeEventListener('scroll', calculatePosition, true); }; } }, [showToolsDropdown]); // Focus search input when dropdown opens useEffect(() => { if (showToolsDropdown && searchInputRef.current) { setTimeout(() => searchInputRef.current?.focus(), 100); } }, [showToolsDropdown]); const toggleToolsDropdown = () => { const nextState = !showToolsDropdown; setShowToolsDropdown(nextState); setSearchQuery(''); setFocusedIndex(-1); if (nextState) { refetch(); } }; return (
{isRecording ? (
{Math.floor(recordingTime / 60)}:{(recordingTime % 60).toString().padStart(2, '0')}
) : audioBlob && audioUrl ? (
{isPlaying ? 'Playing...' : 'Tap to replay'}
) : (
{/* Two input fields for detect stance tool */} {selectedTool && normalizeToolName(selectedTool) === 'detect stance' ? (
setDetectStanceTopic(e.target.value)} placeholder="Enter the debate topic (e.g., Climate change is real)" className="w-full bg-transparent text-zinc-800 dark:text-gray-300 placeholder-zinc-400 dark:placeholder-gray-500 border border-zinc-300 dark:border-zinc-600 rounded-lg px-3 py-2 text-sm focus:outline-none focus:ring-2 focus:ring-teal-500 dark:focus:ring-teal-400" />