Yassine Mhirsi commited on
Commit ·
3beb9d7
1
Parent(s): 28269c5
auto play audio
Browse files
src/app/components/chat/AudioPlayer.tsx
CHANGED
|
@@ -4,15 +4,18 @@ import { Play, Pause } from 'lucide-react';
|
|
| 4 |
type AudioPlayerProps = {
|
| 5 |
src: string;
|
| 6 |
className?: string;
|
|
|
|
| 7 |
};
|
| 8 |
|
| 9 |
-
const AudioPlayer: React.FC<AudioPlayerProps> = ({ src, className = '' }) => {
|
| 10 |
const [isPlaying, setIsPlaying] = useState(false);
|
| 11 |
const [duration, setDuration] = useState(0);
|
| 12 |
const [currentTime, setCurrentTime] = useState(0);
|
| 13 |
const audioRef = useRef<HTMLAudioElement>(null);
|
| 14 |
const progressRef = useRef<HTMLDivElement>(null);
|
| 15 |
|
|
|
|
|
|
|
| 16 |
useEffect(() => {
|
| 17 |
const audio = audioRef.current;
|
| 18 |
if (!audio) return;
|
|
@@ -32,11 +35,27 @@ const AudioPlayer: React.FC<AudioPlayerProps> = ({ src, className = '' }) => {
|
|
| 32 |
audio.addEventListener('loadeddata', setAudioData);
|
| 33 |
audio.addEventListener('timeupdate', setAudioTime);
|
| 34 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 35 |
return () => {
|
| 36 |
audio.removeEventListener('loadeddata', setAudioData);
|
| 37 |
audio.removeEventListener('timeupdate', setAudioTime);
|
| 38 |
};
|
| 39 |
-
}, [src]);
|
| 40 |
|
| 41 |
const togglePlayPause = () => {
|
| 42 |
const audio = audioRef.current;
|
|
|
|
| 4 |
type AudioPlayerProps = {
|
| 5 |
src: string;
|
| 6 |
className?: string;
|
| 7 |
+
autoPlay?: boolean; // Whether to auto-play the audio when rendered
|
| 8 |
};
|
| 9 |
|
| 10 |
+
const AudioPlayer: React.FC<AudioPlayerProps> = ({ src, className = '', autoPlay = false }) => {
|
| 11 |
const [isPlaying, setIsPlaying] = useState(false);
|
| 12 |
const [duration, setDuration] = useState(0);
|
| 13 |
const [currentTime, setCurrentTime] = useState(0);
|
| 14 |
const audioRef = useRef<HTMLAudioElement>(null);
|
| 15 |
const progressRef = useRef<HTMLDivElement>(null);
|
| 16 |
|
| 17 |
+
const hasAutoPlayedRef = useRef(false); // To track if auto-play has already happened
|
| 18 |
+
|
| 19 |
useEffect(() => {
|
| 20 |
const audio = audioRef.current;
|
| 21 |
if (!audio) return;
|
|
|
|
| 35 |
audio.addEventListener('loadeddata', setAudioData);
|
| 36 |
audio.addEventListener('timeupdate', setAudioTime);
|
| 37 |
|
| 38 |
+
// Auto-play when the component mounts if autoPlay is true and hasn't played yet
|
| 39 |
+
if (autoPlay && !hasAutoPlayedRef.current) {
|
| 40 |
+
const playAudio = async () => {
|
| 41 |
+
try {
|
| 42 |
+
await audio.play();
|
| 43 |
+
setIsPlaying(true);
|
| 44 |
+
hasAutoPlayedRef.current = true; // Mark as auto-played to prevent re-triggering
|
| 45 |
+
} catch (error) {
|
| 46 |
+
// Handle browser auto-play restrictions gracefully
|
| 47 |
+
console.warn('Auto-play failed:', error);
|
| 48 |
+
}
|
| 49 |
+
};
|
| 50 |
+
|
| 51 |
+
playAudio();
|
| 52 |
+
}
|
| 53 |
+
|
| 54 |
return () => {
|
| 55 |
audio.removeEventListener('loadeddata', setAudioData);
|
| 56 |
audio.removeEventListener('timeupdate', setAudioTime);
|
| 57 |
};
|
| 58 |
+
}, [src, autoPlay]); // Only re-run when src or autoPlay changes
|
| 59 |
|
| 60 |
const togglePlayPause = () => {
|
| 61 |
const audio = audioRef.current;
|
src/app/components/chat/MessageList.tsx
CHANGED
|
@@ -49,7 +49,7 @@ const MessageList = ({ messages, isLoading = false, error = null, onRetry }: Mes
|
|
| 49 |
</div>
|
| 50 |
)}
|
| 51 |
|
| 52 |
-
{messages.map((message) => (
|
| 53 |
<div
|
| 54 |
key={message.id}
|
| 55 |
className={`flex ${message.role === 'user' ? 'justify-end' : 'justify-start'}`}
|
|
@@ -66,8 +66,9 @@ const MessageList = ({ messages, isLoading = false, error = null, onRetry }: Mes
|
|
| 66 |
<div>
|
| 67 |
<AudioPlayer
|
| 68 |
src={message.audioUrl}
|
|
|
|
| 69 |
/>
|
| 70 |
-
|
| 71 |
</div>
|
| 72 |
) : (
|
| 73 |
// Regular text message
|
|
|
|
| 49 |
</div>
|
| 50 |
)}
|
| 51 |
|
| 52 |
+
{messages.map((message, index) => (
|
| 53 |
<div
|
| 54 |
key={message.id}
|
| 55 |
className={`flex ${message.role === 'user' ? 'justify-end' : 'justify-start'}`}
|
|
|
|
| 66 |
<div>
|
| 67 |
<AudioPlayer
|
| 68 |
src={message.audioUrl}
|
| 69 |
+
autoPlay={message.role === 'assistant' && index === messages.length - 1} // Auto-play only the most recent AI response
|
| 70 |
/>
|
| 71 |
+
<TranscriptToggle content={message.content} />
|
| 72 |
</div>
|
| 73 |
) : (
|
| 74 |
// Regular text message
|
src/app/hooks/useChat.ts
CHANGED
|
@@ -57,7 +57,7 @@ Example output:
|
|
| 57 |
// Add user audio message
|
| 58 |
const userMessage: Omit<ChatMessage, 'id' | 'timestamp'> = {
|
| 59 |
role: 'user',
|
| 60 |
-
content: '
|
| 61 |
audioUrl: URL.createObjectURL(audioBlob), // Store the audio URL for playback in UI
|
| 62 |
};
|
| 63 |
addMessage(userMessage);
|
|
|
|
| 57 |
// Add user audio message
|
| 58 |
const userMessage: Omit<ChatMessage, 'id' | 'timestamp'> = {
|
| 59 |
role: 'user',
|
| 60 |
+
content: 'extracted argument', // Placeholder text for audio message
|
| 61 |
audioUrl: URL.createObjectURL(audioBlob), // Store the audio URL for playback in UI
|
| 62 |
};
|
| 63 |
addMessage(userMessage);
|
src/app/types/chat.types.ts
CHANGED
|
@@ -5,6 +5,7 @@ export type ChatMessage = {
|
|
| 5 |
role: MessageRole;
|
| 6 |
content: string;
|
| 7 |
audioUrl?: string; // URL for audio messages
|
|
|
|
| 8 |
timestamp: Date;
|
| 9 |
};
|
| 10 |
|
|
|
|
| 5 |
role: MessageRole;
|
| 6 |
content: string;
|
| 7 |
audioUrl?: string; // URL for audio messages
|
| 8 |
+
extractedArgument?: string; // Extracted transcription for user audio messages
|
| 9 |
timestamp: Date;
|
| 10 |
};
|
| 11 |
|