Yassine Mhirsi commited on
Commit ·
15bddd3
1
Parent(s): 78b3b2b
audio response
Browse files
src/app/components/chat/MessageList.tsx
CHANGED
|
@@ -61,11 +61,15 @@ const MessageList = ({ messages, isLoading = false, error = null, onRetry }: Mes
|
|
| 61 |
}`}
|
| 62 |
>
|
| 63 |
{message.audioUrl ? (
|
| 64 |
-
// Audio message display -
|
| 65 |
<div>
|
| 66 |
<AudioPlayer
|
| 67 |
src={message.audioUrl}
|
| 68 |
/>
|
|
|
|
|
|
|
|
|
|
|
|
|
| 69 |
</div>
|
| 70 |
) : (
|
| 71 |
// Regular text message
|
|
|
|
| 61 |
}`}
|
| 62 |
>
|
| 63 |
{message.audioUrl ? (
|
| 64 |
+
// Audio message display - show both audio player and text
|
| 65 |
<div>
|
| 66 |
<AudioPlayer
|
| 67 |
src={message.audioUrl}
|
| 68 |
/>
|
| 69 |
+
{/* Display the text content under the audio player */}
|
| 70 |
+
<div className="mt-2 whitespace-pre-wrap break-words">
|
| 71 |
+
{message.content}
|
| 72 |
+
</div>
|
| 73 |
</div>
|
| 74 |
) : (
|
| 75 |
// Regular text message
|
src/app/hooks/useChat.ts
CHANGED
|
@@ -57,7 +57,7 @@ Example output:
|
|
| 57 |
// Add user audio message
|
| 58 |
const userMessage: Omit<ChatMessage, 'id' | 'timestamp'> = {
|
| 59 |
role: 'user',
|
| 60 |
-
content: '
|
| 61 |
audioUrl: URL.createObjectURL(audioBlob), // Store the audio URL for playback in UI
|
| 62 |
};
|
| 63 |
addMessage(userMessage);
|
|
@@ -106,11 +106,27 @@ Example output:
|
|
| 106 |
// Extract tts_text from response (based on your curl example)
|
| 107 |
const ttsText = data?.tts_text || data?.final_argument || data?.result?.result?.[0]?.text || 'No response received';
|
| 108 |
|
| 109 |
-
//
|
|
|
|
|
|
|
|
|
|
| 110 |
const assistantMessage: Omit<ChatMessage, 'id' | 'timestamp'> = {
|
| 111 |
role: 'assistant',
|
| 112 |
content: ttsText,
|
| 113 |
};
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 114 |
addMessage(assistantMessage);
|
| 115 |
|
| 116 |
setState(prev => ({ ...prev, isLoading: false }));
|
|
@@ -192,6 +208,9 @@ Example output:
|
|
| 192 |
// Extract final_argument from response
|
| 193 |
const finalArgument = data?.final_argument || data?.result?.result?.[0]?.text || 'No response received';
|
| 194 |
|
|
|
|
|
|
|
|
|
|
| 195 |
// Parse if final_argument is a JSON string
|
| 196 |
let finalContent = finalArgument;
|
| 197 |
try {
|
|
@@ -203,11 +222,24 @@ Example output:
|
|
| 203 |
// Not JSON, use as is
|
| 204 |
}
|
| 205 |
|
| 206 |
-
// Add assistant response with
|
| 207 |
const assistantMessage: Omit<ChatMessage, 'id' | 'timestamp'> = {
|
| 208 |
role: 'assistant',
|
| 209 |
content: finalContent,
|
| 210 |
};
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 211 |
addMessage(assistantMessage);
|
| 212 |
}
|
| 213 |
|
|
|
|
| 57 |
// Add user audio message
|
| 58 |
const userMessage: Omit<ChatMessage, 'id' | 'timestamp'> = {
|
| 59 |
role: 'user',
|
| 60 |
+
content: 'sent', // Placeholder text for audio message
|
| 61 |
audioUrl: URL.createObjectURL(audioBlob), // Store the audio URL for playback in UI
|
| 62 |
};
|
| 63 |
addMessage(userMessage);
|
|
|
|
| 106 |
// Extract tts_text from response (based on your curl example)
|
| 107 |
const ttsText = data?.tts_text || data?.final_argument || data?.result?.result?.[0]?.text || 'No response received';
|
| 108 |
|
| 109 |
+
// Extract audio_base64 from response if available
|
| 110 |
+
const audioBase64 = data?.audio_base64;
|
| 111 |
+
|
| 112 |
+
// Add assistant response with both text and audio if available
|
| 113 |
const assistantMessage: Omit<ChatMessage, 'id' | 'timestamp'> = {
|
| 114 |
role: 'assistant',
|
| 115 |
content: ttsText,
|
| 116 |
};
|
| 117 |
+
|
| 118 |
+
// Add audio URL to message if audio_base64 is present
|
| 119 |
+
if (audioBase64) {
|
| 120 |
+
// Convert base64 to audio blob and create object URL
|
| 121 |
+
const binaryString = atob(audioBase64);
|
| 122 |
+
const bytes = new Uint8Array(binaryString.length);
|
| 123 |
+
for (let i = 0; i < binaryString.length; i++) {
|
| 124 |
+
bytes[i] = binaryString.charCodeAt(i);
|
| 125 |
+
}
|
| 126 |
+
const audioBlob = new Blob([bytes], { type: 'audio/wav' });
|
| 127 |
+
assistantMessage.audioUrl = URL.createObjectURL(audioBlob);
|
| 128 |
+
}
|
| 129 |
+
|
| 130 |
addMessage(assistantMessage);
|
| 131 |
|
| 132 |
setState(prev => ({ ...prev, isLoading: false }));
|
|
|
|
| 208 |
// Extract final_argument from response
|
| 209 |
const finalArgument = data?.final_argument || data?.result?.result?.[0]?.text || 'No response received';
|
| 210 |
|
| 211 |
+
// Extract audio_base64 from response if available
|
| 212 |
+
const audioBase64 = data?.audio_base64;
|
| 213 |
+
|
| 214 |
// Parse if final_argument is a JSON string
|
| 215 |
let finalContent = finalArgument;
|
| 216 |
try {
|
|
|
|
| 222 |
// Not JSON, use as is
|
| 223 |
}
|
| 224 |
|
| 225 |
+
// Add assistant response with both text and audio if available
|
| 226 |
const assistantMessage: Omit<ChatMessage, 'id' | 'timestamp'> = {
|
| 227 |
role: 'assistant',
|
| 228 |
content: finalContent,
|
| 229 |
};
|
| 230 |
+
|
| 231 |
+
// Add audio URL to message if audio_base64 is present
|
| 232 |
+
if (audioBase64) {
|
| 233 |
+
// Convert base64 to audio blob and create object URL
|
| 234 |
+
const binaryString = atob(audioBase64);
|
| 235 |
+
const bytes = new Uint8Array(binaryString.length);
|
| 236 |
+
for (let i = 0; i < binaryString.length; i++) {
|
| 237 |
+
bytes[i] = binaryString.charCodeAt(i);
|
| 238 |
+
}
|
| 239 |
+
const audioBlob = new Blob([bytes], { type: 'audio/wav' });
|
| 240 |
+
assistantMessage.audioUrl = URL.createObjectURL(audioBlob);
|
| 241 |
+
}
|
| 242 |
+
|
| 243 |
addMessage(assistantMessage);
|
| 244 |
}
|
| 245 |
|