Krish-05 commited on
Commit
bc08895
·
verified ·
1 Parent(s): 33e3679

Update frontend/src/components/chat/ChatInterface.jsx

Browse files
frontend/src/components/chat/ChatInterface.jsx CHANGED
@@ -31,7 +31,7 @@ const ChatInterface = () => {
31
  if (!userPrompt.trim() || isSending) return; // Prevent empty messages or sending when busy
32
 
33
  const userMessage = { role: 'user', message: userPrompt.trim(), timestamp: new Date() };
34
-
35
  // Add user message and a placeholder for assistant's response
36
  setChatHistory(prev => [
37
  ...prev,
@@ -59,6 +59,7 @@ const ChatInterface = () => {
59
  const response = await fetch(FASTAPI_LLM_URL, {
60
  method: 'POST',
61
  headers: { 'Content-Type': 'application/json' },
 
62
  body: JSON.stringify({ text: userMessage.message }),
63
  });
64
 
@@ -76,9 +77,10 @@ const ChatInterface = () => {
76
  if (done) {
77
  console.log("Stream finished.");
78
  // Process any remaining buffer content
79
- processPartialBuffer(buffer);
80
  break;
81
  }
 
82
  const decodedChunk = decoder.decode(value, { stream: true });
83
  buffer += decodedChunk;
84
 
@@ -124,7 +126,7 @@ const ChatInterface = () => {
124
  } catch (err) {
125
  console.error('General Fetch/Stream Error:', err);
126
  currentFullResponse += `\nSorry, something went wrong. Please try again. [Error: ${err.message}]`;
127
- // Update the message immediately on error
128
  setChatHistory(prev => {
129
  const updated = [...prev];
130
  const targetMessage = updated[assistantMessageIndex];
@@ -138,6 +140,7 @@ const ChatInterface = () => {
138
  }
139
  return updated;
140
  });
 
141
  } finally {
142
  setIsSending(false);
143
  // Final update to remove blinking cursor and ensure timestamp is set
@@ -173,14 +176,17 @@ const ChatInterface = () => {
173
  }
174
  };
175
 
 
 
 
 
176
  const handleSendVoiceMessage = async (audioBlob) => {
177
  if (isSending) return;
178
 
179
- // Display a placeholder message while transcribing
180
  setChatHistory(prev => [
181
  ...prev,
182
- { role: 'user', message: '(Recording audio...)', timestamp: new Date() },
183
- { role: 'assistant', message: 'Transcribing...', streaming: true, timestamp: new Date() } // Assistant thinking message
184
  ]);
185
  setIsSending(true);
186
 
@@ -201,35 +207,43 @@ const ChatInterface = () => {
201
  if (transcriptionResult) {
202
  // Update the user's "Recording audio..." message with the actual transcription
203
  setChatHistory(prev => {
204
- // Filter out the "Recording audio..." placeholder message
205
- const updated = prev.filter(msg => !(msg.role === 'user' && msg.message === '(Recording audio...)'));
 
 
 
206
  // Add the actual transcribed user message
207
  const userMsg = { role: 'user', message: transcriptionResult, timestamp: new Date() };
208
- // Filter out the "Transcribing..." assistant message
209
- const filteredAssistantMsgs = updated.filter(msg => !(msg.role === 'assistant' && msg.message === 'Transcribing...'));
210
- return [...filteredAssistantMsgs, userMsg];
211
  });
212
-
213
- // Now, send the transcribed text to your LLM
214
  await handleSendMessage(transcriptionResult);
215
  } else {
216
  alert('Could not transcribe audio. Please try speaking clearer.');
217
  // Remove transcription messages if empty
218
- setChatHistory(prev => prev.filter(msg => !(msg.role === 'user' && msg.message === '(Recording audio...)') && !(msg.role === 'assistant' && msg.message === 'Transcribing...')));
 
 
 
219
  }
220
  } catch (error) {
221
  console.error('Error during voice transcription:', error);
222
  alert(`Failed to transcribe audio. Error: ${error.message || 'Unknown error'}`);
223
  // Remove transcription messages if error
224
- setChatHistory(prev => prev.filter(msg => !(msg.role === 'user' && msg.message === '(Recording audio...)') && !(msg.role === 'assistant' && msg.message === 'Transcribing...')));
 
 
 
225
  } finally {
226
  setIsSending(false);
227
  // Ensure the "Transcribing..." message is always removed when done
228
- setChatHistory(prev => prev.filter(msg => !(msg.role === 'assistant' && msg.message === 'Transcribing...')));
 
 
229
  }
230
  };
231
 
232
-
233
  return (
234
  <div className="chat-layout">
235
  <Sidebar onNewChat={handleNewChat} />
@@ -262,9 +276,10 @@ const ChatInterface = () => {
262
  )}
263
  <div ref={chatEndRef} /> {/* Ref for scrolling to the bottom */}
264
  </div>
 
265
  <ChatInputArea
266
  onSendMessage={handleSendMessage}
267
- onSendVoiceMessage={handleSendVoiceMessage}
268
  isLoading={isSending}
269
  />
270
  </div>
 
31
  if (!userPrompt.trim() || isSending) return; // Prevent empty messages or sending when busy
32
 
33
  const userMessage = { role: 'user', message: userPrompt.trim(), timestamp: new Date() };
34
+
35
  // Add user message and a placeholder for assistant's response
36
  setChatHistory(prev => [
37
  ...prev,
 
59
  const response = await fetch(FASTAPI_LLM_URL, {
60
  method: 'POST',
61
  headers: { 'Content-Type': 'application/json' },
62
+ // Ensure that userPrompt (which can now be the transcript) is sent as 'text'
63
  body: JSON.stringify({ text: userMessage.message }),
64
  });
65
 
 
77
  if (done) {
78
  console.log("Stream finished.");
79
  // Process any remaining buffer content
80
+ processPartialBuffer(buffer);
81
  break;
82
  }
83
+
84
  const decodedChunk = decoder.decode(value, { stream: true });
85
  buffer += decodedChunk;
86
 
 
126
  } catch (err) {
127
  console.error('General Fetch/Stream Error:', err);
128
  currentFullResponse += `\nSorry, something went wrong. Please try again. [Error: ${err.message}]`;
129
+
130
  setChatHistory(prev => {
131
  const updated = [...prev];
132
  const targetMessage = updated[assistantMessageIndex];
 
140
  }
141
  return updated;
142
  });
143
+
144
  } finally {
145
  setIsSending(false);
146
  // Final update to remove blinking cursor and ensure timestamp is set
 
176
  }
177
  };
178
 
179
+ // This function is for handling actual audio file uploads for transcription.
180
+ // If your primary voice input is now Speech-to-Text via react-speech-recognition,
181
+ // this function might be used for a separate "upload audio file" feature,
182
+ // or it might become less critical for the main voice input flow.
183
  const handleSendVoiceMessage = async (audioBlob) => {
184
  if (isSending) return;
185
 
 
186
  setChatHistory(prev => [
187
  ...prev,
188
+ { role: 'user', message: '(Recording audio for transcription...)', timestamp: new Date() }, // Changed placeholder
189
+ { role: 'assistant', message: 'Transcribing audio...', streaming: true, timestamp: new Date() } // Assistant thinking message
190
  ]);
191
  setIsSending(true);
192
 
 
207
  if (transcriptionResult) {
208
  // Update the user's "Recording audio..." message with the actual transcription
209
  setChatHistory(prev => {
210
+ // Remove placeholder user message and assistant's "Transcribing..." message
211
+ const updated = prev.filter(msg =>
212
+ !(msg.role === 'user' && msg.message === '(Recording audio for transcription...)') &&
213
+ !(msg.role === 'assistant' && msg.message === 'Transcribing audio...')
214
+ );
215
  // Add the actual transcribed user message
216
  const userMsg = { role: 'user', message: transcriptionResult, timestamp: new Date() };
217
+ return [...updated, userMsg];
 
 
218
  });
219
+
220
+ // Now, send the transcribed text to your LLM using the existing handleSendMessage
221
  await handleSendMessage(transcriptionResult);
222
  } else {
223
  alert('Could not transcribe audio. Please try speaking clearer.');
224
  // Remove transcription messages if empty
225
+ setChatHistory(prev => prev.filter(msg =>
226
+ !(msg.role === 'user' && msg.message === '(Recording audio for transcription...)') &&
227
+ !(msg.role === 'assistant' && msg.message === 'Transcribing audio...')
228
+ ));
229
  }
230
  } catch (error) {
231
  console.error('Error during voice transcription:', error);
232
  alert(`Failed to transcribe audio. Error: ${error.message || 'Unknown error'}`);
233
  // Remove transcription messages if error
234
+ setChatHistory(prev => prev.filter(msg =>
235
+ !(msg.role === 'user' && msg.message === '(Recording audio for transcription...)') &&
236
+ !(msg.role === 'assistant' && msg.message === 'Transcribing audio...')
237
+ ));
238
  } finally {
239
  setIsSending(false);
240
  // Ensure the "Transcribing..." message is always removed when done
241
+ setChatHistory(prev => prev.filter(msg =>
242
+ !(msg.role === 'assistant' && msg.message === 'Transcribing audio...')
243
+ ));
244
  }
245
  };
246
 
 
247
  return (
248
  <div className="chat-layout">
249
  <Sidebar onNewChat={handleNewChat} />
 
276
  )}
277
  <div ref={chatEndRef} /> {/* Ref for scrolling to the bottom */}
278
  </div>
279
+ {/* Pass both handlers. ChatInputArea will decide which one to use based on its internal logic */}
280
  <ChatInputArea
281
  onSendMessage={handleSendMessage}
282
+ onSendVoiceMessage={handleSendVoiceMessage} // Keep this if you also use react-voice-visualizer for blob
283
  isLoading={isSending}
284
  />
285
  </div>