Alleinzellgaenger commited on
Commit
63428e7
·
1 Parent(s): 49058ae

Add multiple chats

Browse files
backend/app.py CHANGED
@@ -272,7 +272,7 @@ async def chat_endpoint(request: ChatRequest):
272
 
273
  print("🤖 Calling Claude for chat response...")
274
  response = client.messages.create(
275
- model="claude-sonnet-4-20250514",
276
  max_tokens=10000,
277
  system=system_prompt, # system prompt here
278
  messages=anthropic_messages,
 
272
 
273
  print("🤖 Calling Claude for chat response...")
274
  response = client.messages.create(
275
+ model="claude-3-5-haiku-latest",
276
  max_tokens=10000,
277
  system=system_prompt, # system prompt here
278
  messages=anthropic_messages,
frontend/src/components/ChunkPanel.jsx CHANGED
@@ -5,37 +5,35 @@ import rehypeRaw from 'rehype-raw';
5
  import { getChatMarkdownComponents, getTitleMarkdownComponents } from '../utils/markdownComponents.jsx';
6
  import SimpleChat from './SimpleChat.jsx';
7
  import ChunkLoadingTips from './ChunkLoadingTips.jsx';
8
- import React, { useState, useEffect } from 'react';
 
9
 
10
  const ChunkPanel = ({
11
  documentData,
12
  currentChunkIndex,
13
  showChat,
14
- isTransitioning,
15
  updateGlobalChatHistory,
16
  getGlobalChatHistory,
17
  addMessageToChunk,
18
  getCurrentChunkMessages,
19
  hasChunkMessages,
20
- isChunkCompleted,
21
- canEditChunk,
22
- setWaitingForFirstResponse,
23
  markChunkUnderstood,
24
  skipChunk,
25
- goToPrevChunk
 
 
26
  }) => {
27
 
28
  const chatMarkdownComponents = getChatMarkdownComponents();
29
  const titleMarkdownComponents = getTitleMarkdownComponents();
30
- const [isLoading, setIsLoading] = useState(false);
31
 
32
  // Generate greeting for chunks that don't have messages yet
33
  // Only for initial chunk (0) and when not transitioning
34
  useEffect(() => {
35
- if (documentData && showChat && !hasChunkMessages(currentChunkIndex) && currentChunkIndex === 0 && !isTransitioning) {
36
  generateGreetingStreaming();
37
  }
38
- }, [currentChunkIndex, documentData, showChat, isTransitioning]);
39
 
40
  const updateLastAssistantMessage = (delta) => {
41
  const allMessages = getGlobalChatHistory();
@@ -60,306 +58,32 @@ const ChunkPanel = ({
60
  };
61
 
62
  const generateGreetingStreaming = async () => {
63
- setIsLoading(true);
64
- try {
65
- const response = await fetch('/api/chat/stream', {
66
- method: 'POST',
67
- headers: { 'Content-Type': 'application/json' },
68
- body: JSON.stringify({
69
- messages: [],
70
- currentChunk: documentData?.chunks?.[currentChunkIndex]?.text || '',
71
- document: documentData ? JSON.stringify(documentData) : ''
72
- })
73
- });
74
-
75
- const reader = response.body.getReader();
76
- let shouldStop = false;
77
-
78
- // Local snapshot to avoid stale reads
79
- let localMessages = getGlobalChatHistory();
80
- const createTempId = () => `assistant_${Date.now()}_${Math.random().toString(36).slice(2)}`;
81
- let assistantId = null;
82
-
83
- // SSE read buffer
84
- let sseBuffer = '';
85
-
86
- // Streaming smoothness buffer
87
- let textBuffer = '';
88
- let frameScheduled = false;
89
-
90
- const flushBuffer = (isFinal = false) => {
91
- if (!assistantId) return;
92
-
93
- const lastMsg = localMessages[localMessages.length - 1];
94
- if (lastMsg.id === assistantId) {
95
- // Append buffered text
96
- lastMsg.content += textBuffer;
97
- textBuffer = '';
98
- }
99
- updateGlobalChatHistory([...localMessages]);
100
- };
101
-
102
- const scheduleFlush = () => {
103
- if (!frameScheduled) {
104
- frameScheduled = true;
105
- requestAnimationFrame(() => {
106
- flushBuffer();
107
- frameScheduled = false;
108
- });
109
- }
110
- };
111
-
112
- while (!shouldStop) {
113
- const { done, value } = await reader.read();
114
- if (done) break;
115
-
116
- sseBuffer += new TextDecoder().decode(value);
117
- const parts = sseBuffer.split('\n\n');
118
- sseBuffer = parts.pop(); // keep last partial
119
-
120
- for (const part of parts) {
121
- if (!part.startsWith('data:')) continue;
122
- const jsonStr = part.slice(5).trim();
123
- if (!jsonStr) continue;
124
-
125
- let parsed;
126
- try {
127
- parsed = JSON.parse(jsonStr);
128
- } catch (err) {
129
- console.warn('Could not JSON.parse stream chunk', jsonStr);
130
- continue;
131
- }
132
-
133
- if (parsed.error) {
134
- console.error('streaming error', parsed.error);
135
- shouldStop = true;
136
- break;
137
- }
138
- if (parsed.done) {
139
- shouldStop = true;
140
- flushBuffer(true); // final flush, remove cursor
141
- break;
142
- }
143
-
144
- const delta = typeof parsed === 'string' ? parsed : parsed?.content ?? '';
145
-
146
- if (!assistantId) {
147
- assistantId = createTempId();
148
- localMessages.push({
149
- id: assistantId,
150
- role: 'assistant',
151
- content: delta,
152
- chunkIndex: currentChunkIndex
153
- });
154
- } else {
155
- textBuffer += delta;
156
- }
157
 
158
- // Schedule smooth UI update
159
- scheduleFlush();
160
- }
161
- }
162
- } catch (error) {
163
- console.error(error);
164
- addMessageToChunk(
165
- { role: 'assistant', content: 'Sorry, something went wrong. Please try again.' },
166
- currentChunkIndex
167
- );
168
- } finally {
169
- setIsLoading(false);
170
- }
171
  };
172
 
173
- const generateGreeting = async () => {
174
- setIsLoading(true);
175
- if (setWaitingForFirstResponse) {
176
- setWaitingForFirstResponse(true);
177
- }
178
- try {
179
- const response = await fetch('/api/chat', {
180
- method: 'POST',
181
- headers: { 'Content-Type': 'application/json' },
182
- body: JSON.stringify({
183
- messages: [],
184
- currentChunk: documentData?.chunks?.[currentChunkIndex]?.text || '',
185
- document: documentData ? JSON.stringify(documentData) : ''
186
- })
187
- });
188
-
189
- const data = await response.json();
190
-
191
- addMessageToChunk(
192
- {
193
- role: 'assistant',
194
- content: data.content || 'Hi! Welcome to your learning session. Let\'s explore this document together!'
195
- },
196
- currentChunkIndex
197
- );
198
- } catch (error) {
199
- console.error('Error generating greeting:', error);
200
- addMessageToChunk(
201
- {
202
- role: 'assistant',
203
- content: 'Hi! Welcome to your learning session. Let\'s explore this document together!'
204
- },
205
- currentChunkIndex
206
- );
207
- } finally {
208
- setIsLoading(false);
209
- if (setWaitingForFirstResponse) {
210
- setWaitingForFirstResponse(false);
211
- }
212
- }
213
- };
214
 
215
  const handleSendStreaming = async (text) => {
216
  const userMessage = { role: 'user', content: text, chunkIndex: currentChunkIndex };
217
  addMessageToChunk(userMessage, currentChunkIndex);
218
- setIsLoading(true);
219
 
220
- try {
221
- // Get the updated messages after adding the user message
222
- const updatedMessages = [...getGlobalChatHistory(), userMessage];
223
-
224
- const response = await fetch('/api/chat/stream', {
225
- method: 'POST',
226
- headers: { 'Content-Type': 'application/json' },
227
- body: JSON.stringify({
228
- messages: updatedMessages,
229
- currentChunk: documentData?.chunks?.[currentChunkIndex]?.text || '',
230
- document: documentData ? JSON.stringify(documentData) : ''
231
- })
232
- });
233
 
234
- const reader = await response.body.getReader();
235
-
236
- let shouldStop = false;
237
-
238
- // Local snapshot to avoid stale reads - include the user message we just added
239
- let localMessages = updatedMessages;
240
- const createTempId = () => `assistant_${Date.now()}_${Math.random().toString(36).slice(2)}`;
241
- let assistantId = null;
242
-
243
- // SSE read buffer
244
- let sseBuffer = '';
245
-
246
- // Streaming smoothness buffer
247
- let textBuffer = '';
248
- let frameScheduled = false;
249
-
250
- const flushBuffer = (isFinal = false) => {
251
- if (!assistantId) return;
252
-
253
- const lastMsg = localMessages[localMessages.length - 1];
254
- if (lastMsg.id === assistantId) {
255
- // Append buffered text
256
- lastMsg.content += textBuffer;
257
- textBuffer = '';
258
- }
259
- updateGlobalChatHistory([...localMessages]);
260
- };
261
-
262
- const scheduleFlush = () => {
263
- if (!frameScheduled) {
264
- frameScheduled = true;
265
- requestAnimationFrame(() => {
266
- flushBuffer();
267
- frameScheduled = false;
268
- });
269
- }
270
- };
271
- while (!shouldStop) {
272
- const { done, value } = await reader.read();
273
- if (done) break;
274
-
275
- sseBuffer += new TextDecoder().decode(value);
276
- const parts = sseBuffer.split('\n\n');
277
- sseBuffer = parts.pop(); // keep last partial
278
-
279
- for (const part of parts) {
280
- if (!part.startsWith('data:')) continue;
281
- const jsonStr = part.slice(5).trim();
282
- if (!jsonStr) continue;
283
-
284
- let parsed;
285
- try {
286
- parsed = JSON.parse(jsonStr);
287
- } catch (err) {
288
- console.warn('Could not JSON.parse stream chunk', jsonStr);
289
- continue;
290
- }
291
-
292
- if (parsed.error) {
293
- console.error('streaming error', parsed.error);
294
- shouldStop = true;
295
- break;
296
- }
297
- if (parsed.done) {
298
- shouldStop = true;
299
- flushBuffer(true); // final flush, remove cursor
300
- break;
301
- }
302
-
303
- const delta = typeof parsed === 'string' ? parsed : parsed?.content ?? '';
304
-
305
- if (!assistantId) {
306
- assistantId = createTempId();
307
- localMessages.push({
308
- id: assistantId,
309
- role: 'assistant',
310
- content: delta,
311
- chunkIndex: currentChunkIndex
312
- });
313
- } else {
314
- textBuffer += delta;
315
- }
316
 
317
- // Schedule smooth UI update
318
- scheduleFlush();
319
- }
320
- }
321
- } catch (error) {
322
- console.error(error);
323
- addMessageToChunk(
324
- { role: 'assistant', content: 'Sorry, something went wrong. Please try again.' },
325
- currentChunkIndex
326
- );
327
- } finally {
328
- setIsLoading(false);
329
- }
330
  };
331
 
332
- const handleSend = async (text) => {
333
- const userMessage = { role: 'user', content: text, chunkIndex: currentChunkIndex };
334
- addMessageToChunk(userMessage, currentChunkIndex);
335
- setIsLoading(true);
336
-
337
- try {
338
- const response = await fetch('/api/chat', {
339
- method: 'POST',
340
- headers: { 'Content-Type': 'application/json' },
341
- body: JSON.stringify({
342
- messages: getGlobalChatHistory(),
343
- currentChunk: documentData?.chunks?.[currentChunkIndex]?.text || '',
344
- document: documentData ? JSON.stringify(documentData) : ''
345
- })
346
- });
347
-
348
- const data = await response.json();
349
- addMessageToChunk(
350
- { role: 'assistant', content: data.content || 'Sorry, no response received.' },
351
- currentChunkIndex
352
- );
353
- } catch (error) {
354
- console.error('Error:', error);
355
- addMessageToChunk(
356
- { role: 'assistant', content: 'Sorry, something went wrong. Please try again.' },
357
- currentChunkIndex
358
- );
359
- } finally {
360
- setIsLoading(false);
361
- }
362
- };
363
 
364
  return (
365
  <>
@@ -449,11 +173,10 @@ const ChunkPanel = ({
449
  {showChat && (
450
  <div className="relative flex-1 overflow-hidden">
451
  <SimpleChat
452
- messages={getGlobalChatHistory()}
453
  currentChunkIndex={currentChunkIndex}
454
- canEdit={canEditChunk(currentChunkIndex)}
455
  onSend={handleSendStreaming}
456
- isLoading={isLoading || isTransitioning}
457
  />
458
  </div>
459
  )}
 
5
  import { getChatMarkdownComponents, getTitleMarkdownComponents } from '../utils/markdownComponents.jsx';
6
  import SimpleChat from './SimpleChat.jsx';
7
  import ChunkLoadingTips from './ChunkLoadingTips.jsx';
8
+ import React, { useEffect } from 'react';
9
+ import { createTextStreamResponse } from 'ai';
10
 
11
  const ChunkPanel = ({
12
  documentData,
13
  currentChunkIndex,
14
  showChat,
 
15
  updateGlobalChatHistory,
16
  getGlobalChatHistory,
17
  addMessageToChunk,
18
  getCurrentChunkMessages,
19
  hasChunkMessages,
 
 
 
20
  markChunkUnderstood,
21
  skipChunk,
22
+ goToPrevChunk,
23
+ streamResponse,
24
+ isChunkLoading
25
  }) => {
26
 
27
  const chatMarkdownComponents = getChatMarkdownComponents();
28
  const titleMarkdownComponents = getTitleMarkdownComponents();
 
29
 
30
  // Generate greeting for chunks that don't have messages yet
31
  // Only for initial chunk (0) and when not transitioning
32
  useEffect(() => {
33
+ if (documentData && showChat && !hasChunkMessages(currentChunkIndex) && currentChunkIndex === 0) {
34
  generateGreetingStreaming();
35
  }
36
+ }, [currentChunkIndex, documentData, showChat]);
37
 
38
  const updateLastAssistantMessage = (delta) => {
39
  const allMessages = getGlobalChatHistory();
 
58
  };
59
 
60
  const generateGreetingStreaming = async () => {
61
+ const requestBody = JSON.stringify({
62
+ messages: [],
63
+ currentChunk: documentData?.chunks?.[currentChunkIndex]?.text || '',
64
+ document: documentData ? JSON.stringify(documentData) : ''
65
+ });
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
66
 
67
+ streamResponse(requestBody, false);
 
 
 
 
 
 
 
 
 
 
 
 
68
  };
69
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
70
 
71
  const handleSendStreaming = async (text) => {
72
  const userMessage = { role: 'user', content: text, chunkIndex: currentChunkIndex };
73
  addMessageToChunk(userMessage, currentChunkIndex);
 
74
 
75
+ // Build the messages array manually to include the user message immediately
76
+ const messagesWithUserMessage = [...getGlobalChatHistory(), userMessage];
 
 
 
 
 
 
 
 
 
 
 
77
 
78
+ const requestBody = JSON.stringify({
79
+ messages: messagesWithUserMessage,
80
+ currentChunk: documentData?.chunks?.[currentChunkIndex]?.text || '',
81
+ document: documentData ? JSON.stringify(documentData) : ''
82
+ });
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
83
 
84
+ streamResponse(requestBody, false);
 
 
 
 
 
 
 
 
 
 
 
 
85
  };
86
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
87
 
88
  return (
89
  <>
 
173
  {showChat && (
174
  <div className="relative flex-1 overflow-hidden">
175
  <SimpleChat
176
+ messages={getCurrentChunkMessages()}
177
  currentChunkIndex={currentChunkIndex}
 
178
  onSend={handleSendStreaming}
179
+ isLoading={isChunkLoading(currentChunkIndex)}
180
  />
181
  </div>
182
  )}
frontend/src/components/DocumentProcessor.jsx CHANGED
@@ -17,7 +17,6 @@ function DocumentProcessor() {
17
  const [pdfNavigation, setPdfNavigation] = useState(null);
18
  // State for first LLM response loading
19
  const [waitingForFirstResponse, setWaitingForFirstResponse] = useState(false);
20
-
21
  // Custom hooks
22
  const {
23
  fileInputRef,
@@ -35,7 +34,6 @@ function DocumentProcessor() {
35
  currentChunkIndex,
36
  chunkExpanded,
37
  showChat,
38
- isTransitioning,
39
  goToNextChunk,
40
  goToPrevChunk,
41
  skipChunk,
@@ -49,9 +47,9 @@ function DocumentProcessor() {
49
  addMessageToChunk,
50
  getCurrentChunkMessages,
51
  hasChunkMessages,
52
- isChunkCompleted,
53
- canEditChunk
54
- } = useChunkNavigation(documentData, null);
55
 
56
  const {
57
  leftPanelWidth,
@@ -138,7 +136,7 @@ function DocumentProcessor() {
138
  </div>
139
  </div>
140
 
141
- {/* Resizable Divider */}
142
  <div
143
  className="flex items-center justify-center cursor-col-resize group transition-all duration-200"
144
  style={{ width: '8px' }}
@@ -172,18 +170,17 @@ function DocumentProcessor() {
172
  documentData={documentData}
173
  currentChunkIndex={currentChunkIndex}
174
  showChat={showChat}
175
- isTransitioning={isTransitioning}
176
  updateGlobalChatHistory={updateGlobalChatHistory}
177
  getGlobalChatHistory={getGlobalChatHistory}
178
  addMessageToChunk={addMessageToChunk}
179
  getCurrentChunkMessages={getCurrentChunkMessages}
180
  hasChunkMessages={hasChunkMessages}
181
- isChunkCompleted={isChunkCompleted}
182
- canEditChunk={canEditChunk}
183
  setWaitingForFirstResponse={setWaitingForFirstResponse}
 
184
  markChunkUnderstood={markChunkUnderstood}
185
  skipChunk={skipChunk}
186
  goToPrevChunk={goToPrevChunk}
 
187
  />
188
  </div>
189
  </div>
 
17
  const [pdfNavigation, setPdfNavigation] = useState(null);
18
  // State for first LLM response loading
19
  const [waitingForFirstResponse, setWaitingForFirstResponse] = useState(false);
 
20
  // Custom hooks
21
  const {
22
  fileInputRef,
 
34
  currentChunkIndex,
35
  chunkExpanded,
36
  showChat,
 
37
  goToNextChunk,
38
  goToPrevChunk,
39
  skipChunk,
 
47
  addMessageToChunk,
48
  getCurrentChunkMessages,
49
  hasChunkMessages,
50
+ isChunkLoading,
51
+ streamResponse
52
+ } = useChunkNavigation(documentData);
53
 
54
  const {
55
  leftPanelWidth,
 
136
  </div>
137
  </div>
138
 
139
+ {/* Resizable Divider */}
140
  <div
141
  className="flex items-center justify-center cursor-col-resize group transition-all duration-200"
142
  style={{ width: '8px' }}
 
170
  documentData={documentData}
171
  currentChunkIndex={currentChunkIndex}
172
  showChat={showChat}
 
173
  updateGlobalChatHistory={updateGlobalChatHistory}
174
  getGlobalChatHistory={getGlobalChatHistory}
175
  addMessageToChunk={addMessageToChunk}
176
  getCurrentChunkMessages={getCurrentChunkMessages}
177
  hasChunkMessages={hasChunkMessages}
 
 
178
  setWaitingForFirstResponse={setWaitingForFirstResponse}
179
+ isChunkLoading={isChunkLoading}
180
  markChunkUnderstood={markChunkUnderstood}
181
  skipChunk={skipChunk}
182
  goToPrevChunk={goToPrevChunk}
183
+ streamResponse={streamResponse}
184
  />
185
  </div>
186
  </div>
frontend/src/components/SimpleChat.jsx CHANGED
@@ -5,30 +5,23 @@ import rehypeKatex from 'rehype-katex';
5
  import rehypeRaw from 'rehype-raw';
6
  import { getChatMarkdownComponents } from '../utils/markdownComponents.jsx';
7
 
8
- const SimpleChat = ({ messages, currentChunkIndex, canEdit, onSend, isLoading }) => {
9
  const [input, setInput] = useState('');
10
  const containerRef = useRef(null);
11
  const anchorRef = useRef(null); // <- will be a tiny zero-height anchor BEFORE the bubble
12
-
13
  const handleSubmit = (e) => {
14
  e.preventDefault();
15
- if (!input.trim() || isLoading || !canEdit) return;
16
  onSend(input.trim());
17
  setInput('');
18
  };
19
 
20
- // Determine the latest message index for this chunk (same as you had)
21
  const { anchorIndex, firstInChunkIndex } = useMemo(() => {
22
- let first = -1;
23
- let last = -1;
24
- for (let i = 0; i < messages.length; i++) {
25
- if (messages[i].chunkIndex === currentChunkIndex) {
26
- if (first === -1) first = i;
27
- last = i;
28
- }
29
- }
30
- return { anchorIndex: last !== -1 ? last : first, firstInChunkIndex: first };
31
- }, [messages, currentChunkIndex]);
32
 
33
  // Scroll by scrolling the ZERO-HEIGHT anchor into view AFTER layout commits.
34
  const scrollAfterLayout = () => {
@@ -68,7 +61,6 @@ const SimpleChat = ({ messages, currentChunkIndex, canEdit, onSend, isLoading })
68
  className="flex-1 min-h-0 overflow-y-auto p-4 flex flex-col space-y-3"
69
  >
70
  {messages.map((message, idx) => {
71
- const isCurrentChunk = message.chunkIndex === currentChunkIndex;
72
  const isAnchor = idx === anchorIndex;
73
 
74
  // Render a zero-height anchor just BEFORE the bubble for the anchor index.
@@ -80,10 +72,10 @@ const SimpleChat = ({ messages, currentChunkIndex, canEdit, onSend, isLoading })
80
 
81
  <div className={`flex ${message.role === 'user' ? 'justify-end' : 'justify-start'}`}>
82
  <div
83
- className={`max-w-[90%] p-3 rounded-lg transition-opacity ${
84
  message.role === 'user'
85
- ? `bg-gray-100 text-white ${isCurrentChunk ? 'opacity-100' : 'opacity-40'}`
86
- : `bg-white text-gray-900 ${isCurrentChunk ? 'opacity-100' : 'opacity-40'}`
87
  }`}
88
  >
89
  <ReactMarkdown
@@ -121,11 +113,11 @@ const SimpleChat = ({ messages, currentChunkIndex, canEdit, onSend, isLoading })
121
  className={`flex ${message.role === 'user' ? 'justify-end' : 'justify-start'}`}
122
  >
123
  <div
124
- className={`max-w-[90%] p-3 rounded-lg transition-opacity ${
125
- message.role === 'user'
126
- ? `bg-gray-100 text-white ${isCurrentChunk ? 'opacity-100' : 'opacity-40'}`
127
- : `bg-white text-gray-900 ${isCurrentChunk ? 'opacity-100' : 'opacity-40'}`
128
- }`}
129
  >
130
  <ReactMarkdown
131
  remarkPlugins={[remarkMath]}
@@ -165,13 +157,13 @@ const SimpleChat = ({ messages, currentChunkIndex, canEdit, onSend, isLoading })
165
  type="text"
166
  value={input}
167
  onChange={(e) => setInput(e.target.value)}
168
- placeholder={canEdit ? 'Type your message...' : 'This chunk is completed - navigation only'}
169
- disabled={isLoading || !canEdit}
170
  className="flex-1 px-3 py-2 border rounded-lg focus:outline-none focus:ring-2 focus:ring-blue-500 disabled:bg-gray-100 disabled:text-gray-500"
171
  />
172
  <button
173
  type="submit"
174
- disabled={!input.trim() || isLoading || !canEdit}
175
  className="px-4 py-2 bg-blue-500 text-white rounded-lg hover:bg-blue-600 disabled:bg-gray-300 disabled:cursor-not-allowed"
176
  >
177
  {isLoading ? '...' : 'Send'}
 
5
  import rehypeRaw from 'rehype-raw';
6
  import { getChatMarkdownComponents } from '../utils/markdownComponents.jsx';
7
 
8
+ const SimpleChat = ({ messages, currentChunkIndex, onSend, isLoading }) => {
9
  const [input, setInput] = useState('');
10
  const containerRef = useRef(null);
11
  const anchorRef = useRef(null); // <- will be a tiny zero-height anchor BEFORE the bubble
 
12
  const handleSubmit = (e) => {
13
  e.preventDefault();
14
+ if (!input.trim() || isLoading ) return;
15
  onSend(input.trim());
16
  setInput('');
17
  };
18
 
19
+ // Since messages are now filtered to current chunk only, use the last message for anchoring
20
  const { anchorIndex, firstInChunkIndex } = useMemo(() => {
21
+ const lastIndex = messages.length > 0 ? messages.length - 1 : -1;
22
+ const firstIndex = messages.length > 0 ? 0 : -1;
23
+ return { anchorIndex: lastIndex, firstInChunkIndex: firstIndex };
24
+ }, [messages]);
 
 
 
 
 
 
25
 
26
  // Scroll by scrolling the ZERO-HEIGHT anchor into view AFTER layout commits.
27
  const scrollAfterLayout = () => {
 
61
  className="flex-1 min-h-0 overflow-y-auto p-4 flex flex-col space-y-3"
62
  >
63
  {messages.map((message, idx) => {
 
64
  const isAnchor = idx === anchorIndex;
65
 
66
  // Render a zero-height anchor just BEFORE the bubble for the anchor index.
 
72
 
73
  <div className={`flex ${message.role === 'user' ? 'justify-end' : 'justify-start'}`}>
74
  <div
75
+ className={`max-w-[90%] p-3 rounded-lg ${
76
  message.role === 'user'
77
+ ? 'bg-gray-100 text-white'
78
+ : 'bg-white text-gray-900'
79
  }`}
80
  >
81
  <ReactMarkdown
 
113
  className={`flex ${message.role === 'user' ? 'justify-end' : 'justify-start'}`}
114
  >
115
  <div
116
+ className={`max-w-[90%] p-3 rounded-lg ${
117
+ message.role === 'user'
118
+ ? 'bg-gray-100 text-white'
119
+ : 'bg-white text-gray-900'
120
+ }`}
121
  >
122
  <ReactMarkdown
123
  remarkPlugins={[remarkMath]}
 
157
  type="text"
158
  value={input}
159
  onChange={(e) => setInput(e.target.value)}
160
+ placeholder={'Type your message...'}
161
+ disabled={isLoading }
162
  className="flex-1 px-3 py-2 border rounded-lg focus:outline-none focus:ring-2 focus:ring-blue-500 disabled:bg-gray-100 disabled:text-gray-500"
163
  />
164
  <button
165
  type="submit"
166
+ disabled={!input.trim() || isLoading}
167
  className="px-4 py-2 bg-blue-500 text-white rounded-lg hover:bg-blue-600 disabled:bg-gray-300 disabled:cursor-not-allowed"
168
  >
169
  {isLoading ? '...' : 'Send'}
frontend/src/hooks/useChunkNavigation.js CHANGED
@@ -1,19 +1,124 @@
1
  import { useState } from 'react';
2
 
3
- export const useChunkNavigation = (documentData, clearTypingAnimation) => {
4
  const [chunkStates, setChunkStates] = useState({});
5
  const [currentChunkIndex, setCurrentChunkIndex] = useState(0);
6
  const [chunkExpanded, setChunkExpanded] = useState(true);
7
  const [globalChatHistory, setGlobalChatHistory] = useState([]);
8
  const [showChat, setShowChat] = useState(true);
9
- const [isTransitioning, setIsTransitioning] = useState(false);
10
- const [completedChunks, setCompletedChunks] = useState(new Set());
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
11
 
12
  const goToNextChunk = () => {
13
  if (documentData && currentChunkIndex < documentData.chunks.length - 1) {
14
- if (clearTypingAnimation) {
15
- clearTypingAnimation();
16
- }
17
  setCurrentChunkIndex(currentChunkIndex + 1);
18
  setChunkExpanded(true);
19
  }
@@ -21,9 +126,6 @@ export const useChunkNavigation = (documentData, clearTypingAnimation) => {
21
 
22
  const goToPrevChunk = () => {
23
  if (currentChunkIndex > 0) {
24
- if (clearTypingAnimation) {
25
- clearTypingAnimation();
26
- }
27
  setCurrentChunkIndex(currentChunkIndex - 1);
28
  setChunkExpanded(true);
29
  }
@@ -31,57 +133,29 @@ export const useChunkNavigation = (documentData, clearTypingAnimation) => {
31
 
32
  const sendAutomatedMessage = async (action) => {
33
  if (!documentData || currentChunkIndex >= documentData.chunks.length - 1) return;
34
-
35
- setIsTransitioning(true);
36
  const nextChunkIndex = currentChunkIndex + 1;
 
37
  const nextChunk = documentData.chunks[nextChunkIndex];
38
-
39
- // Mark current chunk as completed
40
- setCompletedChunks(prev => new Set(prev).add(currentChunkIndex));
41
-
42
  // Update chunk index immediately for UI feedback
43
  setCurrentChunkIndex(nextChunkIndex);
44
- setChunkExpanded(true);
45
 
46
  // Check if we already have messages for this chunk
47
  if (hasChunkMessages(nextChunkIndex)) {
48
  // Don't generate new response, just navigate
49
- setIsTransitioning(false);
50
  return;
51
  }
52
 
53
- try {
54
- const response = await fetch('/api/chat', {
55
- method: 'POST',
56
- headers: { 'Content-Type': 'application/json' },
57
- body: JSON.stringify({
58
- messages: globalChatHistory,
59
- currentChunk: documentData.chunks[currentChunkIndex]?.text || '',
60
- nextChunk: nextChunk.text,
61
- action: action,
62
- document: documentData ? JSON.stringify(documentData) : ''
63
- })
64
- });
65
 
66
- const data = await response.json();
67
- addMessageToChunk(
68
- { role: 'assistant', content: data.content || 'Let\'s continue to the next section.' },
69
- nextChunkIndex
70
- );
71
-
72
- // Clear any animations after successful response
73
- if (clearTypingAnimation) {
74
- clearTypingAnimation();
75
- }
76
- } catch (error) {
77
- console.error('Error in automated transition:', error);
78
- // Clear animations on error too
79
- if (clearTypingAnimation) {
80
- clearTypingAnimation();
81
- }
82
- } finally {
83
- setIsTransitioning(false);
84
- }
85
  };
86
 
87
  const skipChunk = () => {
@@ -125,12 +199,8 @@ export const useChunkNavigation = (documentData, clearTypingAnimation) => {
125
  return globalChatHistory.some(msg => msg.chunkIndex === chunkIndex);
126
  };
127
 
128
- const isChunkCompleted = (chunkIndex) => {
129
- return completedChunks.has(chunkIndex);
130
- };
131
-
132
- const canEditChunk = (chunkIndex) => {
133
- return chunkIndex === currentChunkIndex && !isChunkCompleted(chunkIndex);
134
  };
135
 
136
  return {
@@ -138,7 +208,6 @@ export const useChunkNavigation = (documentData, clearTypingAnimation) => {
138
  currentChunkIndex,
139
  chunkExpanded,
140
  showChat,
141
- isTransitioning,
142
  goToNextChunk,
143
  goToPrevChunk,
144
  skipChunk,
@@ -152,7 +221,7 @@ export const useChunkNavigation = (documentData, clearTypingAnimation) => {
152
  addMessageToChunk,
153
  getCurrentChunkMessages,
154
  hasChunkMessages,
155
- isChunkCompleted,
156
- canEditChunk
157
  };
158
  };
 
1
  import { useState } from 'react';
2
 
3
+ export const useChunkNavigation = (documentData) => {
4
  const [chunkStates, setChunkStates] = useState({});
5
  const [currentChunkIndex, setCurrentChunkIndex] = useState(0);
6
  const [chunkExpanded, setChunkExpanded] = useState(true);
7
  const [globalChatHistory, setGlobalChatHistory] = useState([]);
8
  const [showChat, setShowChat] = useState(true);
9
+ const [loadingChunkIndex, setLoadingChunkIndex] = useState(null);
10
+
11
+ const streamResponse = async (requestBody, isAutomated, nextChunkIndex) => {
12
+ const targetChunkIndex = nextChunkIndex || currentChunkIndex;
13
+ setLoadingChunkIndex(targetChunkIndex);
14
+
15
+ try {
16
+ const response = await fetch('/api/chat/stream', {
17
+ method: 'POST',
18
+ headers: { 'Content-Type': 'application/json' },
19
+ body: requestBody
20
+ });
21
+
22
+ const reader = await response.body.getReader()
23
+
24
+ let shouldStop = false;
25
+ const parsedBody = JSON.parse(requestBody);
26
+ let localMessages = [...parsedBody.messages];
27
+
28
+ const createTempId = () => `assistant_${Date.now()}_${Math.random().toString(36).slice(2)}`;
29
+ let assistantId = null;
30
+
31
+ // SSE read buffer
32
+ let sseBuffer = '';
33
+
34
+ // Streaming smoothness buffer
35
+ let textBuffer = '';
36
+ let frameScheduled = false;
37
+
38
+ const flushBuffer = (isFinal = false) => {
39
+ if (!assistantId) return;
40
+
41
+ const lastMsg = localMessages[localMessages.length - 1];
42
+ if (lastMsg.id === assistantId) {
43
+ // Append buffered text
44
+ lastMsg.content += textBuffer;
45
+ textBuffer = '';
46
+ }
47
+ updateGlobalChatHistory([...localMessages]);
48
+ };
49
+
50
+ const scheduleFlush = () => {
51
+ if (!frameScheduled) {
52
+ frameScheduled = true;
53
+ requestAnimationFrame(() => {
54
+ flushBuffer();
55
+ frameScheduled = false;
56
+ });
57
+ }
58
+ };
59
+ while (!shouldStop) {
60
+ const { done, value } = await reader.read();
61
+ if (done) break;
62
+
63
+ sseBuffer += new TextDecoder().decode(value);
64
+ const parts = sseBuffer.split('\n\n');
65
+ sseBuffer = parts.pop(); // keep last partial
66
+
67
+ for (const part of parts) {
68
+ if (!part.startsWith('data:')) continue;
69
+ const jsonStr = part.slice(5).trim();
70
+ if (!jsonStr) continue;
71
+
72
+ let parsed;
73
+ try {
74
+ parsed = JSON.parse(jsonStr);
75
+ } catch (err) {
76
+ console.warn('Could not JSON.parse stream chunk', jsonStr);
77
+ continue;
78
+ }
79
+
80
+ if (parsed.error) {
81
+ console.error('streaming error', parsed.error);
82
+ shouldStop = true;
83
+ break;
84
+ }
85
+ if (parsed.done) {
86
+ shouldStop = true;
87
+ flushBuffer(true); // final flush, remove cursor
88
+ break;
89
+ }
90
+
91
+ const delta = typeof parsed === 'string' ? parsed : parsed?.content ?? '';
92
+
93
+ if (!assistantId) {
94
+ assistantId = createTempId();
95
+ localMessages.push({
96
+ id: assistantId,
97
+ role: 'assistant',
98
+ content: delta,
99
+ chunkIndex: nextChunkIndex ? nextChunkIndex : currentChunkIndex
100
+ });
101
+ } else {
102
+ textBuffer += delta;
103
+ }
104
+
105
+ // Schedule smooth UI update
106
+ scheduleFlush();
107
+ }
108
+ }
109
+ } catch (error) {
110
+ console.error(error);
111
+ addMessageToChunk(
112
+ { role: 'assistant', content: 'Sorry, something went wrong. Please try again.' },
113
+ currentChunkIndex
114
+ );
115
+ } finally {
116
+ setLoadingChunkIndex(null);
117
+ }
118
+ };
119
 
120
  const goToNextChunk = () => {
121
  if (documentData && currentChunkIndex < documentData.chunks.length - 1) {
 
 
 
122
  setCurrentChunkIndex(currentChunkIndex + 1);
123
  setChunkExpanded(true);
124
  }
 
126
 
127
  const goToPrevChunk = () => {
128
  if (currentChunkIndex > 0) {
 
 
 
129
  setCurrentChunkIndex(currentChunkIndex - 1);
130
  setChunkExpanded(true);
131
  }
 
133
 
134
  const sendAutomatedMessage = async (action) => {
135
  if (!documentData || currentChunkIndex >= documentData.chunks.length - 1) return;
 
 
136
  const nextChunkIndex = currentChunkIndex + 1;
137
+ setLoadingChunkIndex(nextChunkIndex);
138
  const nextChunk = documentData.chunks[nextChunkIndex];
139
+
 
 
 
140
  // Update chunk index immediately for UI feedback
141
  setCurrentChunkIndex(nextChunkIndex);
 
142
 
143
  // Check if we already have messages for this chunk
144
  if (hasChunkMessages(nextChunkIndex)) {
145
  // Don't generate new response, just navigate
146
+ setLoadingChunkIndex(null);
147
  return;
148
  }
149
 
150
+ const requestBody = JSON.stringify({
151
+ messages: globalChatHistory,
152
+ currentChunk: documentData.chunks[currentChunkIndex]?.text || '',
153
+ nextChunk: nextChunk.text,
154
+ action: action,
155
+ document: documentData ? JSON.stringify(documentData) : ''
156
+ })
 
 
 
 
 
157
 
158
+ streamResponse(requestBody, true, nextChunkIndex);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
159
  };
160
 
161
  const skipChunk = () => {
 
199
  return globalChatHistory.some(msg => msg.chunkIndex === chunkIndex);
200
  };
201
 
202
+ const isChunkLoading = (chunkIndex) => {
203
+ return loadingChunkIndex === chunkIndex;
 
 
 
 
204
  };
205
 
206
  return {
 
208
  currentChunkIndex,
209
  chunkExpanded,
210
  showChat,
 
211
  goToNextChunk,
212
  goToPrevChunk,
213
  skipChunk,
 
221
  addMessageToChunk,
222
  getCurrentChunkMessages,
223
  hasChunkMessages,
224
+ isChunkLoading,
225
+ streamResponse
226
  };
227
  };