SokratesAI / frontend /src /hooks /useChunkNavigation.js
Alleinzellgaenger's picture
Add multiple chats
63428e7
import { useState } from 'react';
export const useChunkNavigation = (documentData) => {
const [chunkStates, setChunkStates] = useState({});
const [currentChunkIndex, setCurrentChunkIndex] = useState(0);
const [chunkExpanded, setChunkExpanded] = useState(true);
const [globalChatHistory, setGlobalChatHistory] = useState([]);
const [showChat, setShowChat] = useState(true);
const [loadingChunkIndex, setLoadingChunkIndex] = useState(null);
const streamResponse = async (requestBody, isAutomated, nextChunkIndex) => {
const targetChunkIndex = nextChunkIndex || currentChunkIndex;
setLoadingChunkIndex(targetChunkIndex);
try {
const response = await fetch('/api/chat/stream', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: requestBody
});
const reader = await response.body.getReader()
let shouldStop = false;
const parsedBody = JSON.parse(requestBody);
let localMessages = [...parsedBody.messages];
const createTempId = () => `assistant_${Date.now()}_${Math.random().toString(36).slice(2)}`;
let assistantId = null;
// SSE read buffer
let sseBuffer = '';
// Streaming smoothness buffer
let textBuffer = '';
let frameScheduled = false;
const flushBuffer = (isFinal = false) => {
if (!assistantId) return;
const lastMsg = localMessages[localMessages.length - 1];
if (lastMsg.id === assistantId) {
// Append buffered text
lastMsg.content += textBuffer;
textBuffer = '';
}
updateGlobalChatHistory([...localMessages]);
};
const scheduleFlush = () => {
if (!frameScheduled) {
frameScheduled = true;
requestAnimationFrame(() => {
flushBuffer();
frameScheduled = false;
});
}
};
while (!shouldStop) {
const { done, value } = await reader.read();
if (done) break;
sseBuffer += new TextDecoder().decode(value);
const parts = sseBuffer.split('\n\n');
sseBuffer = parts.pop(); // keep last partial
for (const part of parts) {
if (!part.startsWith('data:')) continue;
const jsonStr = part.slice(5).trim();
if (!jsonStr) continue;
let parsed;
try {
parsed = JSON.parse(jsonStr);
} catch (err) {
console.warn('Could not JSON.parse stream chunk', jsonStr);
continue;
}
if (parsed.error) {
console.error('streaming error', parsed.error);
shouldStop = true;
break;
}
if (parsed.done) {
shouldStop = true;
flushBuffer(true); // final flush, remove cursor
break;
}
const delta = typeof parsed === 'string' ? parsed : parsed?.content ?? '';
if (!assistantId) {
assistantId = createTempId();
localMessages.push({
id: assistantId,
role: 'assistant',
content: delta,
chunkIndex: nextChunkIndex ? nextChunkIndex : currentChunkIndex
});
} else {
textBuffer += delta;
}
// Schedule smooth UI update
scheduleFlush();
}
}
} catch (error) {
console.error(error);
addMessageToChunk(
{ role: 'assistant', content: 'Sorry, something went wrong. Please try again.' },
currentChunkIndex
);
} finally {
setLoadingChunkIndex(null);
}
};
const goToNextChunk = () => {
if (documentData && currentChunkIndex < documentData.chunks.length - 1) {
setCurrentChunkIndex(currentChunkIndex + 1);
setChunkExpanded(true);
}
};
const goToPrevChunk = () => {
if (currentChunkIndex > 0) {
setCurrentChunkIndex(currentChunkIndex - 1);
setChunkExpanded(true);
}
};
const sendAutomatedMessage = async (action) => {
if (!documentData || currentChunkIndex >= documentData.chunks.length - 1) return;
const nextChunkIndex = currentChunkIndex + 1;
setLoadingChunkIndex(nextChunkIndex);
const nextChunk = documentData.chunks[nextChunkIndex];
// Update chunk index immediately for UI feedback
setCurrentChunkIndex(nextChunkIndex);
// Check if we already have messages for this chunk
if (hasChunkMessages(nextChunkIndex)) {
// Don't generate new response, just navigate
setLoadingChunkIndex(null);
return;
}
const requestBody = JSON.stringify({
messages: globalChatHistory,
currentChunk: documentData.chunks[currentChunkIndex]?.text || '',
nextChunk: nextChunk.text,
action: action,
document: documentData ? JSON.stringify(documentData) : ''
})
streamResponse(requestBody, true, nextChunkIndex);
};
const skipChunk = () => {
return sendAutomatedMessage('skip');
};
const markChunkUnderstood = () => {
return sendAutomatedMessage('understood');
};
const startInteractiveLesson = (startChunkLessonFn) => {
setChunkStates(prev => ({
...prev,
[currentChunkIndex]: 'interactive'
}));
startChunkLessonFn(currentChunkIndex);
};
const setChunkAsInteractive = () => {
// No longer tracking status - this is just for compatibility
};
const updateGlobalChatHistory = (messages) => {
setGlobalChatHistory(messages);
};
const getGlobalChatHistory = () => {
return globalChatHistory;
};
const addMessageToChunk = (message, chunkIndex) => {
const messageWithChunk = { ...message, chunkIndex };
setGlobalChatHistory(prev => [...prev, messageWithChunk]);
};
const getCurrentChunkMessages = () => {
return globalChatHistory.filter(msg => msg.chunkIndex === currentChunkIndex);
};
const hasChunkMessages = (chunkIndex) => {
return globalChatHistory.some(msg => msg.chunkIndex === chunkIndex);
};
const isChunkLoading = (chunkIndex) => {
return loadingChunkIndex === chunkIndex;
};
return {
chunkStates,
currentChunkIndex,
chunkExpanded,
showChat,
goToNextChunk,
goToPrevChunk,
skipChunk,
markChunkUnderstood,
startInteractiveLesson,
setChunkExpanded,
setShowChat,
setChunkAsInteractive,
updateGlobalChatHistory,
getGlobalChatHistory,
addMessageToChunk,
getCurrentChunkMessages,
hasChunkMessages,
isChunkLoading,
streamResponse
};
};