legends810 commited on
Commit
6188173
·
verified ·
1 Parent(s): 7eb2fdf

Update hooks/useCallAi.ts

Browse files
Files changed (1) hide show
  1. hooks/useCallAi.ts +318 -98
hooks/useCallAi.ts CHANGED
@@ -1,10 +1,7 @@
1
  import { useState, useRef } from "react";
2
  import { toast } from "sonner";
 
3
  import { Page } from "@/types";
4
- import {
5
- TITLE_PAGE_START,
6
- TITLE_PAGE_END,
7
- } from "@/lib/prompts";
8
 
9
  interface UseCallAiProps {
10
  onNewPrompt: (prompt: string) => void;
@@ -16,7 +13,6 @@ interface UseCallAiProps {
16
  pages: Page[];
17
  isAiWorking: boolean;
18
  setisAiWorking: React.Dispatch<React.SetStateAction<boolean>>;
19
- setAiThinking: React.Dispatch<React.SetStateAction<string | null>>;
20
  }
21
 
22
  export const useCallAi = ({
@@ -25,31 +21,25 @@ export const useCallAi = ({
25
  onScrollToBottom,
26
  setPages,
27
  setCurrentPage,
28
- setAiThinking,
29
  isAiWorking,
30
  setisAiWorking,
31
  }: UseCallAiProps) => {
32
  const audio = useRef<HTMLAudioElement | null>(null);
33
  const [controller, setController] = useState<AbortController | null>(null);
34
 
35
- const callAiNewProject = async (
36
- prompt: string,
37
- model: string | undefined,
38
- provider: string | undefined,
39
- redesignMarkdown?: string
40
- ) => {
41
  if (isAiWorking) return;
42
  if (!redesignMarkdown && !prompt.trim()) return;
43
-
44
  setisAiWorking(true);
45
- setAiThinking(""); // Clear previous thinking text
46
-
47
  const abortController = new AbortController();
48
  setController(abortController);
49
-
50
  try {
51
  onNewPrompt(prompt);
52
-
53
  const request = await fetch("/api/ask-ai", {
54
  method: "POST",
55
  body: JSON.stringify({
@@ -65,58 +55,184 @@ export const useCallAi = ({
65
  signal: abortController.signal,
66
  });
67
 
68
- if (!request.body) {
69
- throw new Error("The response body is empty.");
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
70
  }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
71
 
72
- const reader = request.body.getReader();
73
- const decoder = new TextDecoder("utf-8");
74
- let accumulatedResponse = "";
75
- let inCodeBlock = false;
76
-
77
- const read = async () => {
78
- const { done, value } = await reader.read();
79
- if (done) {
80
- toast.success("AI has finished generating.");
81
- setisAiWorking(false);
82
- if (audio.current) audio.current.play();
83
- onSuccess(formatPages(accumulatedResponse), prompt);
84
- return { success: true };
85
- }
 
86
 
87
- const chunk = decoder.decode(value, { stream: true });
88
- accumulatedResponse += chunk;
89
-
90
- // Handle thinking and code separation
91
- if (!inCodeBlock) {
92
- const codeStartIndex = accumulatedResponse.indexOf(TITLE_PAGE_START);
93
- if (codeStartIndex !== -1) {
94
- const thinkingText = accumulatedResponse.substring(0, codeStartIndex);
95
- setAiThinking(thinkingText);
 
 
 
 
 
 
96
 
97
- const codePart = accumulatedResponse.substring(codeStartIndex);
98
- formatPages(codePart); // Start rendering code
99
- inCodeBlock = true;
100
- } else {
101
- setAiThinking(accumulatedResponse); // It's still thinking
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
102
  }
103
- } else {
104
- formatPages(accumulatedResponse); // Continue rendering code
105
- }
106
 
107
- return read();
108
- };
109
-
110
- return await read();
 
 
 
 
 
 
 
 
 
 
111
 
 
 
 
112
  } catch (error: any) {
113
  setisAiWorking(false);
114
- setAiThinking(null); // Clear thinking text on error
115
- if (error.name === 'AbortError') {
116
- toast.info("AI generation stopped.");
117
- return { error: "aborted" };
118
  }
119
- toast.error(error.message || "An unexpected error occurred.");
120
  return { error: "network_error", message: error.message };
121
  }
122
  };
@@ -126,7 +242,6 @@ export const useCallAi = ({
126
  if (!prompt.trim()) return;
127
 
128
  setisAiWorking(true);
129
- setAiThinking(""); // Clear previous thinking text
130
 
131
  const abortController = new AbortController();
132
  setController(abortController);
@@ -152,39 +267,48 @@ export const useCallAi = ({
152
  signal: abortController.signal,
153
  });
154
 
155
- if (request.body) {
156
- // Since PUT is not streaming in the original code, we will handle it similarly but add thinking text.
157
- // We will assume the non-streaming response for PUT contains the thinking part first.
158
  const res = await request.json();
159
 
160
  if (!request.ok) {
161
- setisAiWorking(false);
162
- setAiThinking(null);
163
- // Handle specific errors like login, pro, etc.
164
- toast.error(res.message || "An error occurred during follow-up.");
165
- return { error: "api_error", message: res.message };
 
 
 
 
 
 
 
 
 
166
  }
167
-
168
- // Let's assume the response for PUT is not streamed and contains the full text.
169
- // We'll have to parse it. For now, we just update pages.
170
  toast.success("AI responded successfully");
171
  setisAiWorking(false);
 
172
  setPages(res.pages);
173
  onSuccess(res.pages, prompt, res.updatedLines);
174
 
175
  if (audio.current) audio.current.play();
176
 
177
- return { success: true };
178
  }
 
179
  } catch (error: any) {
180
  setisAiWorking(false);
181
- setAiThinking(null);
182
  toast.error(error.message);
 
 
 
183
  return { error: "network_error", message: error.message };
184
  }
185
  };
186
 
187
-
188
  const stopController = () => {
189
  if (controller) {
190
  controller.abort();
@@ -192,48 +316,144 @@ export const useCallAi = ({
192
  setisAiWorking(false);
193
  }
194
  };
195
-
196
  const formatPages = (content: string) => {
197
  const pages: Page[] = [];
198
- const pageRegex = new RegExp(`${TITLE_PAGE_START.replace(/[.*+?^${}()|[]\\]/g, '\\$&')}(.*?)${TITLE_PAGE_END.replace(/[.*+?^${}()|[]\\]/g, '\\$&')}`, "g");
199
-
200
- let lastIndex = 0;
201
- const thinkingText = content.split(pageRegex)[0] || "";
202
- if (thinkingText.trim()) {
203
- setAiThinking(thinkingText);
204
  }
205
-
206
- const matches = Array.from(content.matchAll(pageRegex));
207
- matches.forEach((match, i) => {
208
- const pagePath = match[1].trim();
209
- const startIndex = match.index! + match[0].length;
210
- const endIndex = i + 1 < matches.length ? matches[i+1].index! : content.length;
211
- const chunk = content.substring(startIndex, endIndex);
212
-
213
- const htmlContent = extractHtmlContent(chunk);
214
- if (htmlContent) {
215
- pages.push({ path: pagePath, html: htmlContent });
 
 
 
 
 
 
 
 
 
 
 
 
 
 
216
  }
 
 
 
 
217
  });
218
-
219
  if (pages.length > 0) {
220
  setPages(pages);
221
  const lastPagePath = pages[pages.length - 1]?.path;
222
  setCurrentPage(lastPagePath || "index.html");
223
- onScrollToBottom();
224
  }
 
225
  return pages;
226
  };
227
-
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
228
  const extractHtmlContent = (chunk: string): string => {
229
- const htmlMatch = chunk.match(/``````/);
230
- return htmlMatch ? htmlMatch[1].trim() : chunk.trim();
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
231
  };
232
 
233
  return {
234
  callAiNewProject,
235
  callAiFollowUp,
236
- callAiNewPage: callAiNewProject, // Rerouting for simplicity, can be expanded
237
  stopController,
238
  controller,
239
  audio,
 
1
  import { useState, useRef } from "react";
2
  import { toast } from "sonner";
3
+ import { MODELS } from "@/lib/providers";
4
  import { Page } from "@/types";
 
 
 
 
5
 
6
  interface UseCallAiProps {
7
  onNewPrompt: (prompt: string) => void;
 
13
  pages: Page[];
14
  isAiWorking: boolean;
15
  setisAiWorking: React.Dispatch<React.SetStateAction<boolean>>;
 
16
  }
17
 
18
  export const useCallAi = ({
 
21
  onScrollToBottom,
22
  setPages,
23
  setCurrentPage,
24
+ pages,
25
  isAiWorking,
26
  setisAiWorking,
27
  }: UseCallAiProps) => {
28
  const audio = useRef<HTMLAudioElement | null>(null);
29
  const [controller, setController] = useState<AbortController | null>(null);
30
 
31
+ const callAiNewProject = async (prompt: string, model: string | undefined, provider: string | undefined, redesignMarkdown?: string, handleThink?: (think: string) => void, onFinishThink?: () => void) => {
 
 
 
 
 
32
  if (isAiWorking) return;
33
  if (!redesignMarkdown && !prompt.trim()) return;
34
+
35
  setisAiWorking(true);
36
+
 
37
  const abortController = new AbortController();
38
  setController(abortController);
39
+
40
  try {
41
  onNewPrompt(prompt);
42
+
43
  const request = await fetch("/api/ask-ai", {
44
  method: "POST",
45
  body: JSON.stringify({
 
55
  signal: abortController.signal,
56
  });
57
 
58
+ if (request && request.body) {
59
+ const reader = request.body.getReader();
60
+ const decoder = new TextDecoder("utf-8");
61
+ const selectedModel = MODELS.find(
62
+ (m: { value: string }) => m.value === model
63
+ );
64
+ let contentResponse = "";
65
+
66
+ const read = async () => {
67
+ const { done, value } = await reader.read();
68
+ if (done) {
69
+ const isJson =
70
+ contentResponse.trim().startsWith("{") &&
71
+ contentResponse.trim().endsWith("}");
72
+ const jsonResponse = isJson ? JSON.parse(contentResponse) : null;
73
+
74
+ if (jsonResponse && !jsonResponse.ok) {
75
+ if (jsonResponse.openLogin) {
76
+ // Handle login required
77
+ return { error: "login_required" };
78
+ } else if (jsonResponse.openSelectProvider) {
79
+ // Handle provider selection required
80
+ return { error: "provider_required", message: jsonResponse.message };
81
+ } else if (jsonResponse.openProModal) {
82
+ // Handle pro modal required
83
+ return { error: "pro_required" };
84
+ } else {
85
+ toast.error(jsonResponse.message);
86
+ setisAiWorking(false);
87
+ return { error: "api_error", message: jsonResponse.message };
88
+ }
89
+ }
90
+
91
+ toast.success("AI responded successfully");
92
+ setisAiWorking(false);
93
+
94
+ if (audio.current) audio.current.play();
95
+
96
+ const newPages = formatPages(contentResponse);
97
+ onSuccess(newPages, prompt);
98
+
99
+ return { success: true, pages: newPages };
100
+ }
101
+
102
+ const chunk = decoder.decode(value, { stream: true });
103
+ contentResponse += chunk;
104
+
105
+ if (selectedModel?.isThinker) {
106
+ const thinkMatch = contentResponse.match(/<think>[\s\S]*/)?.[0];
107
+ if (thinkMatch && !contentResponse?.includes("</think>")) {
108
+ handleThink?.(thinkMatch.replace("<think>", "").trim());
109
+ return read();
110
+ }
111
+ }
112
+
113
+ if (contentResponse.includes("</think>")) {
114
+ onFinishThink?.();
115
+ }
116
+
117
+ formatPages(contentResponse);
118
+ return read();
119
+ };
120
+
121
+ return await read();
122
  }
123
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
124
+ } catch (error: any) {
125
+ setisAiWorking(false);
126
+ toast.error(error.message);
127
+ if (error.openLogin) {
128
+ return { error: "login_required" };
129
+ }
130
+ return { error: "network_error", message: error.message };
131
+ }
132
+ };
133
+
134
+ const callAiNewPage = async (prompt: string, model: string | undefined, provider: string | undefined, currentPagePath: string, previousPrompts?: string[]) => {
135
+ if (isAiWorking) return;
136
+ if (!prompt.trim()) return;
137
+
138
+ setisAiWorking(true);
139
+
140
+ const abortController = new AbortController();
141
+ setController(abortController);
142
+
143
+ try {
144
+ onNewPrompt(prompt);
145
 
146
+ const request = await fetch("/api/ask-ai", {
147
+ method: "POST",
148
+ body: JSON.stringify({
149
+ prompt,
150
+ provider,
151
+ model,
152
+ pages,
153
+ previousPrompts,
154
+ }),
155
+ headers: {
156
+ "Content-Type": "application/json",
157
+ "x-forwarded-for": window.location.hostname,
158
+ },
159
+ signal: abortController.signal,
160
+ });
161
 
162
+ if (request && request.body) {
163
+ const reader = request.body.getReader();
164
+ const decoder = new TextDecoder("utf-8");
165
+ const selectedModel = MODELS.find(
166
+ (m: { value: string }) => m.value === model
167
+ );
168
+ let contentResponse = "";
169
+
170
+ const read = async () => {
171
+ const { done, value } = await reader.read();
172
+ if (done) {
173
+ const isJson =
174
+ contentResponse.trim().startsWith("{") &&
175
+ contentResponse.trim().endsWith("}");
176
+ const jsonResponse = isJson ? JSON.parse(contentResponse) : null;
177
 
178
+ if (jsonResponse && !jsonResponse.ok) {
179
+ if (jsonResponse.openLogin) {
180
+ // Handle login required
181
+ return { error: "login_required" };
182
+ } else if (jsonResponse.openSelectProvider) {
183
+ // Handle provider selection required
184
+ return { error: "provider_required", message: jsonResponse.message };
185
+ } else if (jsonResponse.openProModal) {
186
+ // Handle pro modal required
187
+ return { error: "pro_required" };
188
+ } else {
189
+ toast.error(jsonResponse.message);
190
+ setisAiWorking(false);
191
+ return { error: "api_error", message: jsonResponse.message };
192
+ }
193
+ }
194
+
195
+ toast.success("AI responded successfully");
196
+ setisAiWorking(false);
197
+
198
+ if (selectedModel?.isThinker) {
199
+ // Reset to default model if using thinker model
200
+ // Note: You might want to add a callback for this
201
+ }
202
+
203
+ if (audio.current) audio.current.play();
204
+
205
+ const newPage = formatPage(contentResponse, currentPagePath);
206
+ if (!newPage) { return { error: "api_error", message: "Failed to format page" } }
207
+ onSuccess([...pages, newPage], prompt);
208
+
209
+ return { success: true, pages: [...pages, newPage] };
210
  }
 
 
 
211
 
212
+ const chunk = decoder.decode(value, { stream: true });
213
+ contentResponse += chunk;
214
+
215
+ if (selectedModel?.isThinker) {
216
+ const thinkMatch = contentResponse.match(/<think>[\s\S]*/)?.[0];
217
+ if (thinkMatch && !contentResponse?.includes("</think>")) {
218
+ // contentThink += chunk;
219
+ return read();
220
+ }
221
+ }
222
+
223
+ formatPage(contentResponse, currentPagePath);
224
+ return read();
225
+ };
226
 
227
+ return await read();
228
+ }
229
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
230
  } catch (error: any) {
231
  setisAiWorking(false);
232
+ toast.error(error.message);
233
+ if (error.openLogin) {
234
+ return { error: "login_required" };
 
235
  }
 
236
  return { error: "network_error", message: error.message };
237
  }
238
  };
 
242
  if (!prompt.trim()) return;
243
 
244
  setisAiWorking(true);
 
245
 
246
  const abortController = new AbortController();
247
  setController(abortController);
 
267
  signal: abortController.signal,
268
  });
269
 
270
+ if (request && request.body) {
 
 
271
  const res = await request.json();
272
 
273
  if (!request.ok) {
274
+ if (res.openLogin) {
275
+ setisAiWorking(false);
276
+ return { error: "login_required" };
277
+ } else if (res.openSelectProvider) {
278
+ setisAiWorking(false);
279
+ return { error: "provider_required", message: res.message };
280
+ } else if (res.openProModal) {
281
+ setisAiWorking(false);
282
+ return { error: "pro_required" };
283
+ } else {
284
+ toast.error(res.message);
285
+ setisAiWorking(false);
286
+ return { error: "api_error", message: res.message };
287
+ }
288
  }
289
+
 
 
290
  toast.success("AI responded successfully");
291
  setisAiWorking(false);
292
+
293
  setPages(res.pages);
294
  onSuccess(res.pages, prompt, res.updatedLines);
295
 
296
  if (audio.current) audio.current.play();
297
 
298
+ return { success: true, html: res.html, updatedLines: res.updatedLines };
299
  }
300
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
301
  } catch (error: any) {
302
  setisAiWorking(false);
 
303
  toast.error(error.message);
304
+ if (error.openLogin) {
305
+ return { error: "login_required" };
306
+ }
307
  return { error: "network_error", message: error.message };
308
  }
309
  };
310
 
311
+ // Stop the current AI generation
312
  const stopController = () => {
313
  if (controller) {
314
  controller.abort();
 
316
  setisAiWorking(false);
317
  }
318
  };
319
+
320
  const formatPages = (content: string) => {
321
  const pages: Page[] = [];
322
+ if (!content.match(/<<<<<<< START_TITLE (.*?) >>>>>>> END_TITLE/)) {
323
+ return pages;
 
 
 
 
324
  }
325
+
326
+ const cleanedContent = content.replace(
327
+ /[\s\S]*?<<<<<<< START_TITLE (.*?) >>>>>>> END_TITLE/,
328
+ "<<<<<<< START_TITLE $1 >>>>>>> END_TITLE"
329
+ );
330
+ const htmlChunks = cleanedContent.split(
331
+ /<<<<<<< START_TITLE (.*?) >>>>>>> END_TITLE/
332
+ );
333
+ const processedChunks = new Set<number>();
334
+
335
+ htmlChunks.forEach((chunk, index) => {
336
+ if (processedChunks.has(index) || !chunk?.trim()) {
337
+ return;
338
+ }
339
+ const htmlContent = extractHtmlContent(htmlChunks[index + 1]);
340
+
341
+ if (htmlContent) {
342
+ const page: Page = {
343
+ path: chunk.trim(),
344
+ html: htmlContent,
345
+ };
346
+ pages.push(page);
347
+
348
+ if (htmlContent.length > 200) {
349
+ onScrollToBottom();
350
  }
351
+
352
+ processedChunks.add(index);
353
+ processedChunks.add(index + 1);
354
+ }
355
  });
 
356
  if (pages.length > 0) {
357
  setPages(pages);
358
  const lastPagePath = pages[pages.length - 1]?.path;
359
  setCurrentPage(lastPagePath || "index.html");
 
360
  }
361
+
362
  return pages;
363
  };
364
+
365
+ const formatPage = (content: string, currentPagePath: string) => {
366
+ if (!content.match(/<<<<<<< START_TITLE (.*?) >>>>>>> END_TITLE/)) {
367
+ return null;
368
+ }
369
+
370
+ const cleanedContent = content.replace(
371
+ /[\s\S]*?<<<<<<< START_TITLE (.*?) >>>>>>> END_TITLE/,
372
+ "<<<<<<< START_TITLE $1 >>>>>>> END_TITLE"
373
+ );
374
+
375
+ const htmlChunks = cleanedContent.split(
376
+ /<<<<<<< START_TITLE (.*?) >>>>>>> END_TITLE/
377
+ )?.filter(Boolean);
378
+
379
+ const pagePath = htmlChunks[0]?.trim() || "";
380
+ const htmlContent = extractHtmlContent(htmlChunks[1]);
381
+
382
+ if (!pagePath || !htmlContent) {
383
+ return null;
384
+ }
385
+
386
+ const page: Page = {
387
+ path: pagePath,
388
+ html: htmlContent,
389
+ };
390
+
391
+ setPages(prevPages => {
392
+ const existingPageIndex = prevPages.findIndex(p => p.path === currentPagePath || p.path === pagePath);
393
+
394
+ if (existingPageIndex !== -1) {
395
+ const updatedPages = [...prevPages];
396
+ updatedPages[existingPageIndex] = page;
397
+ return updatedPages;
398
+ } else {
399
+ return [...prevPages, page];
400
+ }
401
+ });
402
+
403
+ setCurrentPage(pagePath);
404
+
405
+ if (htmlContent.length > 200) {
406
+ onScrollToBottom();
407
+ }
408
+
409
+ return page;
410
+ };
411
+
412
+ // Helper function to extract and clean HTML content
413
  const extractHtmlContent = (chunk: string): string => {
414
+ if (!chunk) return "";
415
+
416
+ // Extract HTML content
417
+ const htmlMatch = chunk.trim().match(/<!DOCTYPE html>[\s\S]*/);
418
+ if (!htmlMatch) return "";
419
+
420
+ let htmlContent = htmlMatch[0];
421
+
422
+ // Ensure proper HTML structure
423
+ htmlContent = ensureCompleteHtml(htmlContent);
424
+
425
+ // Remove markdown code blocks if present
426
+ htmlContent = htmlContent.replace(/```/g, "");
427
+
428
+ return htmlContent;
429
+ };
430
+
431
+ // Helper function to ensure HTML has complete structure
432
+ const ensureCompleteHtml = (html: string): string => {
433
+ let completeHtml = html;
434
+
435
+ // Add missing head closing tag
436
+ if (completeHtml.includes("<head>") && !completeHtml.includes("</head>")) {
437
+ completeHtml += "\n</head>";
438
+ }
439
+
440
+ // Add missing body closing tag
441
+ if (completeHtml.includes("<body") && !completeHtml.includes("</body>")) {
442
+ completeHtml += "\n</body>";
443
+ }
444
+
445
+ // Add missing html closing tag
446
+ if (!completeHtml.includes("</html>")) {
447
+ completeHtml += "\n</html>";
448
+ }
449
+
450
+ return completeHtml;
451
  };
452
 
453
  return {
454
  callAiNewProject,
455
  callAiFollowUp,
456
+ callAiNewPage,
457
  stopController,
458
  controller,
459
  audio,