RaBU-2 / app /api /ask-ai /route.ts
legends810's picture
Update app/api/ask-ai/route.ts
e18955d verified
/* eslint-disable @typescript-eslint/no-explicit-any */
import type { NextRequest } from "next/server";
import { NextResponse } from "next/server";
import { GoogleGenerativeAI } from "@google/generative-ai";
import {
DIVIDER,
FOLLOW_UP_SYSTEM_PROMPT,
INITIAL_SYSTEM_PROMPT,
NEW_PAGE_END,
NEW_PAGE_START,
REPLACE_END,
SEARCH_START,
UPDATE_PAGE_START,
UPDATE_PAGE_END,
} from "@/lib/prompts";
import { Page } from "@/types";
// Gemini Models Configuration
const GEMINI_MODELS = {
"gemini-2.5-flash": "gemini-2.5-flash",
"gemini-2.5-pro": "gemini-2.5-pro",
"gemini-1.5-pro": "gemini-1.5-pro",
"gemini-1.5-flash": "gemini-1.5-flash"
};
export async function POST(request: NextRequest) {
const body = await request.json();
const { prompt, model, redesignMarkdown, previousPrompts, pages } = body;
if (!model || (!prompt && !redesignMarkdown)) {
return NextResponse.json(
{ ok: false, error: "Missing required fields" },
{ status: 400 }
);
}
// Get Gemini API Key
const geminiApiKey = process.env.GEMINI_API_KEY;
if (!geminiApiKey) {
return NextResponse.json(
{ ok: false, error: "Gemini API key not configured" },
{ status: 500 }
);
}
// Validate model
const selectedModel = GEMINI_MODELS[model as keyof typeof GEMINI_MODELS] || "gemini-2.5-flash";
// Rate limiting and login check has been removed.
try {
const encoder = new TextEncoder();
const stream = new TransformStream();
const writer = stream.writable.getWriter();
const response = new NextResponse(stream.readable, {
headers: {
"Content-Type": "text/plain; charset=utf-8",
"Cache-Control": "no-cache",
Connection: "keep-alive",
},
});
(async () => {
try {
const genAI = new GoogleGenerativeAI(geminiApiKey);
const geminiModel = genAI.getGenerativeModel({ model: selectedModel });
// Prepare the conversation context
let conversationContext = INITIAL_SYSTEM_PROMPT;
if (pages?.length > 1) {
conversationContext += `\n\nHere are the current pages:\n\n${pages.map((p: Page) => `- ${p.path} \n${p.html}`).join("\n")}\n\nNow, please create a new page based on this code. Also here are the previous prompts:\n\n${previousPrompts.map((p: string) => `- ${p}`).join("\n")}`;
}
const userPrompt = redesignMarkdown
? `Here is my current design as a markdown:\n\n${redesignMarkdown}\n\nNow, please create a new design based on this markdown.`
: prompt;
const fullPrompt = `${conversationContext}\n\nUser Request: ${userPrompt}`;
// Generate content with streaming
const result = await geminiModel.generateContentStream(fullPrompt);
for await (const chunk of result.stream) {
const chunkText = chunk.text();
if (chunkText) {
await writer.write(encoder.encode(chunkText));
}
}
} catch (error: any) {
console.error("Gemini API Error:", error);
if (error.message?.includes("quota")) {
await writer.write(
encoder.encode(
JSON.stringify({
ok: false,
openProModal: true,
message: "API quota exceeded. Please try again later.",
})
)
);
} else {
await writer.write(
encoder.encode(
JSON.stringify({
ok: false,
message:
error.message ||
"An error occurred while processing your request.",
})
)
);
}
} finally {
await writer?.close();
}
})();
return response;
} catch (error: any) {
return NextResponse.json(
{
ok: false,
message:
error?.message || "An error occurred while processing your request.",
},
{ status: 500 }
);
}
}
export async function PUT(request: NextRequest) {
const body = await request.json();
const { prompt, previousPrompts, selectedElementHtml, model, pages, files } = body;
if (!prompt || pages.length === 0) {
return NextResponse.json(
{ ok: false, error: "Missing required fields" },
{ status: 400 }
);
}
// Get Gemini API Key
const geminiApiKey = process.env.GEMINI_API_KEY;
if (!geminiApiKey) {
return NextResponse.json(
{ ok: false, error: "Gemini API key not configured" },
{ status: 500 }
);
}
// Validate model
const selectedModel = GEMINI_MODELS[model as keyof typeof GEMINI_MODELS] || "gemini-2.5-flash";
// Rate limiting and login check has been removed.
try {
const genAI = new GoogleGenerativeAI(geminiApiKey);
const geminiModel = genAI.getGenerativeModel({ model: selectedModel });
// Prepare context for updating pages
let contextPrompt = FOLLOW_UP_SYSTEM_PROMPT;
if (previousPrompts) {
contextPrompt += `\n\nAlso here are the previous prompts:\n\n${previousPrompts.map((p: string) => `- ${p}`).join("\n")}`;
} else {
contextPrompt += "\n\nYou are modifying the HTML file based on the user's request.";
}
if (selectedElementHtml) {
contextPrompt += `\n\nYou have to update ONLY the following element, NOTHING ELSE: \n\n\`\`\`html\n${selectedElementHtml}\n\`\`\``;
}
contextPrompt += `. Current pages: ${pages?.map((p: Page) => `- ${p.path} \n${p.html}`).join("\n")}`;
if (files?.length > 0) {
contextPrompt += `. Current images: ${files?.map((f: string) => `- ${f}`).join("\n")}.`;
}
const fullPrompt = `${contextPrompt}\n\nUser Request: ${prompt}`;
const result = await geminiModel.generateContent(fullPrompt);
const chunk = result.response.text();
if (!chunk) {
return NextResponse.json(
{ ok: false, message: "No content returned from the model" },
{ status: 400 }
);
}
if (chunk) {
const updatedLines: number[][] = [];
let newHtml = "";
const updatedPages = [...(pages || [])];
const updatePageRegex = new RegExp(`${UPDATE_PAGE_START.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')}([^\\s]+)\\s*${UPDATE_PAGE_END.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')}([\\s\\S]*?)(?=${UPDATE_PAGE_START.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')}|${NEW_PAGE_START.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')}|$)`, 'g');
let updatePageMatch;
while ((updatePageMatch = updatePageRegex.exec(chunk)) !== null) {
const [, pagePath, pageContent] = updatePageMatch;
const pageIndex = updatedPages.findIndex(p => p.path === pagePath);
if (pageIndex !== -1) {
let pageHtml = updatedPages[pageIndex].html;
let processedContent = pageContent;
const htmlMatch = pageContent.match(/``````/);
if (htmlMatch) {
processedContent = htmlMatch[1];
}
let position = 0;
let moreBlocks = true;
while (moreBlocks) {
const searchStartIndex = processedContent.indexOf(SEARCH_START, position);
if (searchStartIndex === -1) {
moreBlocks = false;
continue;
}
const dividerIndex = processedContent.indexOf(DIVIDER, searchStartIndex);
if (dividerIndex === -1) {
moreBlocks = false;
continue;
}
const replaceEndIndex = processedContent.indexOf(REPLACE_END, dividerIndex);
if (replaceEndIndex === -1) {
moreBlocks = false;
continue;
}
const searchBlock = processedContent.substring(
searchStartIndex + SEARCH_START.length,
dividerIndex
);
const replaceBlock = processedContent.substring(
dividerIndex + DIVIDER.length,
replaceEndIndex
);
if (searchBlock.trim() === "") {
pageHtml = `${replaceBlock}\n${pageHtml}`;
updatedLines.push([1, replaceBlock.split("\n").length]);
} else {
const blockPosition = pageHtml.indexOf(searchBlock);
if (blockPosition !== -1) {
const beforeText = pageHtml.substring(0, blockPosition);
const startLineNumber = beforeText.split("\n").length;
const replaceLines = replaceBlock.split("\n").length;
const endLineNumber = startLineNumber + replaceLines - 1;
updatedLines.push([startLineNumber, endLineNumber]);
pageHtml = pageHtml.replace(searchBlock, replaceBlock);
}
}
position = replaceEndIndex + REPLACE_END.length;
}
updatedPages[pageIndex].html = pageHtml;
if (pagePath === '/' || pagePath === '/index' || pagePath === 'index') {
newHtml = pageHtml;
}
}
}
const newPageRegex = new RegExp(`${NEW_PAGE_START.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')}([^\\s]+)\\s*${NEW_PAGE_END.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')}([\\s\\S]*?)(?=${UPDATE_PAGE_START.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')}|${NEW_PAGE_START.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')}|$)`, 'g');
let newPageMatch;
while ((newPageMatch = newPageRegex.exec(chunk)) !== null) {
const [, pagePath, pageContent] = newPageMatch;
let pageHtml = pageContent;
const htmlMatch = pageContent.match(/``````/);
if (htmlMatch) {
pageHtml = htmlMatch[1];
}
const existingPageIndex = updatedPages.findIndex(p => p.path === pagePath);
if (existingPageIndex !== -1) {
updatedPages[existingPageIndex] = {
path: pagePath,
html: pageHtml.trim()
};
} else {
updatedPages.push({
path: pagePath,
html: pageHtml.trim()
});
}
}
if (updatedPages.length === pages?.length && !chunk.includes(UPDATE_PAGE_START)) {
let position = 0;
let moreBlocks = true;
while (moreBlocks) {
const searchStartIndex = chunk.indexOf(SEARCH_START, position);
if (searchStartIndex === -1) {
moreBlocks = false;
continue;
}
const dividerIndex = chunk.indexOf(DIVIDER, searchStartIndex);
if (dividerIndex === -1) {
moreBlocks = false;
continue;
}
const replaceEndIndex = chunk.indexOf(REPLACE_END, dividerIndex);
if (replaceEndIndex === -1) {
moreBlocks = false;
continue;
}
const searchBlock = chunk.substring(
searchStartIndex + SEARCH_START.length,
dividerIndex
);
const replaceBlock = chunk.substring(
dividerIndex + DIVIDER.length,
replaceEndIndex
);
if (searchBlock.trim() === "") {
newHtml = `${replaceBlock}\n${newHtml}`;
updatedLines.push([1, replaceBlock.split("\n").length]);
} else {
const blockPosition = newHtml.indexOf(searchBlock);
if (blockPosition !== -1) {
const beforeText = newHtml.substring(0, blockPosition);
const startLineNumber = beforeText.split("\n").length;
const replaceLines = replaceBlock.split("\n").length;
const endLineNumber = startLineNumber + replaceLines - 1;
updatedLines.push([startLineNumber, endLineNumber]);
newHtml = newHtml.replace(searchBlock, replaceBlock);
}
}
position = replaceEndIndex + REPLACE_END.length;
}
const mainPageIndex = updatedPages.findIndex(p => p.path === '/' || p.path === '/index' || p.path === 'index');
if (mainPageIndex !== -1) {
updatedPages[mainPageIndex].html = newHtml;
}
}
return NextResponse.json({
ok: true,
updatedLines,
pages: updatedPages,
});
} else {
return NextResponse.json(
{ ok: false, message: "No content returned from the model" },
{ status: 400 }
);
}
} catch (error: any) {
console.error("Gemini API Error:", error);
if (error.message?.includes("quota")) {
return NextResponse.json(
{
ok: false,
openProModal: true,
message: "API quota exceeded. Please try again later.",
},
{ status: 402 }
);
}
return NextResponse.json(
{
ok: false,
message:
error.message || "An error occurred while processing your request.",
},
{ status: 500 }
);
}
}