| import dotenv from 'dotenv'; |
| import fs from 'fs'; |
| import path from 'path'; |
|
|
| dotenv.config(); |
|
|
| const REMOTE_SERVER_URL = process.env.REMOTE_AI_URL || "http://localhost:7860"; |
|
|
| |
| let prompts = {}; |
| try { |
| const promptsPath = path.resolve('./prompts.json'); |
| if (fs.existsSync(promptsPath)) { |
| prompts = JSON.parse(fs.readFileSync(promptsPath, 'utf8')); |
| } |
| } catch (e) { |
| console.error("Prompt Load Error:", e); |
| } |
|
|
| |
| const flattenHistory = (history, currentInput, systemPrompt, limit = 10, gdd = null) => { |
| |
| const recentHistory = history.slice(-limit); |
| |
| let context = recentHistory.map(m => { |
| const roleName = m.role === 'model' ? 'Assistant' : 'User'; |
| return `${roleName}: ${m.parts[0].text}`; |
| }).join('\n'); |
|
|
| |
| const projectAnchor = gdd ? `[PROJECT GDD REFERENCE]:\n${gdd}\n\n` : ""; |
| |
| return `System: ${systemPrompt}\n\n${projectAnchor}${context}\nUser: ${currentInput}\nAssistant:`; |
| }; |
|
|
| |
| const handleStreamResponse = async (response, onThink, onOutput) => { |
| if (!response.ok) throw new Error(`Stream Error: ${response.statusText}`); |
| |
| const reader = response.body.getReader(); |
| const decoder = new TextDecoder("utf-8"); |
| |
| let fullStreamData = ""; |
|
|
| while (true) { |
| const { done, value } = await reader.read(); |
| if (done) break; |
| |
| const chunk = decoder.decode(value, { stream: true }); |
| fullStreamData += chunk; |
|
|
| |
| if (!chunk.includes("__USAGE__")) { |
| if (chunk.includes("__THINK__")) { |
| const parts = chunk.split("__THINK__"); |
| if (parts[0] && onOutput) onOutput(parts[0]); |
| if (parts[1] && onThink) onThink(parts[1]); |
| } else { |
| if (onOutput) onOutput(chunk); |
| } |
| } |
| } |
| |
| |
| let usage = { totalTokenCount: 0, inputTokens: 0, outputTokens: 0 }; |
| let finalCleanText = fullStreamData; |
|
|
| if (fullStreamData.includes("__USAGE__")) { |
| const parts = fullStreamData.split("__USAGE__"); |
| finalCleanText = parts[0]; |
| const usageRaw = parts[1]; |
| |
| try { |
| const parsedUsage = JSON.parse(usageRaw); |
| usage.totalTokenCount = parsedUsage.totalTokenCount || 0; |
| usage.inputTokens = parsedUsage.inputTokens || 0; |
| usage.outputTokens = parsedUsage.outputTokens || 0; |
| } catch (e) { |
| console.warn("Usage Parse Failed in Engine:", e); |
| } |
| } |
|
|
| |
| finalCleanText = finalCleanText.split("__THINK__")[0].trim(); |
|
|
| return { text: finalCleanText, usage }; |
| }; |
|
|
| export const AIEngine = { |
| |
|
|
| callPMStream: async (history, input, onThink, onOutput, gdd = null) => { |
| const systemPrompt = prompts.pm_system_prompt || "You are a Project Manager."; |
| |
| const prompt = flattenHistory(history, input, systemPrompt, 15, gdd); |
| |
| const response = await fetch(`${REMOTE_SERVER_URL}/api/stream`, { |
| method: 'POST', |
| headers: { 'Content-Type': 'application/json' }, |
| body: JSON.stringify({ |
| model: "claude", |
| prompt: prompt, |
| system_prompt: systemPrompt |
| }) |
| }); |
| return await handleStreamResponse(response, onThink, onOutput); |
| }, |
|
|
| callWorkerStream: async (history, input, onThink, onOutput, images = []) => { |
| const systemPrompt = prompts.worker_system_prompt || "You are a Senior Engineer."; |
| |
| const prompt = flattenHistory(history, input, systemPrompt, 8, null); |
| |
| const response = await fetch(`${REMOTE_SERVER_URL}/api/stream`, { |
| method: 'POST', |
| headers: { 'Content-Type': 'application/json' }, |
| body: JSON.stringify({ |
| model: "gpt", |
| prompt: prompt, |
| system_prompt: systemPrompt, |
| images: images |
| }) |
| }); |
| return await handleStreamResponse(response, onThink, onOutput); |
| }, |
|
|
| |
|
|
| callPM: async (history, input, gdd = null) => { |
| const systemPrompt = prompts.pm_system_prompt || "You are a Project Manager."; |
| const prompt = flattenHistory(history, input, systemPrompt, 15, gdd); |
|
|
| const response = await fetch(`${REMOTE_SERVER_URL}/api/generate`, { |
| method: 'POST', |
| headers: { 'Content-Type': 'application/json' }, |
| body: JSON.stringify({ |
| model: "claude", |
| prompt: prompt, |
| system_prompt: systemPrompt |
| }) |
| }); |
| const result = await response.json(); |
| return { text: result.data, usage: result.usage || { totalTokenCount: 0 } }; |
| }, |
|
|
| callWorker: async (history, input) => { |
| const systemPrompt = prompts.worker_system_prompt || "You are a Senior Engineer."; |
| const prompt = flattenHistory(history, input, systemPrompt, 8, null); |
|
|
| const response = await fetch(`${REMOTE_SERVER_URL}/api/generate`, { |
| method: 'POST', |
| headers: { 'Content-Type': 'application/json' }, |
| body: JSON.stringify({ |
| model: "gpt", |
| prompt: prompt, |
| system_prompt: systemPrompt |
| }) |
| }); |
| const result = await response.json(); |
| return { text: result.data, usage: result.usage || { totalTokenCount: 0 } }; |
| }, |
|
|
| |
|
|
| generateEntryQuestions: async (desc) => { |
| const response = await fetch(`${REMOTE_SERVER_URL}/api/generate`, { |
| method: 'POST', |
| headers: { 'Content-Type': 'application/json' }, |
| body: JSON.stringify({ |
| model: "gpt", |
| prompt: `Analyze this project idea: ${desc}`, |
| system_prompt: prompts.analyst_system_prompt || "Output JSON only." |
| }) |
| }); |
| const result = await response.json(); |
| |
| return { ...JSON.parse(result.data), usage: result.usage }; |
| }, |
|
|
| gradeProject: async (desc, ans) => { |
| const response = await fetch(`${REMOTE_SERVER_URL}/api/generate`, { |
| method: 'POST', |
| headers: { 'Content-Type': 'application/json' }, |
| body: JSON.stringify({ |
| model: "gpt", |
| prompt: `Grade this project. Description: ${desc} Answers: ${JSON.stringify(ans)}`, |
| system_prompt: prompts.analyst_system_prompt || "Output JSON only." |
| }) |
| }); |
| const result = await response.json(); |
| const parsed = JSON.parse(result.data); |
| parsed.usage = result.usage; |
| return parsed; |
| }, |
| |
| generateImage: async (prompt) => { |
| try { |
| const response = await fetch(`${REMOTE_SERVER_URL}/api/image`, { |
| method: 'POST', |
| headers: { 'Content-Type': 'application/json' }, |
| body: JSON.stringify({ prompt }) |
| }); |
| const result = await response.json(); |
| return result; |
| } catch (e) { |
| console.error("Image Gen Error:", e); |
| return null; |
| } |
| } |
| }; |