| | |
| | |
| | |
| | |
| | |
| | |
| |
|
| | const genAI = new GoogleGenAI({ apiKey: process.env.GEMINI_API_KEY }); |
| |
|
| | console.log(process.env.GEMINI_API_KEY); |
| |
|
| | |
| | import fs from 'fs'; |
| | const prompts = JSON.parse(fs.readFileSync('./prompts.json', 'utf8')); |
| |
|
| | export const AIEngine = { |
| | |
| | |
| | |
| | callPM: async (history, input) => { |
| | const modelId = 'gemini-3-pro-preview'; |
| | const config = { |
| | thinkingConfig: { thinkingLevel: 'HIGH' }, |
| | tools: [{ |
| | systemInstruction: [{ |
| | text: prompts.pm_system_prompt |
| | }], |
| | googleSearch: {} }], |
| | }; |
| |
|
| | const contents = [ |
| | { role: 'user', parts: [{ text: prompts.pm_system_prompt }] }, |
| | ...history, |
| | { role: 'user', parts: [{ text: input }] } |
| | ]; |
| |
|
| | try { |
| | const response = await genAI.models.generateContent({ |
| | model: modelId, |
| | config, |
| | contents, |
| | }); |
| | return response.text; |
| | } catch (error) { |
| | console.error("PM AI Error:", error); |
| | throw error; |
| | } |
| | }, |
| |
|
| | |
| | |
| | |
| | callWorker: async (history, input, imagePart = null) => { |
| | |
| | |
| | |
| | |
| | const modelId = 'gemini-3-pro-preview'; |
| | const config = { |
| | thinkingConfig: { thinkingLevel: 'HIGH' }, |
| | tools: [{ |
| | systemInstruction: [{ |
| | text: prompts.worker_system_prompt |
| | }], |
| | googleSearch: {} }], |
| | }; |
| |
|
| | const currentParts = [{ text: input }]; |
| | if (imagePart) { |
| | currentParts.push(imagePart); |
| | } |
| |
|
| | const contents = [ |
| | { role: 'user', parts: [{ text: prompts.worker_system_prompt }] }, |
| | ...history, |
| | { role: 'user', parts: currentParts } |
| | ]; |
| |
|
| | try { |
| | const response = await genAI.models.generateContent({ |
| | model: modelId, |
| | config, |
| | contents, |
| | }); |
| | return response.text; |
| | } catch (error) { |
| | console.error("Worker AI Error:", error); |
| | throw error; |
| | } |
| | } |
| | }; |