Spaces:
Running
Running
File size: 2,387 Bytes
69029e9 69ede17 1f3ba27 69ede17 1f3ba27 69ede17 ad4d861 69ede17 0095bf6 69ede17 0bbc572 69ede17 0a49a51 69ede17 0bbc572 69ede17 0bbc572 0095bf6 69ede17 0bbc572 69ede17 0a49a51 69ede17 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 |
/* // aiEngine.js
import { GoogleGenAI } from '@google/genai';
/* import dotenv from 'dotenv';
dotenv.config();
*/
const genAI = new GoogleGenAI({ apiKey: process.env.GEMINI_API_KEY });
console.log(process.env.GEMINI_API_KEY);
// Prompt Loader
import fs from 'fs';
const prompts = JSON.parse(fs.readFileSync('./prompts.json', 'utf8'));
export const AIEngine = {
/**
* PM MODEL (Gemini 3.0 Pro Preview - High Thinking)
*/
callPM: async (history, input) => {
const modelId = 'gemini-3-pro-preview'; // Per prompt requirements
const config = {
thinkingConfig: { thinkingLevel: 'HIGH' },
tools: [{
systemInstruction: [{
text: prompts.pm_system_prompt
}],
googleSearch: {} }],
};
const contents = [
{ role: 'user', parts: [{ text: prompts.pm_system_prompt }] }, // System instruction injection
...history,
{ role: 'user', parts: [{ text: input }] }
];
try {
const response = await genAI.models.generateContent({
model: modelId,
config,
contents,
});
return response.text; // Simple text return for non-stream internal logic
} catch (error) {
console.error("PM AI Error:", error);
throw error;
}
},
/**
* WORKER MODEL (Gemini 2.5 Flash - Fast execution)
*/
callWorker: async (history, input, imagePart = null) => {
/* const modelId = 'gemini-flash-latest'; // Per prompt requirements
const config = {
thinkingConfig: { thinkingBudget: -1 }, // Standard generation
*/
const modelId = 'gemini-3-pro-preview'; // Per prompt requirements
const config = {
thinkingConfig: { thinkingLevel: 'HIGH' },
tools: [{
systemInstruction: [{
text: prompts.worker_system_prompt
}],
googleSearch: {} }],
};
const currentParts = [{ text: input }];
if (imagePart) {
currentParts.push(imagePart);
}
const contents = [
{ role: 'user', parts: [{ text: prompts.worker_system_prompt }] },
...history,
{ role: 'user', parts: currentParts }
];
try {
const response = await genAI.models.generateContent({
model: modelId,
config,
contents,
});
return response.text;
} catch (error) {
console.error("Worker AI Error:", error);
throw error;
}
}
}; |