everydaycats commited on
Commit
d191298
·
verified ·
1 Parent(s): 9ca10e5

Update aiEngine.js

Browse files
Files changed (1) hide show
  1. aiEngine.js +13 -22
aiEngine.js CHANGED
@@ -100,13 +100,17 @@ export const AIEngine = {
100
  * Can return { status: "REJECTED", ... } or { status: "ACCEPTED", questions: ... }
101
  */
102
  generateEntryQuestions: async (description) => {
103
- const modelId = 'gemini-2.5-pro';
104
  // Updated prompt to enforce Gatekeeping (TOS/Nonsense check)
105
  const input = `[MODE 1: QUESTIONS]\nAnalyze this game idea: "${description}". Check for TOS violations or nonsense. If good, ask 3 questions. Output ONLY raw JSON.`;
106
 
107
- try {
108
  const response = await genAI.models.generateContent({
109
  model: modelId,
 
 
 
 
 
110
  config: {
111
  responseMimeType: "application/json",
112
  systemInstruction: { parts: [{ text: prompts.analyst_system_prompt }] }
@@ -128,13 +132,18 @@ export const AIEngine = {
128
  * Returns STRICT JSON
129
  */
130
  gradeProject: async (description, answers) => {
131
- const modelId = 'gemini-2.5-pro';
132
  // Updated prompt to ask for Title and Rating
133
  const input = `[MODE 2: GRADING]\nIdea: "${description}"\nUser Answers: ${JSON.stringify(answers)}\n\nAssess feasibility. Output JSON with title and rating.`;
134
 
135
  try {
136
  const response = await genAI.models.generateContent({
137
  model: modelId,
 
 
 
 
 
138
  config: {
139
  responseMimeType: "application/json",
140
  systemInstruction: { parts: [{ text: prompts.analyst_system_prompt }] }
@@ -156,20 +165,7 @@ export const AIEngine = {
156
  const modelId = 'gemini-2.5-flash-image';
157
 
158
  try {
159
- /* console.log(`[AIEngine] Generating Image for: "${prompt}"...`);
160
- const response = await genAI.models.generateImages({
161
- model: modelId,
162
- prompt: prompt,
163
- config: {
164
- numberOfImages: 1,
165
- aspectRatio: "16:9", // Widescreen for thumbnails
166
- outputMimeType: "image/png"
167
- }
168
- });
169
-
170
- const image = response.generatedImages[0];
171
- if (!image || !image.image) throw new Error("No image data returned");
172
- */
173
  const config = {
174
  responseModalities: [
175
  'IMAGE',
@@ -201,11 +197,6 @@ export const AIEngine = {
201
  continue;
202
  }
203
  if (chunk.candidates?.[0]?.content?.parts?.[0]?.inlineData) {
204
- /* const fileName = `ENTER_FILE_NAME_${fileIndex++}`;
205
- const inlineData = chunk.candidates[0].content.parts[0].inlineData;
206
- const fileExtension = mime.getExtension(inlineData.mimeType || '');
207
- const buffer = Buffer.from(inlineData.data || '', 'base64');
208
- */
209
 
210
  // inlineData is the object from the model: { mimeType: 'image/png', data: '...base64...' }
211
  const inlineData = chunk.candidates[0].content.parts[0].inlineData;
 
100
  * Can return { status: "REJECTED", ... } or { status: "ACCEPTED", questions: ... }
101
  */
102
  generateEntryQuestions: async (description) => {
103
+ const modelId = 'gemini-flash-latest';
104
  // Updated prompt to enforce Gatekeeping (TOS/Nonsense check)
105
  const input = `[MODE 1: QUESTIONS]\nAnalyze this game idea: "${description}". Check for TOS violations or nonsense. If good, ask 3 questions. Output ONLY raw JSON.`;
106
 
 
107
  const response = await genAI.models.generateContent({
108
  model: modelId,
109
+
110
+ thinkingConfig: {
111
+ thinkingBudget: -1,
112
+ },
113
+
114
  config: {
115
  responseMimeType: "application/json",
116
  systemInstruction: { parts: [{ text: prompts.analyst_system_prompt }] }
 
132
  * Returns STRICT JSON
133
  */
134
  gradeProject: async (description, answers) => {
135
+ const modelId = 'gemini-flash-latest';
136
  // Updated prompt to ask for Title and Rating
137
  const input = `[MODE 2: GRADING]\nIdea: "${description}"\nUser Answers: ${JSON.stringify(answers)}\n\nAssess feasibility. Output JSON with title and rating.`;
138
 
139
  try {
140
  const response = await genAI.models.generateContent({
141
  model: modelId,
142
+
143
+ thinkingConfig: {
144
+ thinkingBudget: -1,
145
+ },
146
+
147
  config: {
148
  responseMimeType: "application/json",
149
  systemInstruction: { parts: [{ text: prompts.analyst_system_prompt }] }
 
165
  const modelId = 'gemini-2.5-flash-image';
166
 
167
  try {
168
+
 
 
 
 
 
 
 
 
 
 
 
 
 
169
  const config = {
170
  responseModalities: [
171
  'IMAGE',
 
197
  continue;
198
  }
199
  if (chunk.candidates?.[0]?.content?.parts?.[0]?.inlineData) {
 
 
 
 
 
200
 
201
  // inlineData is the object from the model: { mimeType: 'image/png', data: '...base64...' }
202
  const inlineData = chunk.candidates[0].content.parts[0].inlineData;