everydaytok commited on
Commit
da7a53a
·
verified ·
1 Parent(s): a276d1e

Update app.js

Browse files
Files changed (1) hide show
  1. app.js +270 -1
app.js CHANGED
@@ -62,6 +62,274 @@ const StateManager = {
62
  }
63
  };
64
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
65
  const callAI = async (history, input, contextData, systemPrompt, projectContext, modelId) => {
66
  let contextStr = "";
67
  try { contextStr = JSON.stringify(contextData, null, 2); } catch {}
@@ -288,4 +556,5 @@ app.post('/automated-briefing', async (req, res) => {
288
  });
289
 
290
  app.get('/', async (req, res) => res.status(200).json({ status: "Alive" }));
291
- app.listen(PORT, () => console.log(`✅ Core Online: ${PORT}`));
 
 
62
  }
63
  };
64
 
65
+ // --- NEW: Support injected 'images' variable ---
66
+ const callAI = async (history, input, contextData, images, systemPrompt, projectContext, modelId) => {
67
+ let contextStr = "";
68
+ try { contextStr = JSON.stringify(contextData, null, 2); } catch {}
69
+ const recentHistory = history.slice(-10).map(m => `${m.role === 'model' ? 'Assistant' : 'User'}: ${m.parts?.[0]?.text || ""}`).join('\n');
70
+ const fullPrompt = `System: ${systemPrompt}\n\n${projectContext}\n\n[HISTORY]:\n${recentHistory}\n\n[CONTEXT]: ${contextStr}\n\nUser: ${input}\nAssistant:`;
71
+
72
+ try {
73
+ const response = await fetch(`${REMOTE_SERVER_URL}/api/generate`, {
74
+ method: 'POST', headers: { 'Content-Type': 'application/json' },
75
+ body: JSON.stringify({ model: modelId, prompt: fullPrompt, system_prompt: systemPrompt, images: images || [] })
76
+ });
77
+ const result = await response.json();
78
+ return { text: result.data || result.text || "", usage: result.usage };
79
+ } catch (e) { return { text: "<notification>AI Unreachable</notification>", usage: {} }; }
80
+ };
81
+
82
+ function extractCommands(text) {
83
+ const commands = [];
84
+ const parse = (regex, type, isJson = true) => {
85
+ let match;
86
+ regex.lastIndex = 0;
87
+ while ((match = regex.exec(text)) !== null) {
88
+ let rawPayload = match[1].trim();
89
+ try {
90
+ commands.push({ type, payload: isJson ? JSON.parse(rawPayload) : rawPayload });
91
+ } catch (e) {
92
+ if (type === 'create_thrust') commands.push({ type, payload: { title: "System Thrust", markdown_content: rawPayload, tasks: [] } });
93
+ }
94
+ }
95
+ };
96
+
97
+ parse(/<thrust_create>([\s\S]*?)<\/thrust_create>/gi, 'create_thrust');
98
+ parse(/<timeline_log>([\s\S]*?)<\/timeline_log>/gi, 'log_timeline');
99
+ parse(/<notification>([\s\S]*?)<\/notification>/gi, 'notification', false);
100
+ parse(/<update_requirements>([\s\S]*?)<\/update_requirements>/gi, 'update_requirements', false);
101
+ parse(/<schedule_briefing>([\s\S]*?)<\/schedule_briefing>/gi, 'schedule_briefing');
102
+ parse(/<freeze_project>([\s\S]*?)<\/freeze_project>/gi, 'freeze_project', false);
103
+ parse(/<thrust_complete>([\s\S]*?)<\/thrust_complete>/gi, 'thrust_complete', false);
104
+ parse(/<complete_task>([\s\S]*?)<\/complete_task>/gi, 'complete_task', false);
105
+
106
+ return commands;
107
+ }
108
+
109
+ async function registerMorningCron(projectId, offset) {
110
+ const now = new Date();
111
+ const target = new Date(now);
112
+ target.setUTCHours(6 - offset, 0, 0, 0);
113
+ if (target <= now) target.setDate(target.getDate() + 1);
114
+ const delayMs = target.getTime() - now.getTime();
115
+ fetch(`${CRON_REGISTRY_URL}/register`, {
116
+ method: 'POST', headers: { 'Content-Type': 'application/json' },
117
+ body: JSON.stringify({ secret: CRON_SECRET, jobId: `briefing_${projectId}`, intervalMs: 86400000, initialDelay: delayMs, webhookUrl: `https://everydaytok-thrust-core-server.hf.space/automated-briefing`, leadId: projectId, payload: { projectId, timezoneOffset: offset } })
118
+ }).catch(()=>{});
119
+ }
120
+
121
+ async function executeCommands(userId, projectId, commands) {
122
+ let flags = { shouldReload: false, thrustComplete: false };
123
+
124
+ for (const cmd of commands) {
125
+ try {
126
+ if (cmd.type === 'create_thrust') {
127
+ const { data: thrust } = await supabase.from('thrusts').insert({ lead_id: projectId, title: cmd.payload.title, markdown_content: cmd.payload.markdown_content, status: 'active' }).select().single();
128
+ if (thrust && cmd.payload.tasks && cmd.payload.tasks.length > 0) {
129
+ const tasks = cmd.payload.tasks.map(t => ({ thrust_id: thrust.id, title: t }));
130
+ await supabase.from('thrust_tasks').insert(tasks);
131
+ }
132
+ flags.shouldReload = true;
133
+ }
134
+
135
+ if (cmd.type === 'log_timeline') {
136
+ await supabase.from('timeline_events').insert({ lead_id: projectId, title: cmd.payload.title, description: cmd.payload.description, type: (cmd.payload.type || 'system').toLowerCase() });
137
+ flags.shouldReload = true;
138
+ }
139
+
140
+ if (cmd.type === 'update_requirements') {
141
+ await supabase.from('leads').update({ requirements_doc: cmd.payload }).eq('id', projectId);
142
+ flags.shouldReload = true;
143
+ }
144
+
145
+ if (cmd.type === 'schedule_briefing') {
146
+ await registerMorningCron(projectId, cmd.payload.timezone_offset || 0);
147
+ }
148
+
149
+ if (cmd.type === 'complete_task') {
150
+ const { data: active } = await supabase.from('thrusts').select('id').eq('lead_id', projectId).eq('status', 'active').single();
151
+ if (active) {
152
+ await supabase.from('thrust_tasks').update({ status: 'done' }).eq('thrust_id', active.id).ilike('title', `%${cmd.payload}%`);
153
+ flags.shouldReload = true;
154
+ }
155
+ }
156
+
157
+ if (cmd.type === 'thrust_complete') {
158
+ const { data: active } = await supabase.from('thrusts').select('id').eq('lead_id', projectId).eq('status', 'active').single();
159
+ if (active) {
160
+ await supabase.from('thrusts').update({ status: 'completed' }).eq('id', active.id);
161
+ await supabase.from('thrust_tasks').delete().eq('thrust_id', active.id);
162
+ flags.thrustComplete = true;
163
+ }
164
+ }
165
+
166
+ if (cmd.type === 'freeze_project') {
167
+ const isFrozen = cmd.payload === 'true';
168
+ await StateManager.setFrozen(projectId, isFrozen);
169
+ }
170
+
171
+ if (cmd.type === 'notification' && FRONT_URL) {
172
+ fetch(`${FRONT_URL}/internal/notify`, { method: 'POST', headers: { 'Content-Type': 'application/json' }, body: JSON.stringify({ user_id: userId, type: 'toast', message: cmd.payload }) }).catch(() => {});
173
+ }
174
+
175
+ } catch (e) {}
176
+ }
177
+ return flags;
178
+ }
179
+
180
+ app.post('/init-project', async (req, res) => {
181
+ const { userId, name, description, localPath } = req.body;
182
+ const { data: lead } = await supabase.from('leads').insert({ user_id: userId, name, description, local_path: localPath, status: 'active', requirements_doc: "Init..." }).select().single();
183
+ res.json({ success: true, leadId: lead.id });
184
+
185
+ setImmediate(async () => {
186
+ try {
187
+ const initInput = `PROJECT: ${name}\nDESC: ${description}\nTask: Init PRD, First Thrust, Schedule Morning Briefing.`;
188
+ const aiResult = await callAI([], initInput, {}, [], prompts.init_system_prompt, "", SMART_MODEL_ID);
189
+ aiResult.text += `\n<notification>Project '${name}' initialized successfully!</notification>`;
190
+ await StateManager.addHistory(lead.id, 'user', initInput);
191
+ await StateManager.addHistory(lead.id, 'model', aiResult.text);
192
+ const cmds = extractCommands(aiResult.text);
193
+ await executeCommands(userId, lead.id, cmds);
194
+ } catch (err) {}
195
+ });
196
+ });
197
+
198
+ app.post('/process', async (req, res) => {
199
+ const { userId, projectId, prompt, context, images, task_type = 'chat' } = req.body;
200
+ if (task_type === 'chat') await StateManager.setFrozen(projectId, false);
201
+
202
+ let selectedModel = (task_type === 'log_ingestion') ? FAST_MODEL_ID : SMART_MODEL_ID;
203
+ let sysPrompt = (task_type === 'log_ingestion') ? prompts.log_analyst_prompt : prompts.director_system_prompt;
204
+
205
+ try {
206
+ const { data: lead } = await supabase.from('leads').select('requirements_doc').eq('id', projectId).single();
207
+ const { data: activeThrust } = await supabase.from('thrusts').select('title, tasks:thrust_tasks(title, status)').eq('lead_id', projectId).eq('status', 'active').order('created_at', { ascending: false }).limit(1).single();
208
+ const { data: timeline } = await supabase.from('timeline_events').select('title, type, description, created_at').eq('lead_id', projectId).order('created_at', { ascending: false }).limit(10);
209
+
210
+ const projectContext = `[PRD]: ${lead?.requirements_doc?.substring(0, 3000)}...\n[CURRENT THRUST]: ${activeThrust ? JSON.stringify(activeThrust) : "None"}\n[RECENT TIMELINE]: ${JSON.stringify(timeline || [])}`;
211
+ const history = await StateManager.getHistory(projectId);
212
+
213
+ let aiResult = await callAI(history, prompt, context, images, sysPrompt, projectContext, selectedModel);
214
+ let cmds = extractCommands(aiResult.text);
215
+ let flags = await executeCommands(userId, projectId, cmds);
216
+
217
+ if (flags.thrustComplete && task_type === 'log_ingestion') {
218
+ const escalationPrompt = "The previous thrust is complete based on logs. Generate the next Thrust immediately to keep momentum.";
219
+ const smartResult = await callAI(history, escalationPrompt, context, [], prompts.director_system_prompt, projectContext, SMART_MODEL_ID);
220
+ aiResult.text += `\n\n[DIRECTOR INTERVENTION]:\n${smartResult.text}`;
221
+ const smartCmds = extractCommands(smartResult.text);
222
+ await executeCommands(userId, projectId, smartCmds);
223
+ await StateManager.addHistory(projectId, 'model', aiResult.text);
224
+ } else {
225
+ await StateManager.addHistory(projectId, 'model', aiResult.text);
226
+ }
227
+
228
+ const cleanText = aiResult.text.replace(/<[^>]+>[\s\S]*?<\/[^>]+>/g, '').trim();
229
+ res.json({ text: cleanText, should_reload: flags.shouldReload });
230
+ } catch (e) { res.status(500).json({ error: "Processing Error" }); }
231
+ });
232
+
233
+ app.post('/automated-briefing', async (req, res) => {
234
+ const { projectId } = req.body;
235
+
236
+ try {
237
+ const isFrozen = await StateManager.isFrozen(projectId);
238
+ const { data: lastThrust } = await supabase.from('thrusts').select('created_at').eq('lead_id', projectId).order('created_at', { ascending: false }).limit(1).single();
239
+
240
+ const lastThrustDate = lastThrust ? new Date(lastThrust.created_at).getDate() : 0;
241
+ const todayDate = new Date().getDate();
242
+
243
+ if (isFrozen) return res.json({ status: "skipped_frozen" });
244
+ if (lastThrustDate === todayDate) return res.json({ status: "skipped_exists" });
245
+
246
+ const prompt = "It is morning. Generate today's Morning Briefing (New Thrust). Look at the RECENT TIMELINE to see what was accomplished yesterday. Adopt a highly conversational, proactive tone in the markdown (e.g., 'Morning! You finished X yesterday. Today, the priority is Y.'). If the project has been idle for days, use <freeze_project>true</freeze_project>.";
247
+
248
+ const { data: lead } = await supabase.from('leads').select('*').eq('id', projectId).single();
249
+ const { data: timeline } = await supabase.from('timeline_events').select('*').eq('lead_id', projectId).order('created_at', { ascending: false }).limit(5);
250
+
251
+ const projectContext = `[PRD]: ${lead.requirements_doc}\n[RECENT TIMELINE]: ${JSON.stringify(timeline)}`;
252
+ const history = await StateManager.getHistory(projectId);
253
+
254
+ const aiResult = await callAI(history, prompt, {}, [], prompts.director_system_prompt, projectContext, SMART_MODEL_ID);
255
+
256
+ await StateManager.addHistory(projectId, 'model', aiResult.text);
257
+ const cmds = extractCommands(aiResult.text);
258
+ await executeCommands(lead.user_id, projectId, cmds);
259
+
260
+ await StateManager.setFrozen(projectId, true);
261
+ res.json({ success: true });
262
+
263
+ } catch (e) { res.status(500).json({ error: e.message }); }
264
+ });
265
+
266
+ app.get('/', async (req, res) => res.status(200).json({ status: "Alive" }));
267
+ app.listen(PORT, () => console.log(`✅ Core Online: ${PORT}`));
268
+
269
+ /* import express from 'express';
270
+ import cors from 'cors';
271
+ import fs from 'fs';
272
+ import path from 'path';
273
+ import { createClient } from '@supabase/supabase-js';
274
+
275
+ const PORT = 7860;
276
+ const SUPABASE_URL = process.env.SUPABASE_URL;
277
+ const SUPABASE_KEY = process.env.SUPABASE_SERVICE_KEY;
278
+ const REMOTE_SERVER_URL = process.env.REMOTE_AI_URL || "http://localhost:11434";
279
+ const FRONT_URL = process.env.FRONT_URL;
280
+ const CRON_REGISTRY_URL = process.env.CRON_REGISTRY_URL || "http://localhost:7861";
281
+ const CRON_SECRET = process.env.CRON_SECRET || "default_secret";
282
+
283
+ const SMART_MODEL_ID = "claude";
284
+ const FAST_MODEL_ID = "gpt-5-mini";
285
+
286
+ if (!SUPABASE_URL || !SUPABASE_KEY) process.exit(1);
287
+
288
+ const app = express();
289
+ const supabase = createClient(SUPABASE_URL, SUPABASE_KEY);
290
+
291
+ app.use(express.json({ limit: '50mb' }));
292
+ app.use(cors());
293
+
294
+ let prompts = {};
295
+ try {
296
+ prompts = JSON.parse(fs.readFileSync(path.resolve('./prompts.json'), 'utf8'));
297
+ } catch (e) { process.exit(1); }
298
+
299
+ const activeProjects = new Map();
300
+
301
+ const StateManager = {
302
+ getHistory: async (projectId) => {
303
+ if (activeProjects.has(projectId)) return activeProjects.get(projectId).history;
304
+ const { data: chunks } = await supabase.from('message_chunks').select('*').eq('project_id', projectId).order('chunk_index', { ascending: false }).limit(10);
305
+ const fullHistory = (chunks || []).reverse().flatMap(c => c.payload || []);
306
+ activeProjects.set(projectId, { history: fullHistory, isFrozen: false });
307
+ return fullHistory;
308
+ },
309
+ addHistory: async (projectId, role, text) => {
310
+ const newMessage = { role, parts: [{ text }] };
311
+ if (activeProjects.has(projectId)) activeProjects.get(projectId).history.push(newMessage);
312
+ try {
313
+ const { data: latestChunk } = await supabase.from('message_chunks').select('id, chunk_index, payload').eq('project_id', projectId).order('chunk_index', { ascending: false }).limit(1).single();
314
+ const currentPayload = (latestChunk?.payload) || [];
315
+ if (latestChunk && currentPayload.length < 20) {
316
+ await supabase.from('message_chunks').update({ payload: [...currentPayload, newMessage] }).eq('id', latestChunk.id);
317
+ } else {
318
+ await supabase.from('message_chunks').insert({ project_id: projectId, lead_id: projectId, chunk_index: (latestChunk?.chunk_index ?? -1) + 1, payload: [newMessage] });
319
+ }
320
+ } catch (e) {}
321
+ },
322
+ setFrozen: async (projectId, status) => {
323
+ if (activeProjects.has(projectId)) activeProjects.get(projectId).isFrozen = status;
324
+ await supabase.from('leads').update({ is_frozen: status }).eq('id', projectId);
325
+ },
326
+ isFrozen: async (projectId) => {
327
+ if (activeProjects.has(projectId)) return activeProjects.get(projectId).isFrozen;
328
+ const { data } = await supabase.from('leads').select('is_frozen').eq('id', projectId).single();
329
+ return data?.is_frozen || false;
330
+ }
331
+ };
332
+
333
  const callAI = async (history, input, contextData, systemPrompt, projectContext, modelId) => {
334
  let contextStr = "";
335
  try { contextStr = JSON.stringify(contextData, null, 2); } catch {}
 
556
  });
557
 
558
  app.get('/', async (req, res) => res.status(200).json({ status: "Alive" }));
559
+ app.listen(PORT, () => console.log(`✅ Core Online: ${PORT}`));
560
+ */