everydaytok commited on
Commit
355174b
·
verified ·
1 Parent(s): c03ed1e

Update stateManager.js

Browse files
Files changed (1) hide show
  1. stateManager.js +143 -41
stateManager.js CHANGED
@@ -5,9 +5,9 @@ let supabase = null;
5
  const activeProjects = new Map();
6
  const initializationLocks = new Set();
7
 
8
- // Streaming Buffers (Short-term storage for polling)
9
- const streamBuffers = new Map(); // projectId -> String (accumulated text)
10
- const statusUpdates = new Map(); // projectId -> String (current status message)
11
 
12
  export const initDB = () => {
13
  if (!process.env.SUPABASE_URL || !process.env.SUPABASE_SERVICE_ROLE_KEY) {
@@ -26,24 +26,34 @@ export const StateManager = {
26
  // 1. Check Cache
27
  if (activeProjects.has(projectId)) {
28
  const cached = activeProjects.get(projectId);
29
- return cached;
 
 
30
  }
31
 
32
  // 2. Fetch from DB
33
  const { data: proj, error } = await supabase.from('projects').select('*').eq('id', projectId).single();
34
  if (error || !proj) return null;
35
 
 
36
  const { data: chunks } = await supabase.from('message_chunks')
37
  .select('*').eq('project_id', projectId)
38
  .order('chunk_index', { ascending: false }).limit(10);
39
 
 
40
  const memoryObject = {
41
  ...proj.info,
42
  id: proj.id,
43
  userId: proj.user_id,
 
 
 
 
44
  workerHistory: (chunks || []).filter(c => c.type === 'worker').reverse().flatMap(c => c.payload || []),
45
  pmHistory: (chunks || []).filter(c => c.type === 'pm').reverse().flatMap(c => c.payload || []),
 
46
  commandQueue: [],
 
47
  lastActive: Date.now()
48
  };
49
 
@@ -51,9 +61,11 @@ export const StateManager = {
51
  return memoryObject;
52
  },
53
 
54
- // --- HISTORY ---
55
  addHistory: async (projectId, type, role, text) => {
56
  const newMessage = { role, parts: [{ text }] };
 
 
57
  const project = activeProjects.get(projectId);
58
  if (project) {
59
  const historyKey = type === 'pm' ? 'pmHistory' : 'workerHistory';
@@ -61,64 +73,91 @@ export const StateManager = {
61
  project[historyKey].push(newMessage);
62
  }
63
 
 
64
  try {
65
- const { data: chunks } = await supabase.from('message_chunks')
 
66
  .select('id, chunk_index, payload')
67
  .eq('project_id', projectId)
68
  .eq('type', type)
69
  .order('chunk_index', { ascending: false })
70
- .limit(1);
 
 
 
 
 
71
 
72
  const latest = chunks?.[0];
 
 
73
  const currentPayload = (latest && Array.isArray(latest.payload)) ? latest.payload : [];
74
-
75
- if (latest && currentPayload.length < 20) {
76
- await supabase.from('message_chunks').update({ payload: [...currentPayload, newMessage] }).eq('id', latest.id);
77
- } else {
78
- const nextIndex = (latest?.chunk_index ?? -1) + 1;
79
- await supabase.from('message_chunks').insert({
80
- project_id: projectId, type, chunk_index: nextIndex, payload: [newMessage]
 
 
 
 
 
 
 
 
 
 
 
 
81
  });
 
82
  }
83
- } catch (e) { console.error("[StateManager] DB Sync Error:", e); }
84
- },
85
-
86
- // --- STREAM BUFFERING (For Polling) ---
87
- updateStatus: (projectId, message) => {
88
- statusUpdates.set(projectId, message);
89
- },
90
-
91
- getStatus: (projectId) => {
92
- return statusUpdates.get(projectId) || "Idle";
93
- },
94
-
95
- appendToStream: (projectId, textChunk) => {
96
- const current = streamBuffers.get(projectId) || "";
97
- streamBuffers.set(projectId, current + textChunk);
98
- },
99
-
100
- popStream: (projectId) => {
101
- const content = streamBuffers.get(projectId) || "";
102
- streamBuffers.set(projectId, ""); // Clear after popping
103
- return content;
104
  },
105
 
106
- // --- COMMANDS ---
107
  queueCommand: async (projectId, input) => {
108
  let project = activeProjects.get(projectId);
109
- if (!project) project = await StateManager.getProject(projectId);
 
 
 
 
110
  if (!project) return;
111
 
112
  let command = null;
113
- if (typeof input === 'object' && input.type) command = input;
 
 
 
114
  else if (typeof input === 'string') {
115
- const codeMatch = input.match(/```(?:lua|luau)?([\s\S]*?)```/i);
 
 
 
 
 
 
 
 
 
 
 
 
116
  if (codeMatch) command = { type: "EXECUTE", payload: codeMatch[1].trim() };
 
 
 
117
  }
118
 
119
  if (command) {
120
  if (!project.commandQueue) project.commandQueue = [];
121
  project.commandQueue.push(command);
 
122
  }
123
  },
124
 
@@ -128,12 +167,75 @@ export const StateManager = {
128
  return project.commandQueue.shift();
129
  },
130
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
131
  updateProject: async (projectId, data) => {
 
132
  if (activeProjects.has(projectId)) {
133
  const current = activeProjects.get(projectId);
134
- activeProjects.set(projectId, { ...current, ...data, lastActive: Date.now() });
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
135
  }
136
- // DB update logic omitted for brevity, assumed same as before
137
  },
138
 
139
  getSupabaseClient: () => supabase
 
5
  const activeProjects = new Map();
6
  const initializationLocks = new Set();
7
 
8
+ // --- NEW STREAMING BUFFERS ---
9
+ const streamBuffers = new Map(); // projectId -> String (accumulated partial text)
10
+ const statusBuffers = new Map(); // projectId -> String (current status message)
11
 
12
  export const initDB = () => {
13
  if (!process.env.SUPABASE_URL || !process.env.SUPABASE_SERVICE_ROLE_KEY) {
 
26
  // 1. Check Cache
27
  if (activeProjects.has(projectId)) {
28
  const cached = activeProjects.get(projectId);
29
+ if (cached.workerHistory && cached.pmHistory) {
30
+ return cached;
31
+ }
32
  }
33
 
34
  // 2. Fetch from DB
35
  const { data: proj, error } = await supabase.from('projects').select('*').eq('id', projectId).single();
36
  if (error || !proj) return null;
37
 
38
+ // Fetch last 10 chunks to reconstruct history
39
  const { data: chunks } = await supabase.from('message_chunks')
40
  .select('*').eq('project_id', projectId)
41
  .order('chunk_index', { ascending: false }).limit(10);
42
 
43
+ // 3. Construct Full Memory Object
44
  const memoryObject = {
45
  ...proj.info,
46
  id: proj.id,
47
  userId: proj.user_id,
48
+ thumbnail: proj.info?.thumbnail || null,
49
+ gdd: proj.info?.gdd || null,
50
+
51
+ // Flatten history chunks (Oldest -> Newest)
52
  workerHistory: (chunks || []).filter(c => c.type === 'worker').reverse().flatMap(c => c.payload || []),
53
  pmHistory: (chunks || []).filter(c => c.type === 'pm').reverse().flatMap(c => c.payload || []),
54
+
55
  commandQueue: [],
56
+ failureCount: proj.info?.failureCount || 0,
57
  lastActive: Date.now()
58
  };
59
 
 
61
  return memoryObject;
62
  },
63
 
64
+ // --- HISTORY (PERSISTENT & CHUNKED) ---
65
  addHistory: async (projectId, type, role, text) => {
66
  const newMessage = { role, parts: [{ text }] };
67
+
68
+ // 1. Update local memory immediately (for speed)
69
  const project = activeProjects.get(projectId);
70
  if (project) {
71
  const historyKey = type === 'pm' ? 'pmHistory' : 'workerHistory';
 
73
  project[historyKey].push(newMessage);
74
  }
75
 
76
+ // 2. Database Sync
77
  try {
78
+ // Fetch ONLY the latest chunk metadata to save bandwidth
79
+ const { data: chunks, error: fetchError } = await supabase.from('message_chunks')
80
  .select('id, chunk_index, payload')
81
  .eq('project_id', projectId)
82
  .eq('type', type)
83
  .order('chunk_index', { ascending: false })
84
+ .limit(10);
85
+
86
+ if (fetchError) {
87
+ console.error(`[DB Error] Failed to fetch history for ${projectId}:`, fetchError.message);
88
+ return;
89
+ }
90
 
91
  const latest = chunks?.[0];
92
+
93
+ // Calculate current size safely
94
  const currentPayload = (latest && Array.isArray(latest.payload)) ? latest.payload : [];
95
+ const currentSize = currentPayload.length;
96
+ const latestIndex = (latest && typeof latest.chunk_index === 'number') ? latest.chunk_index : -1;
97
+
98
+ // LOGIC: If we have a chunk and it has room (< 20 items), UPDATE it.
99
+ if (latest && currentSize < 20) {
100
+ const updatedPayload = [...currentPayload, newMessage];
101
+ const { error: updateError } = await supabase.from('message_chunks')
102
+ .update({ payload: updatedPayload })
103
+ .eq('id', latest.id);
104
+ if (updateError) console.error(`[DB Error] Update chunk failed:`, updateError.message);
105
+ }
106
+ // ELSE: Create a NEW chunk
107
+ else {
108
+ const nextIndex = latestIndex + 1;
109
+ const { error: insertError } = await supabase.from('message_chunks').insert({
110
+ project_id: projectId,
111
+ type,
112
+ chunk_index: nextIndex,
113
+ payload: [newMessage] // Start new array
114
  });
115
+ if (insertError) console.error(`[DB Error] Insert new chunk failed:`, insertError.message);
116
  }
117
+ } catch (e) {
118
+ console.error("[StateManager] Unexpected error in addHistory:", e);
119
+ }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
120
  },
121
 
122
+ // --- COMMANDS (MEMORY ONLY + PARSING) ---
123
  queueCommand: async (projectId, input) => {
124
  let project = activeProjects.get(projectId);
125
+
126
+ if (!project) {
127
+ project = await StateManager.getProject(projectId);
128
+ }
129
+
130
  if (!project) return;
131
 
132
  let command = null;
133
+
134
+ if (typeof input === 'object' && input.type && input.payload) {
135
+ command = input;
136
+ }
137
  else if (typeof input === 'string') {
138
+ const rawResponse = input;
139
+
140
+ // Loop Prevention
141
+ if (rawResponse.includes("[ASK_PM:")) return;
142
+ if (rawResponse.includes("[ROUTE_TO_PM:")) return;
143
+ if (rawResponse.includes("[GENERATE_IMAGE:") && !rawResponse.includes("```")) return;
144
+
145
+ // REGEX Parsing
146
+ const codeMatch = rawResponse.match(/```(?:lua|luau)?([\s\S]*?)```/i);
147
+ const readScriptMatch = rawResponse.match(/\[READ_SCRIPT:\s*(.*?)\]/);
148
+ const readHierarchyMatch = rawResponse.match(/\[READ_HIERARCHY:\s*(.*?)\]/);
149
+ const readLogsMatch = rawResponse.includes("[READ_LOGS]");
150
+
151
  if (codeMatch) command = { type: "EXECUTE", payload: codeMatch[1].trim() };
152
+ else if (readScriptMatch) command = { type: "READ_SCRIPT", payload: readScriptMatch[1].trim() };
153
+ else if (readHierarchyMatch) command = { type: "READ_HIERARCHY", payload: readHierarchyMatch[1].trim() };
154
+ else if (readLogsMatch) command = { type: "READ_LOGS", payload: null };
155
  }
156
 
157
  if (command) {
158
  if (!project.commandQueue) project.commandQueue = [];
159
  project.commandQueue.push(command);
160
+ console.log(`[Memory] Queued command for ${projectId}: ${command.type}`);
161
  }
162
  },
163
 
 
167
  return project.commandQueue.shift();
168
  },
169
 
170
+ // --- STREAMING HELPERS (FOR POLLING) ---
171
+ setStatus: (projectId, status) => {
172
+ statusBuffers.set(projectId, status);
173
+ },
174
+
175
+ getStatus: (projectId) => {
176
+ return statusBuffers.get(projectId) || "Idle";
177
+ },
178
+
179
+ appendStream: (projectId, chunk) => {
180
+ const current = streamBuffers.get(projectId) || "";
181
+ streamBuffers.set(projectId, current + chunk);
182
+ },
183
+
184
+ popStream: (projectId) => {
185
+ const content = streamBuffers.get(projectId) || "";
186
+ streamBuffers.set(projectId, ""); // Clear buffer after read
187
+ return content;
188
+ },
189
+
190
+ // --- METADATA UPDATE ---
191
  updateProject: async (projectId, data) => {
192
+ // 1. Update Memory
193
  if (activeProjects.has(projectId)) {
194
  const current = activeProjects.get(projectId);
195
+ const newData = { ...current, ...data, lastActive: Date.now() };
196
+ activeProjects.set(projectId, newData);
197
+ }
198
+
199
+ // 2. Prepare DB Payload
200
+ const payload = {
201
+ info: {
202
+ title: data.title,
203
+ status: data.status,
204
+ stats: data.stats,
205
+ description: data.description,
206
+ failureCount: data.failureCount
207
+ }
208
+ };
209
+
210
+ Object.keys(payload.info).forEach(key => payload.info[key] === undefined && delete payload.info[key]);
211
+
212
+ const { data: currentDb } = await supabase.from('projects').select('info').eq('id', projectId).single();
213
+
214
+ if (currentDb) {
215
+ const mergedInfo = { ...currentDb.info, ...payload.info };
216
+ delete mergedInfo.commandQueue;
217
+
218
+ const { error } = await supabase.from('projects').update({ info: mergedInfo }).eq('id', projectId);
219
+ if (error) console.error("[DB ERROR] Update Project failed:", error.message);
220
+ }
221
+ },
222
+
223
+ cleanupMemory: () => {
224
+ const now = Date.now();
225
+ const FOUR_HOURS = 4 * 60 * 60 * 1000;
226
+ let count = 0;
227
+
228
+ for (const [id, data] of activeProjects.entries()) {
229
+ if (!data.lastActive) data.lastActive = now;
230
+ if (now - data.lastActive > FOUR_HOURS) {
231
+ activeProjects.delete(id);
232
+ // Also clean streams
233
+ streamBuffers.delete(id);
234
+ statusBuffers.delete(id);
235
+ count++;
236
+ }
237
  }
238
+ return count;
239
  },
240
 
241
  getSupabaseClient: () => supabase