everydaytok commited on
Commit
c5dac88
·
verified ·
1 Parent(s): 91348f7

Update stateManager.js

Browse files
Files changed (1) hide show
  1. stateManager.js +75 -94
stateManager.js CHANGED
@@ -22,7 +22,6 @@ export const StateManager = {
22
  // 1. Check Cache
23
  if (activeProjects.has(projectId)) {
24
  const cached = activeProjects.get(projectId);
25
- // Ensure we have history arrays to prevent crashes
26
  if (cached.workerHistory && cached.pmHistory) {
27
  return cached;
28
  }
@@ -32,6 +31,7 @@ export const StateManager = {
32
  const { data: proj, error } = await supabase.from('projects').select('*').eq('id', projectId).single();
33
  if (error || !proj) return null;
34
 
 
35
  const { data: chunks } = await supabase.from('message_chunks')
36
  .select('*').eq('project_id', projectId)
37
  .order('chunk_index', { ascending: false }).limit(10);
@@ -41,10 +41,10 @@ export const StateManager = {
41
  ...proj.info,
42
  id: proj.id,
43
  userId: proj.user_id,
44
- // --- RESTORED FIELDS ---
45
- thumbnail: proj.info?.thumbnail || null, // Ensure thumbnail is carried over
46
- gdd: proj.info?.gdd || null, // Ensure GDD is carried over
47
- // -----------------------
48
  workerHistory: (chunks || []).filter(c => c.type === 'worker').reverse().flatMap(c => c.payload || []),
49
  pmHistory: (chunks || []).filter(c => c.type === 'pm').reverse().flatMap(c => c.payload || []),
50
 
@@ -57,12 +57,11 @@ export const StateManager = {
57
  return memoryObject;
58
  },
59
 
60
-
61
- // --- HISTORY (PERSISTENT) ---
62
  addHistory: async (projectId, type, role, text) => {
63
  const newMessage = { role, parts: [{ text }] };
64
 
65
- // 1. Update local memory immediately
66
  const project = activeProjects.get(projectId);
67
  if (project) {
68
  const historyKey = type === 'pm' ? 'pmHistory' : 'workerHistory';
@@ -70,34 +69,60 @@ export const StateManager = {
70
  project[historyKey].push(newMessage);
71
  }
72
 
73
- // 2. Database Sync (Optimized Chunking)
74
- const { data: chunks } = await supabase.from('message_chunks')
75
- .select('*')
76
- .eq('project_id', projectId)
77
- .eq('type', type)
78
- .order('chunk_index', { ascending: false })
79
- .limit(1);
80
-
81
- const latest = chunks?.[0];
82
-
83
- if (latest && latest.payload && latest.payload.length < 20) {
84
- const updatedPayload = [...(latest.payload || []), newMessage];
85
- await supabase.from('message_chunks')
86
- .update({ payload: updatedPayload })
87
- .eq('id', latest.id);
88
- } else {
89
- const nextIndex = latest ? (latest.chunk_index + 1) : 0;
90
- await supabase.from('message_chunks').insert({
91
- project_id: projectId,
92
- type,
93
- chunk_index: nextIndex,
94
- payload: [newMessage]
95
- });
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
96
  }
97
  },
98
-
99
 
100
- // --- COMMANDS (MEMORY ONLY) ---
101
  queueCommand: async (projectId, input) => {
102
  let project = activeProjects.get(projectId);
103
 
@@ -107,23 +132,20 @@ export const StateManager = {
107
 
108
  if (!project) return;
109
 
110
- // --- RESTORED PARSING LOGIC START ---
111
  let command = null;
112
 
113
  if (typeof input === 'object' && input.type && input.payload) {
114
- // It's already a formatted command object
115
  command = input;
116
  }
117
  else if (typeof input === 'string') {
118
  const rawResponse = input;
119
 
120
- // Filters to prevent loops
121
  if (rawResponse.includes("[ASK_PM:")) return;
122
  if (rawResponse.includes("[ROUTE_TO_PM:")) return;
123
- // Ignore generic image generation triggers if no code is present
124
  if (rawResponse.includes("[GENERATE_IMAGE:") && !rawResponse.includes("```")) return;
125
 
126
- // REGEX: Extract Lua code, Read Script, Read Hierarchy, Logs
127
  const codeMatch = rawResponse.match(/```(?:lua|luau)?([\s\S]*?)```/i);
128
  const readScriptMatch = rawResponse.match(/\[READ_SCRIPT:\s*(.*?)\]/);
129
  const readHierarchyMatch = rawResponse.match(/\[READ_HIERARCHY:\s*(.*?)\]/);
@@ -134,31 +156,21 @@ export const StateManager = {
134
  else if (readHierarchyMatch) command = { type: "READ_HIERARCHY", payload: readHierarchyMatch[1].trim() };
135
  else if (readLogsMatch) command = { type: "READ_LOGS", payload: null };
136
  }
137
- // --- RESTORED PARSING LOGIC END ---
138
-
139
  if (command) {
140
  if (!project.commandQueue) project.commandQueue = [];
141
  project.commandQueue.push(command);
142
  console.log(`[Memory] Queued command for ${projectId}: ${command.type}`);
143
-
144
- // Optional: If you want to persist the queue to Supabase like you did in Firebase,
145
- // you would call updateProject here. But keeping it memory-only is faster.
146
  }
147
  },
148
 
149
  popCommand: async (projectId) => {
150
- // Strictly memory check. If it's not in RAM, it has no commands pending.
151
  const project = activeProjects.get(projectId);
152
-
153
- if (!project || !project.commandQueue || project.commandQueue.length === 0) {
154
- return null;
155
- }
156
-
157
- const command = project.commandQueue.shift();
158
- return command;
159
  },
160
 
161
- // --- METADATA UPDATE (PERSISTENT) ---
162
  updateProject: async (projectId, data) => {
163
  // 1. Update Memory
164
  if (activeProjects.has(projectId)) {
@@ -167,8 +179,7 @@ export const StateManager = {
167
  activeProjects.set(projectId, newData);
168
  }
169
 
170
- // 2. Prepare DB Payload
171
- // IMPORTANT: We explicitly DO NOT include 'commandQueue' here.
172
  const payload = {
173
  info: {
174
  title: data.title,
@@ -176,69 +187,39 @@ export const StateManager = {
176
  stats: data.stats,
177
  description: data.description,
178
  failureCount: data.failureCount
179
- // commandQueue is omitted to keep DB clean
180
  }
181
  };
182
 
183
  // Clean undefined keys
184
  Object.keys(payload.info).forEach(key => payload.info[key] === undefined && delete payload.info[key]);
185
 
186
- // 3. Merge Update to DB
187
- // Fetch current info to preserve fields we aren't updating right now
188
  const { data: currentDb } = await supabase.from('projects').select('info').eq('id', projectId).single();
189
 
190
  if (currentDb) {
191
  const mergedInfo = { ...currentDb.info, ...payload.info };
192
- // Ensure we sanitize commandQueue out of the DB even if it was there historically
193
- delete mergedInfo.commandQueue;
194
 
195
  const { error } = await supabase.from('projects').update({ info: mergedInfo }).eq('id', projectId);
196
  if (error) console.error("[DB ERROR] Update Project failed:", error.message);
197
  }
198
  },
199
 
200
- cleanupMemory: () => {
201
- const now = Date.now();
202
- const FOUR_HOURS = 4 * 60 * 60 * 1000;
203
- let count = 0;
204
-
205
- for (const [id, data] of activeProjects.entries()) {
206
- // FIX: If lastActive is missing/undefined/0, reset it to NOW.
207
- // This prevents the "54 year old project" bug where (now - 0) > 4 hours.
208
- if (!data.lastActive) {
209
- console.warn(`[StateManager] ⚠️ Project ${id} missing timestamp. Healing data...`);
210
- data.lastActive = now;
211
- continue; // Skip deleting this round
212
- }
213
-
214
- if (now - data.lastActive > FOUR_HOURS) {
215
- console.log(`[StateManager] 🧹 Removing expired project: ${id}`);
216
- activeProjects.delete(id);
217
- count++;
218
- }
219
- }
220
-
221
- if (count > 0) {
222
- console.log(`[StateManager] 🗑️ Cleaned ${count} projects from memory.`);
223
- }
224
- return count;
225
- },
226
-
227
- /* cleanupMemory: () => {
228
- // Optional: Remove projects inactive for > 1 hour
229
  const now = Date.now();
 
230
  let count = 0;
231
- for (const [id, proj] of activeProjects.entries()) {
232
- if (now - proj.lastActive >
233
- // 3600000 // 1 hour
234
- 18000000 //5 hours
235
- ) {
 
236
  activeProjects.delete(id);
237
  count++;
238
  }
239
  }
240
  return count;
241
- }, */
242
 
243
  getSupabaseClient: () => supabase
244
  };
 
22
  // 1. Check Cache
23
  if (activeProjects.has(projectId)) {
24
  const cached = activeProjects.get(projectId);
 
25
  if (cached.workerHistory && cached.pmHistory) {
26
  return cached;
27
  }
 
31
  const { data: proj, error } = await supabase.from('projects').select('*').eq('id', projectId).single();
32
  if (error || !proj) return null;
33
 
34
+ // Fetch last 10 chunks to reconstruct history
35
  const { data: chunks } = await supabase.from('message_chunks')
36
  .select('*').eq('project_id', projectId)
37
  .order('chunk_index', { ascending: false }).limit(10);
 
41
  ...proj.info,
42
  id: proj.id,
43
  userId: proj.user_id,
44
+ thumbnail: proj.info?.thumbnail || null,
45
+ gdd: proj.info?.gdd || null,
46
+
47
+ // Flatten history chunks (Oldest -> Newest)
48
  workerHistory: (chunks || []).filter(c => c.type === 'worker').reverse().flatMap(c => c.payload || []),
49
  pmHistory: (chunks || []).filter(c => c.type === 'pm').reverse().flatMap(c => c.payload || []),
50
 
 
57
  return memoryObject;
58
  },
59
 
60
+ // --- HISTORY (PERSISTENT & CHUNKED) ---
 
61
  addHistory: async (projectId, type, role, text) => {
62
  const newMessage = { role, parts: [{ text }] };
63
 
64
+ // 1. Update local memory immediately (for speed)
65
  const project = activeProjects.get(projectId);
66
  if (project) {
67
  const historyKey = type === 'pm' ? 'pmHistory' : 'workerHistory';
 
69
  project[historyKey].push(newMessage);
70
  }
71
 
72
+ // 2. Database Sync
73
+ try {
74
+ // Fetch ONLY the latest chunk metadata to save bandwidth
75
+ const { data: chunks, error: fetchError } = await supabase.from('message_chunks')
76
+ .select('id, chunk_index, payload')
77
+ .eq('project_id', projectId)
78
+ .eq('type', type)
79
+ .order('chunk_index', { ascending: false })
80
+ .limit(10);
81
+
82
+ if (fetchError) {
83
+ console.error(`[DB Error] Failed to fetch history for ${projectId}:`, fetchError.message);
84
+ return;
85
+ }
86
+
87
+ const latest = chunks?.[0];
88
+
89
+ // Calculate current size safely
90
+ const currentPayload = (latest && Array.isArray(latest.payload)) ? latest.payload : [];
91
+ const currentSize = currentPayload.length;
92
+ const latestIndex = (latest && typeof latest.chunk_index === 'number') ? latest.chunk_index : -1;
93
+
94
+ // LOGIC: If we have a chunk and it has room (< 20 items), UPDATE it.
95
+ if (latest && currentSize < 20) {
96
+ console.log(`[History] Appending to Chunk ${latestIndex} (Count: ${currentSize} -> ${currentSize + 1})`);
97
+
98
+ const updatedPayload = [...currentPayload, newMessage];
99
+
100
+ const { error: updateError } = await supabase.from('message_chunks')
101
+ .update({ payload: updatedPayload })
102
+ .eq('id', latest.id);
103
+
104
+ if (updateError) console.error(`[DB Error] Update chunk failed:`, updateError.message);
105
+ }
106
+ // ELSE: Create a NEW chunk
107
+ else {
108
+ const nextIndex = latestIndex + 1;
109
+ console.log(`[History] Chunk ${latestIndex} full (${currentSize}). Creating Chunk ${nextIndex}.`);
110
+
111
+ const { error: insertError } = await supabase.from('message_chunks').insert({
112
+ project_id: projectId,
113
+ type,
114
+ chunk_index: nextIndex,
115
+ payload: [newMessage] // Start new array
116
+ });
117
+
118
+ if (insertError) console.error(`[DB Error] Insert new chunk failed:`, insertError.message);
119
+ }
120
+ } catch (e) {
121
+ console.error("[StateManager] Unexpected error in addHistory:", e);
122
  }
123
  },
 
124
 
125
+ // --- COMMANDS (MEMORY ONLY + PARSING) ---
126
  queueCommand: async (projectId, input) => {
127
  let project = activeProjects.get(projectId);
128
 
 
132
 
133
  if (!project) return;
134
 
 
135
  let command = null;
136
 
137
  if (typeof input === 'object' && input.type && input.payload) {
 
138
  command = input;
139
  }
140
  else if (typeof input === 'string') {
141
  const rawResponse = input;
142
 
143
+ // Loop Prevention
144
  if (rawResponse.includes("[ASK_PM:")) return;
145
  if (rawResponse.includes("[ROUTE_TO_PM:")) return;
 
146
  if (rawResponse.includes("[GENERATE_IMAGE:") && !rawResponse.includes("```")) return;
147
 
148
+ // REGEX Parsing
149
  const codeMatch = rawResponse.match(/```(?:lua|luau)?([\s\S]*?)```/i);
150
  const readScriptMatch = rawResponse.match(/\[READ_SCRIPT:\s*(.*?)\]/);
151
  const readHierarchyMatch = rawResponse.match(/\[READ_HIERARCHY:\s*(.*?)\]/);
 
156
  else if (readHierarchyMatch) command = { type: "READ_HIERARCHY", payload: readHierarchyMatch[1].trim() };
157
  else if (readLogsMatch) command = { type: "READ_LOGS", payload: null };
158
  }
159
+
 
160
  if (command) {
161
  if (!project.commandQueue) project.commandQueue = [];
162
  project.commandQueue.push(command);
163
  console.log(`[Memory] Queued command for ${projectId}: ${command.type}`);
 
 
 
164
  }
165
  },
166
 
167
  popCommand: async (projectId) => {
 
168
  const project = activeProjects.get(projectId);
169
+ if (!project || !project.commandQueue || project.commandQueue.length === 0) return null;
170
+ return project.commandQueue.shift();
 
 
 
 
 
171
  },
172
 
173
+ // --- METADATA UPDATE ---
174
  updateProject: async (projectId, data) => {
175
  // 1. Update Memory
176
  if (activeProjects.has(projectId)) {
 
179
  activeProjects.set(projectId, newData);
180
  }
181
 
182
+ // 2. Prepare DB Payload (Exclude commandQueue)
 
183
  const payload = {
184
  info: {
185
  title: data.title,
 
187
  stats: data.stats,
188
  description: data.description,
189
  failureCount: data.failureCount
 
190
  }
191
  };
192
 
193
  // Clean undefined keys
194
  Object.keys(payload.info).forEach(key => payload.info[key] === undefined && delete payload.info[key]);
195
 
 
 
196
  const { data: currentDb } = await supabase.from('projects').select('info').eq('id', projectId).single();
197
 
198
  if (currentDb) {
199
  const mergedInfo = { ...currentDb.info, ...payload.info };
200
+ delete mergedInfo.commandQueue; // Sanity check
 
201
 
202
  const { error } = await supabase.from('projects').update({ info: mergedInfo }).eq('id', projectId);
203
  if (error) console.error("[DB ERROR] Update Project failed:", error.message);
204
  }
205
  },
206
 
207
+ cleanupMemory: () => {
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
208
  const now = Date.now();
209
+ const FOUR_HOURS = 4 * 60 * 60 * 1000;
210
  let count = 0;
211
+
212
+ for (const [id, data] of activeProjects.entries()) {
213
+ if (!data.lastActive) data.lastActive = now; // Heal missing timestamp
214
+
215
+ if (now - data.lastActive > FOUR_HOURS) {
216
+ console.log(`[StateManager] 🧹 Removing expired project: ${id}`);
217
  activeProjects.delete(id);
218
  count++;
219
  }
220
  }
221
  return count;
222
+ },
223
 
224
  getSupabaseClient: () => supabase
225
  };