Kraft102 commited on
Commit
33f2414
Β·
verified Β·
1 Parent(s): af443cc

Deploy from GitHub Actions 2025-12-16_03-24-50

Browse files
apps/backend/package.json CHANGED
@@ -18,7 +18,9 @@
18
  "sync:start": "tsx src/scripts/neo4j-auto-sync.ts start",
19
  "sync:now": "tsx src/scripts/neo4j-auto-sync.ts sync",
20
  "sync:full": "tsx src/scripts/neo4j-auto-sync.ts full",
21
- "sync:status": "tsx src/scripts/neo4j-auto-sync.ts status"
 
 
22
  },
23
  "dependencies": {
24
  "@anthropic-ai/sdk": "^0.71.0",
 
18
  "sync:start": "tsx src/scripts/neo4j-auto-sync.ts start",
19
  "sync:now": "tsx src/scripts/neo4j-auto-sync.ts sync",
20
  "sync:full": "tsx src/scripts/neo4j-auto-sync.ts full",
21
+ "sync:status": "tsx src/scripts/neo4j-auto-sync.ts status",
22
+ "sync:fast": "tsx src/scripts/fast-neo4j-sync.ts",
23
+ "sync:fast-incremental": "tsx src/scripts/fast-neo4j-sync.ts --incremental"
24
  },
25
  "dependencies": {
26
  "@anthropic-ai/sdk": "^0.71.0",
apps/backend/src/index.ts CHANGED
@@ -119,6 +119,7 @@ import { vectorService } from './services/VectorService.js';
119
 
120
  // 🐝 SWARM & EVOLUTION - Advanced Neural Systems
121
  import { swarmControl } from './agents/SwarmControl.js';
 
122
  import { AngelProxy } from './middleware/AngelProxy.js';
123
  import { prometheus } from './services/Prometheus.js';
124
  import { logStream } from './services/logging/logStream.js';
@@ -164,10 +165,26 @@ async function startServer() {
164
  app.get('/ready', (req, res) => res.json({ ready: true }));
165
  app.get('/live', (req, res) => res.json({ live: true }));
166
 
 
 
 
 
 
 
167
  // Start server IMMEDIATELY
168
  server.listen(PORT, '0.0.0.0', () => {
169
- console.log(`πŸš€ Backend server running on http://0.0.0.0:${PORT} (Early Start)`);
170
  console.log(`πŸ“‘ MCP WebSocket available at ws://0.0.0.0:${PORT}/mcp/ws`);
 
 
 
 
 
 
 
 
 
 
171
  });
172
 
173
  // ============================================
 
119
 
120
  // 🐝 SWARM & EVOLUTION - Advanced Neural Systems
121
  import { swarmControl } from './agents/SwarmControl.js';
122
+ import { handoverWatchdog } from './services/HandoverWatchdog.js';
123
  import { AngelProxy } from './middleware/AngelProxy.js';
124
  import { prometheus } from './services/Prometheus.js';
125
  import { logStream } from './services/logging/logStream.js';
 
165
  app.get('/ready', (req, res) => res.json({ ready: true }));
166
  app.get('/live', (req, res) => res.json({ live: true }));
167
 
168
+ // Start server IMMEDIATELY
169
+ // CRITICAL ARCHITECTURE DECISION:
170
+ // We force PORT 3001 to match the NudgeService internal wiring.
171
+ // Cloud defaults like 7860 break the internal reflex arc.
172
+ const PORT = 3001;
173
+
174
  // Start server IMMEDIATELY
175
  server.listen(PORT, '0.0.0.0', () => {
176
+ console.log(`πŸš€ Backend server running on http://0.0.0.0:${PORT}`);
177
  console.log(`πŸ“‘ MCP WebSocket available at ws://0.0.0.0:${PORT}/mcp/ws`);
178
+ console.log(`✨ [SYSTEM] Neural Singularity aligned on Port ${PORT}`);
179
+
180
+ // ACTIVATING AUTONOMOUS REFLEXES
181
+ handoverWatchdog.startWatch();
182
+
183
+ // Listen for signals (This is where we hook into the Orchestrator later)
184
+ handoverWatchdog.on('handover_signal', (signal) => {
185
+ console.log(`πŸ”” [SYSTEM] ORCHESTRATOR ALERT: Activating ${signal.target}...`);
186
+ // TODO: Integrate with agentOrchestrator.triggerAgent(signal.target);
187
+ });
188
  });
189
 
190
  // ============================================
apps/backend/src/scripts/fast-neo4j-sync.ts ADDED
@@ -0,0 +1,588 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ /**
2
+ * Fast Neo4j Sync - Parallel batch sync with incremental support
3
+ *
4
+ * Much faster than sequential sync by:
5
+ * 1. Using larger batches (500 instead of 100)
6
+ * 2. Running multiple batches in parallel
7
+ * 3. Using UNWIND for bulk operations
8
+ * 4. Supporting incremental sync based on timestamps
9
+ */
10
+
11
+ import neo4j, { Driver, Session } from 'neo4j-driver';
12
+ import { config } from 'dotenv';
13
+ import { resolve } from 'path';
14
+
15
+ // Load .env.production if it exists, otherwise fall back to .env
16
+ config({ path: resolve(process.cwd(), '.env.production') });
17
+ config({ path: resolve(process.cwd(), '.env') });
18
+
19
+ interface SyncConfig {
20
+ localUri: string;
21
+ localUser: string;
22
+ localPassword: string;
23
+ cloudUri: string;
24
+ cloudUser: string;
25
+ cloudPassword: string;
26
+ batchSize: number;
27
+ parallelBatches: number;
28
+ incrementalOnly: boolean;
29
+ lastSyncTime?: Date;
30
+ }
31
+
32
+ interface SyncStats {
33
+ nodesCreated: number;
34
+ nodesUpdated: number;
35
+ relationshipsCreated: number;
36
+ relationshipsUpdated: number;
37
+ duration: number;
38
+ }
39
+
40
+ class FastNeo4jSync {
41
+ private localDriver: Driver;
42
+ private cloudDriver: Driver;
43
+ private config: SyncConfig;
44
+ private stats: SyncStats = {
45
+ nodesCreated: 0,
46
+ nodesUpdated: 0,
47
+ relationshipsCreated: 0,
48
+ relationshipsUpdated: 0,
49
+ duration: 0
50
+ };
51
+
52
+ constructor(config: SyncConfig) {
53
+ this.config = config;
54
+ this.localDriver = neo4j.driver(
55
+ config.localUri,
56
+ neo4j.auth.basic(config.localUser, config.localPassword)
57
+ );
58
+ this.cloudDriver = neo4j.driver(
59
+ config.cloudUri,
60
+ neo4j.auth.basic(config.cloudUser, config.cloudPassword)
61
+ );
62
+ }
63
+
64
+ async connect(): Promise<void> {
65
+ console.log('πŸ“‘ Connecting to databases...');
66
+
67
+ const localSession = this.localDriver.session();
68
+ const cloudSession = this.cloudDriver.session();
69
+
70
+ try {
71
+ await localSession.run('RETURN 1');
72
+ console.log(' βœ“ Local Neo4j connected');
73
+
74
+ await cloudSession.run('RETURN 1');
75
+ console.log(' βœ“ Cloud AuraDB connected');
76
+ } finally {
77
+ await localSession.close();
78
+ await cloudSession.close();
79
+ }
80
+ }
81
+
82
+ async getLastSyncTime(): Promise<Date | null> {
83
+ const cloudSession = this.cloudDriver.session();
84
+ try {
85
+ const result = await cloudSession.run(`
86
+ MATCH (s:_SyncMetadata {id: 'last_sync'})
87
+ RETURN s.timestamp as timestamp
88
+ `);
89
+ if (result.records.length > 0) {
90
+ const ts = result.records[0].get('timestamp');
91
+ return ts ? new Date(ts) : null;
92
+ }
93
+ return null;
94
+ } catch {
95
+ return null;
96
+ } finally {
97
+ await cloudSession.close();
98
+ }
99
+ }
100
+
101
+ async setLastSyncTime(timestamp: Date): Promise<void> {
102
+ const cloudSession = this.cloudDriver.session();
103
+ try {
104
+ await cloudSession.run(`
105
+ MERGE (s:_SyncMetadata {id: 'last_sync'})
106
+ SET s.timestamp = $timestamp
107
+ `, { timestamp: timestamp.toISOString() });
108
+ } finally {
109
+ await cloudSession.close();
110
+ }
111
+ }
112
+
113
+ async countNodesToSync(since?: Date): Promise<number> {
114
+ const localSession = this.localDriver.session();
115
+ try {
116
+ let query: string;
117
+ let params: Record<string, unknown> = {};
118
+
119
+ if (since) {
120
+ query = `
121
+ MATCH (n)
122
+ WHERE n.updatedAt > $since OR n.createdAt > $since OR NOT exists(n.updatedAt)
123
+ RETURN count(n) as count
124
+ `;
125
+ params = { since: since.toISOString() };
126
+ } else {
127
+ query = 'MATCH (n) RETURN count(n) as count';
128
+ }
129
+
130
+ const result = await localSession.run(query, params);
131
+ return result.records[0].get('count').toNumber();
132
+ } finally {
133
+ await localSession.close();
134
+ }
135
+ }
136
+
137
+ async countRelationshipsToSync(since?: Date): Promise<number> {
138
+ const localSession = this.localDriver.session();
139
+ try {
140
+ let query: string;
141
+ let params: Record<string, unknown> = {};
142
+
143
+ if (since) {
144
+ // For relationships, we sync if either end node was updated
145
+ query = `
146
+ MATCH (a)-[r]->(b)
147
+ WHERE a.updatedAt > $since OR b.updatedAt > $since
148
+ OR a.createdAt > $since OR b.createdAt > $since
149
+ OR NOT exists(a.updatedAt)
150
+ RETURN count(r) as count
151
+ `;
152
+ params = { since: since.toISOString() };
153
+ } else {
154
+ query = 'MATCH ()-[r]->() RETURN count(r) as count';
155
+ }
156
+
157
+ const result = await localSession.run(query, params);
158
+ return result.records[0].get('count').toNumber();
159
+ } finally {
160
+ await localSession.close();
161
+ }
162
+ }
163
+
164
+ async syncNodesInParallel(since?: Date): Promise<void> {
165
+ const localSession = this.localDriver.session();
166
+ const totalNodes = await this.countNodesToSync(since);
167
+
168
+ console.log(`\nπŸ“¦ Syncing ${totalNodes} nodes (batch: ${this.config.batchSize}, parallel: ${this.config.parallelBatches})...`);
169
+
170
+ if (totalNodes === 0) {
171
+ console.log(' βœ“ No nodes to sync');
172
+ return;
173
+ }
174
+
175
+ let offset = 0;
176
+ let processed = 0;
177
+ const startTime = Date.now();
178
+
179
+ try {
180
+ while (offset < totalNodes) {
181
+ // Fetch multiple batches worth of nodes
182
+ const fetchSize = this.config.batchSize * this.config.parallelBatches;
183
+
184
+ let query: string;
185
+ let params: Record<string, unknown> = { skip: neo4j.int(offset), limit: neo4j.int(fetchSize) };
186
+
187
+ if (since) {
188
+ query = `
189
+ MATCH (n)
190
+ WHERE n.updatedAt > $since OR n.createdAt > $since OR NOT exists(n.updatedAt)
191
+ RETURN n, labels(n) as labels, elementId(n) as elementId
192
+ SKIP $skip LIMIT $limit
193
+ `;
194
+ params.since = since.toISOString();
195
+ } else {
196
+ query = `
197
+ MATCH (n)
198
+ RETURN n, labels(n) as labels, elementId(n) as elementId
199
+ SKIP $skip LIMIT $limit
200
+ `;
201
+ }
202
+
203
+ const result = await localSession.run(query, params);
204
+ const nodes = result.records.map(r => ({
205
+ properties: r.get('n').properties,
206
+ labels: r.get('labels') as string[],
207
+ elementId: r.get('elementId') as string
208
+ }));
209
+
210
+ if (nodes.length === 0) break;
211
+
212
+ // Split into batches and process in parallel
213
+ const batches: typeof nodes[] = [];
214
+ for (let i = 0; i < nodes.length; i += this.config.batchSize) {
215
+ batches.push(nodes.slice(i, i + this.config.batchSize));
216
+ }
217
+
218
+ // Process batches in parallel
219
+ await Promise.all(batches.map(batch => this.importNodeBatch(batch)));
220
+
221
+ processed += nodes.length;
222
+ offset += fetchSize;
223
+
224
+ const elapsed = (Date.now() - startTime) / 1000;
225
+ const rate = Math.round(processed / elapsed);
226
+ const eta = Math.round((totalNodes - processed) / rate);
227
+
228
+ console.log(` Progress: ${processed}/${totalNodes} (${rate} nodes/sec, ETA: ${eta}s)`);
229
+ }
230
+
231
+ this.stats.nodesCreated = processed;
232
+ console.log(` βœ“ Synced ${processed} nodes in ${((Date.now() - startTime) / 1000).toFixed(1)}s`);
233
+ } finally {
234
+ await localSession.close();
235
+ }
236
+ }
237
+
238
+ private async importNodeBatch(nodes: Array<{ properties: Record<string, unknown>; labels: string[]; elementId: string }>): Promise<void> {
239
+ const cloudSession = this.cloudDriver.session();
240
+ try {
241
+ // Group nodes by label combination for efficient UNWIND
242
+ const nodesByLabels = new Map<string, typeof nodes>();
243
+
244
+ for (const node of nodes) {
245
+ const labelKey = node.labels.sort().join(':');
246
+ if (!nodesByLabels.has(labelKey)) {
247
+ nodesByLabels.set(labelKey, []);
248
+ }
249
+ nodesByLabels.get(labelKey)!.push(node);
250
+ }
251
+
252
+ // Import each label group
253
+ for (const [labelKey, labelNodes] of nodesByLabels) {
254
+ const labels = labelKey || 'Node';
255
+ const nodeData = labelNodes.map(n => ({
256
+ ...n.properties,
257
+ _syncId: n.elementId
258
+ }));
259
+
260
+ // Use MERGE to handle both create and update
261
+ await cloudSession.run(`
262
+ UNWIND $nodes as nodeData
263
+ MERGE (n:${labels} {_syncId: nodeData._syncId})
264
+ SET n = nodeData
265
+ `, { nodes: nodeData });
266
+ }
267
+ } finally {
268
+ await cloudSession.close();
269
+ }
270
+ }
271
+
272
+ async syncRelationshipsInParallel(since?: Date): Promise<void> {
273
+ const localSession = this.localDriver.session();
274
+ const totalRels = await this.countRelationshipsToSync(since);
275
+
276
+ console.log(`\nπŸ”— Syncing ${totalRels} relationships...`);
277
+
278
+ if (totalRels === 0) {
279
+ console.log(' βœ“ No relationships to sync');
280
+ return;
281
+ }
282
+
283
+ let offset = 0;
284
+ let processed = 0;
285
+ const startTime = Date.now();
286
+
287
+ try {
288
+ while (offset < totalRels) {
289
+ const fetchSize = this.config.batchSize * this.config.parallelBatches;
290
+
291
+ let query: string;
292
+ let params: Record<string, unknown> = { skip: neo4j.int(offset), limit: neo4j.int(fetchSize) };
293
+
294
+ if (since) {
295
+ query = `
296
+ MATCH (a)-[r]->(b)
297
+ WHERE a.updatedAt > $since OR b.updatedAt > $since
298
+ OR a.createdAt > $since OR b.createdAt > $since
299
+ OR NOT exists(a.updatedAt)
300
+ RETURN type(r) as type, properties(r) as props,
301
+ elementId(a) as startId, elementId(b) as endId
302
+ SKIP $skip LIMIT $limit
303
+ `;
304
+ params.since = since.toISOString();
305
+ } else {
306
+ query = `
307
+ MATCH (a)-[r]->(b)
308
+ RETURN type(r) as type, properties(r) as props,
309
+ elementId(a) as startId, elementId(b) as endId
310
+ SKIP $skip LIMIT $limit
311
+ `;
312
+ }
313
+
314
+ const result = await localSession.run(query, params);
315
+ const rels = result.records.map(r => ({
316
+ type: r.get('type') as string,
317
+ properties: r.get('props') as Record<string, unknown>,
318
+ startId: r.get('startId') as string,
319
+ endId: r.get('endId') as string
320
+ }));
321
+
322
+ if (rels.length === 0) break;
323
+
324
+ // Split into batches and process in parallel
325
+ const batches: typeof rels[] = [];
326
+ for (let i = 0; i < rels.length; i += this.config.batchSize) {
327
+ batches.push(rels.slice(i, i + this.config.batchSize));
328
+ }
329
+
330
+ // Process batches sequentially to avoid deadlocks
331
+ for (const batch of batches) {
332
+ await this.importRelationshipBatch(batch);
333
+ }
334
+
335
+ processed += rels.length;
336
+ offset += fetchSize;
337
+
338
+ const elapsed = (Date.now() - startTime) / 1000;
339
+ const rate = Math.round(processed / elapsed);
340
+ const eta = Math.round((totalRels - processed) / rate);
341
+
342
+ console.log(` Progress: ${processed}/${totalRels} (${rate} rels/sec, ETA: ${eta}s)`);
343
+ }
344
+
345
+ this.stats.relationshipsCreated = processed;
346
+ console.log(` βœ“ Synced ${processed} relationships in ${((Date.now() - startTime) / 1000).toFixed(1)}s`);
347
+ } finally {
348
+ await localSession.close();
349
+ }
350
+ }
351
+
352
+ private async importRelationshipBatch(rels: Array<{ type: string; properties: Record<string, unknown>; startId: string; endId: string }>): Promise<void> {
353
+ const cloudSession = this.cloudDriver.session();
354
+ try {
355
+ // Group by relationship type
356
+ const relsByType = new Map<string, typeof rels>();
357
+
358
+ for (const rel of rels) {
359
+ if (!relsByType.has(rel.type)) {
360
+ relsByType.set(rel.type, []);
361
+ }
362
+ relsByType.get(rel.type)!.push(rel);
363
+ }
364
+
365
+ // Import each type
366
+ for (const [relType, typeRels] of relsByType) {
367
+ const relData = typeRels.map(r => ({
368
+ startId: r.startId,
369
+ endId: r.endId,
370
+ props: r.properties
371
+ }));
372
+
373
+ await cloudSession.run(`
374
+ UNWIND $rels as relData
375
+ MATCH (a {_syncId: relData.startId})
376
+ MATCH (b {_syncId: relData.endId})
377
+ MERGE (a)-[r:${relType}]->(b)
378
+ SET r = relData.props
379
+ `, { rels: relData });
380
+ }
381
+ } finally {
382
+ await cloudSession.close();
383
+ }
384
+ }
385
+
386
+ async syncSchema(): Promise<void> {
387
+ console.log('\nπŸ“‹ Syncing schema...');
388
+ const localSession = this.localDriver.session();
389
+ const cloudSession = this.cloudDriver.session();
390
+
391
+ try {
392
+ // Get constraints from local - build CREATE statements from available fields
393
+ const constraintsResult = await localSession.run('SHOW CONSTRAINTS');
394
+
395
+ for (const record of constraintsResult.records) {
396
+ const name = record.get('name');
397
+ const constraintType = record.get('type');
398
+ const entityType = record.get('entityType');
399
+ const labelsOrTypes = record.get('labelsOrTypes') as string[];
400
+ const properties = record.get('properties') as string[];
401
+
402
+ if (!name || name.startsWith('_')) continue;
403
+
404
+ // Build CREATE CONSTRAINT statement
405
+ let createStatement = '';
406
+ const label = labelsOrTypes?.[0] || 'Node';
407
+ const prop = properties?.[0] || 'id';
408
+
409
+ if (constraintType === 'UNIQUENESS' && entityType === 'NODE') {
410
+ createStatement = `CREATE CONSTRAINT ${name} IF NOT EXISTS FOR (n:${label}) REQUIRE n.${prop} IS UNIQUE`;
411
+ } else if (constraintType === 'NODE_KEY') {
412
+ createStatement = `CREATE CONSTRAINT ${name} IF NOT EXISTS FOR (n:${label}) REQUIRE n.${prop} IS NODE KEY`;
413
+ }
414
+
415
+ if (createStatement) {
416
+ try {
417
+ await cloudSession.run(createStatement);
418
+ console.log(` βœ“ Constraint: ${name}`);
419
+ } catch (e: unknown) {
420
+ const error = e as Error;
421
+ if (!error.message?.includes('already exists') && !error.message?.includes('equivalent')) {
422
+ console.log(` ⚠ Constraint ${name}: ${error.message?.slice(0, 50)}`);
423
+ }
424
+ }
425
+ }
426
+ }
427
+
428
+ // Get indexes - exclude LOOKUP and internal indexes
429
+ const indexesResult = await localSession.run('SHOW INDEXES');
430
+
431
+ for (const record of indexesResult.records) {
432
+ const name = record.get('name');
433
+ const indexType = record.get('type');
434
+ const entityType = record.get('entityType');
435
+ const labelsOrTypes = record.get('labelsOrTypes') as string[];
436
+ const properties = record.get('properties') as string[];
437
+
438
+ if (!name || name.startsWith('_') || indexType === 'LOOKUP') continue;
439
+
440
+ const label = labelsOrTypes?.[0] || 'Node';
441
+ const props = properties || [];
442
+
443
+ let createStatement = '';
444
+
445
+ if (indexType === 'RANGE' && entityType === 'NODE' && props.length > 0) {
446
+ const propList = props.map(p => `n.${p}`).join(', ');
447
+ createStatement = `CREATE INDEX ${name} IF NOT EXISTS FOR (n:${label}) ON (${propList})`;
448
+ } else if (indexType === 'VECTOR' && props.length > 0) {
449
+ // Skip vector indexes - they need special handling
450
+ console.log(` ⏭ Skipping vector index: ${name}`);
451
+ continue;
452
+ }
453
+
454
+ if (createStatement) {
455
+ try {
456
+ await cloudSession.run(createStatement);
457
+ console.log(` βœ“ Index: ${name} (${indexType})`);
458
+ } catch (e: unknown) {
459
+ const error = e as Error;
460
+ if (!error.message?.includes('already exists') && !error.message?.includes('equivalent')) {
461
+ console.log(` ⚠ Index ${name}: ${error.message?.slice(0, 50)}`);
462
+ }
463
+ }
464
+ }
465
+ }
466
+
467
+ // Ensure _syncId index exists for fast lookups
468
+ try {
469
+ await cloudSession.run('CREATE INDEX sync_id_index IF NOT EXISTS FOR (n:Node) ON (n._syncId)');
470
+ } catch {
471
+ // Ignore
472
+ }
473
+ } finally {
474
+ await localSession.close();
475
+ await cloudSession.close();
476
+ }
477
+ }
478
+
479
+ async run(): Promise<SyncStats> {
480
+ const startTime = Date.now();
481
+
482
+ console.log('═'.repeat(60));
483
+ console.log('⚑ Fast Neo4j Sync');
484
+ console.log('═'.repeat(60));
485
+ console.log(`\nLocal: ${this.config.localUri}`);
486
+ console.log(`Cloud: ${this.config.cloudUri}`);
487
+ console.log(`Mode: ${this.config.incrementalOnly ? 'Incremental' : 'Full'}`);
488
+ console.log(`Batch: ${this.config.batchSize} nodes Γ— ${this.config.parallelBatches} parallel`);
489
+
490
+ await this.connect();
491
+
492
+ let since: Date | undefined;
493
+
494
+ if (this.config.incrementalOnly) {
495
+ const lastSync = await this.getLastSyncTime();
496
+ if (lastSync) {
497
+ since = lastSync;
498
+ console.log(`\nπŸ“… Last sync: ${lastSync.toISOString()}`);
499
+ console.log(' Only syncing changes since then...');
500
+ } else {
501
+ console.log('\n⚠️ No previous sync found, doing full sync...');
502
+ }
503
+ }
504
+
505
+ await this.syncSchema();
506
+ await this.syncNodesInParallel(since);
507
+ await this.syncRelationshipsInParallel(since);
508
+
509
+ // Record sync time
510
+ await this.setLastSyncTime(new Date());
511
+
512
+ this.stats.duration = Date.now() - startTime;
513
+
514
+ console.log('\n' + '═'.repeat(60));
515
+ console.log('βœ… Sync Complete!');
516
+ console.log('═'.repeat(60));
517
+ console.log(` Nodes: ${this.stats.nodesCreated}`);
518
+ console.log(` Relationships: ${this.stats.relationshipsCreated}`);
519
+ console.log(` Duration: ${(this.stats.duration / 1000).toFixed(1)}s`);
520
+ console.log(` Rate: ${Math.round((this.stats.nodesCreated + this.stats.relationshipsCreated) / (this.stats.duration / 1000))} items/sec`);
521
+
522
+ return this.stats;
523
+ }
524
+
525
+ async close(): Promise<void> {
526
+ await this.localDriver.close();
527
+ await this.cloudDriver.close();
528
+ }
529
+ }
530
+
531
+ // CLI
532
+ async function main() {
533
+ const args = process.argv.slice(2);
534
+ const incremental = args.includes('--incremental') || args.includes('-i');
535
+ const help = args.includes('--help') || args.includes('-h');
536
+
537
+ if (help) {
538
+ console.log(`
539
+ Fast Neo4j Sync - Parallel batch sync with incremental support
540
+
541
+ Usage: npx tsx src/scripts/fast-neo4j-sync.ts [options]
542
+
543
+ Options:
544
+ --incremental, -i Only sync changes since last sync
545
+ --help, -h Show this help
546
+
547
+ Environment variables:
548
+ NEO4J_LOCAL_URI Local Neo4j URI (default: bolt://localhost:7687)
549
+ NEO4J_LOCAL_USER Local Neo4j user (default: neo4j)
550
+ NEO4J_LOCAL_PASSWORD Local Neo4j password
551
+ NEO4J_CLOUD_URI Cloud AuraDB URI
552
+ NEO4J_CLOUD_USER Cloud user (default: neo4j)
553
+ NEO4J_CLOUD_PASSWORD Cloud password
554
+ `);
555
+ process.exit(0);
556
+ }
557
+
558
+ // Cloud URI from .env.production (NEO4J_URI contains the cloud AuraDB)
559
+ const cloudUri = process.env.NEO4J_URI || '';
560
+ const cloudPassword = process.env.NEO4J_PASSWORD || '';
561
+
562
+ const syncConfig: SyncConfig = {
563
+ localUri: process.env.NEO4J_LOCAL_URI || 'bolt://localhost:7687',
564
+ localUser: process.env.NEO4J_LOCAL_USER || 'neo4j',
565
+ localPassword: process.env.NEO4J_LOCAL_PASSWORD || 'password', // Local default
566
+ cloudUri: cloudUri,
567
+ cloudUser: process.env.NEO4J_USER || 'neo4j',
568
+ cloudPassword: cloudPassword,
569
+ batchSize: 500, // Larger batches
570
+ parallelBatches: 4, // 4 parallel operations
571
+ incrementalOnly: incremental
572
+ };
573
+
574
+ if (!syncConfig.cloudUri) {
575
+ console.error('❌ NEO4J_URI (cloud) is required in .env.production');
576
+ process.exit(1);
577
+ }
578
+
579
+ const sync = new FastNeo4jSync(syncConfig);
580
+
581
+ try {
582
+ await sync.run();
583
+ } finally {
584
+ await sync.close();
585
+ }
586
+ }
587
+
588
+ main().catch(console.error);
apps/backend/src/scripts/sync-relationships-only.ts ADDED
@@ -0,0 +1,108 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ /**
2
+ * Sync only relationships to cloud (nodes already synced)
3
+ */
4
+
5
+ import neo4j, { Driver } from 'neo4j-driver';
6
+ import { config } from 'dotenv';
7
+ import { resolve } from 'path';
8
+
9
+ config({ path: resolve(process.cwd(), '.env.production') });
10
+ config({ path: resolve(process.cwd(), '.env') });
11
+
12
+ const BATCH_SIZE = 500;
13
+
14
+ async function main() {
15
+ const localDriver = neo4j.driver(
16
+ 'bolt://localhost:7687',
17
+ neo4j.auth.basic('neo4j', 'password')
18
+ );
19
+
20
+ const cloudDriver = neo4j.driver(
21
+ process.env.NEO4J_URI || '',
22
+ neo4j.auth.basic(process.env.NEO4J_USER || 'neo4j', process.env.NEO4J_PASSWORD || '')
23
+ );
24
+
25
+ console.log('═'.repeat(60));
26
+ console.log('πŸ”— Syncing Relationships Only');
27
+ console.log('═'.repeat(60));
28
+
29
+ const localSession = localDriver.session();
30
+ const countResult = await localSession.run('MATCH ()-[r]->() RETURN count(r) as count');
31
+ const totalRels = countResult.records[0].get('count').toNumber();
32
+ await localSession.close();
33
+
34
+ console.log(`\nTotal relationships to sync: ${totalRels}`);
35
+
36
+ let offset = 0;
37
+ let processed = 0;
38
+ const startTime = Date.now();
39
+
40
+ while (offset < totalRels) {
41
+ const session = localDriver.session();
42
+
43
+ const result = await session.run(`
44
+ MATCH (a)-[r]->(b)
45
+ RETURN type(r) as type, properties(r) as props,
46
+ elementId(a) as startId, elementId(b) as endId
47
+ SKIP $skip LIMIT $limit
48
+ `, { skip: neo4j.int(offset), limit: neo4j.int(BATCH_SIZE) });
49
+
50
+ const rels = result.records.map(r => ({
51
+ type: r.get('type') as string,
52
+ properties: r.get('props') as Record<string, unknown>,
53
+ startId: r.get('startId') as string,
54
+ endId: r.get('endId') as string
55
+ }));
56
+
57
+ await session.close();
58
+
59
+ if (rels.length === 0) break;
60
+
61
+ // Group by type
62
+ const relsByType = new Map<string, typeof rels>();
63
+ for (const rel of rels) {
64
+ if (!relsByType.has(rel.type)) {
65
+ relsByType.set(rel.type, []);
66
+ }
67
+ relsByType.get(rel.type)!.push(rel);
68
+ }
69
+
70
+ // Import each type
71
+ for (const [relType, typeRels] of relsByType) {
72
+ const cloudSession = cloudDriver.session();
73
+ try {
74
+ const relData = typeRels.map(r => ({
75
+ startId: r.startId,
76
+ endId: r.endId,
77
+ props: r.properties
78
+ }));
79
+
80
+ await cloudSession.run(`
81
+ UNWIND $rels as relData
82
+ MATCH (a {_syncId: relData.startId})
83
+ MATCH (b {_syncId: relData.endId})
84
+ MERGE (a)-[r:${relType}]->(b)
85
+ SET r = relData.props
86
+ `, { rels: relData });
87
+ } finally {
88
+ await cloudSession.close();
89
+ }
90
+ }
91
+
92
+ processed += rels.length;
93
+ offset += BATCH_SIZE;
94
+
95
+ const elapsed = (Date.now() - startTime) / 1000;
96
+ const rate = Math.round(processed / elapsed);
97
+ const eta = Math.round((totalRels - processed) / rate);
98
+
99
+ console.log(` Progress: ${processed}/${totalRels} (${rate} rels/sec, ETA: ${eta}s)`);
100
+ }
101
+
102
+ console.log(`\nβœ… Synced ${processed} relationships in ${((Date.now() - startTime) / 1000).toFixed(1)}s`);
103
+
104
+ await localDriver.close();
105
+ await cloudDriver.close();
106
+ }
107
+
108
+ main().catch(console.error);
apps/backend/src/services/HandoverWatchdog.ts ADDED
@@ -0,0 +1,99 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import fs from 'fs';
2
+ import path from 'path';
3
+ import { EventEmitter } from 'events';
4
+
5
+ // DEFINITION: Path to the Single Source of Truth
6
+ // Assuming running from apps/backend/src or similar structure in Docker
7
+ const HANDOVER_LOG_PATH = path.join(process.cwd(), '..', '..', 'docs', 'HANDOVER_LOG.md');
8
+
9
+ export class HandoverWatchdog extends EventEmitter {
10
+ private static instance: HandoverWatchdog;
11
+ private isWatching: boolean = false;
12
+ private lastProcessedTime: number = 0;
13
+
14
+ private constructor() {
15
+ super();
16
+ }
17
+
18
+ public static getInstance(): HandoverWatchdog {
19
+ if (!HandoverWatchdog.instance) {
20
+ HandoverWatchdog.instance = new HandoverWatchdog();
21
+ }
22
+ return HandoverWatchdog.instance;
23
+ }
24
+
25
+ /**
26
+ * Starts the surveillance of the Blackboard Protocol.
27
+ */
28
+ public startWatch(): void {
29
+ if (this.isWatching) {
30
+ console.log('πŸ‘οΈ [WATCHDOG] Already active.');
31
+ return;
32
+ }
33
+
34
+ console.log(`πŸ‘οΈ [WATCHDOG] Initializing Blackboard Protocol surveillance...`);
35
+ console.log(`πŸ“ [WATCHDOG] Target: ${HANDOVER_LOG_PATH}`);
36
+
37
+ // Verify existence of the Truth Document
38
+ if (!fs.existsSync(HANDOVER_LOG_PATH)) {
39
+ console.warn(`⚠️ [WATCHDOG] Critical Failure: Handover log not found at ${HANDOVER_LOG_PATH}`);
40
+ // Retry logic could be added here, but for now we warn
41
+ return;
42
+ }
43
+
44
+ try {
45
+ // We use watchFile for better Docker volume compatibility than fs.watch
46
+ fs.watchFile(HANDOVER_LOG_PATH, { interval: 2000 }, (curr, prev) => {
47
+ // Only trigger if modified time changed and it's newer than last process
48
+ if (curr.mtimeMs !== prev.mtimeMs) {
49
+ console.log('⚑ [WATCHDOG] Detect: Blackboard updated. Analyzing...');
50
+ this.analyzeBlackboard();
51
+ }
52
+ });
53
+
54
+ this.isWatching = true;
55
+ console.log('🟒 [WATCHDOG] Active and Waiting for Signals.');
56
+ } catch (error) {
57
+ console.error('❌ [WATCHDOG] Failed to initialize:', error);
58
+ }
59
+ }
60
+
61
+ /**
62
+ * Reads the Blackboard and looks for "READY FOR [AGENT]" signals.
63
+ */
64
+ private analyzeBlackboard(): void {
65
+ try {
66
+ const content = fs.readFileSync(HANDOVER_LOG_PATH, 'utf-8');
67
+
68
+ // REGEX LOGIC:
69
+ // We look for: **Status:** [BOLD/EMOJI?] READY FOR [AGENT_NAME]
70
+ // Captures the agent name. Case insensitive.
71
+ const statusRegex = /\*\*Status:\*\*\s*(?:βœ…|πŸ”΄|🟒|\*\*)?\s*READY FOR\s*\[?(.*?)\]?/i;
72
+
73
+ // We only scan the last 2000 characters to be efficient
74
+ const recentContent = content.slice(-2000);
75
+ const match = statusRegex.exec(recentContent);
76
+
77
+ if (match && match[1]) {
78
+ const targetAgent = match[1].trim().replace(/\*|\]|\[/g, ''); // Cleanup cleanup
79
+
80
+ console.log(`πŸš€ [WATCHDOG] SIGNAL VERIFIED: Handover to Agent [${targetAgent}]`);
81
+
82
+ // Emit the signal for the Orchestrator to pick up
83
+ this.emit('handover_signal', {
84
+ target: targetAgent,
85
+ timestamp: new Date(),
86
+ source: 'Blackboard'
87
+ });
88
+ } else {
89
+ // Debug log to confirm scan happened (can be removed later for silence)
90
+ // console.log('πŸ” [WATCHDOG] Scanned. No active handover signal found.');
91
+ }
92
+
93
+ } catch (error) {
94
+ console.error('❌ [WATCHDOG] Read error:', error);
95
+ }
96
+ }
97
+ }
98
+
99
+ export const handoverWatchdog = HandoverWatchdog.getInstance();