File size: 3,127 Bytes
33f2414
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
/**
 * Sync only relationships to cloud (nodes already synced)
 */

import neo4j, { Driver } from 'neo4j-driver';
import { config } from 'dotenv';
import { resolve } from 'path';

config({ path: resolve(process.cwd(), '.env.production') });
config({ path: resolve(process.cwd(), '.env') });

const BATCH_SIZE = 500;

async function main() {
  const localDriver = neo4j.driver(
    'bolt://localhost:7687',
    neo4j.auth.basic('neo4j', 'password')
  );

  const cloudDriver = neo4j.driver(
    process.env.NEO4J_URI || '',
    neo4j.auth.basic(process.env.NEO4J_USER || 'neo4j', process.env.NEO4J_PASSWORD || '')
  );

  console.log('═'.repeat(60));
  console.log('🔗 Syncing Relationships Only');
  console.log('═'.repeat(60));

  const localSession = localDriver.session();
  const countResult = await localSession.run('MATCH ()-[r]->() RETURN count(r) as count');
  const totalRels = countResult.records[0].get('count').toNumber();
  await localSession.close();

  console.log(`\nTotal relationships to sync: ${totalRels}`);

  let offset = 0;
  let processed = 0;
  const startTime = Date.now();

  while (offset < totalRels) {
    const session = localDriver.session();

    const result = await session.run(`
      MATCH (a)-[r]->(b)
      RETURN type(r) as type, properties(r) as props,
             elementId(a) as startId, elementId(b) as endId
      SKIP $skip LIMIT $limit
    `, { skip: neo4j.int(offset), limit: neo4j.int(BATCH_SIZE) });

    const rels = result.records.map(r => ({
      type: r.get('type') as string,
      properties: r.get('props') as Record<string, unknown>,
      startId: r.get('startId') as string,
      endId: r.get('endId') as string
    }));

    await session.close();

    if (rels.length === 0) break;

    // Group by type
    const relsByType = new Map<string, typeof rels>();
    for (const rel of rels) {
      if (!relsByType.has(rel.type)) {
        relsByType.set(rel.type, []);
      }
      relsByType.get(rel.type)!.push(rel);
    }

    // Import each type
    for (const [relType, typeRels] of relsByType) {
      const cloudSession = cloudDriver.session();
      try {
        const relData = typeRels.map(r => ({
          startId: r.startId,
          endId: r.endId,
          props: r.properties
        }));

        await cloudSession.run(`
          UNWIND $rels as relData
          MATCH (a {_syncId: relData.startId})
          MATCH (b {_syncId: relData.endId})
          MERGE (a)-[r:${relType}]->(b)
          SET r = relData.props
        `, { rels: relData });
      } finally {
        await cloudSession.close();
      }
    }

    processed += rels.length;
    offset += BATCH_SIZE;

    const elapsed = (Date.now() - startTime) / 1000;
    const rate = Math.round(processed / elapsed);
    const eta = Math.round((totalRels - processed) / rate);

    console.log(`  Progress: ${processed}/${totalRels} (${rate} rels/sec, ETA: ${eta}s)`);
  }

  console.log(`\n✅ Synced ${processed} relationships in ${((Date.now() - startTime) / 1000).toFixed(1)}s`);

  await localDriver.close();
  await cloudDriver.close();
}

main().catch(console.error);