| """ |
| Pre-compute force-directed layout for Steam co-review network. |
| |
| Uses networkx spring_layout (Fruchterman-Reingold) with scipy acceleration. |
| 26K nodes, filtered edges. Outputs steam_force_layout.json. |
| |
| Usage: |
| python3 compute_layout.py |
| """ |
|
|
| import json |
| import time |
| import sys |
| from pathlib import Path |
|
|
| try: |
| import networkx as nx |
| except ImportError: |
| print("ERROR: networkx required. pip install networkx") |
| sys.exit(1) |
|
|
| |
| HERE = Path(__file__).parent |
| NETWORK_JSON = HERE / 'steam_network.json' |
| OUTPUT = HERE / 'steam_force_layout.json' |
|
|
| |
| |
| MIN_WEIGHT = 100 |
| |
| ITERATIONS = 80 |
| |
| K_SPRING = None |
| |
| SEED = 42 |
|
|
| def main(): |
| t0 = time.time() |
| print("=" * 60) |
| print("Steam Network Force Layout Generator") |
| print("=" * 60) |
|
|
| |
| print("\n[1/4] Loading network data...") |
| with open(NETWORK_JSON) as f: |
| data = json.load(f) |
|
|
| nodes = data['nodes'] |
| links = data['links'] |
| print(f" {len(nodes):,} nodes, {len(links):,} links") |
|
|
| |
| print(f"\n[2/4] Building graph (min weight={MIN_WEIGHT})...") |
| G = nx.Graph() |
|
|
| |
| for i, node in enumerate(nodes): |
| G.add_node(i, title=node['title'], reviews=node.get('reviews', 0)) |
|
|
| |
| added = 0 |
| skipped = 0 |
| for link in links: |
| if link['weight'] >= MIN_WEIGHT: |
| G.add_edge(link['source'], link['target'], weight=link['weight']) |
| added += 1 |
| else: |
| skipped += 1 |
|
|
| |
| isolates = list(nx.isolates(G)) |
| G.remove_nodes_from(isolates) |
|
|
| print(f" {G.number_of_nodes():,} nodes, {G.number_of_edges():,} edges") |
| print(f" ({len(isolates):,} isolated nodes removed, {skipped:,} edges below threshold)") |
|
|
| |
| print(f"\n[3/4] Computing spring layout ({ITERATIONS} iterations)...") |
| print(f" This may take a few minutes for {G.number_of_nodes():,} nodes...") |
|
|
| t1 = time.time() |
|
|
| |
| pos = nx.spring_layout( |
| G, |
| k=K_SPRING, |
| iterations=ITERATIONS, |
| seed=SEED, |
| weight='weight', |
| ) |
|
|
| layout_time = time.time() - t1 |
| print(f" Layout computed in {layout_time:.1f}s") |
|
|
| |
| print("\n[4/4] Writing output...") |
| output = { |
| 'positions': {}, |
| 'meta': { |
| 'node_count': G.number_of_nodes(), |
| 'edge_count': G.number_of_edges(), |
| 'min_weight': MIN_WEIGHT, |
| 'iterations': ITERATIONS, |
| 'layout_time_seconds': round(layout_time, 1), |
| } |
| } |
|
|
| |
| |
| |
| |
| |
| all_x = sorted(float(xy[0]) for xy in pos.values()) |
| all_y = sorted(float(xy[1]) for xy in pos.values()) |
| n = len(all_x) |
| p_lo, p_hi = 0.01, 0.99 |
| x_lo = all_x[int(n * p_lo)] |
| x_hi = all_x[int(n * p_hi)] |
| y_lo = all_y[int(n * p_lo)] |
| y_hi = all_y[int(n * p_hi)] |
| x_range = x_hi - x_lo or 1 |
| y_range = y_hi - y_lo or 1 |
| margin = 0.02 |
|
|
| for node_idx, (x, y) in pos.items(): |
| node_id = nodes[node_idx]['id'] |
| nx = (float(x) - x_lo) / x_range * (1 - 2 * margin) + margin |
| ny = (float(y) - y_lo) / y_range * (1 - 2 * margin) + margin |
| nx = max(0.0, min(1.0, nx)) |
| ny = max(0.0, min(1.0, ny)) |
| output['positions'][node_id] = [round(nx, 6), round(ny, 6)] |
|
|
| with open(OUTPUT, 'w') as f: |
| json.dump(output, f, separators=(',', ':')) |
|
|
| size_mb = OUTPUT.stat().st_size / (1024 * 1024) |
| elapsed = time.time() - t0 |
|
|
| print(f"\n{'=' * 60}") |
| print(f"Saved: {OUTPUT}") |
| print(f" {output['meta']['node_count']:,} nodes with positions") |
| print(f" {output['meta']['edge_count']:,} edges used for layout") |
| print(f" Size: {size_mb:.1f} MB") |
| print(f" Total time: {elapsed:.0f}s ({elapsed/60:.1f} min)") |
| print("=" * 60) |
|
|
|
|
| if __name__ == '__main__': |
| main() |
|
|