Datasets:
File size: 4,786 Bytes
6773486 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 | """
Pre-compute force-directed layout for Steam co-review network.
Uses networkx spring_layout (Fruchterman-Reingold) with scipy acceleration.
26K nodes, filtered edges. Outputs steam_force_layout.json.
Usage:
python3 compute_layout.py
"""
import json
import time
import sys
from pathlib import Path
try:
import networkx as nx
except ImportError:
print("ERROR: networkx required. pip install networkx")
sys.exit(1)
# ── Config ──────────────────────────────────────────────────────────
HERE = Path(__file__).parent
NETWORK_JSON = HERE / 'steam_network.json'
OUTPUT = HERE / 'steam_force_layout.json'
# Edge weight threshold — only include edges with weight >= this
# Higher = fewer edges = faster layout. 100 gives ~9K nodes, ~219K edges.
MIN_WEIGHT = 100
# Layout iterations (more = better convergence, slower)
ITERATIONS = 80
# Spring constant (lower = more spread out)
K_SPRING = None # None = auto (1/sqrt(n))
# Seed for reproducibility
SEED = 42
def main():
t0 = time.time()
print("=" * 60)
print("Steam Network Force Layout Generator")
print("=" * 60)
# 1. Load network data
print("\n[1/4] Loading network data...")
with open(NETWORK_JSON) as f:
data = json.load(f)
nodes = data['nodes']
links = data['links']
print(f" {len(nodes):,} nodes, {len(links):,} links")
# 2. Build graph with filtered edges
print(f"\n[2/4] Building graph (min weight={MIN_WEIGHT})...")
G = nx.Graph()
# Add all nodes with metadata
for i, node in enumerate(nodes):
G.add_node(i, title=node['title'], reviews=node.get('reviews', 0))
# Add edges above threshold
added = 0
skipped = 0
for link in links:
if link['weight'] >= MIN_WEIGHT:
G.add_edge(link['source'], link['target'], weight=link['weight'])
added += 1
else:
skipped += 1
# Remove isolated nodes (no edges after filtering)
isolates = list(nx.isolates(G))
G.remove_nodes_from(isolates)
print(f" {G.number_of_nodes():,} nodes, {G.number_of_edges():,} edges")
print(f" ({len(isolates):,} isolated nodes removed, {skipped:,} edges below threshold)")
# 3. Compute layout
print(f"\n[3/4] Computing spring layout ({ITERATIONS} iterations)...")
print(f" This may take a few minutes for {G.number_of_nodes():,} nodes...")
t1 = time.time()
# Use weight for layout — higher weight = stronger attraction
pos = nx.spring_layout(
G,
k=K_SPRING,
iterations=ITERATIONS,
seed=SEED,
weight='weight',
)
layout_time = time.time() - t1
print(f" Layout computed in {layout_time:.1f}s")
# 4. Build output — map node index back to node ID
print("\n[4/4] Writing output...")
output = {
'positions': {},
'meta': {
'node_count': G.number_of_nodes(),
'edge_count': G.number_of_edges(),
'min_weight': MIN_WEIGHT,
'iterations': ITERATIONS,
'layout_time_seconds': round(layout_time, 1),
}
}
# Normalize positions to [0, 1] using percentile-based scaling.
# Min/max normalization is ruined by outlier nodes — a few extreme
# positions compress the main cluster into a tiny area. Percentile
# scaling maps the 1st-99th percentile range to [0.02, 0.98] and
# clamps outliers to the edges.
all_x = sorted(float(xy[0]) for xy in pos.values())
all_y = sorted(float(xy[1]) for xy in pos.values())
n = len(all_x)
p_lo, p_hi = 0.01, 0.99
x_lo = all_x[int(n * p_lo)]
x_hi = all_x[int(n * p_hi)]
y_lo = all_y[int(n * p_lo)]
y_hi = all_y[int(n * p_hi)]
x_range = x_hi - x_lo or 1
y_range = y_hi - y_lo or 1
margin = 0.02
for node_idx, (x, y) in pos.items():
node_id = nodes[node_idx]['id']
nx = (float(x) - x_lo) / x_range * (1 - 2 * margin) + margin
ny = (float(y) - y_lo) / y_range * (1 - 2 * margin) + margin
nx = max(0.0, min(1.0, nx))
ny = max(0.0, min(1.0, ny))
output['positions'][node_id] = [round(nx, 6), round(ny, 6)]
with open(OUTPUT, 'w') as f:
json.dump(output, f, separators=(',', ':'))
size_mb = OUTPUT.stat().st_size / (1024 * 1024)
elapsed = time.time() - t0
print(f"\n{'=' * 60}")
print(f"Saved: {OUTPUT}")
print(f" {output['meta']['node_count']:,} nodes with positions")
print(f" {output['meta']['edge_count']:,} edges used for layout")
print(f" Size: {size_mb:.1f} MB")
print(f" Total time: {elapsed:.0f}s ({elapsed/60:.1f} min)")
print("=" * 60)
if __name__ == '__main__':
main()
|