Spaces:
Sleeping
Sleeping
File size: 13,037 Bytes
c89947b |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385 386 |
"""
Combinatorial type enumeration for convex polyhedra inscribed in the sphere.
Generates random point sets on the sphere and counts distinct combinatorial types
using canonical graph labelings to avoid O(N^2) isomorphism checks.
"""
import numpy as np
from scipy.spatial import ConvexHull
from collections import defaultdict
from multiprocessing import Pool, cpu_count
import os
try:
import pynauty
PYNAUTY_AVAILABLE = True
except ImportError:
PYNAUTY_AVAILABLE = False
def generate_random_points_on_sphere(n_points, rng=None):
"""
Generate n_points uniformly distributed on the unit sphere using Marsaglia's method.
Args:
n_points: Number of points to generate
rng: numpy random generator (if None, uses default)
Returns:
Array of shape (n_points, 3) with points on unit sphere
"""
if rng is None:
rng = np.random.default_rng()
# Marsaglia's method: uniform sampling on sphere
# Generate points in [-1, 1]^2 and reject those outside unit circle
points = []
while len(points) < n_points:
# Generate candidate points
batch_size = max(n_points - len(points), 100)
x = rng.uniform(-1, 1, batch_size)
y = rng.uniform(-1, 1, batch_size)
r_sq = x**2 + y**2
# Keep only points inside unit circle
mask = r_sq < 1
x = x[mask]
y = y[mask]
r_sq = r_sq[mask]
# Map to sphere: (x, y, r^2) -> (2x√(1-r^2), 2y√(1-r^2), 1-2r^2)
sqrt_term = np.sqrt(1 - r_sq)
sphere_x = 2 * x * sqrt_term
sphere_y = 2 * y * sqrt_term
sphere_z = 1 - 2 * r_sq
for i in range(len(x)):
if len(points) < n_points:
points.append([sphere_x[i], sphere_y[i], sphere_z[i]])
return np.array(points)
def extract_graph_from_hull(vertices_3d):
"""
Extract the 1-skeleton (edge graph) from the convex hull of vertices.
Args:
vertices_3d: N x 3 array of vertex coordinates
Returns:
adjacency: dict mapping vertex index to list of adjacent vertex indices
n_vertices: number of vertices
"""
hull = ConvexHull(vertices_3d)
# Build edge list from hull faces (simplices)
edges = set()
for simplex in hull.simplices:
# Each face is a triangle with 3 edges
v0, v1, v2 = simplex
edges.add(tuple(sorted([v0, v1])))
edges.add(tuple(sorted([v1, v2])))
edges.add(tuple(sorted([v2, v0])))
# Build adjacency list
n_vertices = len(vertices_3d)
adjacency = {i: [] for i in range(n_vertices)}
for v1, v2 in edges:
adjacency[v1].append(v2)
adjacency[v2].append(v1)
return adjacency, n_vertices
def compute_canonical_hash(adjacency, n_vertices):
"""
Compute a canonical hash for a graph using pynauty.
Isomorphic graphs will have the same canonical hash.
Args:
adjacency: dict mapping vertex index to list of adjacent vertices
n_vertices: number of vertices in the graph
Returns:
tuple: canonical hash (hashable and comparable)
"""
if not PYNAUTY_AVAILABLE:
raise ImportError("pynauty not installed. Install with: pip install pynauty")
# Create pynauty graph
g = pynauty.Graph(number_of_vertices=n_vertices, directed=False, adjacency_dict=adjacency)
# Get canonical labeling
# canon_label returns a canonical permutation that maps the graph to a canonical form
canonical_labeling = pynauty.canon_label(g)
# Build canonical adjacency list using the relabeling
# canonical_labeling[i] tells us what the new label of vertex i is
canonical_edges = []
for v1 in range(n_vertices):
for v2 in adjacency[v1]:
if v1 < v2: # Only add each edge once
# Map to canonical labels
c1, c2 = canonical_labeling[v1], canonical_labeling[v2]
canonical_edges.append(tuple(sorted([c1, c2])))
# Sort edges for consistent ordering
canonical_edges = tuple(sorted(canonical_edges))
# Also include vertex degrees in canonical order
canonical_degrees = tuple(sorted([len(adjacency[i]) for i in range(n_vertices)]))
return (n_vertices, canonical_degrees, canonical_edges)
def enumerate_combinatorial_types(n_vertices, n_samples, seed=None, verbose=True):
"""
Enumerate distinct combinatorial types of convex polyhedra with n_vertices on the sphere.
Args:
n_vertices: Number of vertices in each polyhedron
n_samples: Number of random samples to generate
seed: Random seed (for reproducibility)
verbose: If True, print progress updates
Returns:
dict with keys:
- 'n_vertices': number of vertices
- 'n_samples': number of samples generated
- 'n_types': number of distinct combinatorial types found
- 'type_counts': dict mapping canonical hash to count
- 'type_examples': dict mapping canonical hash to example vertex set
"""
if not PYNAUTY_AVAILABLE:
raise ImportError("pynauty not installed. Install with: pip install pynauty")
rng = np.random.default_rng(seed)
# Track unique types
type_counts = defaultdict(int)
type_examples = {} # Store one example of each type
# Track failures (non-simplicial hulls, degenerate cases)
n_failures = 0
for i in range(n_samples):
if verbose and (i + 1) % max(1, n_samples // 10) == 0:
print(f" Sample {i + 1}/{n_samples}: {len(type_counts)} types found")
try:
# Generate random points on sphere
vertices = generate_random_points_on_sphere(n_vertices, rng)
# Extract graph from convex hull
adjacency, n_verts = extract_graph_from_hull(vertices)
# Compute canonical hash
canonical_hash = compute_canonical_hash(adjacency, n_verts)
# Track this type
type_counts[canonical_hash] += 1
# Store example if first time seeing this type
if canonical_hash not in type_examples:
type_examples[canonical_hash] = vertices.copy()
except Exception as e:
# Handle degenerate cases (coplanar points, numerical issues, etc.)
n_failures += 1
if verbose and n_failures == 1:
print(f" Warning: encountered degenerate case: {e}")
if verbose:
print(f"\nCompleted: {len(type_counts)} distinct types found")
if n_failures > 0:
print(f" ({n_failures} degenerate cases skipped)")
return {
'n_vertices': n_vertices,
'n_samples': n_samples,
'n_types': len(type_counts),
'type_counts': dict(type_counts),
'type_examples': type_examples,
'n_failures': n_failures,
}
def format_enumeration_report(result):
"""
Format enumeration results as a readable string.
Args:
result: Dictionary returned by enumerate_combinatorial_types
Returns:
Formatted string report
"""
report = []
report.append(f"\n{'='*60}")
report.append(f"Combinatorial Type Enumeration: {result['n_vertices']} vertices")
report.append(f"{'='*60}")
report.append(f"Samples generated: {result['n_samples']}")
report.append(f"Distinct types found: {result['n_types']}")
report.append(f"Degenerate cases: {result['n_failures']}")
report.append("")
# Sort types by frequency
sorted_types = sorted(result['type_counts'].items(), key=lambda x: x[1], reverse=True)
report.append("Top 10 most common types:")
for i, (type_hash, count) in enumerate(sorted_types[:10], 1):
n_verts, degrees, edges = type_hash
frequency = 100 * count / result['n_samples']
report.append(f" {i}. Type with degree sequence {degrees}: {count} times ({frequency:.2f}%)")
if len(sorted_types) > 10:
report.append(f" ... and {len(sorted_types) - 10} more rare types")
return "\n".join(report)
def _worker_enumerate_types(args):
"""
Worker function for parallel enumeration.
Each worker maintains its own local type_counts dictionary - no locking needed!
Results are merged after all workers complete.
Args:
args: tuple of (n_vertices, n_samples_for_worker, worker_seed, worker_id, verbose)
Returns:
dict with local results from this worker
"""
n_vertices, n_samples, seed, worker_id, verbose = args
if not PYNAUTY_AVAILABLE:
raise ImportError("pynauty not installed. Install with: pip install pynauty")
# Each worker gets its own RNG seeded differently
rng = np.random.default_rng(seed)
# Local type tracking (no shared state - no locking!)
type_counts = defaultdict(int)
type_examples = {}
n_failures = 0
for i in range(n_samples):
if verbose and (i + 1) % max(1, n_samples // 5) == 0:
print(f" Worker {worker_id}: {i + 1}/{n_samples} samples, {len(type_counts)} types")
try:
vertices = generate_random_points_on_sphere(n_vertices, rng)
adjacency, n_verts = extract_graph_from_hull(vertices)
canonical_hash = compute_canonical_hash(adjacency, n_verts)
type_counts[canonical_hash] += 1
if canonical_hash not in type_examples:
type_examples[canonical_hash] = vertices.copy()
except Exception as e:
n_failures += 1
if verbose and n_failures == 1:
print(f" Worker {worker_id}: Warning - degenerate case: {e}")
return {
'type_counts': dict(type_counts),
'type_examples': type_examples,
'n_failures': n_failures,
}
def enumerate_combinatorial_types_parallel(n_vertices, n_samples, seed=None,
n_workers=None, verbose=True):
"""
Enumerate distinct combinatorial types using parallel processing.
Uses a partition-and-merge strategy: each worker processes samples independently
with no shared state (no locking!), then results are merged at the end.
Args:
n_vertices: Number of vertices in each polyhedron
n_samples: Total number of random samples to generate
seed: Random seed (for reproducibility)
n_workers: Number of parallel workers (default: cpu_count())
verbose: If True, print progress updates
Returns:
dict with keys:
- 'n_vertices': number of vertices
- 'n_samples': number of samples generated
- 'n_types': number of distinct combinatorial types found
- 'type_counts': dict mapping canonical hash to count
- 'type_examples': dict mapping canonical hash to example vertex set
- 'n_workers': number of workers used
"""
if not PYNAUTY_AVAILABLE:
raise ImportError("pynauty not installed. Install with: pip install pynauty")
if n_workers is None:
n_workers = cpu_count()
if verbose:
print(f"Using {n_workers} parallel workers")
# Divide samples among workers
samples_per_worker = n_samples // n_workers
extra_samples = n_samples % n_workers
# Generate independent seeds for each worker
if seed is not None:
rng = np.random.default_rng(seed)
worker_seeds = [rng.integers(0, 2**32) for _ in range(n_workers)]
else:
worker_seeds = [None] * n_workers
# Prepare worker arguments
worker_args = []
for i in range(n_workers):
n_samples_for_worker = samples_per_worker + (1 if i < extra_samples else 0)
worker_args.append((n_vertices, n_samples_for_worker, worker_seeds[i], i, verbose))
# Run workers in parallel
if verbose:
print(f"Starting parallel enumeration...")
with Pool(n_workers) as pool:
worker_results = pool.map(_worker_enumerate_types, worker_args)
# Merge results from all workers
if verbose:
print(f"Merging results from {n_workers} workers...")
merged_type_counts = defaultdict(int)
merged_type_examples = {}
total_failures = 0
for result in worker_results:
# Merge type counts
for type_hash, count in result['type_counts'].items():
merged_type_counts[type_hash] += count
# Keep one example (arbitrary which worker's example we keep)
if type_hash not in merged_type_examples:
merged_type_examples[type_hash] = result['type_examples'][type_hash]
total_failures += result['n_failures']
if verbose:
print(f"\nCompleted: {len(merged_type_counts)} distinct types found")
if total_failures > 0:
print(f" ({total_failures} degenerate cases skipped)")
return {
'n_vertices': n_vertices,
'n_samples': n_samples,
'n_types': len(merged_type_counts),
'type_counts': dict(merged_type_counts),
'type_examples': merged_type_examples,
'n_failures': total_failures,
'n_workers': n_workers,
}
|