Image_SDF / src /sdf_geo /optimizer.py
WendingGao's picture
Upload 22 files
fdb3169 verified
"""
Geometry Optimizer Module
Gradient-based optimization for geometric constraints.
"""
import torch
from typing import Dict, List, Callable
class GeometryOptimizer:
"""
Optimizer for geometric constraints using gradient descent.
Following GeoSDF paper methodology with AdamW optimizer.
"""
def __init__(self, learning_rate: float = 0.1, max_iterations: int = 500,
convergence_threshold: float = 1e-6):
self.learning_rate = learning_rate
self.max_iterations = max_iterations
self.convergence_threshold = convergence_threshold
def optimize(self, sdf, constraints: List[Callable],
weights: List[float], verbose: bool = False) -> Dict:
"""
Optimize SDF parameters to satisfy constraints.
Args:
sdf: SDF primitive with learnable parameters
constraints: List of constraint functions returning loss tensors
weights: Weight for each constraint
verbose: Print optimization progress
Returns:
Dictionary with optimization results
"""
# Get trainable parameters
params = [p for p in sdf.parameters() if p.requires_grad]
if not params:
return {'final_loss': 0.0, 'converged': True, 'iterations': 0}
optimizer = torch.optim.AdamW(params, lr=self.learning_rate)
scheduler = torch.optim.lr_scheduler.CosineAnnealingLR(
optimizer, T_max=self.max_iterations, eta_min=1e-4
)
prev_loss = float('inf')
converged = False
for i in range(self.max_iterations):
optimizer.zero_grad()
# Compute weighted sum of constraint losses
total_loss = torch.tensor(0.0)
for constraint, weight in zip(constraints, weights):
loss = constraint()
total_loss = total_loss + weight * loss
# Backward pass
total_loss.backward()
# Gradient clipping for stability
torch.nn.utils.clip_grad_norm_(params, max_norm=1.0)
optimizer.step()
scheduler.step()
# Check convergence
current_loss = total_loss.item()
if abs(prev_loss - current_loss) < self.convergence_threshold:
converged = True
break
prev_loss = current_loss
if verbose and i % 100 == 0:
print(f" Iteration {i}: loss = {current_loss:.6f}")
return {
'final_loss': current_loss,
'converged': converged,
'iterations': i + 1
}