Cleanroom / cleanroom /surrogateAI /clean_eval.py
bipin-ank's picture
Upload cleanroom dataset
0bd444e
import torch
import torch.nn as nn
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
def _rollout(model, initial_state, num_steps, norm_stats):
"""
Performs a multi-step rollout simulation using the trained model.
The model's output from one step is used as the input for the next.
Applies normalization to inputs and de-normalization to outputs.
"""
# --- UNPACK NORMALIZATION STATS ---
coords_mean = norm_stats['coords_mean'].to(device)
coords_std = norm_stats['coords_std'].to(device)
vel_mean = norm_stats['vel_mean'].to(device)
vel_std = norm_stats['vel_std'].to(device)
inlet_vel_mean = norm_stats['inlet_vel_mean'].to(device)
inlet_vel_std = norm_stats['inlet_vel_std'].to(device)
epsilon = 1e-7
predicted_trajectory = []
current_velocities = initial_state['current_velocities'].float().to(device)
coordinates = initial_state['coordinates'].float().to(device)
node_type = initial_state['node_type'].float().to(device)
for step in range(num_steps):
raw_vel = initial_state['meta_info']['velocity']
if torch.is_tensor(raw_vel):
inlet_vel = raw_vel.clone().detach().float().to(device)
else:
inlet_vel = torch.tensor(raw_vel, dtype=torch.float32).to(device)
if inlet_vel.dim() > 1:
inlet_vel = inlet_vel.flatten()[0]
elif inlet_vel.dim() == 1 and inlet_vel.numel() == 1:
inlet_vel = inlet_vel.item()
if isinstance(inlet_vel, torch.Tensor):
norm_inlet_scalar = (inlet_vel - inlet_vel_mean) / (inlet_vel_std + epsilon)
else:
norm_inlet_scalar = (inlet_vel - inlet_vel_mean.item()) / (inlet_vel_std.item() + epsilon)
num_nodes = coordinates.shape[0]
if isinstance(norm_inlet_scalar, torch.Tensor):
norm_inlet_vel = norm_inlet_scalar.expand(num_nodes).float()
else:
norm_inlet_vel = torch.full((num_nodes,), float(norm_inlet_scalar), dtype=torch.float32, device=device)
norm_coords = (coordinates - coords_mean) / (coords_std + epsilon)
norm_current_vel = (current_velocities - vel_mean) / (vel_std + epsilon)
norm_coords_batch = norm_coords.unsqueeze(0)
node_type_batch = node_type.unsqueeze(0)
norm_current_vel_batch = norm_current_vel.unsqueeze(0)
norm_inlet_vel_feature_batch = norm_inlet_vel.unsqueeze(0).unsqueeze(-1)
final_input = torch.cat([
norm_coords_batch,
node_type_batch,
norm_current_vel_batch,
norm_inlet_vel_feature_batch
], dim=-1)
with torch.no_grad():
predicted_normalized_velocities = model(final_input).squeeze(0)
predicted_next_velocities = (predicted_normalized_velocities * (vel_std + epsilon)) + vel_mean
predicted_trajectory.append(predicted_next_velocities)
current_velocities = predicted_next_velocities
return torch.stack(predicted_trajectory, dim=0)
def evaluate(model, trajectory_data, norm_stats):
"""
Takes a full validation trajectory, normalizes it, and generates a model rollout prediction.
Args:
model: The trained Transolver model.
trajectory_data: A dictionary containing the full, grouped validation trajectory.
norm_stats: A dictionary with the mean and std of the training data.
"""
num_steps = trajectory_data['current_velocities'].shape[0]
initial_state = {
'coordinates': trajectory_data['coordinates'].float(),
'node_type': trajectory_data['node_type'].float(),
'current_velocities': trajectory_data['current_velocities'][0].float(), # First step
'meta_info': trajectory_data['meta_info']
}
predicted_trajectory = _rollout(model, initial_state, num_steps, norm_stats)
results = {
'ground_truth_trajectory': trajectory_data['target_velocities'].float(),
'predicted_trajectory': predicted_trajectory,
'coordinates': trajectory_data['coordinates'].float(),
'node_type': trajectory_data['node_type'].float(),
'meta_info': trajectory_data['meta_info']
}
return None, results