File size: 4,367 Bytes
b5241eb |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 |
"""
Utilities for loading WAKESET volume extractions (unstructured CFD data)
and interpolating them onto regular 3D grids.
"""
import pandas as pd
import numpy as np
from pathlib import Path
def process_fluent_export_sparse(
filepath: str,
grid_dim: int = 128, # Sizes larger than 128 will require interpolation
precision_round: int = 3,
fill_value: float = 0.0 # Use 0.0 or np.nan for solid object (XLUUV)
):
"""
Parses CFD exports where solid geometry cells are missing.
Maps physical coordinates to a fixed tensor grid.
"""
filepath = Path(filepath)
print(f"Processing: {filepath.name}...")
# 1. Load Data
try:
df = pd.read_csv(
filepath, sep=',', skipinitialspace=True,
engine='c', on_bad_lines='warn'
)
except Exception as e:
print(f"Read failed: {e}")
return None
# Normalize columns
df.columns = [c.strip().lower().replace('-', '_') for c in df.columns]
# 2. Extract and Round Coordinates
# Rounding is critical to group points into grid lines
x_raw = df['x_coordinate'].values.round(precision_round)
y_raw = df['y_coordinate'].values.round(precision_round)
z_raw = df['z_coordinate'].values.round(precision_round)
# 3. Detect Grid Ticks (The "Ruler")
# We find the unique values for each axis to define the grid
x_unique = np.unique(x_raw)
y_unique = np.unique(y_raw)
z_unique = np.unique(z_raw)
# Validation: Do we have roughly 128 ticks per axis?
# (Allowing slight variance if entire slices are missing, though rare)
if len(x_unique) > grid_dim or len(y_unique) > grid_dim or len(z_unique) > grid_dim:
print(f"Error: Found too many grid ticks (X:{len(x_unique)}, Y:{len(y_unique)}, Z:{len(z_unique)}).")
print("Try reducing 'precision_round' (e.g., to 3) if floating point jitter is high.")
return None
# 4. Map Physical Coords to Integer Indices (0 to 127)
# np.searchsorted finds the index of each raw point in the unique array
# This effectively "snaps" the float coordinates to integer grid positions
idx_x = np.searchsorted(x_unique, x_raw)
idx_y = np.searchsorted(y_unique, y_raw)
idx_z = np.searchsorted(z_unique, z_raw)
# 5. Create the Blank Canvas (The Full Cube)
# Initialize with fill_value (0.0 represents "no flow" inside the object)
volume_shape = (grid_dim, grid_dim, grid_dim)
channels = [
'velocity_magnitude', 'x_velocity', 'y_velocity', 'z_velocity',
'total_pressure', 'absolute_pressure'
]
volume_data = {}
for col in channels:
if col in df.columns:
# Create empty grid
grid = np.full(volume_shape, fill_value, dtype=np.float32)
# 6. Fill the known data
# We use the integer indices to slot data into the correct place.
# Missing indices (the solid object) remain as 'fill_value'.
# Note: We assume standard (x, y, z) layout.
# If your ML model expects (z, y, x), swap the indices here.
grid[idx_x, idx_y, idx_z] = df[col].values
volume_data[col] = grid
# 7. Verification
filled_count = np.count_nonzero(volume_data['velocity_magnitude'] != fill_value)
print(f" -> Grid filled. Solid cells (missing data): {grid_dim**3 - filled_count}")
return volume_data
if __name__ == "__main__":
# --- Test ---
fpath = "C:/Users/zacco/Desktop/Files/WAKESET/Test Files/Forward_0100_ms_Angle_00_CUBE_128" # Update path
# Check current directory or provided path
p = Path(fpath)
if not p.exists() and p.with_suffix(".csv").exists(): p = p.with_suffix(".csv")
if p.exists():
# Using fill_value=0.0 is common for "inside wall" (zero velocity)
# Using fill_value=np.nan is better if you want to mask the loss function later
volumes = process_fluent_export_sparse(p, fill_value=0.0)
if volumes:
print("Success.")
# Verify the shape
print(f"Shape: {volumes['velocity_magnitude'].shape}")
# Save
np.savez_compressed(p.name + ".npz", **volumes) |