DrivaerML-PCTR / scripts /preprocess_dml.py
Jrhoss's picture
Upload scripts/preprocess_dml.py with huggingface_hub
9094afb verified
"""
DrivAerML Preprocessing Script
================================
Downloads boundary_i.vtp files from neashton/drivaerml, samples 100k cell
centres with CFD fields, applies local coordinate normalisation, and saves
as boundary_i.pt in a staging directory for upload to HuggingFace.
Usage:
pip install pyvista huggingface_hub torch numpy requests
export HF_TOKEN=your_token_here
python preprocess_drivaerml.py [--start 1] [--end 500]
"""
import os, sys, argparse, tempfile, shutil, requests
import numpy as np
import torch
SOURCE_REPO = "neashton/drivaerml"
TARGET_REPO = "JrHoss/DrivaerML-PCTR"
N_POINTS = 100_000
SEED = 42
STAGING_DIR = "./staging"
MISSING_RUNS = {
167, 211, 218, 221, 248, 282, 291, 295,
316, 325, 329, 364, 370, 376, 403, 473
}
TARGET_FIELDS = [
"CpMeanTrim",
"pMeanTrim",
"pPrime2MeanTrim",
"wallShearStressMeanTrim",
]
# ── Core processing ────────────────────────────────────────────────────────────
def download_vtp(run_id, dest, token):
url = f"https://huggingface.co/datasets/{SOURCE_REPO}/resolve/main/run_{run_id}/boundary_{run_id}.vtp"
with requests.get(url, headers={"Authorization": f"Bearer {token}"}, stream=True, timeout=300) as r:
if r.status_code == 404:
return False
r.raise_for_status()
with open(dest, "wb") as f:
for chunk in r.iter_content(chunk_size=8192 * 1024):
f.write(chunk)
return True
def process_vtp(vtp_path, run_id):
"""Extract cell centres and target fields from a VTP file."""
import pyvista as pv
mesh = pv.read(vtp_path)
coords = np.array(mesh.cell_centers().points, dtype=np.float32) # [M, 3]
targets = np.zeros((len(coords), 4), dtype=np.float32)
for i, field in enumerate(TARGET_FIELDS):
data = np.array(mesh.cell_data[field], dtype=np.float32)
if data.ndim == 2: # wallShearStress is a vector
data = np.linalg.norm(data, axis=1)
targets[:, i] = data
return coords, targets
def sample_and_normalise(coords, targets, run_id):
"""Random sample 100k points and apply per-axis z-score normalisation."""
rng = np.random.default_rng(run_id + SEED)
idx = rng.choice(len(coords), size=N_POINTS, replace=len(coords) < N_POINTS)
coords = coords[idx]
targets = targets[idx]
mean = coords.mean(axis=0)
std = np.where((s := coords.std(axis=0)) < 1e-8, 1.0, s)
return (coords - mean) / std, targets, idx, mean, std
def make_pt(coords_norm, targets, idx, mean, std, run_id):
return {
"coords": torch.tensor(coords_norm, dtype=torch.float32), # [100000, 3]
"targets": torch.tensor(targets, dtype=torch.float32), # [100000, 4]
"sample_idx": torch.tensor(idx, dtype=torch.int64),
"coords_mean": torch.tensor(mean, dtype=torch.float32), # [3]
"coords_std": torch.tensor(std, dtype=torch.float32), # [3]
"run_id": run_id,
}
# ── Per-run pipeline ───────────────────────────────────────────────────────────
def process_run(run_id, token, tmp_dir):
pt_path = os.path.join(STAGING_DIR, f"run_{run_id}", f"boundary_{run_id}.pt")
if os.path.exists(pt_path):
print(f"[{run_id:03d}] already staged — skip")
return True
vtp_path = os.path.join(tmp_dir, f"boundary_{run_id}.vtp")
try:
print(f"[{run_id:03d}] downloading ...", end=" ", flush=True)
if not download_vtp(run_id, vtp_path, token):
print("NOT FOUND")
return False
print(f"done ({os.path.getsize(vtp_path)/1024**2:.0f} MB)")
coords, targets = process_vtp(vtp_path, run_id)
coords_n, targets, idx, mu, sigma = sample_and_normalise(coords, targets, run_id)
os.makedirs(os.path.dirname(pt_path), exist_ok=True)
torch.save(make_pt(coords_n, targets, idx, mu, sigma, run_id), pt_path)
print(f"[{run_id:03d}] saved ({os.path.getsize(pt_path)/1024**2:.1f} MB)")
return True
except Exception as e:
print(f"[{run_id:03d}] ERROR: {e}")
return False
finally:
if os.path.exists(vtp_path):
os.remove(vtp_path)
# ── Main ───────────────────────────────────────────────────────────────────────
def main():
parser = argparse.ArgumentParser()
parser.add_argument("--start", type=int, default=1)
parser.add_argument("--end", type=int, default=500)
args = parser.parse_args()
token = os.environ.get("HF_TOKEN")
if not token:
sys.exit("Set HF_TOKEN environment variable first.")
os.makedirs(STAGING_DIR, exist_ok=True)
tmp_dir = tempfile.mkdtemp(prefix="drivaerml_")
processed, skipped, failed = [], [], []
try:
for run_id in range(args.start, args.end + 1):
if run_id in MISSING_RUNS:
skipped.append(run_id)
continue
(processed if process_run(run_id, token, tmp_dir) else failed).append(run_id)
except KeyboardInterrupt:
print("\nInterrupted.")
finally:
shutil.rmtree(tmp_dir, ignore_errors=True)
print(f"\nDone — processed: {len(processed)} skipped: {len(skipped)} failed: {len(failed)}")
if failed:
print(f"Failed IDs: {failed}")
if __name__ == "__main__":
main()