File size: 2,386 Bytes
e45abdb
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
from __future__ import annotations

import argparse
import json
from pathlib import Path
import sys

import numpy as np
import yaml

ROOT = Path(__file__).resolve().parents[1]
sys.path.insert(0, str(ROOT / "src"))

from sparsewake.data import load_h5
from sparsewake.evaluate import evaluate_predictions, predict
from sparsewake.features import build_design_matrix
from sparsewake.splits import pose_holdout_split
from sparsewake.train import standardize_train_val_test, train_temporal_mlp


def main() -> None:
    parser = argparse.ArgumentParser()
    parser.add_argument("--config", required=True)
    parser.add_argument("--data", default=None)
    parser.add_argument("--quick", action="store_true")
    parser.add_argument("--out", default="tables/quick_train_metrics.json")
    args = parser.parse_args()
    cfg = yaml.safe_load(Path(args.config).read_text())
    data_path = Path(args.data) if args.data else ROOT / cfg["data"]
    data = load_h5(data_path, input_key=cfg.get("input_key", "X_raw"))
    history = 4 if args.quick else int(cfg.get("history", 24))
    x, idx = build_design_matrix(data, feature_set=cfg.get("feature_set", "raw_norm"), history=history)
    y = data["target"][idx]
    pose_id = data["pose_id"][idx]
    train_idx, val_idx, test_idx = pose_holdout_split(pose_id, seed=int(cfg.get("seed", 1)))
    if args.quick:
        train_idx = train_idx[: min(len(train_idx), 512)]
        val_idx = val_idx[: min(len(val_idx), 128)]
        test_idx = test_idx[: min(len(test_idx), 128)]
    x, _, _ = standardize_train_val_test(x, train_idx)
    output_dim = 3 if cfg.get("target", "location") == "location_theta" else 2
    model = train_temporal_mlp(
        x,
        y,
        train_idx,
        val_idx,
        output_dim=output_dim,
        epochs=3 if args.quick else int(cfg.get("epochs", 50)),
        batch_size=int(cfg.get("batch_size", 1024)),
        seed=int(cfg.get("seed", 1)),
    )
    pred = predict(model, x[test_idx])
    metrics = evaluate_predictions(y[test_idx, :output_dim], pred)
    metrics["quick_mode"] = bool(args.quick)
    metrics["n_train"] = int(len(train_idx))
    metrics["n_test"] = int(len(test_idx))
    out = ROOT / args.out
    out.parent.mkdir(parents=True, exist_ok=True)
    out.write_text(json.dumps(metrics, indent=2) + "\n")
    print(json.dumps(metrics, indent=2))


if __name__ == "__main__":
    main()