Datasets:
File size: 5,913 Bytes
8b573bc cc4539e 8b573bc | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 | """
FactoryNet Loader - Easy access to hackathon datasets.
Usage:
from factorynet_loader import load_factorynet
# Load AURSAD data
df, metadata = load_factorynet("aursad")
# Get specific columns
setpoints = df[[c for c in df.columns if c.startswith("setpoint_")]]
efforts = df[[c for c in df.columns if c.startswith("effort_")]]
feedback = df[[c for c in df.columns if c.startswith("feedback_")]]
"""
import pandas as pd
import json
from pathlib import Path
from typing import Tuple, List, Optional, Dict
import numpy as np
try:
from datasets import load_dataset
HF_AVAILABLE = True
except ImportError:
HF_AVAILABLE = False
# HuggingFace repo
HF_REPO = "Forgis/factorynet-hackathon"
def load_factorynet(
dataset: str = "aursad",
split: str = "train",
from_hf: bool = True,
local_path: Optional[Path] = None,
) -> Tuple[pd.DataFrame, List[Dict]]:
"""
Load FactoryNet dataset.
Args:
dataset: "aursad" or "voraus"
split: "train" (full data) or future splits
from_hf: If True, load from HuggingFace Hub
local_path: Local path override
Returns:
df: DataFrame with time series
metadata: List of episode metadata dicts
"""
if from_hf and HF_AVAILABLE:
return _load_from_hf(dataset, split)
elif local_path:
return _load_from_local(local_path, dataset)
else:
raise ValueError("Either set from_hf=True or provide local_path")
def _load_from_hf(dataset: str, split: str) -> Tuple[pd.DataFrame, List[Dict]]:
"""Load from HuggingFace Hub."""
ds = load_dataset(HF_REPO, data_dir=dataset, split=split)
df = ds.to_pandas()
# Try to load metadata
try:
from huggingface_hub import hf_hub_download
meta_file = hf_hub_download(
repo_id=HF_REPO,
filename=f"{dataset}/{dataset}_metadata.json",
repo_type="dataset"
)
with open(meta_file) as f:
metadata = json.load(f)
except:
metadata = []
return df, metadata
def _load_from_local(local_path: Path, dataset: str) -> Tuple[pd.DataFrame, List[Dict]]:
"""Load from local files."""
local_path = Path(local_path)
# Find parquet file
parquet_files = list(local_path.glob(f"**/*{dataset}*factorynet*.parquet"))
if not parquet_files:
parquet_files = list(local_path.glob("**/*.parquet"))
if not parquet_files:
raise FileNotFoundError(f"No parquet files found in {local_path}")
df = pd.read_parquet(parquet_files[0])
# Load metadata
meta_files = list(local_path.glob(f"**/*{dataset}*metadata*.json"))
if meta_files:
with open(meta_files[0]) as f:
metadata = json.load(f)
else:
metadata = []
return df, metadata
def get_episode(df: pd.DataFrame, episode_id: str) -> pd.DataFrame:
"""Extract a single episode from the dataset."""
return df[df["episode_id"] == episode_id].copy()
def get_episodes_by_fault(df: pd.DataFrame, metadata: List[Dict], fault_type: str) -> pd.DataFrame:
"""Get all episodes of a specific fault type."""
fault_episodes = [m["episode_id"] for m in metadata if m.get("fault_type") == fault_type]
return df[df["episode_id"].isin(fault_episodes)].copy()
def extract_features(df: pd.DataFrame, window_size: int = 100) -> np.ndarray:
"""
Extract basic features for anomaly detection.
Returns array of shape (n_windows, n_features).
"""
# Get signal columns
signal_cols = [c for c in df.columns if any(
c.startswith(p) for p in ["setpoint_", "effort_", "feedback_"]
)]
data = df[signal_cols].values
# Sliding window features
n_windows = len(data) // window_size
features = []
for i in range(n_windows):
window = data[i * window_size : (i + 1) * window_size]
# Basic stats per column
feat = np.concatenate([
window.mean(axis=0), # Mean
window.std(axis=0), # Std
window.max(axis=0), # Max
window.min(axis=0), # Min
np.abs(np.diff(window, axis=0)).mean(axis=0), # Mean absolute diff
])
features.append(feat)
return np.array(features)
def compute_causal_residual(df: pd.DataFrame, axis: int = 0) -> pd.Series:
"""
Compute causal residual: effort that can't be explained by setpoint.
High residual = anomaly (effort without command, or command without effort).
"""
setpoint = df[f"setpoint_pos_{axis}"]
effort = df[f"effort_torque_{axis}"] if f"effort_torque_{axis}" in df.columns else df[f"effort_current_{axis}"]
# Simple approach: effort should correlate with setpoint change
setpoint_diff = setpoint.diff().abs()
effort_normalized = (effort - effort.mean()) / effort.std()
# Residual: effort that doesn't match setpoint activity
residual = effort_normalized - setpoint_diff / (setpoint_diff.max() + 1e-6)
return residual
# Quick test
if __name__ == "__main__":
print("Testing FactoryNet loader...")
# Try local load
try:
df, meta = load_factorynet("aursad", from_hf=False,
local_path=Path(__file__).parent.parent / "output" / "aursad_real")
print(f"Loaded {len(df)} rows, {len(df.columns)} columns")
print(f"Metadata for {len(meta)} episodes")
print(f"Columns: {df.columns.tolist()[:10]}...")
# Test feature extraction
features = extract_features(df)
print(f"Extracted features: {features.shape}")
except Exception as e:
print(f"Local load failed: {e}")
print("Try: pip install datasets && load with from_hf=True")
|