zylipku's picture
Update README.md
282cc13 verified
metadata
dataset_info:
  - config_name: 30V_Jan24
    features:
      - name: config
        dtype: string
      - name: traj_id
        dtype: string
      - name: shape
        list: int64
      - name: data
        list:
          list:
            list: uint8
      - name: left_right
        list:
          list: int64
      - name: barycenter
        list:
          list: float64
    splits:
      - name: train
        num_bytes: 1011799274
        num_examples: 220
    download_size: 32061722
    dataset_size: 1011799274
  - config_name: 60V_Dec24
    features:
      - name: config
        dtype: string
      - name: traj_id
        dtype: string
      - name: shape
        list: int64
      - name: data
        list:
          list:
            list: uint8
      - name: left_right
        list:
          list: int64
      - name: barycenter
        list:
          list: float64
    splits:
      - name: train
        num_bytes: 1107240702
        num_examples: 605
    download_size: 34858112
    dataset_size: 1107240702
  - config_name: default
    features:
      - name: config
        dtype: string
      - name: traj_id
        dtype: string
      - name: shape
        list: int64
      - name: data
        list:
          list:
            list: uint8
      - name: left_right
        list:
          list: int64
      - name: barycenter
        list:
          list: float64
    splits:
      - name: train
        num_bytes: 2119039976
        num_examples: 825
    download_size: 66925984
    dataset_size: 2119039976
configs:
  - config_name: 30V_Jan24
    data_files:
      - split: train
        path: 30V_Jan24/train-*
  - config_name: 60V_Dec24
    data_files:
      - split: train
        path: 60V_Dec24/train-*
  - config_name: default
    data_files:
      - split: train
        path: data/train-*

Descriptions

Converting script

import pickle
from pathlib import Path

import numpy as np
from datasets import Dataset


DATA_DIR = Path("/path/to/cached/hugging_face/datasets/for/MLDS-NUS/Experimental_Images")
# should end with something like "snapshots/fd299418e9435f8fd98956a3f0a7344d208cc142"


def calc_left_right(data: np.ndarray):
    left_rights = []
    for im in data:
        nonzero_columns = (im != 0).any(axis=-2)
        left = nonzero_columns.argmax() if nonzero_columns.any() else -1
        # Find the rightmost non-zero column
        right = len(nonzero_columns) - 1 - nonzero_columns[::-1].argmax() if nonzero_columns.any() else -1
        left_right = np.array([left, right])
        left_rights.append(left_right)
    left_rights = np.stack(left_rights, axis=0)  # shape: (seq_len, 2)
    return left_rights


def calc_barycenter(data: np.ndarray) -> np.ndarray:
    """
    Calculate the barycenter of the polymer from the snapshot.
    Assumes snapshot shape is (100, 500).
    """
    xx = np.arange(data.shape[-2]).reshape(-1, 1)
    bary_x = (data * xx).sum(axis=(-2, -1)) / data.sum(axis=(-2, -1))
    yy = np.arange(data.shape[-1]).reshape(1, -1)
    bary_y = (data * yy).sum(axis=(-2, -1)) / data.sum(axis=(-2, -1))
    barycenter = np.stack([bary_x, bary_y], axis=-1)  # (seq_len, 2)
    return barycenter


def gen():
    for folder in ["30V_Jan24", "60V_Dec24"]:

        with open(DATA_DIR / f"{folder}.pkl", "rb") as f:
            data = pickle.load(f)
    
        for k, v in data.items():
            frame = np.clip(v, 0, 255).astype(np.uint8) # save memory
            left_rights = calc_left_right(255 - frame)
            barycenters = calc_barycenter(255 - frame)
    
            yield {
                "config": folder,
                "traj_id": k,
                "shape": list(frame.shape),
                "data": frame,
                "left_right": left_rights,
                "barycenter": barycenters,
            }


ds = Dataset.from_generator(gen)
ds = ds.with_format("numpy")

ds.push_to_hub("MLDS-NUS/polymer-dynamics_experimental-data")


# upload by configs
def gen(folder: str):

    with open(DATA_DIR / f"{folder}.pkl", "rb") as f:
        data = pickle.load(f)

    for k, v in data.items():
        frame = np.clip(v, 0, 255).astype(np.uint8)
        left_rights = calc_left_right(255 - frame)
        barycenters = calc_barycenter(255 - frame)

        yield {
            "config": folder,
            "traj_id": k,
            "shape": list(frame.shape),
            "data": frame,
            "left_right": left_rights,
            "barycenter": barycenters,
        }


for config_name in ["30V_Jan24", "60V_Dec24"]:
    ds = Dataset.from_generator(lambda cn=config_name: gen(cn))
    ds = ds.with_format("numpy")

    ds.push_to_hub(
        "MLDS-NUS/polymer-dynamics_experimental-data",
        config_name=config_name,
        data_dir=f"{config_name}",
    )

How to use

Directly loading by datasets is supported now!

from datasets import load_dataset
import numpy as np


hf_dataset_30V = load_dataset("MLDS-NUS/polymer-dynamics_experimental-data", config_name="30V_Jan24")
hf_dataset_60V = load_dataset("MLDS-NUS/polymer-dynamics_experimental-data", config_name="60V_Jan24")

hf_dataset_30V = hf_dataset_30V.with_format("numpy")["train"]
hf_dataset_60V = hf_dataset_60V.with_format("numpy")["train"]


for sample in hf_dataset_30V:
  for k, v in sample.items():
      if isinstance(v, np.ndarray):
          print(f"{k}: {type(v)}, shape={v.shape}, dtype={v.dtype}")
      else:
          print(f"{k}: {v}")

output:

config: 30V_Jan24
traj_id: 30V_Tra_0
shape: <class 'numpy.ndarray'>, shape=(3,), dtype=int64
data: <class 'numpy.ndarray'>, shape=(160, 100, 350), dtype=int64
left_right: <class 'numpy.ndarray'>, shape=(160, 2), dtype=int64
barycenter: <class 'numpy.ndarray'>, shape=(160, 2), dtype=float32

How to contribute

import numpy as np
from datasets import Dataset


def gen(config_name: str):

    for data in your_database_retriever(config_name):

        frame = ...
        traj_id = ...
        shape = ...
        left_rights = ...
        barycenters = ...

        yield {
            "config": config_name,
            "traj_id": traj_id,
            "shape": shape,
            "data": data, # a np.ndarray object of shape `shape`
            "left_right": left_rights,
            "barycenter": barycenters,
        }


config_name = ...
ds = Dataset.from_generator(lambda cn=config_name: gen(cn))
ds = ds.with_format("numpy")

ds.push_to_hub(
    "MLDS-NUS/polymer-dynamics_experimental-data",
    config_name=config_name,
    data_dir=f"{config_name}",
)