|
|
import huggingface_hub |
|
|
import requests.exceptions |
|
|
import zarr |
|
|
import numpy as np |
|
|
import datasets |
|
|
import fsspec |
|
|
from huggingface_hub import HfFileSystem |
|
|
from fsspec.implementations.zip import ZipFileSystem |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
_CITATION = """\ |
|
|
@InProceedings{ocf:mrms, |
|
|
title = {MRMS Archival Precipitation Rate Radar Dataset}, |
|
|
author={Jacob Bieker |
|
|
}, |
|
|
year={2022} |
|
|
} |
|
|
""" |
|
|
|
|
|
|
|
|
_DESCRIPTION = """\ |
|
|
This dataset consists of MRMS precipitation radar data for the continental United States, |
|
|
sampled at a 1kmx1km area and 2-mimntely spatial resolution. |
|
|
""" |
|
|
|
|
|
_HOMEPAGE = "https://mtarchive.geol.iastate.edu/" |
|
|
|
|
|
_LICENSE = "US Government data, Open license, no restrictions" |
|
|
|
|
|
|
|
|
class ZarrTest: |
|
|
|
|
|
@staticmethod |
|
|
def create(): |
|
|
store1 = zarr.DirectoryStore('data.zarr') |
|
|
root1 = zarr.group(store=store1) |
|
|
z1 = root1.zeros('trace', shape=(100, 100), chunks=(100, 10), dtype='i1', overwrite=True) |
|
|
z1[:] = 42 |
|
|
z1[0, :] = np.arange(100) |
|
|
z1[:, 0] = np.arange(100) |
|
|
z2 = root1.zeros('plaintext', shape=100, dtype='u1', overwrite=True) |
|
|
z3 = root1.zeros('ciphertext', shape=100, dtype='u1', overwrite=True) |
|
|
z4 = root1.zeros('key', shape=100, dtype='u1', overwrite=True) |
|
|
z2[:] = 22 |
|
|
z3[:] = 33 |
|
|
z4[:] = 44 |
|
|
zarr.consolidate_metadata(store1) |
|
|
|
|
|
store2 = zarr.DirectoryStore('data.zarr') |
|
|
root2 = zarr.group(store=store2) |
|
|
test = np.all(z1[:] == root2['trace'][:]) |
|
|
print(test) |
|
|
store1.close() |
|
|
store2.close() |
|
|
|
|
|
@staticmethod |
|
|
def create_in_hf(): |
|
|
try: |
|
|
... |
|
|
|
|
|
except requests.exceptions.HTTPError as e: |
|
|
... |
|
|
store1 = zarr.DirectoryStore('hf://datasets/spikingneurons/test/data.zarr') |
|
|
root1 = zarr.group(store=store1) |
|
|
z1 = root1.zeros('trace', shape=(100, 100), chunks=(100, 10), dtype='i1', overwrite=True) |
|
|
z1[:] = 42 |
|
|
z1[0, :] = np.arange(100) |
|
|
z1[:, 0] = np.arange(100) |
|
|
z2 = root1.zeros('plaintext', shape=100, dtype='u1', overwrite=True) |
|
|
z3 = root1.zeros('ciphertext', shape=100, dtype='u1', overwrite=True) |
|
|
z4 = root1.zeros('key', shape=100, dtype='u1', overwrite=True) |
|
|
z2[:] = 22 |
|
|
z3[:] = 33 |
|
|
z4[:] = 44 |
|
|
zarr.consolidate_metadata(store1) |
|
|
print(z1[:]) |
|
|
store1.close() |
|
|
|
|
|
|
|
|
store2 = zarr.DirectoryStore('hf://datasets/spikingneurons/test/data.zarr') |
|
|
root2 = zarr.open_consolidated(store2) |
|
|
print(root2['trace'][:]) |
|
|
store2.close() |
|
|
|
|
|
@staticmethod |
|
|
def create_in_zip(): |
|
|
store1 = zarr.ZipStore('data.zarr.zip') |
|
|
root1 = zarr.group(store=store1) |
|
|
z1 = root1.zeros('trace', shape=(100, 100), chunks=(100, 10), dtype='i1', overwrite=True) |
|
|
z1[:] = 42 |
|
|
z1[0, :] = np.arange(100) |
|
|
z1[:, 0] = np.arange(100) |
|
|
z2 = root1.zeros('plaintext', shape=100, dtype='u1', overwrite=True) |
|
|
z3 = root1.zeros('ciphertext', shape=100, dtype='u1', overwrite=True) |
|
|
z4 = root1.zeros('key', shape=100, dtype='u1', overwrite=True) |
|
|
z2[:] = 22 |
|
|
z3[:] = 33 |
|
|
z4[:] = 44 |
|
|
zarr.consolidate_metadata(store1) |
|
|
print(z1[:]) |
|
|
store1.close() |
|
|
|
|
|
store2 = zarr.ZipStore('data.zarr.zip') |
|
|
root2 = zarr.open_consolidated(store2) |
|
|
print(root2['trace'][:]) |
|
|
store2.close() |
|
|
|
|
|
@staticmethod |
|
|
def load_from_hf(): |
|
|
""" |
|
|
Note that we load data generated via create_in_zip to upload to HF |
|
|
""" |
|
|
from datasets import load_dataset |
|
|
import datasets |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
from zarr.storage import FSStore |
|
|
store = FSStore('hf://datasets/spikingneurons/test/data.zarr') |
|
|
root = zarr.group(store=store) |
|
|
print(root['trace'][:]) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
with fsspec.open('hf://datasets/spikingneurons/test/data.zarr.zip', 'rb') as zip_file: |
|
|
_zip_fs = ZipFileSystem(zip_file) |
|
|
store4 = FSStore(url="", fs=_zip_fs) |
|
|
root4 = zarr.open_consolidated(store=store4) |
|
|
print(root4['trace'][:]) |
|
|
|
|
|
ZarrTest.create() |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class Test(datasets.GeneratorBasedBuilder): |
|
|
VERSION = datasets.Version("1.0.0") |
|
|
BUILDER_CONFIGS = [] |
|
|
|
|
|
|
|
|
def _info(self): |
|
|
_features = datasets.Features({ |
|
|
"data": datasets.Value("string"), |
|
|
"label": datasets.ClassLabel(names=["0", "1"]), |
|
|
}) |
|
|
|
|
|
return datasets.DatasetInfo( |
|
|
|
|
|
description=_DESCRIPTION, |
|
|
|
|
|
features=_features, |
|
|
|
|
|
homepage=_HOMEPAGE, |
|
|
|
|
|
license=_LICENSE, |
|
|
|
|
|
citation=_CITATION, |
|
|
) |
|
|
|
|
|
def _split_generators(self, dl_manager): |
|
|
from fsspec.implementations.zip import ZipFileSystem |
|
|
print(dl_manager) |
|
|
print(self.config.name) |
|
|
urls = ["example.zip"] |
|
|
data_dir = dl_manager.download_and_extract(urls) |
|
|
print(data_dir) |
|
|
|
|
|
|
|
|
with fsspec.open(data_dir[0]) as f: |
|
|
with fsspec.filesystem("zip", fo=f) as fs: |
|
|
|
|
|
all_files = fs.glob("*") |
|
|
print("All files in the zip:", all_files) |
|
|
|
|
|
|
|
|
streaming = dl_manager.is_streaming |
|
|
return [ |
|
|
datasets.SplitGenerator( |
|
|
name=datasets.Split.TRAIN, |
|
|
|
|
|
gen_kwargs={ |
|
|
"filepath": urls if streaming else data_dir["train"], |
|
|
"split": "train", |
|
|
"streaming": False, |
|
|
}, |
|
|
), |
|
|
datasets.SplitGenerator( |
|
|
name=datasets.Split.TEST, |
|
|
|
|
|
gen_kwargs={ |
|
|
"filepath": urls if streaming else data_dir["test"], |
|
|
"split": "test", |
|
|
"streaming": False, |
|
|
}, |
|
|
), |
|
|
datasets.SplitGenerator( |
|
|
name=datasets.Split.VALIDATION, |
|
|
|
|
|
gen_kwargs={ |
|
|
"filepath": urls if streaming else data_dir["valid"], |
|
|
"split": "valid", |
|
|
"streaming": False, |
|
|
}, |
|
|
), |
|
|
] |
|
|
|