test / test.py
Praveen Kulkarni
utf8 encoding
eff6b7e
import huggingface_hub
import requests.exceptions
import zarr
import numpy as np
import datasets
import fsspec
from huggingface_hub import HfFileSystem
from fsspec.implementations.zip import ZipFileSystem
# Find for instance the citation on arxiv or on the dataset repo/website
_CITATION = """\
@InProceedings{ocf:mrms,
title = {MRMS Archival Precipitation Rate Radar Dataset},
author={Jacob Bieker
},
year={2022}
}
"""
# You can copy an official description
_DESCRIPTION = """\
This dataset consists of MRMS precipitation radar data for the continental United States,
sampled at a 1kmx1km area and 2-mimntely spatial resolution.
"""
_HOMEPAGE = "https://mtarchive.geol.iastate.edu/"
_LICENSE = "US Government data, Open license, no restrictions"
class ZarrTest:
@staticmethod
def create():
store1 = zarr.DirectoryStore('data.zarr')
root1 = zarr.group(store=store1)
z1 = root1.zeros('trace', shape=(100, 100), chunks=(100, 10), dtype='i1', overwrite=True)
z1[:] = 42
z1[0, :] = np.arange(100)
z1[:, 0] = np.arange(100)
z2 = root1.zeros('plaintext', shape=100, dtype='u1', overwrite=True)
z3 = root1.zeros('ciphertext', shape=100, dtype='u1', overwrite=True)
z4 = root1.zeros('key', shape=100, dtype='u1', overwrite=True)
z2[:] = 22
z3[:] = 33
z4[:] = 44
zarr.consolidate_metadata(store1)
store2 = zarr.DirectoryStore('data.zarr')
root2 = zarr.group(store=store2)
test = np.all(z1[:] == root2['trace'][:])
print(test)
store1.close()
store2.close()
@staticmethod
def create_in_hf():
try:
...
# huggingface_hub.delete_folder(path_in_repo="data.zarr", repo_id="spikingneurons/test", repo_type="dataset")
except requests.exceptions.HTTPError as e:
...
store1 = zarr.DirectoryStore('hf://datasets/spikingneurons/test/data.zarr')
root1 = zarr.group(store=store1)
z1 = root1.zeros('trace', shape=(100, 100), chunks=(100, 10), dtype='i1', overwrite=True)
z1[:] = 42
z1[0, :] = np.arange(100)
z1[:, 0] = np.arange(100)
z2 = root1.zeros('plaintext', shape=100, dtype='u1', overwrite=True)
z3 = root1.zeros('ciphertext', shape=100, dtype='u1', overwrite=True)
z4 = root1.zeros('key', shape=100, dtype='u1', overwrite=True)
z2[:] = 22
z3[:] = 33
z4[:] = 44
zarr.consolidate_metadata(store1)
print(z1[:])
store1.close()
store2 = zarr.DirectoryStore('hf://datasets/spikingneurons/test/data.zarr')
root2 = zarr.open_consolidated(store2)
print(root2['trace'][:])
store2.close()
@staticmethod
def create_in_zip():
store1 = zarr.ZipStore('data.zarr.zip')
root1 = zarr.group(store=store1)
z1 = root1.zeros('trace', shape=(100, 100), chunks=(100, 10), dtype='i1', overwrite=True)
z1[:] = 42
z1[0, :] = np.arange(100)
z1[:, 0] = np.arange(100)
z2 = root1.zeros('plaintext', shape=100, dtype='u1', overwrite=True)
z3 = root1.zeros('ciphertext', shape=100, dtype='u1', overwrite=True)
z4 = root1.zeros('key', shape=100, dtype='u1', overwrite=True)
z2[:] = 22
z3[:] = 33
z4[:] = 44
zarr.consolidate_metadata(store1)
print(z1[:])
store1.close()
store2 = zarr.ZipStore('data.zarr.zip')
root2 = zarr.open_consolidated(store2)
print(root2['trace'][:])
store2.close()
@staticmethod
def load_from_hf():
"""
Note that we load data generated via create_in_zip to upload to HF
"""
from datasets import load_dataset
import datasets
# ds = load_dataset("spikingneurons/test", streaming=True, trust_remote_code=True)
# print(ds)
# _fs = HfFileSystem()
# _files = _fs.ls("datasets/spikingneurons/test", detail=False)
# print(_files)
# import xarray as xr
# _arr = xr.open_dataset('hf://datasets/spikingneurons/test/data.zarr', engine='zarr', chunks={})
from zarr.storage import FSStore
store = FSStore('hf://datasets/spikingneurons/test/data.zarr')
root = zarr.group(store=store)
print(root['trace'][:])
# store = FSStore('zip://*::hf://datasets/spikingneurons/test/example.zip')
# store11 = FSStore('zip://hf://datasets/spikingneurons/test/example.zip')
# store22 = FSStore('zip+hf://datasets/spikingneurons/test/example.zip')
# store = zarr.DirectoryStore(UPath('hf://datasets/spikingneurons/test') / 'data.zarr')
# store1 = zarr.DirectoryStore('data.zarr')
# root1 = zarr.group(store=store1)
with fsspec.open('hf://datasets/spikingneurons/test/data.zarr.zip', 'rb') as zip_file:
_zip_fs = ZipFileSystem(zip_file)
store4 = FSStore(url="", fs=_zip_fs)
root4 = zarr.open_consolidated(store=store4)
print(root4['trace'][:])
ZarrTest.create()
# ZarrTest.create_in_zip()
# ZarrTest.load_from_hf()
# ZarrTest.create_in_hf()
class Test(datasets.GeneratorBasedBuilder):
VERSION = datasets.Version("1.0.0")
BUILDER_CONFIGS = []
# DEFAULT_CONFIG_NAME = "default"
def _info(self):
_features = datasets.Features({
"data": datasets.Value("string"),
"label": datasets.ClassLabel(names=["0", "1"]),
})
return datasets.DatasetInfo(
# This is the description that will appear on the datasets page.
description=_DESCRIPTION,
# This defines the different columns of the dataset and their types
features=_features,
# Homepage of the dataset for documentation
homepage=_HOMEPAGE,
# License for the dataset if available
license=_LICENSE,
# Citation for the dataset
citation=_CITATION,
)
def _split_generators(self, dl_manager):
from fsspec.implementations.zip import ZipFileSystem
print(dl_manager)
print(self.config.name)
urls = ["example.zip"]
data_dir = dl_manager.download_and_extract(urls)
print(data_dir)
# Open the zip file using fsspec
with fsspec.open(data_dir[0]) as f:
with fsspec.filesystem("zip", fo=f) as fs:
# List all files in the zip
all_files = fs.glob("*")
print("All files in the zip:", all_files)
streaming = dl_manager.is_streaming
return [
datasets.SplitGenerator(
name=datasets.Split.TRAIN,
# These kwargs will be passed to _generate_examples
gen_kwargs={
"filepath": urls if streaming else data_dir["train"],
"split": "train",
"streaming": False,
},
),
datasets.SplitGenerator(
name=datasets.Split.TEST,
# These kwargs will be passed to _generate_examples
gen_kwargs={
"filepath": urls if streaming else data_dir["test"],
"split": "test",
"streaming": False,
},
),
datasets.SplitGenerator(
name=datasets.Split.VALIDATION,
# These kwargs will be passed to _generate_examples
gen_kwargs={
"filepath": urls if streaming else data_dir["valid"],
"split": "valid",
"streaming": False,
},
),
]