WaterFlowCountersRecognition / WaterFlowCountersRecognition.py
SIA86's picture
Upload WaterFlowCountersRecognition.py
e5ecdf2
raw
history blame
5.13 kB
import json
import os
import collections
import datasets
_CITATION = """\
@SIA86{huggingface:dataset,
title = {WaterFlowCountersRecognition dataset},
author={SIA86},
year={2023}
}
"""
_DESCRIPTION = """\
This dataset is designed to detect digital data from water flow counters photos.
"""
_HOMEPAGE = "https://github.com/SIA86/WaterFlowRecognition"
_REGION_NAME = ['value_a', 'value_b', 'serial']
_REGION_ROTETION = ['0', '90', '180', '270']
class WaterFlowCounterConfig(datasets.BuilderConfig):
"""Builder Config for WaterFlowCounter"""
def __init__(self, data_url, metadata_urls, **kwargs):
"""BuilderConfig for WaterFlowCounter.
Args:
data_url: `string`, url to download the photos.
metadata_urls: instance segmentation regions and description
**kwargs: keyword arguments forwarded to super.
"""
super(WaterFlowCounterConfig, self).__init__(version=datasets.Version("1.0.0"), **kwargs)
self.data_url = data_url
self.metadata_urls = metadata_urls
class WaterFlowCounter(datasets.GeneratorBasedBuilder):
"""WaterFlowCounter Images dataset"""
BUILDER_CONFIGS = [
WaterFlowCounterConfig(
name="WFCR_full",
description="Full dataset which contains coordinates and names of regions and information about rotation",
data_url={
"train": "https://huggingface.co/datasets/SIA86/WaterFlowCountersRecognition/blob/main/data/train_photos.zip",
"test": "https://huggingface.co/datasets/SIA86/WaterFlowCountersRecognition/blob/main/data/test_photos.zip",
}
metadata_url = "https://huggingface.co/datasets/SIA86/WaterFlowCountersRecognition/blob/main/WaterFlowCounter.json"
)
]
def _info(self):
features = datasets.Features(
{
"image": datasets.Image(),
"regions": datasets.Sequence(
{
"all_points_x": datasets.Sequence(datasets.Value("int64")),
"all_points_y": datasets.Sequence(datasets.Value("int64")),
"name": datasets.ClassLabel(names=_REGION_NAME, num_classes=3),
"rotated": datasets.ClassLabel(names=_REGION_ROTETION, num_classes=4)
}
)
}
)
return datasets.DatasetInfo(
features=features,
homepage=_HOMEPAGE,
citation=_CITATION,
)
def _split_generators(self, dl_manager):
data_files = dl_manager.download_and_extract(self.config.data_url)
metadata_files = dl_manager.download_and_extract(self.config.metadata_url)
return [
datasets.SplitGenerator(
name=datasets.Split.TRAIN,
gen_kwargs={
"folder_dir": data_files["train"],
"metadata_path": metadata_files,
},
),
datasets.SplitGenerator(
name=datasets.Split.TEST,
gen_kwargs={
"folder_dir": data_files["test"],
"metadata_path": metadata_files,
},
)
]
def generate_examples(self, folder_dir, metadata_path):
name_to_id = {}
rotation_to_id = {}
for indx, name in enumerate(_REGION_NAME):
name_to_id[name] = indx
for indx, name in enumerate(_REGION_ROTETION):
rotation_to_id[name] = indx
with open(metadata_path, "r") as f:
annotations = json.load(f)
for file in os.listdir(folder_dir):
filepath = os.path.join(folder_dir, file)
with open(filepath, "rb") as f:
image_bytes = f.read()
all_x = []
all_y = []
names = []
for el in annotations['_via_img_metadata']:
if annotations['_via_img_metadata'][el]['filename'] == file:
for region in annotations['_via_img_metadata'][el]['regions']:
all_x.append(region['shape_attributes']['all_points_x'])
all_y.append(region['shape_attributes']['all_points_y'])
names.append(name_to_id[list(region['region_attributes']['name'].keys())[0]])
rotated = rotation_to_id[list(region['region_attributes']['rotated'].keys())[0]]
yield idx, {
"image": {"path": filepath, "bytes": image_bytes},
"regions": {
"all_points_x": all_x,
"all_points_y": all_y,
"name":names,
"rotated": rotated
}
}
idx += 1