dimun commited on
Commit
b8cfa52
·
1 Parent(s): 95bc178

added dataset _generate_examples function

Browse files
Files changed (3) hide show
  1. .env +1 -0
  2. dates.py +98 -1
  3. test.py +8 -0
.env ADDED
@@ -0,0 +1 @@
 
 
1
+ LOCAL_PROJECT_PATH="/home/diego/personal-apps/dates"
dates.py CHANGED
@@ -21,4 +21,101 @@ _URLs = {
21
  }
22
 
23
 
24
- logger = datasets.logging.get_logger(__name__)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
21
  }
22
 
23
 
24
+ logger = datasets.logging.get_logger(__name__)
25
+
26
+
27
+ def has_extension(file_path, extensions):
28
+ _, file_extension = os.path.splitext(file_path)
29
+ return file_extension.lower() in extensions
30
+
31
+
32
+ class ExpirationDate(datasets.GeneratorBasedBuilder):
33
+ VERSION = datasets.Version("0.0.1")
34
+ CATEGORIES = ["day", "month", "year"]
35
+
36
+ def _info(self):
37
+ features = datasets.Features(
38
+ {
39
+ "id": datasets.Value("string"),
40
+ "transcriptions": datasets.Sequence(datasets.Value("string")),
41
+ "bboxes_block": datasets.Sequence(datasets.Sequence(datasets.Value("int64"))),
42
+ "categories": datasets.Sequence(datasets.features.ClassLabel(names=self.CATEGORIES)),
43
+ "image_path": datasets.Value("string"),
44
+ "width": datasets.Value("int32"),
45
+ "height": datasets.Value("int32")
46
+ }
47
+ )
48
+
49
+ return datasets.DatasetInfo(
50
+ # This is the description that will appear on the datasets page.
51
+ description=_DESCRIPTION,
52
+ # Features/targets of the dataset
53
+ features=features,
54
+ # Homepage of the dataset for documentation
55
+ homepage=_HOMEPAGE,
56
+ # License for the dataset if available
57
+ license=_LICENSE,
58
+ # Citation for the dataset
59
+ citation=_CITATION,
60
+ )
61
+
62
+ def _split_generators(self, dl_manager):
63
+ """Returns SplitGenerators."""
64
+ # dl_manager is a datasets.download.DownloadManager that can be used to download and extract files
65
+ # based on the provided URLs
66
+
67
+ archive_path = dl_manager.download_and_extract(_URLs)
68
+
69
+ return [
70
+ datasets.SplitGenerator(
71
+ name=datasets.Split.TRAIN,
72
+ # These kwargs will be passed to _generate_examples
73
+ gen_kwargs={
74
+ "filepath": os.path.join(archive_path["dates_synth"], "Date-Synth/"),
75
+ "split": "train"
76
+ },
77
+ ),
78
+ datasets.SplitGenerator(
79
+ name=datasets.Split.TEST,
80
+ # These kwargs will be passed to _generate_examples
81
+ gen_kwargs={
82
+ "filepath": os.path.join(archive_path["dates_real"], "Date-Real/"),
83
+ "split": "test"
84
+ },
85
+ )
86
+ ]
87
+
88
+ def _generate_examples(self, filepath, split):
89
+ logger.info(
90
+ f"⏳ Generating examples from = {filepath} to the split {split}")
91
+ ann_file = os.path.join(filepath, "annotations.json")
92
+
93
+ # get json
94
+ with open(ann_file, "r", encoding="utf8") as f:
95
+ features_map = json.load(f)
96
+
97
+ img_dir = os.path.join(filepath, "images")
98
+ img_listdir = os.listdir(img_dir)
99
+
100
+ for guid, filename in enumerate(img_listdir):
101
+ if filename.endswith(".jpg"):
102
+ image_features = features_map[filename]
103
+ image_ann = image_features.get("ann")
104
+
105
+ transcriptions = [box.get("transcription", "") for box in image_ann]
106
+ bboxes_block = [box.get("bbox") for box in image_ann]
107
+ categories = [box.get("cls") if box.get(
108
+ "cls") in self.CATEGORIES else "invalid" for box in image_ann]
109
+
110
+ # get image
111
+ image_path = os.path.join(img_dir, filename)
112
+
113
+ yield guid, {
114
+ "id": filename,
115
+ "transcriptions": transcriptions,
116
+ "bboxes_block": bboxes_block,
117
+ "categories": categories,
118
+ "image_path": image_path,
119
+ "width": image_features.get("width"),
120
+ "height": image_features.get("height"),
121
+ }
test.py ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ from datasets import load_dataset
2
+ from dotenv import load_dotenv
3
+ import os
4
+
5
+ load_dotenv()
6
+
7
+ LOCAL_PROJECT_PATH = os.getenv('LOCAL_PROJECT_PATH')
8
+ ds = load_dataset(LOCAL_PROJECT_PATH, trust_remote_code=True)