beckhamc commited on
Commit
d56e828
·
verified ·
1 Parent(s): ed422b1

Upload folder using huggingface_hub

Browse files
data/alvan-nee-9M0tSjb-cpA-unsplash.jpeg ADDED

Git LFS Details

  • SHA256: e3fa9ca85ccfba82a40d130ba3ca0b0aa63bc966676f643b5cb1947c8b4071cd
  • Pointer size: 131 Bytes
  • Size of remote file: 677 kB
data/alvan-nee-Id1DBHv4fbg-unsplash.jpeg ADDED

Git LFS Details

  • SHA256: a65d3a853b7c65dd4d394cb6b209f77666351d2bae7c6670c5677d8eb5981644
  • Pointer size: 132 Bytes
  • Size of remote file: 1.16 MB
data/alvan-nee-bQaAJCbNq3g-unsplash.jpeg ADDED

Git LFS Details

  • SHA256: 4cda55c53c11843ed368eb8eb68fd79521ac7b839bdd70f8f89589cf7006ed97
  • Pointer size: 132 Bytes
  • Size of remote file: 1.4 MB
data/alvan-nee-brFsZ7qszSY-unsplash.jpeg ADDED

Git LFS Details

  • SHA256: 9d8013d9efa2edb356e0f88c66de044f71247a99cab52b1628e753c2a08bb602
  • Pointer size: 132 Bytes
  • Size of remote file: 1.19 MB
data/alvan-nee-eoqnr8ikwFE-unsplash.jpeg ADDED

Git LFS Details

  • SHA256: 5c9805758a8f8950a35df820f3bfc32b3c6ca2a0e0e214a7978ea147a233bd54
  • Pointer size: 132 Bytes
  • Size of remote file: 1.17 MB
data/metadata.jsonl ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ {"file_name": "alvan-nee-9M0tSjb-cpA-unsplash.jpeg", "text": "cute dog"}
2
+ {"file_name": "alvan-nee-Id1DBHv4fbg-unsplash.jpeg", "text": "cute dog"}
3
+ {"file_name": "alvan-nee-bQaAJCbNq3g-unsplash.jpeg", "text": "cute dog"}
4
+ {"file_name": "alvan-nee-eoqnr8ikwFE-unsplash.jpeg", "text": "cute dog"}
5
+ {"file_name": "alvan-nee-brFsZ7qszSY-unsplash.jpeg", "text": "cute dog"}
dataset.py ADDED
@@ -0,0 +1,49 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from datasets import GeneratorBasedBuilder, DatasetInfo, SplitGenerator, DownloadManager
2
+ from typing import Any, Dict, List, Tuple
3
+ import os
4
+ import json
5
+
6
+ class ProcessedImageDataset(GeneratorBasedBuilder):
7
+ VERSION = "1.0.0"
8
+
9
+ def _info(self) -> DatasetInfo:
10
+ # Specify dataset info here
11
+ return DatasetInfo(
12
+ # You can add description, citation, homepage, etc.
13
+ features=self._features(),
14
+ supervised_keys=("image", "text"),
15
+ )
16
+
17
+ def _features(self):
18
+ # Define the features of your dataset: image file, text, etc.
19
+ from datasets import Features, Image, Value
20
+ return Features({"image_file": Image(), "text": Value("string")})
21
+
22
+ def _split_generators(self, dl_manager) -> List[SplitGenerator]:
23
+ # This method is tasked with downloading/extracting the data and defining the splits
24
+ print(self.config.data_dir)
25
+ if self.config.data_dir is None:
26
+ raise ValueError(f'Data directory unspecified. Correct usage is: load_dataset(script_path, data_dir=data_dir_path)')
27
+ return [SplitGenerator(name="train", gen_kwargs={"data_dir": self.config.data_dir})]
28
+
29
+ def _generate_examples(self, data_dir):
30
+ # def _generate_examples(self, data_dir: str) -> Tuple[int, Dict[str, Any]]:
31
+ # This method will read the data and yield examples
32
+ metadata_file_path = os.path.join(data_dir, "metadata.jsonl")
33
+
34
+ # Read metadata and store it in a dictionary
35
+ metadata = {}
36
+ with open(metadata_file_path, "r") as f:
37
+ for line in f:
38
+ item = json.loads(line)
39
+ metadata[item["file_name"]] = item
40
+
41
+ # Iterate through each file in the data directory
42
+ for filename in os.listdir(data_dir):
43
+ if filename.endswith(".png") or filename.endswith(".jpg") or filename.endswith(".jpeg"):
44
+ if filename in metadata:
45
+ metadata_entry = metadata[filename]
46
+ yield filename, {
47
+ "image_file": os.path.join(data_dir, filename),
48
+ "text": metadata_entry["text"],
49
+ }