NawinCom commited on
Commit
996850f
·
1 Parent(s): 44c00cb

Upload CommyTesting.py

Browse files
Files changed (1) hide show
  1. CommyTesting.py +81 -0
CommyTesting.py ADDED
@@ -0,0 +1,81 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import csv
2
+ import json
3
+ import os
4
+ import pdb
5
+ import datasets
6
+ import pandas as pd
7
+
8
+ # TODO: Add description of the dataset here
9
+ # You can copy an official description
10
+ _DESCRIPTION = """\
11
+ Dataset for commy test eye diabetic
12
+ """
13
+
14
+ # TODO: Add a link to an official homepage for the dataset here
15
+ _HOMEPAGE = "NawinCom/CommyTesting"
16
+
17
+ # TODO: Add the licence for the dataset here if you can find it
18
+ _LICENSE = ""
19
+
20
+ _URL = "https://huggingface.co/datasets/NawinCom/CommyTesting/blob/main/images.zip"
21
+ # classes = [0,0]
22
+ # create class
23
+ train = pd.read_csv('Train.csv')
24
+ lis1 = train['id_code']
25
+ lis2 = train['diagnosis']
26
+ dic = dict(zip(lis1, lis2))
27
+
28
+ # TODO: Name of the dataset usually matches the script name with CamelCase instead of snake_case
29
+ class ImagesDemo(datasets.GeneratorBasedBuilder):
30
+ """TODO: Short description of my dataset."""
31
+
32
+ def _info(self):
33
+ return datasets.DatasetInfo(
34
+ # This is the description that will appear on the datasets page.
35
+ description=_DESCRIPTION,
36
+ # This defines the different columns of the dataset and their types
37
+ features=datasets.Features(
38
+ {
39
+ "image": datasets.Image(),
40
+ "label": datasets.Value("string"),
41
+ }
42
+ ), # Here we define them above because they are different between the two configurations
43
+ # If there's a common (input, target) tuple from the features, uncomment supervised_keys line below and
44
+ # specify them. They'll be used if as_supervised=True in builder.as_dataset.
45
+ # supervised_keys=("sentence", "label"),
46
+ # Homepage of the dataset for documentation
47
+ supervised_keys=None,
48
+ homepage=_HOMEPAGE,
49
+ )
50
+
51
+ def _split_generators(self, dl_manager):
52
+ # TODO: This method is tasked with downloading/extracting the data and defining the splits depending on the configuration
53
+ # If several configurations are possible (listed in BUILDER_CONFIGS), the configuration selected by the user is in self.config.name
54
+
55
+ # dl_manager is a datasets.download.DownloadManager that can be used to download and extract URLS
56
+ # It can accept any type or nested list/dict and will give back the same structure with the url replaced with path to local files.
57
+ # By default the archives will be extracted and a path to a cached folder where they are extracted is returned instead of the archive
58
+
59
+ data_dir = dl_manager.download(_URL)
60
+ image_iters = dl_manager.iter_archive(data_dir)
61
+ return [
62
+ datasets.SplitGenerator(
63
+ name=datasets.Split.TRAIN,
64
+ # These kwargs will be passed to _generate_examples
65
+ gen_kwargs={
66
+ "images": image_iters
67
+ },
68
+ ),
69
+ ]
70
+
71
+ # method parameters are unpacked from `gen_kwargs` as given in `_split_generators`
72
+ def _generate_examples(self, images):
73
+ idx = 0
74
+ for filepath, image in images:
75
+ print(filepath)
76
+ check = filepath.split('/')[-1].replace('.jpg', '')
77
+ yield idx, {
78
+ "image" : {"path": filepath, "bytes": image.read()},
79
+ "label" : dic[check]
80
+ }
81
+ idx+=1