ThatGuyVanquish commited on
Commit
ccce5cf
·
1 Parent(s): c7b9c4d

Delete rabbi_kook.py

Browse files
Files changed (1) hide show
  1. rabbi_kook.py +0 -99
rabbi_kook.py DELETED
@@ -1,99 +0,0 @@
1
- import json
2
- import os
3
- from typing import List
4
-
5
- import datasets
6
-
7
- _CITATION = ""
8
-
9
- _DESCRIPTION = ""
10
-
11
- _HOMEPAGE = ""
12
-
13
- _LICENSE = ""
14
-
15
- # The HuggingFace Datasets library doesn't host the datasets but only points to the original raw_files.
16
- # This can be an arbitrary nested dict/list of URLs (see below in `_split_generators` method)
17
- _URL = "https://github.com/LiorLevi15/NLP-MINI-PROJECT/tree/master/DATA/parsed_data_splits"
18
- _URLS = {
19
- "train": _URL + "rabbi_kook_train.json",
20
- "test": _URL + "rabbi_kook_test.json",
21
- "dev": _URL + "rabbi_kook_dev.json",
22
- }
23
-
24
-
25
- class RabbiKook(datasets.GeneratorBasedBuilder):
26
- VERSION = datasets.Version("1.1.0")
27
-
28
- def _info(self):
29
- features = datasets.Features(
30
- {
31
- "id": datasets.Value("string"),
32
- "paragraph": datasets.Value("string"),
33
- "summery": datasets.Value("string"),
34
- }
35
- )
36
- return datasets.DatasetInfo(
37
- # This defines the different columns of the dataset and their types
38
- features=features, # Here we define them above because they are different between the two configurations
39
- # If there's a common (input, target) tuple from the features, uncomment supervised_keys line below and
40
- # specify them. They'll be used if as_supervised=True in builder.as_dataset.
41
- # supervised_keys=("sentence", "label"),
42
- )
43
-
44
- # def _split_generators(self, dl_manager: datasets.DownloadManager) -> List[datasets.SplitGenerator]:
45
- # urls_to_download = _URLS
46
- # downloaded_files = dl_manager.download_and_extract(urls_to_download)
47
- #
48
- # return [
49
- # datasets.SplitGenerator(name=datasets.Split.TRAIN, gen_kwargs={"filepath": downloaded_files["train"]}),
50
- # datasets.SplitGenerator(name=datasets.Split.TEST, gen_kwargs={"filepath": downloaded_files["test"]}),
51
- # datasets.SplitGenerator(name=datasets.Split.VALIDATION, gen_kwargs={"filepath": downloaded_files["dev"]}),
52
- # ]
53
-
54
- def _split_generators(self, dl_manager):
55
- # This method is tasked with downloading/extracting the data and defining the splits.
56
-
57
- # dl_manager is a datasets.download.DownloadManager that can be used to download and extract URLS It can
58
- # accept any type or nested list/dict and will give back the same structure with the url replaced with path
59
- # to local raw_files. By default the archives will be extracted and a path to a cached folder where they are
60
- # extracted is returned instead of the archive
61
- data_dir = dl_manager.download_and_extract(_URL)
62
- return [
63
- datasets.SplitGenerator(
64
- name=datasets.Split.TRAIN,
65
- # These kwargs will be passed to _generate_examples
66
- gen_kwargs={
67
- "filepath": os.path.join(data_dir, "rabbi_kook_train.json"),
68
- "split": "train",
69
- },
70
- ),
71
- datasets.SplitGenerator(
72
- name=datasets.Split.TEST,
73
- # These kwargs will be passed to _generate_examples
74
- gen_kwargs={
75
- "filepath": os.path.join(data_dir, "rabbi_kook_test.json"),
76
- "split": "test"
77
- },
78
- ),
79
- datasets.SplitGenerator(
80
- name=datasets.Split.VALIDATION,
81
- # These kwargs will be passed to _generate_examples
82
- gen_kwargs={
83
- "filepath": os.path.join(data_dir, "rabbi_kook_dev.json"),
84
- "split": "dev",
85
- },
86
- ),
87
- ]
88
-
89
- def _generate_examples(self, filepath, split):
90
- print(f'generating examples from = {filepath}')
91
- with open(filepath, encoding="utf-8") as f:
92
- data_list = json.load(f)
93
- for data in data_list:
94
- yield data["id"], {
95
- "paragraph": data["paragraph"],
96
- "summary": data["summary"],
97
- "id": data["id"],
98
- }
99
-