AtlasUnified commited on
Commit
841e5fd
·
1 Parent(s): 6a2cae5

Delete atlas-preprocessed-code.py

Browse files
Files changed (1) hide show
  1. atlas-preprocessed-code.py +0 -99
atlas-preprocessed-code.py DELETED
@@ -1,99 +0,0 @@
1
- import gzip
2
- import json
3
-
4
- import datasets
5
-
6
- logger = datasets.logging.get_logger(__name__)
7
-
8
-
9
- _HOMEPAGE = "https://github.com/atlasunified"
10
-
11
-
12
- _DESCRIPTION = "\
13
- Your description here.\
14
- "
15
-
16
- _LICENSE = "odc-by"
17
-
18
- _VARIANTS = {
19
- "C#": {
20
- "version": "1.0.0",
21
- "download_size": 12345678, # replace with actual size
22
- "dataset_size": 12345678, # replace with actual size
23
- "splits": {
24
- "train": {
25
- "num_bytes": 12345678, # replace with actual size
26
- "num_examples": 12345678, # replace with actual size
27
- "files": [
28
- "data/C#.jsonl",
29
- # continue with other file paths...
30
- ],
31
- },
32
- },
33
- },
34
- "C++": {
35
- # similar structure as above, replace with actual data
36
- },
37
- # continue for other languages...
38
- }
39
-
40
- _FEATURES = datasets.Features(
41
- added=datasets.Value("string"),
42
- created=datasets.Value("string"),
43
- id=datasets.Value("string"),
44
- source=datasets.Value("string"),
45
- text=datasets.Value("string"),
46
- version=datasets.Value("string"),
47
- )
48
-
49
- _CITATION = """\
50
- Your citation here.
51
- """
52
-
53
-
54
- class MyHFRepo(datasets.GeneratorBasedBuilder):
55
- """Your dataset description here."""
56
-
57
- BUILDER_CONFIGS = [
58
- datasets.BuilderConfig(name=name, version=config["version"])
59
- for name, config in _VARIANTS.items()
60
- ]
61
-
62
- DEFAULT_CONFIG_NAME = "C#" # replace with the language you want as default
63
-
64
- def _info(self):
65
- """Give information and typings for the dataset."""
66
- return datasets.DatasetInfo(
67
- description=_DESCRIPTION,
68
- features=_FEATURES,
69
- supervised_keys=None,
70
- homepage=_HOMEPAGE,
71
- license=_LICENSE,
72
- citation=_CITATION,
73
- dataset_size=_VARIANTS[self.config.name]["dataset_size"],
74
- download_size=_VARIANTS[self.config.name]["download_size"],
75
- )
76
-
77
- def _split_generators(self, dl_manager):
78
- train_downloaded_files = dl_manager.download(
79
- _VARIANTS[self.config.name]["splits"]["train"]["files"]
80
- )
81
- return [
82
- datasets.SplitGenerator(
83
- name=str(datasets.Split.TRAIN),
84
- gen_kwargs={"filepaths": train_downloaded_files},
85
- ),
86
- ]
87
-
88
- def _generate_examples(self, filepaths):
89
- """This function returns the examples in the raw (text) form by
90
- iterating on all the files."""
91
- id_ = 0
92
- for filepath in filepaths:
93
- logger.info("generating examples from = %s", filepath)
94
- with gzip.open(open(filepath, "rb"), "rt", encoding="utf-8") as f:
95
- for line in f:
96
- if line:
97
- example = json.loads(line)
98
- yield id_, example
99
- id_ += 1