atlas-preprocessed-code / atlas-preprocessed-code.py
AtlasUnified's picture
Upload atlas-preprocessed-code.py
6a2cae5
raw
history blame
2.85 kB
import gzip
import json
import datasets
logger = datasets.logging.get_logger(__name__)
_HOMEPAGE = "https://github.com/atlasunified"
_DESCRIPTION = "\
Your description here.\
"
_LICENSE = "odc-by"
_VARIANTS = {
"C#": {
"version": "1.0.0",
"download_size": 12345678, # replace with actual size
"dataset_size": 12345678, # replace with actual size
"splits": {
"train": {
"num_bytes": 12345678, # replace with actual size
"num_examples": 12345678, # replace with actual size
"files": [
"data/C#.jsonl",
# continue with other file paths...
],
},
},
},
"C++": {
# similar structure as above, replace with actual data
},
# continue for other languages...
}
_FEATURES = datasets.Features(
added=datasets.Value("string"),
created=datasets.Value("string"),
id=datasets.Value("string"),
source=datasets.Value("string"),
text=datasets.Value("string"),
version=datasets.Value("string"),
)
_CITATION = """\
Your citation here.
"""
class MyHFRepo(datasets.GeneratorBasedBuilder):
"""Your dataset description here."""
BUILDER_CONFIGS = [
datasets.BuilderConfig(name=name, version=config["version"])
for name, config in _VARIANTS.items()
]
DEFAULT_CONFIG_NAME = "C#" # replace with the language you want as default
def _info(self):
"""Give information and typings for the dataset."""
return datasets.DatasetInfo(
description=_DESCRIPTION,
features=_FEATURES,
supervised_keys=None,
homepage=_HOMEPAGE,
license=_LICENSE,
citation=_CITATION,
dataset_size=_VARIANTS[self.config.name]["dataset_size"],
download_size=_VARIANTS[self.config.name]["download_size"],
)
def _split_generators(self, dl_manager):
train_downloaded_files = dl_manager.download(
_VARIANTS[self.config.name]["splits"]["train"]["files"]
)
return [
datasets.SplitGenerator(
name=str(datasets.Split.TRAIN),
gen_kwargs={"filepaths": train_downloaded_files},
),
]
def _generate_examples(self, filepaths):
"""This function returns the examples in the raw (text) form by
iterating on all the files."""
id_ = 0
for filepath in filepaths:
logger.info("generating examples from = %s", filepath)
with gzip.open(open(filepath, "rb"), "rt", encoding="utf-8") as f:
for line in f:
if line:
example = json.loads(line)
yield id_, example
id_ += 1