| import datasets | |
| features = datasets.Features( | |
| { | |
| "source": datasets.Value(dtype="string", id=None), | |
| "id": datasets.Value(dtype="string", id=None), | |
| "text": datasets.Value(dtype="string", id=None), | |
| "added": datasets.Value(dtype="string", id=None), | |
| "timestamp": datasets.Value(dtype="timestamp[s]", id=None), | |
| "metadata": {"url": datasets.Value(dtype="string", id=None)}, | |
| } | |
| ) | |
| dataset = datasets.load_dataset( | |
| "json", | |
| data_files={ | |
| "train": "fi_processed/c4-fi.*.json", | |
| "validation": "fi_processed/c4-fi-validation*.json", | |
| }, | |
| features=features, | |
| cache_dir="/researchdisk/datasets_cache", | |
| num_proc=96, | |
| ) | |
| dataset = dataset.flatten() | |
| dataset = dataset.rename_column("metadata.url", "url") | |
| dataset = dataset.remove_columns(["source", "id", "added"]) | |
| print(dataset) | |
| dataset.save_to_disk("/researchdisk/mc4_3.1.0_fi", num_proc=96) | |