Update libritts-aligned.py
Browse files- libritts-aligned.py +2 -2
libritts-aligned.py
CHANGED
|
@@ -240,7 +240,7 @@ class LibriTTSAlign(datasets.GeneratorBasedBuilder):
|
|
| 240 |
textgrid_url=f"https://huggingface.co/datasets/cdminix/libritts-aligned/resolve/main/data/{name.replace('-', '_')}.tar.gz",
|
| 241 |
verbose=_VERBOSE,
|
| 242 |
tmp_directory=os.path.join(_PATH, f"{name}-tmp"),
|
| 243 |
-
chunk_size=
|
| 244 |
n_workers=_MAX_WORKERS,
|
| 245 |
)
|
| 246 |
pickle.dump(ds, open(pkl_path, "wb"))
|
|
@@ -267,7 +267,7 @@ class LibriTTSAlign(datasets.GeneratorBasedBuilder):
|
|
| 267 |
for entry in process_map(
|
| 268 |
self._create_entry,
|
| 269 |
zip([i] * len(ds), np.arange(len(ds))),
|
| 270 |
-
chunksize=
|
| 271 |
max_workers=_MAX_WORKERS,
|
| 272 |
desc=f"processing dataset {hashes[i]}",
|
| 273 |
tqdm_class=tqdm,
|
|
|
|
| 240 |
textgrid_url=f"https://huggingface.co/datasets/cdminix/libritts-aligned/resolve/main/data/{name.replace('-', '_')}.tar.gz",
|
| 241 |
verbose=_VERBOSE,
|
| 242 |
tmp_directory=os.path.join(_PATH, f"{name}-tmp"),
|
| 243 |
+
chunk_size=100,
|
| 244 |
n_workers=_MAX_WORKERS,
|
| 245 |
)
|
| 246 |
pickle.dump(ds, open(pkl_path, "wb"))
|
|
|
|
| 267 |
for entry in process_map(
|
| 268 |
self._create_entry,
|
| 269 |
zip([i] * len(ds), np.arange(len(ds))),
|
| 270 |
+
chunksize=100,
|
| 271 |
max_workers=_MAX_WORKERS,
|
| 272 |
desc=f"processing dataset {hashes[i]}",
|
| 273 |
tqdm_class=tqdm,
|