Upload generate_training_data.py
Browse files- generate_training_data.py +62 -0
generate_training_data.py
ADDED
|
@@ -0,0 +1,62 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import sys
|
| 3 |
+
from tqdm import tqdm
|
| 4 |
+
from ipatok import tokenise
|
| 5 |
+
from glob import glob
|
| 6 |
+
from lhotse import CutSet
|
| 7 |
+
from lhotse.shar.writers import SharWriter
|
| 8 |
+
from pathlib import Path
|
| 9 |
+
import logging
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
logging.basicConfig(
|
| 13 |
+
level=logging.DEBUG,
|
| 14 |
+
format="[%(asctime)s] %(levelname)s [%(name)s.%(funcName)s:%(lineno)d] %(message)s",
|
| 15 |
+
datefmt="%Y/%b/%d %H:%M:%S",
|
| 16 |
+
stream=sys.stdout)
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
inpath = sys.argv[1]
|
| 21 |
+
outpath = sys.argv[2]
|
| 22 |
+
|
| 23 |
+
filelist = glob(inpath+'/**/*.jsonl.gz',recursive=True)
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
datasets = [file.replace(inpath,'') for file in filelist]
|
| 27 |
+
datasets = [file.replace(os.path.basename(file),'') for file in datasets]
|
| 28 |
+
datasets = list(set(datasets))
|
| 29 |
+
datasets = [file for file in datasets if 'dev' not in file and 'test' not in file and 'doreco' not in file]
|
| 30 |
+
print(datasets)
|
| 31 |
+
logging.info("%s speech train data files found!"%len(datasets))
|
| 32 |
+
|
| 33 |
+
logging.info("Beginning processing dataset")
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
data_dir = Path(outpath)
|
| 37 |
+
data_dir.mkdir(parents=True, exist_ok=True)
|
| 38 |
+
with SharWriter(data_dir, fields={"recording": "flac"}, shard_size=20000) as writer:
|
| 39 |
+
|
| 40 |
+
for i,dataset in enumerate(datasets):
|
| 41 |
+
|
| 42 |
+
data_path = inpath+dataset
|
| 43 |
+
logging.info("Processing %s"%data_path)
|
| 44 |
+
|
| 45 |
+
supervision = sorted(glob(os.path.join(data_path,'cuts*')))
|
| 46 |
+
recording = sorted(glob(os.path.join(data_path,'recording*')))
|
| 47 |
+
assert len(supervision)==len(recording)
|
| 48 |
+
|
| 49 |
+
logging.info("%s shards found"%len(supervision))
|
| 50 |
+
|
| 51 |
+
cuts = CutSet.from_shar(
|
| 52 |
+
{
|
| 53 |
+
"cuts": supervision,
|
| 54 |
+
"recording": recording
|
| 55 |
+
}
|
| 56 |
+
)
|
| 57 |
+
|
| 58 |
+
|
| 59 |
+
for cut in tqdm(cuts):
|
| 60 |
+
writer.write(cut)
|
| 61 |
+
|
| 62 |
+
logging.info("Processing done! %s datasets remaining."%(len(datasets)-i-1))
|