Upload folder using huggingface_hub
Browse filesThis view is limited to 50 files because it contains too many changes. See raw diff
- .gitattributes +0 -0
- imdanboy/jets/config.txt +1 -0
- imdanboy/jets/decode_train.loss.ave/dev/durations +250 -0
- imdanboy/jets/decode_train.loss.ave/dev/feats_type +1 -0
- imdanboy/jets/decode_train.loss.ave/dev/log/keys.1.scp +32 -0
- imdanboy/jets/decode_train.loss.ave/dev/log/keys.2.scp +32 -0
- imdanboy/jets/decode_train.loss.ave/dev/log/keys.3.scp +31 -0
- imdanboy/jets/decode_train.loss.ave/dev/log/keys.4.scp +31 -0
- imdanboy/jets/decode_train.loss.ave/dev/log/keys.5.scp +31 -0
- imdanboy/jets/decode_train.loss.ave/dev/log/keys.6.scp +31 -0
- imdanboy/jets/decode_train.loss.ave/dev/log/keys.7.scp +31 -0
- imdanboy/jets/decode_train.loss.ave/dev/log/keys.8.scp +31 -0
- imdanboy/jets/decode_train.loss.ave/dev/log/output.1/durations/durations +32 -0
- imdanboy/jets/decode_train.loss.ave/dev/log/output.1/speech_shape/speech_shape +0 -0
- imdanboy/jets/decode_train.loss.ave/dev/log/output.2/durations/durations +32 -0
- imdanboy/jets/decode_train.loss.ave/dev/log/output.2/speech_shape/speech_shape +0 -0
- imdanboy/jets/decode_train.loss.ave/dev/log/output.3/durations/durations +31 -0
- imdanboy/jets/decode_train.loss.ave/dev/log/output.3/speech_shape/speech_shape +0 -0
- imdanboy/jets/decode_train.loss.ave/dev/log/output.4/durations/durations +31 -0
- imdanboy/jets/decode_train.loss.ave/dev/log/output.4/speech_shape/speech_shape +0 -0
- imdanboy/jets/decode_train.loss.ave/dev/log/output.5/durations/durations +31 -0
- imdanboy/jets/decode_train.loss.ave/dev/log/output.5/speech_shape/speech_shape +0 -0
- imdanboy/jets/decode_train.loss.ave/dev/log/output.6/durations/durations +31 -0
- imdanboy/jets/decode_train.loss.ave/dev/log/output.6/speech_shape/speech_shape +0 -0
- imdanboy/jets/decode_train.loss.ave/dev/log/output.7/durations/durations +31 -0
- imdanboy/jets/decode_train.loss.ave/dev/log/output.7/speech_shape/speech_shape +0 -0
- imdanboy/jets/decode_train.loss.ave/dev/log/output.8/durations/durations +31 -0
- imdanboy/jets/decode_train.loss.ave/dev/log/output.8/speech_shape/speech_shape +0 -0
- imdanboy/jets/decode_train.loss.ave/dev/log/tts_inference.1.log +902 -0
- imdanboy/jets/decode_train.loss.ave/dev/log/tts_inference.2.log +902 -0
- imdanboy/jets/decode_train.loss.ave/dev/log/tts_inference.3.log +900 -0
- imdanboy/jets/decode_train.loss.ave/dev/log/tts_inference.4.log +900 -0
- imdanboy/jets/decode_train.loss.ave/dev/log/tts_inference.5.log +900 -0
- imdanboy/jets/decode_train.loss.ave/dev/log/tts_inference.6.log +900 -0
- imdanboy/jets/decode_train.loss.ave/dev/log/tts_inference.7.log +900 -0
- imdanboy/jets/decode_train.loss.ave/dev/log/tts_inference.8.log +900 -0
- imdanboy/jets/decode_train.loss.ave/dev/speech_shape +0 -0
- imdanboy/jets/decode_train.loss.ave/dev/wav/LJ049-0008.wav +3 -0
- imdanboy/jets/decode_train.loss.ave/dev/wav/LJ049-0009.wav +3 -0
- imdanboy/jets/decode_train.loss.ave/dev/wav/LJ049-0010.wav +3 -0
- imdanboy/jets/decode_train.loss.ave/dev/wav/LJ049-0011.wav +3 -0
- imdanboy/jets/decode_train.loss.ave/dev/wav/LJ049-0012.wav +3 -0
- imdanboy/jets/decode_train.loss.ave/dev/wav/LJ049-0013.wav +3 -0
- imdanboy/jets/decode_train.loss.ave/dev/wav/LJ049-0014.wav +3 -0
- imdanboy/jets/decode_train.loss.ave/dev/wav/LJ049-0015.wav +3 -0
- imdanboy/jets/decode_train.loss.ave/dev/wav/LJ049-0016.wav +0 -0
- imdanboy/jets/decode_train.loss.ave/dev/wav/LJ049-0017.wav +3 -0
- imdanboy/jets/decode_train.loss.ave/dev/wav/LJ049-0018.wav +3 -0
- imdanboy/jets/decode_train.loss.ave/dev/wav/LJ049-0019.wav +3 -0
- imdanboy/jets/decode_train.loss.ave/dev/wav/LJ049-0020.wav +3 -0
.gitattributes
CHANGED
|
The diff for this file is too large to render.
See raw diff
|
|
|
imdanboy/jets/config.txt
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{'train_config': '/usr/local/lib/python3.8/dist-packages/espnet_model_zoo/models--imdanboy--jets/snapshots/1db95c26516c44e6789bf06417c51e89400b190b/exp/tts_train_jets_raw_phn_tacotron_g2p_en_no_space/config.yaml', 'model_file': '/usr/local/lib/python3.8/dist-packages/espnet_model_zoo/models--imdanboy--jets/snapshots/1db95c26516c44e6789bf06417c51e89400b190b/exp/tts_train_jets_raw_phn_tacotron_g2p_en_no_space/train.total_count.ave_5best.pth'}
|
imdanboy/jets/decode_train.loss.ave/dev/durations
ADDED
|
@@ -0,0 +1,250 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
LJ049-0008 5 7 9 6 10 9 8 7 6 7 6 6 7 9 11 8 7 7 6 6 6 7 6 6 5 8 9 18 9 8 6 5 6 7 8 8 9 7 7 11 8 7 9 8 8 6 9 9 9 8 5 5 5 6 9 10 16 10 9
|
| 2 |
+
LJ049-0009 5 5 6 5 5 5 6 5 5 6 5 4 4 5 5 5 5 6 5 7 7 5 5 5 7 8 9 7 7 9 10 7 6 5 7 6 7 6 5 5 5 7 8 6 4 4 5 6 6 6 5 6 6 6 6 7 7 6 7 6 6 7 7 6 12 13 9 7 7 5 5 4 5 4 6 7 7 7 6 6 6 8 9 10 10 11 30 9 6 6 7 7 7 8 9 7 7 7 6 5 4 5 5 7 11 10 6 5 5 5 7 8 7 5 6 9 10 7 6 5 6 17
|
| 3 |
+
LJ049-0010 3 7 6 8 7 7 7 6 5 6 5 6 5 6 7 8 8 7 7 7 6 5 6 7 8 7 8 6 5 6 6 8 8 9 19 18 10
|
| 4 |
+
LJ049-0011 13 8 6 6 7 7 8 10 7 6 6 7 8 8 8 6 9 16 21 20 5 5 5 5 6 6 5 5 5 5 3 9 8 7 6 6 6 5 7 6 5 5 6 7 7 7 8 8 8 10 13 14 5 6 6 8 9 11 8 7 7 14 7 6 6 5 4 5 6 6 7 7 7 10 13 12 9 8 5 5 5 5 5 5 5 7 7 7 8 7 5 5 5 5 7 8 6 4 5 6 6 6 6 6 6 8 6
|
| 5 |
+
LJ049-0012 8 8 7 9 9 8 8 12 11 10 46 13 7 6 4 4 6 5 4 4 4 4 5 5 5 5 5 5 5 5 6 6 5 10 8 6 7 8 6 7 8 10 9 13 7 6 6 5 5 6 6 5 5 5 5 5 5 6 6 5 5 7 6 8 8 9 6 4 6 6 7 6 6 6 6 7
|
| 6 |
+
LJ049-0013 5 9 9 8 9 6 5 5 7 9 10 8 7 8 11 7 5 5 6 7 7 7 6 6 7 4 7 6 6 5 5 5 6 5 5 6 7 9 9 7 6 4 4 5 5 5 6 6 5 4 4 4 3 7 9 12 12 9 7 7 6 6 8 10 5 5 6 6 7 6 7 7 7 8 7 6 8
|
| 7 |
+
LJ049-0014 5 5 5 5 5 6 7 6 5 6 6 7 6 7 7 24 9 8 8 6 7 7 6 5 6 6 7 7 7 8 10 9 9 8 7 5 5 5 6 6 6 6 6 5 5 11 10 7 6 6 7 8 7 7 8 9 7 8 7 9 9 9
|
| 8 |
+
LJ049-0015 5 7 22 9 12 8 8 5 5 5 6 6 7 7 6 4 5 5 7 8 7 5 6 9 10 7 6 4 5 7 6 7 8 9 6 5 5 7 7 5 3 4 4 5 5 5 7 8 9 16 7 6 7 6 6 4 4 7 6 6 6 6 6 7 8 10 11 14 12 10
|
| 9 |
+
LJ049-0016 5 5 5 5 6 5 5 5 5 6 6 5 4 5 5 5 8 8 12 9 9
|
| 10 |
+
LJ049-0017 9 7 5 6 5 7 8 10 9 8 9 7 5 7 10 6 5 5 5 5 5 4 4 5 6 6 5 5 6 7 7 8 9 10 8 8 9 8 5 6 6 6 6 6 6 5 5 5 6 8 6 9 5 5 6 5 8 9 10 8 6 6 8 10 35 9 7 6 5 5 6 7 9 9 6 6 8 5 5 5 5 6 10 14 13
|
| 11 |
+
LJ049-0018 5 6 6 5 6 6 7 9 8 10 6 6 6 6 5 6 7 9 23 11 9 11 6 5 6 5 5 6 7 7 7 8 6 7 8 7 9
|
| 12 |
+
LJ049-0019 5 7 8 11 9 7 5 5 5 5 5 5 5 6 6 6 6 6 3 5 6 6 6 4 6 5 7 7 6 7 5 4 7 6 6 6 6 7 4 6 6 6 7 7 8 8 7 10 12 9 7 8 11 9 10 11 7 6 6 5 5 6 7 8 8 8 9 11 8 6 6 7 7 7 7 7 8 7 7 7 8 9 12 10 9
|
| 13 |
+
LJ049-0020 8 5 5 6 6 6 7 7 6 5 7 6 7 7 7 9 7 4 7 5 6 7 6 6 7 6 7 8 8 5 5 5 9 9 8 6 6 4 5 6 7 7 7 6 5 5 7 5 5 5 6 5 6 6 5 5 5 5 5 5 5 5 8 6 5 5 5 6 6 7 7 8 8 8 7 5 6 7 7 6 6 6 5 5 4 6 8 8 9 10 9 13 17
|
| 14 |
+
LJ049-0021 8 8 7 7 6 6 5 6 5 5 6 6 7 9 6 6 6 6 7 7 9 6 6 9 11 8 8 8 10 7 4 5 4 5 5 5 6 6 5 5 5 5 5 6 6 7 5 7 8 10 16 15 10 5 6 8 10 7 6 8 10 13 10 9
|
| 15 |
+
LJ049-0022 6 7 7 6 5 5 5 7 8 7 6 7 9 6 6 8 11 7 10 4 6 5 5 5 5 6 7 6 7 7 6 7 9 8 7 7 8 13 7 6 7 6 7 6 6 5 6 5 5 5 5 6 4 5 7 9 11 11 7 5 4 5 5 6 5 8 16 6 5 7 6 5 5 6 6 6 5 7 5 6 9 6 6 6 6 8 10 13 17 27 12 6 4 5 6 9 8 7 7 8 7 7 8 7 6 7 7 9
|
| 16 |
+
LJ049-0023 10 6 6 6 6 7 8 8 8 8 6 7 8 10 9 11 27 7 7 7 6 5 5 5 7 8 9 7 9 12 24 5 5 5 6 7 8 7 7 7 6 6 6 5 5 6 6 7 9 8 5 6 6 6 9 10 6 6 8 11 10 7 6 11 5 5 7 7 7 7 7 5 9 7 5 5 7 6 7 8 9 8
|
| 17 |
+
LJ049-0024 5 8 6 6 5 5 6 6 5 5 5 7 6 4 4 6 6 6 5 4 4 4 5 6 6 6 7 8 4 4 6 6 6 6 5 6 7 5 4 4 5 5 5 6 6 5 6 6 8 6 9
|
| 18 |
+
LJ049-0025 12 9 7 8 8 6 8 8 8 8 7 5 7 7 5 5 6 6 6 6 5 4 4 5 5 5 6 4 5 5 5 5 5 5 6 6 5 6 10 12 10 10
|
| 19 |
+
LJ049-0026 10 6 7 9 11 7 9 10 13 7 7 6 4 5 5 7 8 8 6 7 9 6 7 6 6 5 6 7 7 6 6 6 6 8 6 5 6 6 5 5 8 12 8 7 8 8 8 9 6 5 3 4 5 4 5 8 8 8 7 4 8 7 6 5 6 6 6 6 5 5 5 5 9 6 5 5 5 5 5 5 6 6 5 6 6 8 6 9
|
| 20 |
+
LJ049-0027 3 5 5 6 5 5 5 4 4 6 13 6 6 6 7 5 5 8 8 8 9 8 7 7 7 7 7 11 20 6 6 4 6 6 5 5 5 6 8 8 9 8 7 11 8 10 19 7 7 8 7 7 8 7 10 10 7 6 7 5 5 5 11 5 6 7 6 5 6 10 9 10 11 8 8 5 5 5 5 6 7 7 6 8 9 8 5 4 4 5 5 5 6 6 5 5 6 6 6 8 9 5 6 6 8 9 12 14 8 10 10
|
| 21 |
+
LJ049-0028 6 7 8 6 5 5 5 7 8 8 6 7 7 3 7 6 6 6 7 8 7 9 10 5 4 7 7 7 5 5 5 6 7 8 5 6 7 7 6 9 12 8 8 8 7 8 5 5 6 8 6 6 4 5 5 4 5 6 8 7 8 9 11 9 9 11 10 10
|
| 22 |
+
LJ049-0029 5 5 7 8 7 8 7 6 6 4 5 5 6 5 6 5 5 6 7 8 9 13 11 8 6 6 5 5 6 6 6 5 6 7 5 7 6 6 6 6 8 8 8 9 10 10 24 6 12 7 6 5 6 6 6 6 8 9 9 5 5 4 4 5 7 7 6 8 8 7 5 4 6 6 6 6 5 5 5 5 5 4 5 5 6 5 6 6 5 6 8 9
|
| 23 |
+
LJ049-0030 5 5 5 5 6 6 6 6 4 6 6 6 5 6 8 11 7 6 5 5 5 4 7 9 8 11 10 6 6 5 5 4 5 4 4 5 4 4 6 6 6 6 6 6 7 7 7 8 7 6 5 6 8 9 8 5 5 4 5 6 6 7 7 8 10 11 9 7 7 9 10 8 8 11 44 6 8 7 8 9 12 9 10 6 6 7 6 6 8 9 7 7 8 7 8
|
| 24 |
+
LJ049-0031 5 5 5 5 6 5 5 5 5 6 6 6 7 6 3 4 6 6 5 5 7 7 7 9 9 7 7 9 11 9 9 11 18 6 6 7 8 8 10 7 7 6 7 9 8 8 7 10 17 14 10
|
| 25 |
+
LJ049-0032 6 6 7 7 11 12 6 6 7 6 5 6 7 10 12 12 13 6 7 6 5 6 6 5 5 5 19 6 6 7 5 4 3 3 5 6 7 7 6 6 5 5 4 5 7 7 6 5 5 5 7 8 7 5 6 9 10 7 6 7 8 8 8 6 5 4 4 5 5 5 6 8 9 8 8 8 5 5 6 6 6 6 5 5 6 7 13
|
| 26 |
+
LJ049-0033 7 10 8 11 7 7 6 5 5 5 6 8 8 8 7 4 8 9 8 7 6 6 7 5 7 5 6 7 7 6 5 5 5 5 5 7 14 8 6 6 8 15 16 18 10 6 6 5 7 6 5 5 6 5 5 5 5 6 9 9 10 7 7 9 12 7 17 8 5 7 7 7 8 7 7 6 7 4 20 7 5 6 9 12 7 6 6 8 8 8 6 5 4 4 5 5 5 6 5 5 6 7 11 13 16
|
| 27 |
+
LJ049-0034 7 5 4 3 4 7 7 9 8 7 5 5 5 5 6 6 5 5 5 5 7 6 7 10 13 10 6 6 4 4 7 6 7 6 5 4 5 6 4 4 4 5 5 7 7 7 7 7 5 4 5 7 9 8 7 6 6 11 9 11 49 7 7 6 5 6 4 5 5 5 6 6 5 6 6 8 6 9
|
| 28 |
+
LJ049-0035 10 7 7 6 11 15 9 8 12 11 9 7 8 6 6 5 5 5 5 5 6 6 5 5 5 4 4 5 6 7 6 5 6 10 9 8 6 5 5 6 9 7 11 8 7 5 5 6 6 6 5 5 11 7 7 10 9 6 8 7 8 8 9 9 8 7 6 7 7 5 6 7 7 6 5 6 7 10 8 4 6 5 5 6 5 5 6 7 11 13 16
|
| 29 |
+
LJ049-0036 4 9 9 7 9 7 6 8 8 8 7 5 9 9 7 5 5 5 6 7 7 7 8 11 12 11 12 10
|
| 30 |
+
LJ049-0037 8 8 7 7 6 7 8 13 12 11 33 8 6 7 10 9 5 5 5 5 5 5 5 6 6 6 6 7 4 5 6 6 8 7 6 7 6 7 10 8 8 7 8 6 6 7 6 6 6 5 8 9 12 8 5 5 4 5 7 7 7 9 10 7 10 13 10
|
| 31 |
+
LJ049-0038 6 6 7 8 11 10 9 8 7 6 8 9 7 6 9 8 6 5 7 12 9 8 10 5 6 6 7 8 11 7 5 5 5 6 6 10 7 7 8 9 7 10 7 7 10 9 5 5 5 5 5 5 5 5 5 9 8 8 7 8 31 7 7 7 5 6 10 10 11 8 7 5 7 11 7 5 5 5 6 6 9 6 6 7 6 8 6 5 6 6 7 11 16
|
| 32 |
+
LJ049-0039 8 8 7 7 6 6 4 4 6 6 6 6 5 5 7 6 7 7 6 8 7 9 5 5 4 5 8 8 8 7 4 8 7 6 5 6 6 6 6 5 5 5 5 5 5 6 7 6 5 6 10 8 12 6 13 10 12 6 8 9 10 12 11 8 9
|
| 33 |
+
LJ049-0040 7 7 8 8 8 8 6 6 7 8 7 7 6 5 4 9 11 12 8 9 3 6 5 5 6 4 4 3 4 4 6 6 6 4 5 7 7 8 8 8 6 7 7 6 6 6 7 8 17
|
| 34 |
+
LJ049-0041 8 5 5 5 4 4 6 6 6 6 5 5 5 5 4 4 5 5 5 5 6 5 5 6 5 9 6 5 5 5 5 5 5 5 5 5 5 6 6 5 6 6 6 7 7 7 6 6 6 6 7 8 7 7 6 6 7 8 10 8 7 8 8 6 5 5 5 5 5 5 6 5 5 5 5 8 6 8 9 9 6 5 5 5 7 8 7 5 6 9 10 7 6 5 7 6 5 5 7 8 9 9 7 9 8 7 6 7 8 9 7 8 8 8
|
| 35 |
+
LJ049-0042 10 7 7 9 7 10 15 13 7 9 12 7 6 5 4 6 5 5 5 5 6 7 7 6 6 9 6 9 7 6 8 5 5 6 8 7 6 7 8 6 7 9 18 46 13 5 7 7 8 6 6 6 5 5 6 5 6 6 8 8 8 6 7 7 8 9 9 7 5 5 6 6 7 6 8
|
| 36 |
+
LJ049-0043 10 7 7 7 11 35 9 10 10 12 7 6 4 4 6 5 6 5 6 8 9 7 10 10 5 5 5 5 6 6 5 4 4 4 3 6 9 13 11 10 5 4 5 5 5 6 6 7 7 7 5 4 5 5 9 9 8 5 5 6 6 7 6 6 5 5 5 6 7 8 6 5 7 11 9 15 8 9
|
| 37 |
+
LJ049-0044 12 7 4 4 5 5 5 6 8 13 6 8 6 7 7 7 5 4 5 5 8 6 8 9 10 8 9 9 11 18 8 6 5 7 10 11 7 7 7 10 6 5 5 6 7 7 8 7 6 5 4 5 5 5 6 6 5 5 5 5 20 6 7 7 7 8 7 7 9 7 5 8
|
| 38 |
+
LJ049-0045 6 6 7 9 7 8 10 10 8 8 6 6 5 5 6 9 8 8 5 4 6 9 8 9 8 7 6 8 9 11 9 9 14 52 14 7 5 5 6 7 5 5 5 6 7 7 12 10 13 9 8 14 7 6 7 9 6 5 5 5 5 5 6 6 5 4 4 4 4 8 7 7 8 6 6 7 7 8 6 5 7 7 8 7 7 7 6 5 5 5 4 5 6 8 7 9 8 7 6 6 5 7
|
| 39 |
+
LJ049-0046 10 7 7 10 13 20 23 13 8 8 7 7 7 8 7 5 5 5 6 7 8 9 8 8 6 6 6 8 13 10 8 9 7 8 8 7 6 5 5 7 9 10 7 7 9 7 6 7 9 9 6 5 7 9 10 10 10 7 7 9 12 16
|
| 40 |
+
LJ049-0047 8 8 8 11 10 7 6 5 5 5 7 8 8 9 7 6 6 7 8 7 5 4 5 6 8 11 7 6 4 5 5 5 5 5 5 5 5 5 5 6 6 5 6 8 14 14 7 7 8 8 9 10 9 8 10 9 6 6 7 8 8 10 19 19 9 7 7 8 10 10 12 13 20
|
| 41 |
+
LJ049-0048 4 5 6 5 6 6 8 6 6 6 6 6 7 7 4 5 4 5 4 6 7 7 7 8 8 8 8 6 10 7 7 10 9 7 6 6 5 5 13 11 8 5 5 4 5 5 5 6 6 5 5 5 4 4 5 6 7 11 13 15 7 5 6 7 10 10 8 8
|
| 42 |
+
LJ049-0049 11 6 8 8 8 8 9 8 8 8 6 9 6 5 6 8 8 7 6 6 5 5 12 11 8 5 5 4 5 5 5 6 6 5 5 5 4 5 5 7 9 7 7 7 6 7 10 10 12 10 7 9
|
| 43 |
+
LJ049-0050 5 7 6 6 5 7 6 10 11 9 10 18 5 5 5 6 9 15 11 12 8 5 5 5 8 8 8 9 6 6 5 9 6 10 8 15 7 7 8 8 7 7 7 4 5 5 6 5 5 6 6 5 3 6 5 5 5 7 6 7 7 6 5 5 7 6 8
|
| 44 |
+
LJ049-0051 8 7 9 6 5 6 6 5 10 7 7 7 7 9 9 8 5 5 6 7 6 5 6 6 8 13 13 13 10
|
| 45 |
+
LJ049-0052 8 6 6 7 7 8 9 8 10 8 7 5 7 6 5 7 10 12 11 6 5 5 5 5 5 6 7 6 5 6 6 6 6 5 4 4 5 4 6 5 6 6 5 5 6 6 6 7 7 7 9 9 11 7 6 4 4 7 6 5 7 7 7 7 8 10 14 16
|
| 46 |
+
LJ049-0053 10 6 6 7 8 6 6 7 7 7 5 6 7 8 9 9 9 10 6 4 5 8 8 8 6 6 9 7 5 6 5 5 6 7 6 5 5 5 6 10 9 8 7 8 7 6 6 9 10 5 7 10 10 7 5 5 5 4 6 7 8 14 16 12 35 7 7 7 6 5 5 5 7 8 7 5 6 10 7 6 7 7 8 7 6 7 6 11 7 7 8 9 8
|
| 47 |
+
LJ049-0054 6 7 8 6 5 5 5 7 8 8 6 7 8 6 6 6 5 5 7 6 6 6 5 4 5 4 6 12 9 7 7 6 14 7 9 8 6 6 6 6 5 6 5 4 7 5 5 5 8 7 12 7 5 7 7 8 6 6 8 11 9 9 7 6 7 8 5 5 5 5 4 5 5 5 6 6 5 6 6 8 6 9
|
| 48 |
+
LJ049-0055 18 11 12 7 6 5 4 5 6 10 10 12 8 7 7 5 5 6 7 6 7 7 7
|
| 49 |
+
LJ049-0056 6 7 7 9 9 7 9 7 8 7 6 4 6 8 11 7 6 4 4 5 5 5 5 6 6 7 7 8 10 13 22 9 7 7 7 8 7 7 6 6 12 7 6 7 6 5 6 5 5 5 5 5 6 5 5 5 6 9 10 8 7 7 8 9 8 6 7 8 7 8 7 7 6 7 6 10 7 6 5 6 6 6 5 5 5 5 6 8 12
|
| 50 |
+
LJ049-0057 5 7 7 7 11 11 8 8 5 4 6 6 5 9 9 7 6 6 4 5 5 6 5 6 6 7 8 8 4 5 7 9 9 8 7 8 5 5 6 6 5 5 5 5 5 5 6 6 5 5 6 6 6 7 11 9 7 7 9 11 7 8 8 6 7 8 9 7 8 8 8
|
| 51 |
+
LJ049-0058 13 7 5 4 5 6 8 12 41 16 7 6 5 4 5 6 7 7 7 6 5 5 5 5 5 5 6 6 6 6 7 7 10 7 5 5 5 5 6 6 5 5 5 5 10 9 8 6 5 7 7 8 7 8 8 7 8 7 7 5 4 5 6 4 5 7 9 16 19 13 7 7 8 7 6 7 5 6 7 8 9 7 8 8 8
|
| 52 |
+
LJ049-0059 13 7 6 5 4 5 6 7 7 7 6 5 5 6 6 6 6 7 15 8 12 6 6 6 6 6 7 6 5 5 5 5 6 6 7 6 7 5 4 5 6 6 6 10 16 8 9 7 5 6 7 7 5 5 7 10 24 10 7 8 7 7 7 7 9 7 5 6 5 5 5 6 5 5 5 7 8 7 9 9 9
|
| 53 |
+
LJ049-0060 6 6 5 5 6 5 5 7 6 5 7 10 6 6 6 5 5 7 10 9 7 9 9 7 5 5 6 5 7 8 8 7 9 23 11 9 8 8 8 8 8 9 13 12 9 7 7 8 9 7 6 5 4 5 5 5 6 6 5 5 5 5 6 6 8 8 5 5 4 5 7 4 4 7 6 7 6 7 8 22 7 7 11 9 13 16 14 10 10
|
| 54 |
+
LJ049-0061 11 12 7 7 6 7 6 7 5 5 5 6 6 6 8 10 8 10 10 8 7 6 7 7 7 8 10 10 11 7 6 4 5 7 6 7 6 4 5 5 5 6 6 5 5 5 4 5 5 5 5 6 7 8 6 5 5 4 4 5 6 8 9 8 8 11 8 14 10 9 8 5 5 7 6 7 7 8 10 11 10 10 14 11 7 5 6 8 9 11 10 8 9
|
| 55 |
+
LJ049-0062 5 5 7 7 7 6 7 6 7 8 6 6 6 7 6 7 5 4 5 6 6 6 10 20 9 7 7 7 7 9 10 9 8 7 7 6 5 4 4 7 6 7 6 2 5 5 5 5 6 6 5 6 6 8 6 9
|
| 56 |
+
LJ049-0063 7 8 8 7 7 6 10 8 7 6 5 5 4 7 5 5 5 8 7 16 8 7 7 6 5 5 5 7 8 7 6 7 15 6 6 7 9 6 8 10 7 11 9 9 8 6 5 5 6 7 6 5 5 6 8 11 8 9 9 8 8 9 7 6 7 8 7 7 8 14 14 8 7 7 8 8 8 8 7 6 6 8 11
|
| 57 |
+
LJ049-0064 5 5 7 7 6 6 5 5 6 8 9 9 8 7 7 5 7 8 7 7 5 5 6 5 5 6 8 7 8 8 9 6 6 6 6 6 7 7 7 10 7 7 6 8
|
| 58 |
+
LJ049-0065 11 8 5 5 5 6 6 6 9 7 7 8 4 5 7 8 8 7 5 5 5 7 6 7 6 8 7 7 7 7 7 6 5 6 7 7 7 8 6 7 7 6 7 12 7 8 9 6 6 4 5 8 8 8 6 6 10 15 8 5 5 7 7 9 7 6 9 12 8 9
|
| 59 |
+
LJ049-0066 4 5 6 7 8 6 6 7 7 10 34 7 7 8 9 8 8 9 12 9 8 7 9 10 8 7 8 11 7 6 6 11 10 6 5 5 5 7 8 7 5 6 9 10 7 5 4 4 5 6 7 5 7 8 8 6 7 5 5 6 9 10 10 9 11 35 12 7 5 6 7 7 8 6 6 6 13 7 7 7 8 7 7 7 5 3 4 5 8 7 7 9 10 8 9 9
|
| 60 |
+
LJ049-0067 6 6 8 7 7 8 11 7 10 9 7 6 7 9 10 7 6 6 7 7 8 9 12 8 16 9 7 8 8 9 7 6 7 8 3 6 7 8 7 6 6 6 5 8 6 5 5 5 6 6 7 6 6 5 4 3 5 6 6 8 7 5 5 6 6 5 5 5 4 5 6 8 9 8 7 6 7 5 8 7 8 8 7 7 8 7 7 6 5 6 9 12 10 8 12 10
|
| 61 |
+
LJ049-0068 11 8 8 7 7 11 9 7 5 5 5 6 6 7 7 8 11 12 7 9
|
| 62 |
+
LJ049-0069 7 7 10 7 9 7 7 6 7 6 7 12 13 7 5 5 5 6 7 7 6 5 6 6 6 7 7 7 8 6 6 10 5 5 7 8 7 9 7 6 8 11 11 17 8 9 12 14 11 8 9
|
| 63 |
+
LJ049-0070 12 7 9 9 9 9 13 6 6 4 5 5 7 7 7 6 10 9 9 8 6 5 5 6 7 6 5 5 6 7 8 7 7 8 10 9 7 7 8 8 7 7 8 5 7 6 6 8 6 6 5 7 9 10 8 5 5 5 6 9 7 7 7 6 5 5 5 6 6 5 5 6 5 5 7 7 6 8 12 14 10 5 7 7 9 7 5 5 6 7 6 6 6 7 7 7 8 9 9 11 11 10
|
| 64 |
+
LJ049-0071 8 8 6 6 5 5 7 10 8 7 5 5 6 8 9 8 8 7 8 10 11 10 14 12 27 6 5 4 5 5 4 4 6 6 6 6 6 6 6 5 6 7 11 10 9 7 4 6 7 7 6 9 7 9 8 6 6 6 8 10 9 7 7 10 13 14 11 7 9
|
| 65 |
+
LJ049-0072 11 14 8 6 6 6 6 7 9 6 6 6 6 5 6 8 9 7 5 5 5 6 8 8 6 6 5 4 4 6 7 7 7 7 7 7 8 6 7 8 6 7 7 6 6 6 6 5 6 6 6 9 7 6 9 13 11 37 8 5 5 5 6 6 7 7 10 9 12 9 7 7 7 6 6 6 7 7 6 5 5 5 7 8 7 5 6 9 10 7 6 4 4 5 5 5 5 6 6 7 7 8 10 14 16
|
| 66 |
+
LJ049-0073 4 6 6 7 9 8 7 5 5 5 5 5 7 6 6 6 7 6 7 5 5 5 4 6 4 4 4 4 5 5 5 6 6 5 5 4 4 4 14 8 8 8 7 11 8 9 9 7 6 5 7 6 6 5 5 6 5 7 16 6 6 5 6 9 10 7 7 10 8 9 7 5 7 11 11 12 11 8 9
|
| 67 |
+
LJ049-0074 6 6 8 10 8 6 8 7 10 7 6 5 5 6 6 6 7 9 10 10 6 5 5 5 5 5 6 6 5 5 7 6 8 10 10 7 5 6 6 8 9 7 15 8 5 5 5 6 7 6 6 6 11 9 10 9 11 7 8 7 5 6 7 9 9 8 7 6 9 8 7 8 7 7 4 4 5 5 8 11 7 6 4 4 5 5 5 5 7 7 6 5 5 5 7 9 9 7 9 14 10
|
| 68 |
+
LJ049-0075 6 5 5 4 4 5 5 8 9 9 9 12 10 10
|
| 69 |
+
LJ049-0076 5 5 5 5 6 6 7 7 7 8 8 6 7 9 9 5 5 5 5 4 5 5 6 7 6 8 6 7 4 5 6 5 5 5 5 5 5 5 5 6 5 4 4 4 7 6 7 7 7 8 5 5 6 5 6 9 8 7 5 6 5 5 5 5 5 5 5 5 5 6 5 5 5 7 7 8 6 5 7 6 5 8 9 9 7 8 21 7 5 5 5 6 6 6 6 7 7 4 7 7 6 12 5 7 7 7 7 5 5 7 7 7 7 6 5 5 5 5 6 7 7 6 6 6 5 6 11 10
|
| 70 |
+
LJ049-0077 10 6 6 6 6 7 8 8 8 8 6 7 8 10 9 11 29 7 7 7 6 5 5 5 7 8 8 7 8 9 6 8 6 5 3 4 5 6 6 6 5 5 4 5 5 5 5 5 5 6 6 7 9 12 15
|
| 71 |
+
LJ049-0078 5 7 7 6 6 7 7 7 8 9 8 7 6 4 3 6 6 7 6 7 7 7 8 7 6 6 7 8 8 6 6 8 8 5 4 5 5 17 7 10 11 11 11 11 6 6 5 5 5 5 6 4 5 5 5 5 5 5 6 6 5 4 4 4 7 7 7 7 6 7 6 8
|
| 72 |
+
LJ049-0079 7 5 6 9 9 8 6 6 9 6 6 5 6 8 6 6 6 6 7 5 4 6 8 15 6 5 7 8 9 9 9 10 6 8 7 7 6 5 5 5 6 7 8 8 7 9 28 14 8 6 5 5 6 7 17 6 5 6 7 7 8 8 9 9 6 5 4 5 5 6 5 6 7 7 6 7 6 6 6 7 6 8 7 6 12 9 6 7 7 8 7 6 5 6 12 5 5 4 8 8 8 7 9 11 10 10
|
| 73 |
+
LJ049-0080 14 12 9 8 6 6 8 8 7 9 8 7 5 6 6 7 6 10 11 16 15
|
| 74 |
+
LJ049-0081 5 4 4 5 6 6 8 9 12 5 6 6 7 5 6 5 4 4 4 5 6 7 6 7 6 6 6 6 5 6 7 7 9 9 6 6 6 7 7 8 8 8 6 7 7 7 5 4 5 5 4 5 5 5 5 5 5 5 5 7 6 5 6 7 6 8
|
| 75 |
+
LJ049-0082 6 6 6 5 5 7 5 5 8 8 8 7 6 5 5 5 5 6 8 10 6 5 5 5 6 6 8 8 8 8 6 7 8 8 7 8 25 6 5 5 5 6 6 7 7 7 4 5 5 5 5 5 7 7 6 7 8 8 11 12 32 12 5 6 7 6 6 7 7 6 6 6 6 5 6 7 8 23 7 5 3 4 4 5 5 5 6 10 10 9 7 7 6 8
|
| 76 |
+
LJ049-0083 8 4 5 7 8 7 10 9 5 5 5 9 7 5 6 6 7 5 9 10 11 14 7 6 5 5 7 8 10 12 14 8 7 9 7 9 8 8 7 9 8 5 7 8 7 9 9 9 9 10 12 30 8 6 7 8 8 8 8 10 11 28 15 17 9 7 6 7 8 7 6 6 7 9 13 11 9 11 12 34 7 8 8 9 8 6 7 6 7 8 8 8 8 6 5 5 5 8 8 7 8 8 7 6 5 5 6 9 13 10 11 14 10
|
| 77 |
+
LJ049-0084 10 9 9 7 5 5 5 5 5 5 6 6 5 5 5 4 5 7 7 7 6 5 6 8 6 6 7 8 6 6 7 6 7 10 10 5 6 6 7 9 12 9 9 7 7 7 6 12 28 11 6 5 7 6 7 6 7 6 5 5 5 6 8 8 9 8 7 6 4 4 5 5 6 7 9 8 10 10 8 6 5 5 6 7 6 7 6 6 5 7 6 6 9 10 8 6 5 5 8 7 6 5 5 6 5 9 9 10 8 7 8
|
| 78 |
+
LJ049-0085 5 6 7 7 8 7 5 7 7 7 7 7 8 5 5 4 5 5 6 8 6 6 7 8 6 7 7 6 6 6 7 7 6 6 7 7 6 8
|
| 79 |
+
LJ049-0086 7 6 7 7 8 6 7 5 6 7 8 10 9 7 9 8 9 6 5 6 8 6 7 6 7 7 7 6 7 7 7 6 6 6 6 7 7 8 7 7 9 12 9 8 8 7 6 6 6 7 6 6 6 8 8 8 8 7 7 5 5 5 7 9 9 8 7 7 7 6 5 5 5 7 8 6 8 8 9 8 9 8 6 6 8 6 8 9 9 8 11 13 10 9
|
| 80 |
+
LJ049-0087 5 5 7 9 10 7 6 7 6 8 5 6 8 9 11 10 7 5 5 5 5 5 6 6 5 5 5 5 6 7 10 11 13 13 25 11 7 6 5 4 5 5 6 8 8 7 6 6 5 5 5 5 5 6 6 5 5 5 5 9 10 7 10 7 5 6 6 5 6 9 9 10 10 12 10
|
| 81 |
+
LJ049-0088 10 8 7 10 13 6 5 6 7 9 10 7 5 5 5 9 9 7 8 8 8 10 6 5 5 5 6 5 6 7 5 6 6 5 6 7 7 7 7 8 8 8 6 7 7 7 6 5 5 7 7 7 6 5 4 5 5 8 10 9 8 11 14 11 10
|
| 82 |
+
LJ049-0089 6 7 7 8 10 7 7 9 7 5 6 7 8 8 8 11 9 11 13 18 7 3 8 5 8 16 9 9 9 8 11 8 5 7 9 7 7 8 8 10 8 9 17 9 11 7 8 6 5 5 5 6 6 7 8 7 8 7 5 7 7 6 6 6 6 7 7 7 7 6 6 5 6 8 7 7 14
|
| 83 |
+
LJ049-0090 14 5 6 7 7 7 12 7 8 7 8 7 8 7 5 3 4 6 9 8 8 9 13 47 11 8 10 7 5 6 6 7 6 10 12 13 10 9
|
| 84 |
+
LJ049-0091 14 7 5 6 6 9 8 7 7 6 6 8 8 8 6 8 8 8 10 7 7 6 6 7 9 5 5 6 6 7 7 9 7 9 13 6 8 7 5 7 6 6 6 6 6 7 10 13 10 10 10 10 5 7 7 6 5 5 5 7 8 8 6 7 12 7 6 4 4 7 6 7 6 2 5 5 5 5 6 6 5 6 7 7 6 8
|
| 85 |
+
LJ049-0092 5 5 7 9 10 12 11 10 9 8 6 6 6 5 7 14 9 9 8 8 8 5 7 7 7 6 7 7 8 8 7 7 5 7 6 10 37 8 8 8 8 8 5 5 6 6 6 6 8 9 16 11 10 10 10 9 8 8 9 8 6 7 4 8 13 9 10 10 6 5 5 6 10 10 7 7 7 6 7 12
|
| 86 |
+
LJ049-0093 11 7 8 6 6 6 6 7 5 5 5 6 9 10 7 5 5 6 8 7 7 8 8 10 9 12 8 8 7 6 5 5 5 7 7 7 7 6 8 7 6 7 7 6 12 8 9 9 7 6 6 7 8 9 10 8 5 6 6 7 8 5 6 5 9 9 5 6 7 7 6 10 9 9 9 7 8 7 6 6 6 5 6 6 10 12 13 10
|
| 87 |
+
LJ049-0094 5 5 5 5 6 6 7 6 8 8 6 8 13 8 5 5 5 5 7 7 6 5 5 5 7 8 9 7 8 10 28 6 5 5 5 6 6 5 6 9 11 10 11 10 10 20 7 6 7 7 10 9 10 11 8 6 6 5 6 10 10 7 7 7 6 7 13
|
| 88 |
+
LJ049-0095 6 6 9 13 9 8 7 7 8 7 7 6 5 5 5 6 5 5 6 6 7 6 6 6 7 6 6 6 7 9 10 25 12 8 7 7 12 14 14 7 6 5 4 7 12 7 9 7 5 6 7 5 8 6 7 9 8 11 9 12 10
|
| 89 |
+
LJ049-0096 6 4 6 6 7 6 5 5 7 9 7 5 5 5 5 7 9 9 9 8 7 7 7 5 6 6 6 7 8 8 8 6 7 7 8 7 14 9 7 5 6 6 7 6 10 11 14 14 26 5 4 7 6 6 4 5 5 6 5 7 9 9 7 6 6 6 7 7 8 8 8 6 7 7 7 5 4 5 5 4 5 5 6 5 5 5 5 11 5 8 7 6 7 8 16
|
| 90 |
+
LJ049-0097 4 6 8 6 6 8 7 7 6 5 5 4 5 5 6 8 9 8 6 7 4 5 5 5 5 5 5 4 4 7 7 7 6 9 8 6 5 6 6 5 6 8 7 5 9 9 10 8 5 6 7 9 10 9
|
| 91 |
+
LJ049-0098 8 7 6 8 7 8 8 9 14 7 9 7 10 10 11 9 11 9 9 7 9 9 9 9 8 8 8 6 6 8 9 6 5 7 9 10 7 7 12 8 9 7 6 5 4 6 7 9 8 6 5 5 5 19 9 5 5 5 7 7 6 5 6 7 6 7 8 8 10 9 15 7 6 6 6 7 6 7 5 5 5 7 8 6 6 5 4 6 6 6 7 9 9 10 10 9
|
| 92 |
+
LJ049-0099 11 8 5 5 5 6 7 8 7 11 8 7 7 8 7 4 5 5 6 7 7 7 6 7 7 6 5 5 5 5 6 11 9 7 6 6 5 5 5 7 7 8 8 8 6 7 7 7 5 4 5 5 4 5 5 6 5 5 5 5 5 6 6 5 6 7 6 8
|
| 93 |
+
LJ049-0100 5 5 5 5 5 6 7 6 7 6 6 5 6 7 7 6 6 7 6 6 5 5 7 9 6 5 7 9 11 19 7 8 7 6 9 8 7 7 6 5 8 6 7 8 8 8 8 8 8 16 6 10 9 11 8 9
|
| 94 |
+
LJ049-0101 7 6 6 8 8 6 7 7 9 13 9 9 10 8 12 10 10 8 8 8 10 12 10 7 9 7 7 6 5 6 9 6 5 5 7 7 6 6 8 8 9 10 8 10 7 9 11 11 29 9 6 6 6 8 7 6 9 12 6 5 6 6 9 9 8 8 6 7 9 16 20
|
| 95 |
+
LJ049-0102 5 5 5 5 6 6 5 5 6 5 7 27 8 10 9 5 5 5 5 5 6 7 6 10 15 11 9 8 7 9 11 8 7 8 9 8 7 7 6 6 4 4 5 8 8 6 8 9 6 6 6 5 7 8 8 8 8 21 6 5 6 9 9 9 8 6 6 6 4 5 5 6 5 6 7 6 9 17 6 5 5 5 6 6 5 5 5 4 5 7 7 8 8 9 12 9 5 5 6 7 9 8 5 5 5 5 5 5 5 4 5 7 8 9 10 8 8
|
| 96 |
+
LJ049-0103 7 5 7 7 6 6 6 7 7 7 9 11 10 18 7 7 6 7 6 6 6 6 13 9 6 5 6 6 5 5 7 6 7 10 9 7 5 5 5 5 6 8 8 6 6 5 5 6 5 5 5 5 5 6 8 6 6 7 6 7 8 7 8 10 18 10 7 7 6 7 8 7 7 6 5 7 7 7 9 7 7 8 8 7 5 7 9 13 10
|
| 97 |
+
LJ049-0104 12 8 7 7 7 7 11 11 8 15 5 5 6 7 6 5 6 5 5 5 5 6 6 5 5 6 5 5 5 6 6 7 10 9 5 5 5 5 5 5 5 6 15 15 14 24 9 5 6 8 9 8 7 6 7 5 5 5 7 7 10 9 5 5 5 6 5 6 7 7 9 23 6 6 8 8 8 6 7 7 7 7 5 6 8 10 7 6 8 7 6 6 6 5 7 8 8 9 7 6 8
|
| 98 |
+
LJ049-0105 7 6 6 5 4 4 6 7 7 9 7 5 5 5 5 6 6 6 6 9 6 6 6 7 8 7 7 13 9 6 7 9 6 8 9 9 10 9 11 29 6 5 5 5 6 6 5 5 5 4 5 7 7 7 8 7 7 7 6 7 10 9 5 5 5 5 5 5 5 4 4 7 7 7 6 7 6 4 6 8 8 7 6 6 8 9 8 7 8 7 8
|
| 99 |
+
LJ049-0106 7 6 7 5 4 5 6 5 5 4 5 6 7 7 6 7 7 6 6 6 5 7 8 12 31 13 5 6 8 8 10 7 6 6 6 6 7 8 7 7 8 7 7 7 5 5 5 5 7 9 10 10 8 6 5 6 6 6 7 7 7 8 8 9 18 10 6 7 6 8 7 7 7 7 6 7 6 7 7 7 7 8 7 6 8 7 7 9 12 6 8 7 6 4 5 8 8 6 8 8 6 6 6 5 7 8 8 9 10 15
|
| 100 |
+
LJ049-0107 22 18 7 9 8 7 7 6 6 7 7 6 6 5 5 5 4 6 5 6 7 6 5 5 5 5 6 9 14 14 9 11 10
|
| 101 |
+
LJ049-0108 11 6 8 6 6 8 9 7 10 6 5 6 7 7 5 5 5 6 6 7 5 4 5 5 6 6 6 5 5 5 6 5 8 6 5 5 5 6 8 5 12 8 14 5 5 4 4 5 4 5 6 7 7 7 8 5 6 6 20 6 6 5 5 9 6 5 6 9 15 11 9 8 7 6 6 7 8 7 7 8 7 6 5 5 6 7 7 7 7 9 9 9 8 7 8 7 5 6 6 6 6 12 17 11 9
|
| 102 |
+
LJ049-0109 4 4 5 5 5 6 8 8 6 8 8 9 8 5 6 13 26 13 6 5 6 6 6 5 7 6 6 7 7 7 6 7 8 13 12 10 11 16 10 10 8 5 7 6 6 5 4 5 5 6 7 7 6 6 5 5 6 5 5 6 9 6 6 7 6 7 9 8 9 13 12 10
|
| 103 |
+
LJ049-0110 5 5 6 5 6 7 6 5 5 5 5 5 7 7 6 5 5 5 5 5 6 6 7 6 6 5 8 8 8 8 6 7 7 7 7 12 7 7 6 4 4 4 4 5 6 6 6 6 7 8 8 7 6 8 7 6 7 7 6 9 7 5 6 5 3 4 5 6 8 6 6 6 7 8 10 7 8 8 7 5 5 6 10 17 22 8 10 12 9 8 6 5 6 6 5 7 9 8 5 4 7 7 7 7 6 6 6 8
|
| 104 |
+
LJ049-0111 7 7 7 7 6 5 5 6 7 8 7 5 5 5 6 5 5 7 5 7 7 5 6 5 6 9 11 12 7 6 8 8 8 8 6 7 7 8 8 11 9 11 19 12 23 9 7 5 6 6 8 7 7 19 6 5 5 6 6 7 8 8 8 6 7 8 10 10 16
|
| 105 |
+
LJ049-0112 9 7 9 8 10 5 6 7 7 8 6 8 7 8 15 6 5 5 7 7 6 9 9 7 7 5 5 3 4 5 6 8 6 6 6 8 9 12 7 6 9 7 11 8 7 8 8 7 5 4 4 5 6 10 17 14 9 9
|
| 106 |
+
LJ049-0113 8 7 8 9 9 6 5 5 6 9 12 8 7 8 7 11 5 5 4 5 6 6 7 7 8 8 9 16 8 6 5 7 7 12 14 8 11 11 6 5 6 10 22 8 8 9 11 9 8 7 10 12 14 30 6 7 7 7 7 8 9 7 5 5 5 7 7 6 5 6 9 6 9 12 6 8 7 8 9 8
|
| 107 |
+
LJ049-0114 6 5 6 6 8 9 6 8 9 7 11 10 10 7 7 7 6 6 8 9 7 6 5 6 8 8 9 8 6 7 7 5 8 7 7 7 6 6 5 5 4 5 5 6 5 6 7 5 5 5 5 6 5 4 3 4 5 6 8 6 6 6 8 9 13 8 8 35 9 7 5 5 5 6 6 5 5 6 6 5 5 6 6 5 5 5 5 6 6 6 6 5 5 6 8 14 10 10 8 6 6 7 6 6 8 8 7 6 6 7 10 12 21
|
| 108 |
+
LJ049-0115 7 6 5 6 8 8 9 7 5 5 5 8 10 10 6 5 5 5 4 6 6 5 4 6 8 7 7 7 9 9 10 8 5 5 5 5 6 6 7 6 5 5 5 6 6 8 12 17 13 28 18 13
|
| 109 |
+
LJ049-0116 10 7 8 8 10 9 8 8 7 6 10 7 5 5 5 5 5 5 7 7 7 7 7 6 7 8 8 7 5 5 5 6 7 6 7 8 8 8 8 6 6 7 5 5 5 8 10 6 6 4 6 7 6 5 7 9 10 8 10 8 9 10 12 8 8 9 12 12 5 6 8 7 8 6 6 9 12 11 45 10 6 6 6 6 7 7 6 6 6 5 6 4 6 6 6 7 7 7 6 6 7 9 14
|
| 110 |
+
LJ049-0117 4 4 5 6 6 6 5 7 8 7 7 6 6 8 9 8 7 7 7 7 7 7 7 7 8 10 13 5 6 5 4 12 17 15 17 10 8 7 7 8 8 9 7
|
| 111 |
+
LJ049-0118 9 8 10 7 5 5 4 5 4 5 5 5 7 7 5 7 11 11 8 17 6 5 6 7 13 11 18 6 6 6 5 5 5 6 6 6 5 5 7 9 7 5 5 5 7 7 7 6 5 4 5 9 10 9 6 9 7 7 7 8 10 14 18 6 5 5 5 5 7 8 8 6 5 6 6 6 9 5 5 6 8 7 8 8 6 9 8 7 6 6 12
|
| 112 |
+
LJ049-0119 5 5 5 6 7 6 5 5 6 6 6 5 5 5 7 10 11 5 6 6 7 8 13 7 7 7 8 7 7 7 5 3 4 5 8 6 7 8 9 7 8 20 6 6 6 8 7 6 5 5 6 8 11 24 8 57 9 11 6 5 5 6 7 8 7 7 7 6 6 6 6 5 5 5 7 7 6 5 5 5 7 9 9 7 9 14 10
|
| 113 |
+
LJ049-0120 11 7 5 5 6 6 6 5 6 15 13 5 6 7 6 8 10 6 6 7 11 9 11 5 4 7 7 6 6 6 8 10 13 7 9 7 6 7 7 6 6 7 7 7 6 7 5 4 5 7 6 6 5 5 6 7 8 6 5 5 8 7 6 7 7 10 13 13 10
|
| 114 |
+
LJ049-0121 8 4 5 6 6 6 5 7 8 10 9 9 7 6 5 5 7 8 9 7 8 8 8 6 6 7 7 8 9 8 5 4 5 11 13 9 8 8 7 5 5 4 4 5 6 8 9 9 8 7 6 8 9 8 7 5 6 6 6 7 6 5 6 6 7 8 8 8 8 8 6 5 6 8 7 9 8 9 7 7 7 7 7 5 5 5 7 7 7 7 6 7 7 8
|
| 115 |
+
LJ049-0122 6 8 10 8 7 6 7 19 11 14 11 10 7 8 10 7 10 9 14 35 7 8 8 6 6 6 7 7 8 7 6 7 8 7 6 18 8 8 11 7 7 6 7 8 8 8 9 9 10 31 9 7 6 6 7 8 11 7 6 6 5 6 9 10 8 7 6 7 5 5 7 7 7 11 11 8 5 6 7 7 6 7 7 6 6 6 6 6 6 7 10 15
|
| 116 |
+
LJ049-0123 12 7 6 6 7 8 8 8 8 6 6 7 7 5 4 5 5 4 5 5 5 5 5 4 4 5 6 5 5 5 6 11 8 9 10 7 7 6 7 6 7 7 7 6 5 6 5 6 8 6 6 6 6 7 7 7 7 8 7 12 5 5 5 8 6 6 7 5 5 5 5 5 4 4 6 5 5 6 6 5 5 7 9 8 8 6 8 10 11 5 6 7 6 6 6 7 7 7 8 6 5 5 8 7 6 7 7 10 13 13 10
|
| 117 |
+
LJ049-0124 11 6 9 6 7 8 9 6 14 6 5 4 4 4 9 12 10 7 6 7 6 7 8 8 7 8 8 11 7 6 6 7 8 11 13 17
|
| 118 |
+
LJ049-0125 5 6 7 8 7 8 8 8 9 7 7 6 6 13 15 6 6 13 9 8 8 6 6 8 5 4 4 7 10 8 5 6 6 6 5 6 6 6 6 6 8 8 13 20 12 7 7 7 6 6 4 7 10 8 5 6 6 6 5 5 4 7 6 6 6 6 6 6 5 4 5 6 5 7 6 7 8 10 7 7 10 8 7 8 7 8 8 6 6 7 7 7 8 10 9 10 12 13 10
|
| 119 |
+
LJ049-0126 6 7 5 6 7 6 5 7 9 8 6 5 4 6 8 9 9 8 11 13 11 16 11 12 14 12 7 5 6 7 5 6 5 9 15 6 6 5 6 7 7 8 6 8 8 7 6 7 7 8 9 10 11 9
|
| 120 |
+
LJ049-0127 12 7 6 8 10 10 10 8 8 6 6 4 6 5 7 7 6 5 5 5 7 8 8 6 7 10 13 5 5 5 6 7 7 7 7 7 7 9 6 5 7 6 10 9 11 8 9 6 6 5 5 10 11 7 12 15 7 8 7 7 6 5 6 5 6 6 5 4 6 6 6 6 5 6 9 10 9 8 8 8 6 5 6 6 7 8 9 13 12 10
|
| 121 |
+
LJ049-0128 6 51 12 14 9 6 5 5 7 9 7 6 5 6 6 6 7 6 5 7 6 6 6 7 7 5 5 6 7 6 6 9 12 7 6 8 10 8 5 5 4 3 4 5 6 8 6 6 6 8 10 13 9 9 10
|
| 122 |
+
LJ049-0129 8 4 5 5 8 10 13 23 17 11 7 6 5 5 6 7 6 7 6 7 8 6 6 6 7 6 7 5 4 5 6 6 6 9 6 6 5 7 6 5 6 6 7 5 6 5 5 7 6 6 6 6 6 7 7 10 9 8 10 10 13 17 7 8 6 7 6 4 4 5 5 6 6 6 5 7 5 7 9 8 7 10 8 7 8 8 9 5 6 7 6 4 4 5 6 6 6 5 5 4 4 4 5 6 10 9 13 10 9
|
| 123 |
+
LJ049-0130 6 7 8 6 5 5 5 7 8 7 6 7 7 6 6 6 4 5 7 14 10 8 8 7 8 8 9 7 8 11 13
|
| 124 |
+
LJ049-0132 5 5 5 5 6 5 7 6 5 5 6 8 11 8 10 14 7 10 6 6 7 7 6 6 7 8 7 7 9 13 8 6 5 6 6 8 9 7 6 5 7 8 9 9 9 8 8 7 5 6 6 5 8 7 10 11 11 7 6 8 12 13 17
|
| 125 |
+
LJ049-0133 5 7 11 9 10 7 6 6 8 10 13 9 4 5 5 5 7 9 12 9 6 5 6 4 5 5 5 5 5 5 6 6 5 4 4 4 7 6 6 6 7 8 11 8 8 6 7 6 6 9 6 8 9 14 8 7 7 6 5 5 5 5 6 10 11 13 9 8
|
| 126 |
+
LJ049-0134 5 5 5 5 6 6 7 6 6 6 5 4 8 11 12 8 8 3 6 6 5 5 7 6 5 8 9 8 8 7 7 7 7 7 6 6 6 5 5 7 6 7 7 9 10 9 6 5 6 6 6 8 8 8 6 5 4 6 7 6 6 5 6 5 6 7 5 6 9 29 11 6 5 6 6 7 7 6 6 6 6 5 6 6 6 5 5 5 7 8 8 8 9 9 10 12
|
| 127 |
+
LJ049-0135 5 7 6 7 8 6 6 6 7 6 7 5 5 5 4 6 6 5 5 6 6 8 11 9 6 5 4 5 5 5 6 6 5 5 5 4 5 5 6 7 6 6 6 5 5 4 5 6 7 5 6 5 5 5 7 8 10 10 16
|
| 128 |
+
LJ049-0136 8 6 5 6 6 6 6 7 6 5 5 6 10 8 8 7 8 9 11 7 6 6 7 6 6 6 8 6 6 6 7 7 7 7 7 6 6 6 5 6 6 6 5 5 6 9 10 33 5 6 6 6 7 6 7 5 4 5 6 6 7 11 14 8 7 7 7 7 12 12 8 10 10 11 8 9 10 8 8 5 4 4 6 6 6 6 5 5 5 7 7 6 6 5 6 8 8 8 6 5 5 7 7 6 5 5 5 7 9 9 7 9 18
|
| 129 |
+
LJ049-0137 4 5 6 6 7 7 7 9 8 5 6 6 6 9 10 6 6 8 12 9 7 7 6 7 8 8 7 7 6 7 9 9 8 9 7 5 5 5 4 4 4 5 5 5 6 6 5 6 7 8 10 29 7 5 6 6 5 5 7 8 9 7 6 6 7 7 6 6 7 6 7 6 6 5 4 6 6 7 7 6 5 4 5 5 6 6 7 9 9 9 7 6 5 5 6 7 8 7 7 9 8 6 6 7 7 5 7
|
| 130 |
+
LJ049-0138 5 8 5 5 8 7 5 6 8 8 8 8 6 7 8 9 11 14 9 9 19 13 8 7 7 5 5 4 5 4 7 6 4 5 6 7 12 5 5 5 5 6 6 8 7 6 6 5 6 6 6 5 5 5 7 6 6 6 4 6 6 7 7 6 5 4 5 5 6 6 7 9 12 15
|
| 131 |
+
LJ049-0139 4 6 7 6 6 7 8 7 8 9 8 6 6 6 6 11 9 6 7 7 9 7 7 7 7 9 9 6 6 6 6 7 6 6 9 9 10 6 5 4 5 7 4 5 5 5 5 5 5 6 6 5 4 4 4 7 7 7 7 6 7 6 8
|
| 132 |
+
LJ049-0140 5 6 8 8 7 7 6 7 8 6 7 6 7 7 6 7 8 8 15 9 10 9 7 6 8 6 6 7 7 6 5 5 5 5 5 5 6 6 5 5 6 6 5 7 7 7 7 10 9 5 5 5 6 5 6 7 7 10 22 7 7 7 6 5 4 6 6 7 8 7 7 5 6 9 8 12 9 8 8 5 5 8 7 9 9 13 6 6 5 6 6 6 7 7 7 6 5 6 7 6 5 6 9 10 15 11 7
|
| 133 |
+
LJ049-0141 6 7 7 7 7 6 8 7 6 8 8 9 6 5 5 4 4 5 4 4 5 5 6 5 6 7 6 5 5 4 6 31 9 9 6 8 10 11 10 6 6 5 5 6 6 8 8 5 5 5 6 6 7 9 9 6 8 7 7 9 5 5 5 8 6 5 6 6 7 6 7 5 6 7 5 6 6 13 8 8 10 7 7 8 7 8 8 14 11 9 5 5 5 7 9 10 8 6 6 5 4 6 6 6 6 5 5 5 4 5 5 5 6 6 5 6 7 7 6 8
|
| 134 |
+
LJ049-0142 8 7 7 8 7 8 6 5 5 5 6 6 7 6 7 5 4 5 6 6 6 8 10 10 8 7 8 8 8 8 7 11 9 11 8 10 11 8 6 5 5 5 7 6 6 6 6 6 7 7 8 7 6 8
|
| 135 |
+
LJ049-0143 9 10 8 9 8 11 6 6 6 7 8 8 8 6 8 12 17 9 9 8 7 8 7 10 9 6 8 7 8 7 7 7 6 6 7 6 8 8 6 7 12 11 7 7 9 10 8 6 6 5 7 7 6 5 4 4 4 5 7 9 9 8 9 17 6 5 6 6 5 5 6 6 7 6 5 5 5 6 10 19 18 7 9
|
| 136 |
+
LJ049-0144 5 5 6 6 5 5 7 6 4 4 4 6 8 9 7 5 5 5 6 4 5 5 8 9 10 9 10 6 6 7 9 7 5 5 8 7 8 6 4 5 5 6 7 6 6 6 5 5 7 9 10 9 6 5 5 5 6 7 8 7 7 8 8 12 8 9 10 6 6 8 9 9 7 8 9 16 7 6 5 5 5 5 5 5 5 6 6 5 6 5 6 6 6 5 6 6 8 7 8
|
| 137 |
+
LJ049-0145 11 8 6 6 6 7 7 5 5 5 4 4 5 6 7 6 6 5 6 6 5 5 5 5 5 5 5 5 5 5 5 5 8 10 8 6 5 6 6 7 6 5 5 5 5 6 5 8 7 6 6 6 5 7 7 7 8 7 7 6 5 5 5 5 5 6 6 5 6 7 6 9 28 8 6 5 5 9 12 9 8 8 8 10 7 8 6 6 6 7 7 6 7 10 7 11 8 7 7
|
| 138 |
+
LJ049-0146 8 7 8 7 7 6 6 7 8 7 7 7 6 7 7 6 6 7 6 7 9 9 7 8 8 8 8 10 7 6 5 5 5 5 6 6 9 11 10 8 9 9 4 5 4 6 7 5 4 7 6 5 4 5 5 6 6 7 6 6 8 8 10 7 5 5 6 7 6 6 5 7 7 8 6 6 7 7 8 9 7 23 9 7 7 7 7 6 6 6 6 7 9 9 9 8 8
|
| 139 |
+
LJ049-0147 7 7 11 10 7 7 7 5 5 5 6 4 5 5 5 5 5 7 9 6 5 5 5 6 7 6 3 6 6 6 6 8 11 8 6 5 6 6 7 8 7 7 8 7 6 5 5 9 11 7 6 8 12 8 7 4 8 6 7 10 5 7 12 8 6 5 5 7 6 7 6 6 6 6 7 6 6 6 5 5 7 5 8 6 5 4 5 4 6 6 5 4 7 6 6 6 6 6 7 8 10 11 13 12 10
|
| 140 |
+
LJ049-0148 5 6 8 6 8 8 8 7 10 9 7 8 7 7 13 9 7 7 6 5 5 5 7 8 8 6 6 8 3 7 8 6 5 5 6 6 6 9 8 8 8 8 10 9 9 8 6 5 4 5 7 8 7 6 5 5 6 7 6 5 4 5 5 5 7 7 8 8 8 8 10 7 9 15 13 15 5 5 5 5 4 6 7 8 8 9 16
|
| 141 |
+
LJ049-0149 4 5 5 5 4 5 8 8 6 5 8 10 7 9 7 9 9 17 8 7 7 6 6 6 6 5 6 5 4 6 7 11 8 11 27 10 9 8 7 7 9 9 9 11 8 7 6 5 6 6 7 7 6 6 7 6 6 6 8 9 8 6 5 5 6 6 6 6 6 5 5 5 6 6 5 5 5 8 10 9 8 6 7 8 6 10 10 9 7 7 6 5 5 5 5 5 5 5 6 6 5 6 7 7 7 6 5 6 11 11 12 10
|
| 142 |
+
LJ049-0150 12 9 7 6 5 4 6 7 6 6 5 5 5 6 7 6 6 8 11 6 6 7 7 10 10 8 6 6 6 6 6 5 6 8 9 10 6 6 7 5 6 6 8 8 7 7 7 6 5 6 7 5 4 4 4 6 8 9 7 5 5 6 5 6 7 6 7 7 8 8 8 7 7 10 8 9 10 7 6 6 10 7 6 6 5 6 6 7 8 5 5 5 5 5 5 5 6 6 5 6 6 8 6 9
|
| 143 |
+
LJ049-0151 6 6 7 7 6 7 5 5 5 6 6 7 9 11 7 5 5 6 5 5 5 5 5 5 5 6 6 5 4 4 4 7 6 7 7 7 10 15
|
| 144 |
+
LJ049-0152 6 7 8 8 10 6 5 5 4 4 5 5 5 4 6 6 6 7 6 7 5 4 5 6 6 6 8 13 6 5 4 4 6 6 6 6 5 5 5 4 5 5 5 6 6 5 5 5 5 6 8 6 5 8 11 7 6 8 9 9 5 5 5 3 4 5 6 6 6 5 5 4 5 5 5 5 5 5 6 6 7 8 9 12 8 6 7 7 7 8 9 8 6 7 6 6 7 8 6 6 6 6 6 5 8 9 8 6 5 6 6 8 7 9
|
| 145 |
+
LJ049-0153 6 7 8 6 5 5 5 7 8 8 6 7 7 7 5 9 8 5 5 6 10 10 9 6 6 6 7 5 5 6 6 6 6 5 5 5 5 3 4 5 6 6 6 5 4 4 5 5 5 5 5 5 6 6 7 8 9 16 8 9 14 11 10 7 6 6 6 6 6 10 10 16 13 32 9 8 8 9 5 4 5 7 8 7 7 6 8 7 7 7 7 6 8
|
| 146 |
+
LJ049-0154 9 10 13 12 9 8 6 7 6 7 10 10 14 12 9
|
| 147 |
+
LJ049-0155 10 8 8 6 5 6 7 7 5 5 6 7 6 6 5 7 8 8 9 11 7 7 9 9 8 8 6 6 7 4 5 5 5 5 5 5 4 4 7 7 8 10 7 7 6 8 12 16 7 8 9 7 5 6 7 9 8 9 7 6 10 9 6 8 7 5 4 7 6 6 6 6 6 10 7 12 9 9 7 8 8 12 7 6 4 5 9 6 5 5 6 8 9 8 11 12 16 10
|
| 148 |
+
LJ049-0156 12 6 5 5 5 7 8 7 5 7 9 8 7 17 7 6 6 6 4 5 5 4 5 5 5 5 5 6 6 5 5 7 6 7 14 10 5 6 7 8 7 8 6 6 7 14 7 5 5 6 7 7 9 8 9 8 7 7 17 14 9 6 5 6 9 8 8 9 10 8 8 8 17
|
| 149 |
+
LJ049-0157 4 7 7 7 6 6 7 6 5 6 10 9 9 8 6 5 4 6 6 6 5 5 5 5 6 6 9 7 7 9 8 8 6 7 7 8 7 11 9 8 8 7 10 8 10 10 6 8 6 8 7 7 6 7 7 6 8 6 5 5 6 6 9 12 10 10
|
| 150 |
+
LJ049-0158 11 15 8 7 7 8 7 6 5 7 7 8 6 6 6 6 7 7 9 12 12 10 7 6 7 5 5 4 4 8 8 8 10 6 7 7 7 9 8 7 6 5 5 6 8 6 5 6 4 4 7 6 7 6 6 7 7 7 7 6 7 7 8 7 9 16 15 16 31 13 8 6 6 6 7 7 8 8 8 6 6 7 7 5 4 5 5 4 5 5 6 5 5 5 5 9 5 8 7 6 7 8 9 9
|
| 151 |
+
LJ049-0159 6 7 8 6 5 5 5 7 8 9 7 9 13 17 9 14 10 6 8 6 7 7 9 8 5 6 6 6 6 6 4 6 7 7 5 5 6 7 6 7 10 11 7 6 9 14 13 27 11 13 10 8 8 7 8 7 10 11 7 6 6 6 7 6 7 5 4 5 6 6 6 9 5 6 4 4 6 7 8 8 8 7 7 5 4 4 5 5 5 6 6 5 6 6 8 6 9
|
| 152 |
+
LJ049-0160 11 7 6 6 6 9 8 11 16 16 35 7 7 7 6 5 5 5 7 8 8 6 7 8 3 6 6 8 13 6 5 8 9 7 7 7 9 9 9 7 6 6 6 6 7 6 7 5 4 5 6 6 6 9 5 6 4 4 5 6 6 6 5 6 5 5 5 4 4 7 6 6 6 5 5 6 5 4 4 5 5 5 6 6 5 6 7 6 13
|
| 153 |
+
LJ049-0161 6 7 8 9 8 8 17 8 4 3 4 5 4 5 5 6 6 6 5 6 5 6 4 5 7 6 5 5 7 8 7 5 5 5 6 6 8 7 6 6 6 5 7 7 7 8 8 8 7 6 6 6 5 2 4 5 5 5 6 6 5 6 6 8 6 9
|
| 154 |
+
LJ049-0162 8 6 9 8 8 7 7 6 4 5 5 7 8 8 6 7 9 4 7 5 6 7 5 5 5 6 7 7 7 6 7 7 6 6 6 7 6 7 5 4 5 6 6 6 10 5 6 4 4 4 5 4 4 7 6 6 6 5 5 6 5 4 4 5 5 5 6 6 5 6 6 7 6 8
|
| 155 |
+
LJ049-0163 6 56 12 16 27 5 6 6 6 6 6 7 6 8 8 6 5 6 5 5 5 5 5 5 5 5 4 4 5 6 6 6 5 4 4 5 5 6 8 7 7 7 7 8 6 7 6 6 7 7 8 8 9 15 16 13 12 9 6 6 7 7 5 6 7 7 8 8 5 6 6 8 9 9 10 7 6 6 7 8 7 7 8 6 7 7 5 5 7 7 6 6 5 5 6 8 7 11 10 7 8 9 13 10 9
|
| 156 |
+
LJ049-0164 7 5 5 5 6 6 6 7 6 7 8 9 8 6 6 5 5 4 6 31 10 10 7 5 5 7 8 8 7 6 9 13 8 10 5 5 5 6 6 8 6 8 11 7 7 13 5 5 5 6 6 4 6 7 7 7 2 5 4 7 6 6 6 5 5 5 5 4 5 6 8 7 8 6 7 5 4 4 5 5 5 6 6 5 5 5 5 4 5 5 4 3 4 5 6 8 6 6 6 8 9 13 8 10 7 8 8 7 7 7 8 9 8
|
| 157 |
+
LJ049-0165 5 6 9 8 9 8 5 6 9 8 6 5 4 4 5 5 6 6 6 7 6 7 8 8 8 5 5 5 5 5 3 4 5 6 6 6 5 4 4 4 5 6 8 7 7 7 7 8 6 7 5 6 7 7 7 7 8 8 10 8 6 5 4 5 5 6 7 7 6 6 6 4 7 7 8 9 25 6 6 7 7 8 7 6 7 8 9 11 8 8 8 7 7 10 8 8 8 7 7 8 8 9 7
|
| 158 |
+
LJ049-0166 8 6 9 9 9 31 13 12 8 6 6 8 7 9 10 8 9 6 4 5 5 5 6 6 6 5 6 5 5 5 4 4 7 6 6 6 5 5 6 5 4 4 5 5 5 6 6 5 6 7 6 10 30 9 6 6 5 6 6 7 5 5 6 8 8 9 7 5 6 6 7 9 10 10 9 9 8 7 8 7 5 6 7 5 5 4 6 11 7 6 8 13 13 10 9
|
| 159 |
+
LJ049-0167 8 6 6 9 7 8 7 5 5 6 5 5 5 6 6 6 5 6 5 6 4 5 7 6 5 5 7 9 8 7 6 7 6 6 7 7 7 7 5 5 5 5 5 5 5 6 6 5 5 5 4 4 12 6 6 6 6 5 6 6 8 7 8
|
| 160 |
+
LJ049-0168 14 8 6 7 6 6 7 7 6 6 9 13 14 26 6 6 5 6 8 8 9 9 6 6 8 7 6 6 4 5 5 5 5 8 6 7 6 6 5 6 6 6 6 7 6 7 5 4 5 6 6 6 9 10 9 6 7 5 8 9 8 9 9 6 7 10 11 6 6 6 6 8 14 14 8 5 5 5 5 7 8 6 7 7 12 15
|
| 161 |
+
LJ049-0169 5 5 5 6 8 9 10 9 11 9 6 6 6 6 6 10 12 7 6 4 4 7 5 7 9 13 8 7 8 10 8 7 6 5 6 6 7 6 6 6 5 5 5 5 6 7 5 7 9 8 6 4 6 8 10 11 8 10 13 7 7 8 9 8
|
| 162 |
+
LJ049-0170 7 5 5 6 8 7 5 5 4 4 6 7 8 8 6 5 6 6 6 8 6 6 4 4 5 8 7 7 6 4 9 6 6 6 7 5 3 4 6 8 8 9 7 9 9 10 9 8 8 7 7 5 4 4 5 5 5 6 6 5 6 7 6 7 10 8 8 7 7 7 8 9 15 9 8 7 7 6 4 4 6 6 6 5 5 5 6 9 10 10 9 10 10 10
|
| 163 |
+
LJ049-0171 5 5 9 9 13 9 5 6 6 6 8 10 6 6 7 10 10 9 9 9 9 7 6 6 6 5 5 5 8 9 10 8 8 6 5 6 6 9 8 8 7 6 5 6 5 5 5 6 6 6 5 6 5 6 4 5 7 6 5 5 7 9 9 8 7 8
|
| 164 |
+
LJ049-0172 2 5 6 7 6 7 6 6 7 7 7 5 4 4 5 5 7 14 10 18 18 9 6 6 5 5 7 6 7 9 8 7 9 13 11 11 7 6 8 12 10 11 11 7 6 7 7 9 7 4 7 5 5 6 6 7 6 7 5 4 5 6 6 7 9 7 8
|
| 165 |
+
LJ049-0173 11 7 6 7 7 7 7 5 5 6 5 7 7 6 8 12 14 10 10 7 9 10 6 10 8 8 6 8 8 8 6 8 5 6 7 9 17 12 7 6 7 6 6 7 7 8 9 11 11 10 21 9 63 9 16 7 5 5 6 7 7 5 6 7 10 6 10 5 6 6 5 6 10 8 6 6 5 5 6 6 6 6 8 7 8 9 5 6 5 7 8 11 21
|
| 166 |
+
LJ049-0174 8 12 7 7 7 7 9 9 7 7 8 6 5 5 5 7 8 7 6 7 7 6 5 5 6 6 5 6 7 7 7 7 6 5 5 6 6 8 5 7 9 9 7 5 7 6 9 8 7 9 7 7 7 7 10 11 8 8 9 8 8 25 7 9 8 27 11 11 8 5 5 7 7 6 5 5 5 6 7 7 7 7 14
|
| 167 |
+
LJ049-0175 5 6 7 7 5 5 5 5 5 6 6 5 5 5 5 6 7 8 9 13 10 11 10 10 14 10
|
| 168 |
+
LJ049-0176 7 6 6 6 6 6 6 7 13 9 11 32 8 7 7 6 5 5 5 7 8 8 6 6 6 3 7 7 7 10 8 9 7 6 9 5 6 8 8 7 7 6 7 8 8 9 10 10 9 20 7 7 9 7 9 8 5 8 11 9 9 25 13 8 6 6 5 6 6 7 6 7 6 5 5 10 6 6 5 7 6 6 6 5 5 8 8 7 7 7 8 10 9 8
|
| 169 |
+
LJ049-0177 5 7 8 8 7 6 9 12 17 15 8 13 13 14 5 5 5 6 6 6 6 8 9 7 5 5 4 4 5 4 4 6 6 6 6 5 6 8 7 10 8 9 9 10 7 6 5 5 5 5 5 5 7 7 6 5 5 5 7 9 9 7 9 13 25 6 6 8 10 7 9 8 6 6 6 6 7 7 6 5 4 5 5 7 7 8 5 5 5 5 5 8 7 9 8 7 10 9 10 9 6 6 6 41 15 23
|
| 170 |
+
LJ049-0178 10 5 4 4 5 7 7 7 7 6 6 6 8 7 5 6 13 10 7 8 6 6 7 6 5 6 6 10 9 9 10 8 9 7 7 6 5 6 7 7 6 5 5 5 7 8 7 6 7 8 6 5 5 9 9 8 8 8 7 9 8 11 11 8 8
|
| 171 |
+
LJ049-0179 5 5 5 5 6 5 7 6 5 5 6 8 11 7 8 17 7 10 7 9 9 7 37 13 13 5 6 6 5 7 7 6 5 5 5 7 8 8 6 7 10 13 7 6 8 9 7 8 7 7 7 6 5 5 6 6 7 7 8 7 5 5 5 6 7 6 6 6 6 5 5 6 6 6 6 7 6 7 5 4 5 6 6 7 10 12 12 10
|
| 172 |
+
LJ049-0180 5 5 5 5 6 6 7 5 5 5 8 6 6 5 5 6 6 6 7 9 10 14
|
| 173 |
+
LJ049-0181 5 7 6 7 9 6 8 10 7 7 6 7 8 9 11 8 7 7 7 8 7 6 8 12 8 10 9 6 5 5 6 5 4 5 7 6 5 5 7 8 7 6 7 5 6 7 5 7 7 6 6 6 7 9 8 9 7 6 8 11 7 6 4 5 6 6 5 5 6 7 6 6 13 9 8 7 8 6 7 7 9 8 7 8 7 8 7 8 9 6 5 4 4 6 7 7 10 10 8 10 9 11 11 10
|
| 174 |
+
LJ049-0182 11 6 8 6 7 8 9 7 27 7 5 5 5 6 6 7 5 4 5 6 6 5 5 7 7 7 7 6 6 7 11 7 6 6 7 7 6 5 5 5 7 8 7 6 6 8 5 4 6 6 5 4 5 5 6 6 7 9 9 8 7 6 17 7 5 6 8 6 6 8 9 9 6 7 5 7 7 6 7 6 5 4 5 5 6 6 6 7 5 7 9 10 10 16
|
| 175 |
+
LJ049-0183 6 7 6 6 5 4 6 8 8 8 12 11 8 10 8 8 13 9 9 5 6 6 5 5 6 5 4 6 6 6 6 5 6 8 7 10 9 9 9 10 7 6 6 8 6 7 7 7 9 9 9 6 6 5 4 5 6 6 8 8 9 12 8 10 7 6 7 7 6 8 9 10
|
| 176 |
+
LJ049-0184 7 7 7 6 7 6 6 7 7 5 7 8 10 8 7 7 5 6 7 7 5 5 6 7 7 6 9 10 10 13 17 8 7 7 7 6 6 7 8 8 9 8 6 6 11 18 7 4 5 6 6 6 6 6 6 5 5 6 8 6 5 4 4 5 5 5 6 6 5 6 6 8 6 9
|
| 177 |
+
LJ049-0185 7 6 6 5 4 4 6 7 7 9 7 5 5 4 5 5 5 6 6 7 6 6 4 6 7 6 5 5 6 10 10 11 10 6 6 6 7 7 6 7 10 10 9 10 8 7 9
|
| 178 |
+
LJ049-0186 5 5 5 5 5 6 7 6 4 6 7 9 7 6 5 5 6 5 5 5 6 6 5 5 5 4 8 11 8 7 8 9 6 8 9 13 10 13 8 6 5 6 6 7 10 9 10 8 6 5 10 9 7 6 7 5 5 4 4 4 4 4 5 4 4 7 6 6 6 5 5 5 4 5 5 5 6 6 5 6 6 8 6 9
|
| 179 |
+
LJ049-0187 11 11 9 5 5 5 8 11 8 7 7 6 6 8 7 6 6 6 6 10 7 5 4 7 7 8 6 8 7 5 7 7 7 8 10 7 5 6 6 8 6 6 7 8 6 6 6 6 6 7 6 7 5 4 5 6 6 6 10 6 6 5 7 14 9 10 11 8 9 6 6 5 5 7 9 11 15
|
| 180 |
+
LJ049-0188 8 8 7 6 11 9 5 7 5 5 7 8 7 5 6 7 5 5 4 7 11 7 6 9 13 12 22 8 7 7 5 6 6 7 8 10 8 4 5 5 6 6 6 6 6 5 6 8 7 6 7 6 8 7 5 5 6 6 5 5 6 8 11 7 6 7 10 9 7 7 7 9 14 10 7 11 20 7 8 9 6 5 5 7 9 7 6 8 5 6 6 7 6 7 5 6 7 8 7 7 6 8 12
|
| 181 |
+
LJ049-0189 5 5 7 7 6 8 7 10 6 9 6 10 8 10 7 6 7 9 7 5 4 5 5 8 9 9 9 8 8 7 6 6 5 5 5 5 6 6 7 6 7 6 6 10 12 7 6 8 13 10 8 5 4 4 7 6 6 4 5 5 6 6 7 7 10 12 9 5 4 5 7 7 6 5 5 5 7 8 8 7 8 8 5 8 7 6 57 15 13 10
|
| 182 |
+
LJ049-0190 8 4 6 9 7 6 6 6 8 7 7 5 4 8 14 15 26 7 5 6 51 12 15 16 10 9 11 10 11 10 10 8 5 6 7 7 5 5 6 7 6 7 10 11 7 6 9 13 10 9
|
| 183 |
+
LJ049-0191 9 8 6 6 7 7 7 8 8 8 13 8 6 4 5 5 6 6 6 7 8 19 7 6 8 8 9 6 8 5 5 6 8 6 7 7 5 5 6 5 5 7 5 7 8 9 7 10 9 9 8 6 6 8 7 5 5 6 6 8 7 9 6 5 5 5 5 5 4 4 5 6 8 6 6 6 9 10 12 11 11 10
|
| 184 |
+
LJ049-0192 8 5 5 5 7 8 8 8 9 8 5 4 7 8 8 6 6 7 9 7 5 5 5 4 5 6 7 7 6 6 6 6 5 7 7 7 7 6 5 6 7 7 9 7 5 4 6 7 11 12 9 5 4 4 6 7 7 6 5 5 5 7 8 8 6 7 13 6 6 5 6 7 6 6 6 6 5 5 6 8 6 7 8 7 6 7 6 8 7 8 9 7 8 9 8 7 6 7 8 7 7 9 6 7 7 6 7 5 4 5 7 7 8 6 5 6 6 8
|
| 185 |
+
LJ049-0193 7 7 6 5 5 5 5 4 5 5 6 10 17 6 6 5 4 7 7 7 6 7 6 6 7 6 5 5 6 8 17 9 13 6 6 5 6 6 6 7 10 13 10 10 27 7 7 6 11 8 6 5 5 6 6 6 6 6 6 6 5 6 7 7 5 5 6 7 6 6 9 6 5 5 6 6 7 6 5 6 6 7 6 6 6 8 6 5 5 6 6 6 6 6 5 5 6 8 6 5 4 4 5 5 5 6 6 5 6 7 6 13
|
| 186 |
+
LJ049-0194 10 6 5 5 6 7 6 8 8 7 7 7 9 6 6 7 43 16 15 10
|
| 187 |
+
LJ049-0195 10 8 7 6 5 4 5 5 5 7 7 6 4 5 5 7 8 7 6 7 19 8 8 7 4 5 4 6 6 6 7 6 7 5 4 5 6 6 6 9 5 6 4 4 5 6 6 6 5 6 5 5 5 4 4 7 6 6 6 5 5 6 5 4 4 5 5 5 6 6 5 6 7 7 8 34 6 8 11 12 12 3 6 6 6 6 5 4 6 7 7 5 5 5 5 5 6 8 10 13 10 9
|
| 188 |
+
LJ049-0196 6 6 6 6 6 6 6 7 12 9 10 16 8 6 7 11 19 12 13 21 9 7 11 8 11 11 9 9 9 6 4 5 5 4 4 7 6 6 6 5 5 6 5 4 4 5 5 5 6 6 5 5 7 6 7 14 9 5 6 7 8 7 8 6 5 6 9 8 6 4 5 6 6 7 5 6 6 6 8 7 6 5 6 8 8 10 8 8 10 8 7 6 5 6 7 5 6 5 5 5 6 9 11 9 9
|
| 189 |
+
LJ049-0197 7 5 7 7 9 8 7 8 5 5 6 7 7 6 6 7 5 8 6 5 6 8 12 7 6 4 4 5 7 6 5 5 7 7 6 5 4 4 5 5 6 8 9 9 11 9 7 7 7 6 5 5 5 5 5 7 8 9 8 6 5 4 5 5 5 5 5 5 5 5 6 6 5 10 7 8 6 6 8 18
|
| 190 |
+
LJ049-0198 10 10 13 8 7 5 7 8 7 7 6 6 6 8 9 8 8 7 6 5 6 6 9 9 9 15 8 8 9 7 6 8 8 8 7 7 11 6 7 6 7 8 7 5 9 8 9 9 8 8 8
|
| 191 |
+
LJ049-0199 4 8 9 7 6 7 8 7 7 7 7 8 7 7 6 6 6 5 8 7 6 8 9 8 7 6 7 7 8 8 7 7 7 7 8 6 6 5 4 7 9 10 8 6 5 4 5 5 5 6 7 7 6 6 5 5 6 5 5 5 5 5 6 5 5 5 6 5 5 6 7 9 6 6 6 7 6 7 5 4 6 6 6 7 9 7 8
|
| 192 |
+
LJ049-0200 6 6 7 7 6 5 4 7 8 8 6 6 7 10 8 4 5 8 9 6 5 5 5 7 8 8 8 7 6 11 8 6 5 6 5 5 5 8 11 17 21 9 7 6 5 6 7 7 5 5 7 8 9 13 6 5 5 7 8 8 6 5 6 6 6 8 9 8 6 7 7 9 7 7 8 6 5 5 5 5 5 6 6 5 6 7 7 13
|
| 193 |
+
LJ049-0201 4 8 8 5 6 7 6 6 6 7 6 8 8 7 5 5 4 6 7 7 6 7 6 5 4 5 5 6 6 6 7 5 7 8 9 7 10 9 10 7 7 5 7 12 11 8 11 7 5 4 4 5 6 7 6 9 6 5 5 6 6 6 7 9 8 9 8 9 9 10 12 9 6 6 6 7 6 6 5 6 5 5 5 7 7 7 7 6 8 8 6 5 7 8 7 7 5 6 5 7 11 12 12 23
|
| 194 |
+
LJ049-0202 8 8 7 5 5 5 5 7 8 8 6 4 7 6 6 6 7 6 7 5 4 5 6 6 7 10 12 13 10
|
| 195 |
+
LJ049-0203 14 13 10 8 5 5 6 7 8 9 13 9 11 11 8 6 5 6 5 6 8 8 6 7 7 9 7 5 7 6 7 7 6 8 9 9 12 21 13 14 9 5 5 6 6 6 7 8 7 7 7 6 6 7 7 7 6 8 8 5 5 5 6 6 6 5 6 7 11 9 7 7 7 9 8 7 6 5 6 5 5 5 6 6 7 6 7 5 5 6 6 6 7 9 7 8
|
| 196 |
+
LJ049-0204 10 8 9 5 5 7 7 6 5 4 6 7 5 5 5 5 5 6 6 6 8 7 10 16 19
|
| 197 |
+
LJ049-0205 7 7 8 6 6 5 6 7 5 4 6 7 7 6 7 7 8 8 6 6 6 6 6 6 7 8 9 6 5 6 9 6 6 5 5 5 7 6 5 5 5 6 5 5 5 6 4 5 5 6 6 8 8 7 7 11 8 5 6 6 6 9 10 6 6 7 11 10 17 5 6 8 9 10 6 5 4 5 5 5 5 7 6 8 7 5 6 6 6 6 7 6 7 5 5 5 5 5 5 4 4 5 6 6 5 5 8 11 7 8 10 10 9 10 21
|
| 198 |
+
LJ049-0206 4 6 9 8 9 8 6 5 5 8 13 11 12 7 6 7 7 8 7 6 5 6 5 5 6 7 7 8 8 8 7 11 7 5 5 7 6 6 6 7 9 7 8 8 14 17 29 11 7 9 10 9 10 9 9 13 18
|
| 199 |
+
LJ049-0207 4 4 5 5 5 5 6 6 7 8 7 4 5 5 5 5 4 5 5 5 5 6 7 6 6 7 6 6 6 7 6 7 5 4 5 6 6 7 10 11 8 6 6 7 7 7 5 7 6 6 6 6 7 6 6 5 4 4 4 5 7 9 9 8 7 9 9 7 6 5 5 7 8 9 9 9 9 7 6 7 5 5 5 5 6 8 8 7 6 6 5 4 4 5 5 5 6 6 5 4 4 4 4 5 5 4 6 7 7 7 6 8 7 8
|
| 200 |
+
LJ049-0208 10 7 8 7 7 6 6 7 6 5 7 7 7 8 7 7 7 11 7 7 6 7 5 10 8 6 6 5 6 5 6 4 4 5 5 5 5 6 7 8 7 5 5 6 8 8 6 7 5 4 4 5 5 5 6 6 5 5 4 4 3 5 8 8 8 8 6 7 7 9 8 13 6 6 5 6 8 9 10 11 9 9
|
| 201 |
+
LJ049-0209 5 5 5 5 6 6 7 5 5 5 6 6 7 9 9 13 9 9 16 8 6 7 8 10 10 9 7 8 9 8 6 9 8 6 4 5 5 6 7 7 8 9 7 6 5 6 7 7 5 7 7 6 7 7 8 7 7 9 6 6 6 9 16 12 11 9
|
| 202 |
+
LJ049-0210 2 3 4 6 7 8 8 6 6 6 7 5 7 7 5 5 7 8 7 5 5 6 8 8 6 6 5 4 5 8 8 7 5 5 5 6 6 6 5 5 5 5 6 8 8 8 6 8 5 4 4 5 5 5 6 6 5 4 4 4 4 5 5 4 6 7 7 7 6 7 6 8
|
| 203 |
+
LJ049-0211 8 6 5 4 3 4 4 5 5 5 6 6 6 7 8 7 7 8 6 6 4 4 4 5 5 5 5 6 7 7 5 6 6 7 7 7 5 5 7 7 5 5 6 5 5 5 6 7 8 7 5 5 6 6 7 10 37 8 8 8 6 5 6 6 6 6 7 6 7 6 6 7 7 5 6 5 6 8 7 9 7 6 5 5 5 5 6 7 8 8 8 6 6 7 7 5 4 5 5 4 5 5 5 5 5 4 5 5 6 6 5 6 8 8 9
|
| 204 |
+
LJ049-0212 7 10 7 5 6 7 11 7 5 6 6 7 7 6 7 7 7 7 7 8 10 12 8 7 6 6 6 5 5 6 7 7 8 8 9 7 10 6 6 5 6 8 7 7 8 7 10 12 8 9
|
| 205 |
+
LJ049-0213 6 5 5 8 8 9 8 7 8 9 8 8 7 7 8 8 6 5 5 6 7 9 6 5 5 6 6 6 7 6 7 5 4 5 6 6 7 10 8 8
|
| 206 |
+
LJ049-0214 6 5 6 6 5 6 8 10 9 8 7 13 5 5 6 6 6 7 7 7 6 6 6 7 5 5 8 7 6 7 6 8 10 10 5 7 7 7 6 4 5 6 6 7 8 12 7 6 8 11 5 7 9 11 9 9 7 7 6 6 5 5 5 8 9 10 10 11 12 12 10 27 5 5 3 6 5 6 7 6 6 6 7 13 9 14
|
| 207 |
+
LJ049-0215 5 5 4 4 6 6 6 6 5 5 5 5 4 4 5 5 5 5 6 5 5 6 6 19 10 6 6 5 8 8 11 10 9 7 7 5 7 6 5 6 7 5 5 6 21 9 8 8 5 6 6 7 6 7 5 4 5 6 6 6 10 11 14 7 8 7 6 7 6 6 7 6 7 8 6 7 6 6 8 10 10 12 8 8 6 7 6 4 4 5 6 6 6 5 5 4 4 4 5 6 10 9 14 9 8
|
| 208 |
+
LJ049-0216 6 56 11 17 10 6 8 10 16 24 23 26 6 5 6 5 6 5 6 6 6 7 7 8 8 6 7 6 6 6 6 6 6 6 8 11 6 6 8 11 9 7 8 8 7 6 8 7 6 6 7 7 6 5 5 5 7 9 9 7 9 14 10
|
| 209 |
+
LJ049-0217 8 9 9 11 8 12 22 10 7 5 5 5 6 6 6 6 7 7 6 5 6 5 5 5 5 6 6 5 6 7 6 7 6 5 9 7 7 6 5 5 8 9 13 8 6 8 7 7 9 10 10 8 5 5 4 5 7 6 6 5 5 4 5 5 8 9 11 9 8 8 9 8 10 11 11 10 8 9 9 7 6 7 9 14
|
| 210 |
+
LJ049-0218 5 5 6 6 6 5 5 7 7 6 5 5 5 7 8 8 7 8 9 7 8 5 5 5 5 5 6 6 5 5 7 6 7 13 11 5 6 7 8 7 8 6 6 7 18 9 8 7 7 6 5 5 5 6 7 8 7 7 5 7
|
| 211 |
+
LJ049-0219 13 11 8 5 5 7 6 5 8 9 8 8 7 6 5 5 5 5 6 7 8 9 7 4 5 5 5 6 6 6 8 8 6 6 5 5 6 8 15 13
|
| 212 |
+
LJ049-0220 4 4 5 5 4 5 8 8 7 5 4 6 8 8 6 5 5 5 6 6 7 8 6 5 6 7 7 5 5 7 8 6 5 6 6 9 6 7 8 8 10 9 7 6 6 6 5 5 6 6 7 6 7 5 4 5 6 6 7 9 11 8 7 6 8 6 5 5 6 6 8 6 7 7 6 5 5 8 7 7 7 11 6 5 5 5 6 6 7 6 5 5 6 8 8 7 8 9 5 5 6 7 10 7 5 7 10 12 10
|
| 213 |
+
LJ049-0221 6 5 7 9 9 6 7 7 8 7 6 6 5 4 4 4 4 7 9 8 7 6 6 7 11 7 6 10 10 10 8 8 8 8 7 7 7 10 34 8 7 6 6 8 8 5 5 4 6 7 6 6 5 5 5 6 7 5 5 7 11 7 6 5 6 6 5 5 5 5 5 8 8 13 13 10 6 6 6 6 7 7 6 6 6 6 5 6 6 6 5 5 5 7 8 8 8 9 8 6 6 8
|
| 214 |
+
LJ049-0222 7 6 7 5 5 7 7 7 9 8 7 8 6 7 8 7 5 5 6 8 9 6 5 4 7 6 7 6 11 20 8 7 8 6 6 6 6 7 7 7 7 7 5 6 5 5 6 7 6 5 5 4 5 5 5 7 7 7 7 7 9 7 6 9 6 7 8 9 8 7 6 5 5 5 7 8 10 10 15
|
| 215 |
+
LJ049-0223 4 6 6 10 8 6 6 6 5 5 5 4 5 7 6 7 6 5 5 6 8 8 7 6 7 11 6 7 7 7 6 7 6 6 10 12 7 6 7 10 8 7 6 6 6 5 6 7 5 6 6 22 7 6 6 8 8 7 5 6 6 7 7 6 7 8 8 10 10 4 7 7 14 18 28 11 7 6 7 6 6 5 7 5 7 9 10 10 16
|
| 216 |
+
LJ049-0224 4 7 10 10 7 5 7 8 10 7 6 6 7 7 7 6 6 6 5 5 6 6 6 6 6 5 5 6 8 6 5 4 4 5 5 5 6 6 5 6 6 8 6 9
|
| 217 |
+
LJ049-0225 5 5 5 5 5 6 7 6 10 9 9 11 12 8 7 7 6 5 5 5 7 8 8 7 8 8 6 8 7 6 55 14 18 23 9 9 7 6 8 7 6 6 7 6 7 8 5 5 6 6 6 6 5 5 5 6 8 9 9 7 5 8 10 15 28 13 8 4 4 4 5 5 5 6 6 5 5 4 5 4 6 6 8 6 6 6 7 7 6 12 12 9 8
|
| 218 |
+
LJ049-0226 7 5 5 5 6 7 8 11 8 6 5 6 6 6 7 6 7 5 6 6 5 6 8 8 9 7 10 10 9 6 8 6 5 6 8 8 10 10 12 23 14 13 9 6 7 7 7 7 8 7 17 10 6 9 9 8 6 6 6 6 5 7 8 8 8 6 7 8 10 7 6 8
|
| 219 |
+
LJ049-0227 5 6 8 14 15 4 5 7 7 10 10 10 9 8 6 5 5 6 6 8 8 8 6 6 6 6 5 6 7 7 8 7 7 6 7 6 6 6 6 6 6 5 5 5 5 7 8 10 9 9 9 7 6 7 5 5 5 5 7 8 8 7 6 6 5 4 4 5 5 5 6 6 5 5 5 4 4 11 6 6 6 6 5 6 6 9 8 9
|
| 220 |
+
LJ049-0228 5 5 5 5 5 5 7 6 5 5 6 8 11 8 9 22 6 6 6 8 7 9 7 6 5 5 5 5 6 7 8 8 8 6 6 7 7 5 4 5 5 4 5 5 5 5 5 4 4 5 6 5 5 5 6 8 8 9 8 9 7 10 9 7 5 5 4 7 8 9 9 11 9 11 11 9
|
| 221 |
+
LJ049-0229 9 8 7 8 12 8 6 6 7 7 14 8 8 6 6 5 5 7 6 7 8 8 10 9 7 6 5 6 5 5 6 6 7 6 7 5 4 5 6 6 7 10 12 11 9
|
| 222 |
+
LJ049-0230 10 12 7 7 5 6 7 6 5 5 8 7 6 6 6 6 6 5 5 5 5 6 8 7 7 7 6 6 6 6 8 8 10 6 5 5 7 6 5 6 6 7 7 5 5 5 5 6 6 6 4 9 7 6 7 6 5 2 4 4 7 6 6 6 5 5 6 5 4 4 5 5 5 6 6 5 6 6 8 6 9
|
| 223 |
+
LJ050-0001 10 7 8 6 7 8 7 6 8 5 7 10 10 9 6 4 13 11 7 7 7 7 6 5 7 10 13 14 40 9 8 13 11 6 6 6 7 5 5 6 6 5 5 7 10 9 9 8 10 11 12 13 10 25 7 7 8 7 6 5 5 5 4 5 5 5 6 6 5 4 5 4 3 4 5 6 6 6 8 8 8 6 5 6 6 7 8 8 8 6 6 7 7 5 4 5 5 4 5 5 6 5 5 5 5 5 6 6 5 5 7 6 8
|
| 224 |
+
LJ050-0002 5 7 6 6 6 5 5 5 6 5 6 7 6 6 5 6 8 9 10 10 9
|
| 225 |
+
LJ050-0003 7 8 12 5 5 5 6 6 5 4 5 4 3 4 5 5 6 6 7 7 7 5 4 6 6 7 8 8 8 6 6 7 7 5 4 5 5 4 5 5 5 5 5 4 4 5 6 6 5 5 8 10 31 11 8 8 9 14 18 15 29 7 5 4 4 6 6 7 6 5 5 6 5 4 4 5 5 5 6 6 5 6 7 7 9 12 8 6 7 11 11 18 11 7
|
| 226 |
+
LJ050-0004 8 6 6 6 6 8 7 7 7 7 7 6 6 6 5 5 5 5 7 7 6 5 5 5 7 8 9 7 9 18
|
| 227 |
+
LJ050-0005 7 7 7 6 5 5 5 7 9 7 5 5 5 7 7 6 5 5 5 7 8 7 5 7 8 8 7 6 6 7 7 8 6 7 8 9 22 6 5 5 6 7 9 8 11 12 22
|
| 228 |
+
LJ050-0006 4 6 7 5 6 6 7 8 5 5 6 8 8 7 6 8 9 6 5 5 4 6 5 4 6 6 6 6 5 6 9 6 5 5 6 6 8 7 8 7 7 7 8 7 7 7 7 6 6 5 6 9 8 7 6 5 5 3 4 5 6 6 6 5 4 4 5 5 5 5 5 5 6 6 7 9 12 15
|
| 229 |
+
LJ050-0007 5 6 7 7 8 5 6 6 5 6 6 6 6 6 6 6 4 4 4 5 5 6 6 7 8 9 11 8 7 6 8 7 10 9 9 8 8 10 10 6 5 7 5 5 6 7 6 3 5 6 6 8 9 8 7 7 7 8 7 7 6 8
|
| 230 |
+
LJ050-0008 5 5 5 5 5 5 7 5 5 5 6 8 11 8 9 22 6 5 5 5 6 5 5 6 6 5 5 5 5 6 9 7 5 5 4 6 7 6 6 5 5 5 6 7 5 6 8 7 5 6 6 6 6 7 7 6 6 9 8 6 7 5 6 5 6 5 5 5 5 6 6 10 8 8 6 8 7 6 7 6 7 10 8 7 6 7 8 7 7 7 7 7 7 10 14
|
| 231 |
+
LJ050-0009 6 6 8 7 8 11 8 7 6 6 6 5 5 5 5 6 7 7 7 7 6 4 5 6 6 5 5 7 9 9 9 11 52 12 8 8 7 9 9 11 12 8 6 4 5 8 8 8 6 6 12 8 9 7 8 13 8 5 5 6 6 8 10 7 6 9 10 7 7 11 8 9 9 9 8 8 6 5 5 5 5 7 7 6 5 5 5 7 9 9 7 9 14 10
|
| 232 |
+
LJ050-0010 9 11 9 9 9 7 7 7 7 6 6 6 5 5 4 5 8 9 9 9 8 8 7 6 7 5 5 5 7 7 6 5 5 5 7 8 8 6 8 13 11 5 5 5 5 5 4 4 5 6 6 6 5 5 4 5 5 5 5 5 5 6 6 7 9 10 7 9 6 5 6 6 6 6 8 10 15 9 8 8
|
| 233 |
+
LJ050-0011 6 6 9 9 8 5 5 5 8 9 8 6 7 10 9 12 7 7 8 8 8 7 14 6 5 5 7 7 6 6 4 6 6 8 8 6 5 4 5 5 6 6 8 10 12 15
|
| 234 |
+
LJ050-0012 8 7 15 7 6 8 7 7 7 6 5 5 6 6 6 6 4 6 6 7 9 10 14 9 8 8 7 8 9 8 11 9 8 10 7 7 7 6 6 5 5 5 8 8 7 7 8 11 7 7 7 7 6 6 6 5 5 5 5 6 5 6 7 9 11 7 5 5 7 6 7 8 16
|
| 235 |
+
LJ050-0013 4 5 6 5 3 4 5 6 6 6 5 4 4 4 4 5 5 6 7 6 6 6 4 5 13 8 7 7 8 8 8 7 7 9 5 8 11 11 23 7 7 6 9 8 11 8 8 7 7 6 5 5 5 6 6 5 5 6 7 7 7 6 6 4 6 6 7 7 6 5 4 5 5 6 6 7 8 10 8 7 8 7 8 8 7 6 7 6 7 5 6 8 9 7 8 7 8 7 9 10 11 13 10
|
| 236 |
+
LJ050-0014 6 6 6 7 7 6 6 5 5 4 7 9 8 8 7 11 8 6 6 4 5 5 5 6 6 5 6 6 5 5 6 8 9 8 8 7 10 6 4 6 9 10 9 8 7 6 6 4 5 5 5 5 5 5 6 6 5 4 4 4 7 7 7 7 6 7 6 8
|
| 237 |
+
LJ050-0015 5 5 5 5 5 6 7 6 7 6 6 5 7 7 9 9 6 6 4 6 6 6 7 7 6 6 4 6 6 8 8 6 5 4 5 5 6 6 7 9 9 17 9 8 7 8 6 8 7 6 6 6 6 7 9 7 7 7 6 6 5 10 6 5 5 5 4 6 6 6 7 6 7 5 4 5 6 6 6 9 17 7 8 9 8 6 6 9 11 8 6 6 5 8 9 9 7 9 14 10
|
| 238 |
+
LJ050-0016 7 6 10 7 6 6 6 6 6 8 7 7 7 6 5 4 4 6 6 5 5 6 6 6 7 8 10 10 24 6 5 5 5 6 7 8 6 6 7 5 5 6 9 8 11 12 11 19 9 5 5 6 8 6 7 8 7 6 6 8 7 9 11 8 7 6 8 7 8 7 5 6 6 9 27 7 8 7 7 6 6 7 7 7 8 13 13 13 10 10 11 9 11 10 10 9 13
|
| 239 |
+
LJ050-0017 5 4 7 8 8 7 5 5 5 6 4 5 5 7 8 7 10 7 7 7 6 6 7 5 5 6 7 6 3 5 6 6 8 9 8 7 7 7 7 7 10 14
|
| 240 |
+
LJ050-0018 5 7 13 6 7 7 8 7 6 7 7 6 5 4 6 6 6 8 8 10 7 8 7 7 7 8 8 6 5 5 9 6 6 6 5 4 5 8 14 8 8 6 5 5 6 5 6 5 5 5 2 5 6 5 6 5 5 5 4 5 4 4 6 6 6 7 6 6 6 6 6 6 5 5 5 5 5 5 5 6 6 5 4 4 4 7 7 7 7 6 7 6 8
|
| 241 |
+
LJ050-0019 7 6 8 6 7 8 8 7 7 4 7 6 7 6 6 5 6 8 8 7 7 5 4 7 9 9 9 7 6 5 5 4 6 7 7 7 9 8 7 17
|
| 242 |
+
LJ050-0020 8 6 6 7 7 5 5 5 5 5 6 7 6 5 5 6 8 12 8 9 23 6 6 6 7 7 6 5 5 5 7 8 8 6 7 10 5 6 9 11 10 9 7 6 6 5 4 5 6 6 7 7 7 6 6 7 5 5 7 6 7 8 6 6 11 8 6 5 5 5 8 8 11 8 9
|
| 243 |
+
LJ050-0021 11 9 7 7 7 6 7 7 7 8 9 8 6 7 23 10 5 6 6 6 7 7 8 8 8 6 7 7 9 8 24 8 5 5 6 8 7 6 6 6 7 8 7 7 6 6 8 6 6 5 5 5 5 7 7 7 7 7 10 8 7 7 8 8 5 5 7 7 8 7 6 5 5 5 5 6 7 6 5 5 5 5 23 7 5 5 5 5 6 6 5 5 5 6 6 6 7 7 9 7 6 8
|
| 244 |
+
LJ050-0022 9 11 7 6 5 7 7 3 7 8 9 9 10 6 5 7 4 5 5 6 5 6 6 5 5 5 4 6 6 6 7 6 7 5 4 5 6 6 6 8 9 8 6 5 5 5 5 5 8 11 8 12 11 7 6 6 7 8 7 13 21 8 6 5 6 6 8 9 8 8 7 7 6 7 7 8 9 9 7 9 14 10
|
| 245 |
+
LJ050-0023 9 10 8 7 7 8 6 6 8 10 8 8 7 8 13
|
| 246 |
+
LJ050-0024 5 4 4 4 5 4 4 5 5 5 6 8 9 8 8 6 6 6 8 7 7 6 5 7 7 7 7 7 10 7 7 8 6 6 6 7 6 6 5 5 4 6 6 6 6 6 6 5 6 9 8 13 8 8 5 5 5 7 7 6 5 5 5 7 8 8 7 8 8 5 7 8 8 8 11 10 6 6 8 11 10 8 6 5 6 6 7 6 5 6 6 6 8 8 8 8 9 7 18
|
| 247 |
+
LJ050-0025 19 9 7 5 4 4 5 8 6 6 6 10 10 10 9 11 8 7 7 7 6 7 7 7 14 6 6 6 7 8 8 10 10 9
|
| 248 |
+
LJ050-0026 5 5 5 5 5 6 7 9 10 11 10 10 7 13 6 6 6 9 9 8 6 6 28 6 5 6 7 6 4 4 5 7 7 10 9 9 9 8 7 9 9 7 7 9 12 9 7 6 5 5 8 8 12 8 7 5 6 5 6 8 9 11 10 13 13 10
|
| 249 |
+
LJ050-0027 8 7 8 7 8 6 6 6 7 6 5 7 7 6 6 6 10 8 9 9 10 12 9 8 8 9 6 6 6 5 5 9 8 6 8 10 7 6 7 5 6 8 10 9 9 9 9 10 10 10 9
|
| 250 |
+
LJ050-0028 11 18 15 11 8 5 5 6 6 7 6 5 5 5 7 8 9 9 8 15 8 7 6 8 11 11 9 9 7 6 7 6 8 11 9 14 5 6 8 6 5 6 7 7 7 6 6 8 6 7 9 8 8 9 10 8 17 8 7 7 9 6 6 8 9 8 8 6 6 6 5 6 6 6 7 6 7 5 4 5 6 6 7 11 16
|
imdanboy/jets/decode_train.loss.ave/dev/feats_type
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
raw
|
imdanboy/jets/decode_train.loss.ave/dev/log/keys.1.scp
ADDED
|
@@ -0,0 +1,32 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
LJ049-0008 and detailing police in civilian clothes to be scattered throughout the sizable crowd.
|
| 2 |
+
LJ049-0009 When President and Mrs. Kennedy shook hands with members of the public along the fences surrounding the reception area, they were closely guarded by Secret Service agents
|
| 3 |
+
LJ049-0010 who responded to the unplanned event with dispatch.
|
| 4 |
+
LJ049-0011 As described in chapter two, the President directed that his car stop on two occasions during the motorcade so that he could greet members of the public.
|
| 5 |
+
LJ049-0012 At these stops, agents from the Presidential follow-up car stood between the President and the public,
|
| 6 |
+
LJ049-0013 and on one occasion Agent Kellerman left the front seat of the President's car to take a similar position.
|
| 7 |
+
LJ049-0014 The Commission regards such impromptu stops as presenting an unnecessary danger,
|
| 8 |
+
LJ049-0015 but finds that the Secret Service agents did all that could have been done to take protective measures.
|
| 9 |
+
LJ049-0016 The Presidential limousine.
|
| 10 |
+
LJ049-0017 The limousine used by President Kennedy in Dallas was a convertible with a detachable, rigid plastic "bubble" top
|
| 11 |
+
LJ049-0018 which was neither bulletproof nor bullet resistant.
|
| 12 |
+
LJ049-0019 The last Presidential vehicle with any protection against small-arms fire left the White House in nineteen fifty-three.
|
| 13 |
+
LJ049-0020 It was not then replaced because the state of the art did not permit the development of a bulletproof top of sufficiently light weight
|
| 14 |
+
LJ049-0021 to permit its removal on those occasions when the President wished to ride in an open car.
|
| 15 |
+
LJ049-0022 The Secret Service believed that it was very doubtful that any President would ride regularly in a vehicle with a fixed top, even though transparent.
|
| 16 |
+
LJ049-0023 Since the assassination, the Secret Service, with the assistance of other Federal agencies and of private industry,
|
| 17 |
+
LJ049-0024 has developed a vehicle for the better protection of the President.
|
| 18 |
+
LJ049-0025 Access to passenger compartment of Presidential car.
|
| 19 |
+
LJ049-0026 On occasion the Secret Service has been permitted to have an agent riding in the passenger compartment with the President.
|
| 20 |
+
LJ049-0027 Presidents have made it clear, however, that they did not favor this or any other arrangement which interferes with the privacy of the President and his guests.
|
| 21 |
+
LJ049-0028 The Secret Service has therefore suggested this practice only on extraordinary occasions.
|
| 22 |
+
LJ049-0029 Without attempting to prescribe or recommend specific measures which should be employed for the future protection of Presidents,
|
| 23 |
+
LJ049-0030 the Commission does believe that there are aspects of the protective measures employed in the motorcade at Dallas which deserve special comment.
|
| 24 |
+
LJ049-0031 The Presidential vehicle in use in Dallas, described in chapter two,
|
| 25 |
+
LJ049-0032 had no special design or equipment which would have permitted the Secret Service agent riding in the driver's compartment
|
| 26 |
+
LJ049-0033 to move into the passenger section without hindrance or delay. Had the vehicle been so designed it is possible that an agent riding in the front seat
|
| 27 |
+
LJ049-0034 could have reached the President in time to protect him from the second and fatal shot to hit the President.
|
| 28 |
+
LJ049-0035 However, such access to the President was interfered with both by the metal bar some fifteen inches above the back of the front seat
|
| 29 |
+
LJ049-0036 and by the passengers in the jump seats.
|
| 30 |
+
LJ049-0037 In contrast, the Vice Presidential vehicle, although not specially designed for that purpose,
|
| 31 |
+
LJ049-0038 had no passenger in a jump seat between Agent Youngblood and Vice President Johnson to interfere with Agent Youngblood's ability
|
| 32 |
+
LJ049-0039 to take a protective position in the passenger compartment before the third shot was fired.
|
imdanboy/jets/decode_train.loss.ave/dev/log/keys.2.scp
ADDED
|
@@ -0,0 +1,32 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
LJ049-0040 The assassination suggests that it would have been of prime importance
|
| 2 |
+
LJ049-0041 in the protection of the President if the Presidential car permitted immediate access to the President by a Secret Service agent at the first sign of danger.
|
| 3 |
+
LJ049-0042 At that time the agents on the framing boards of the follow-up car were expected to perform such a function.
|
| 4 |
+
LJ049-0043 However, these agents could not reach the President's car when it was traveling at an appreciable rate of speed.
|
| 5 |
+
LJ049-0044 Even if the car is traveling more slowly, the delay involved in reaching the President may be crucial.
|
| 6 |
+
LJ049-0045 It is clear that at the time of the shots in Dallas, Agent Clinton J. Hill leaped to the President's rescue as quickly as humanly possible.
|
| 7 |
+
LJ049-0046 Even so, analysis of the motion picture films taken by amateur photographer Zapruder
|
| 8 |
+
LJ049-0047 reveals that Hill first placed his hand on the Presidential car at frame three forty-three, thirty frames
|
| 9 |
+
LJ049-0048 and therefore approximately one point six seconds after the President was shot in the head.
|
| 10 |
+
LJ049-0049 About three point seven seconds after the President received this wound,
|
| 11 |
+
LJ049-0050 Hill had both feet on the car and was climbing aboard to assist President and Mrs. Kennedy.
|
| 12 |
+
LJ049-0051 Planning for motorcade contingencies.
|
| 13 |
+
LJ049-0052 In response to inquiry by the Commission regarding the instructions to agents in a motorcade
|
| 14 |
+
LJ049-0053 of emergency procedures to be taken in a contingency such as that which actually occurred, the Secret Service responded, quote,
|
| 15 |
+
LJ049-0054 The Secret Service has consistently followed two general principles in emergencies involving the President.
|
| 16 |
+
LJ049-0055 All agents are so instructed.
|
| 17 |
+
LJ049-0056 The first duty of the agents in the motorcade is to attempt to cover the President as closely as possible and practicable
|
| 18 |
+
LJ049-0057 and to shield him by attempting to place themselves between the President and any source of danger.
|
| 19 |
+
LJ049-0058 Secondly, agents are instructed to remove the President as quickly as possible from known or impending danger.
|
| 20 |
+
LJ049-0059 Agents are instructed that it is not their responsibility to investigate or evaluate a present danger,
|
| 21 |
+
LJ049-0060 but to consider any untoward circumstances as serious and to afford the President maximum protection at all times.
|
| 22 |
+
LJ049-0061 No responsibility rests upon those agents near the President for the identification or arrest of any assassin or an attacker.
|
| 23 |
+
LJ049-0062 Their primary responsibility is to stay with and protect the President.
|
| 24 |
+
LJ049-0063 Beyond these two principles the Secret Service believes a detailed contingency or emergency plan is not feasible
|
| 25 |
+
LJ049-0064 because the variations possible preclude effective planning.
|
| 26 |
+
LJ049-0065 A number of steps are taken, however, to permit appropriate steps to be taken in an emergency.
|
| 27 |
+
LJ049-0066 For instance, the lead car always is manned by Secret Service agents familiar with the area and with local law enforcement officials;
|
| 28 |
+
LJ049-0067 the radio net in use in motorcades is elaborate and permits a number of different means of communication with various local points.
|
| 29 |
+
LJ049-0068 A doctor is in the motorcade.
|
| 30 |
+
LJ049-0069 This basic approach to the problem of planning for emergencies is sound.
|
| 31 |
+
LJ049-0070 Any effort to prepare detailed contingency plans might well have the undesirable effect of inhibiting quick and imaginative responses.
|
| 32 |
+
LJ049-0071 If the advance preparation is thorough, and the protective devices and techniques employed are sound,
|
imdanboy/jets/decode_train.loss.ave/dev/log/keys.3.scp
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
LJ049-0072 those in command should be able to direct the response appropriate to the emergency. The Commission finds that the Secret Service agents in the motorcade
|
| 2 |
+
LJ049-0073 who were immediately responsible for the President's safety reacted promptly at the time the shots were fired.
|
| 3 |
+
LJ049-0074 Their actions demonstrate that the President and the Nation can expect courage and devotion to duty from the agents of the Secret Service.
|
| 4 |
+
LJ049-0075 Recommendations.
|
| 5 |
+
LJ049-0076 The Commission's review of the provisions for Presidential protection at the time of President Kennedy's trip to Dallas demonstrates the need for substantial improvements.
|
| 6 |
+
LJ049-0077 Since the assassination, the Secret Service and the Department of the Treasury
|
| 7 |
+
LJ049-0078 have properly taken the initiative in reexamining major aspects of Presidential protection.
|
| 8 |
+
LJ049-0079 Many changes have already been made and others are contemplated, some of them in response to the Commission's questions and informal suggestions.
|
| 9 |
+
LJ049-0080 Assassination a Federal Crime
|
| 10 |
+
LJ049-0081 There was no Federal criminal jurisdiction over the assassination of President Kennedy.
|
| 11 |
+
LJ049-0082 Had there been reason to believe that the assassination was the result of a conspiracy, Federal jurisdiction could have been asserted;
|
| 12 |
+
LJ049-0083 it has long been a Federal crime to conspire to injure any Federal officer, on account of, or while he is engaged in, the lawful discharge of the duties of his office.
|
| 13 |
+
LJ049-0084 Murder of the President has never been covered by Federal law, however, so that once it became reasonably clear that the killing was the act of a single person,
|
| 14 |
+
LJ049-0085 the State of Texas had exclusive jurisdiction.
|
| 15 |
+
LJ049-0086 It is anomalous that Congress has legislated in other ways touching upon the safety of the Chief Executive or other Federal officers,
|
| 16 |
+
LJ049-0087 without making an attack on the President a crime. Threatening harm to the President is a Federal offense,
|
| 17 |
+
LJ049-0088 as is advocacy of the overthrow of the Government by the assassination of any of its officers.
|
| 18 |
+
LJ049-0089 The murder of Federal judges, U.S. attorneys and marshals, and a number of other specifically designated
|
| 19 |
+
LJ049-0090 Federal law enforcement officers is a Federal crime.
|
| 20 |
+
LJ049-0091 Equally anomalous are statutory provisions which specifically authorize the Secret Service to protect the President,
|
| 21 |
+
LJ049-0092 without authorizing it to arrest anyone who harms him. The same provisions authorize the Service to arrest without warrant
|
| 22 |
+
LJ049-0093 persons committing certain offenses, including counterfeiting and certain frauds involving Federal checks or securities.
|
| 23 |
+
LJ049-0094 The Commission agrees with the Secret Service that it should be authorized to make arrests without warrant
|
| 24 |
+
LJ049-0095 for all offenses within its jurisdiction, as are FBI agents and Federal marshals.
|
| 25 |
+
LJ049-0096 There have been a number of efforts to make assassination a Federal crime, particularly after the assassination of President McKinley
|
| 26 |
+
LJ049-0097 and the attempt on the life of President-elect Franklin D. Roosevelt.
|
| 27 |
+
LJ049-0098 In nineteen oh two bills passed both Houses of Congress but failed of enactment when the Senate refused to accept the conference report.
|
| 28 |
+
LJ049-0099 A number of bills were introduced immediately following the assassination of President Kennedy.
|
| 29 |
+
LJ049-0100 The Commission recommends to the Congress that it adopt legislation which would:
|
| 30 |
+
LJ049-0101 Punish the murder or manslaughter of, attempt or conspiracy to murder, kidnaping of and assault upon
|
| 31 |
+
LJ049-0102 the President, Vice President, or other officer next in the order of succession to the Office of President, the President-elect and the Vice-President-elect,
|
imdanboy/jets/decode_train.loss.ave/dev/log/keys.4.scp
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
LJ049-0103 whether or not the act is committed while the victim is in the performance of his official duties or on account of such performance.
|
| 2 |
+
LJ049-0104 Such a statute would cover the President and Vice President or, in the absence of a Vice President, the person next in order of succession.
|
| 3 |
+
LJ049-0105 During the period between election and inauguration, the President-elect and Vice-President-elect would also be covered.
|
| 4 |
+
LJ049-0106 Restricting the coverage in this way would avoid unnecessary controversy over the inclusion or exclusion of other officials who are in the order of succession
|
| 5 |
+
LJ049-0107 or who hold important governmental posts.
|
| 6 |
+
LJ049-0108 In addition, the restriction would probably eliminate a need for the requirement which has been urged as necessary for the exercise of Federal power,
|
| 7 |
+
LJ049-0109 that the hostile act occur while the victim is engaged in or because of the performance of official duties.
|
| 8 |
+
LJ049-0110 The governmental consequences of assassination of one of the specified officials give the United States ample power to act for its own protection.
|
| 9 |
+
LJ049-0111 The activities of the victim at the time an assassination occurs and the motive for the assassination
|
| 10 |
+
LJ049-0112 bear no relationship to the injury to the United States which follows from the act.
|
| 11 |
+
LJ049-0113 This point was ably made in the nineteen oh two debate by Senator George F. Hoar, the sponsor of the Senate bill, quote,
|
| 12 |
+
LJ049-0114 what this bill means to punish is the crime of interruption of the Government of the United States and the destruction of its security by striking down the life
|
| 13 |
+
LJ049-0115 of the person who is actually in the exercise of the executive power, or
|
| 14 |
+
LJ049-0116 of such persons as have been constitutionally and lawfully provided to succeed thereto in case of a vacancy. It is important to this country
|
| 15 |
+
LJ049-0117 that the interruption shall not take place for an hour, end quote.
|
| 16 |
+
LJ049-0118 Enactment of this statute would mean that the investigation of any of the acts covered and of the possibility of a further attempt
|
| 17 |
+
LJ049-0119 would be conducted by Federal law enforcement officials, in particular, the FBI with the assistance of the Secret Service.
|
| 18 |
+
LJ049-0120 At present, Federal agencies participate only upon the sufferance of the local authorities.
|
| 19 |
+
LJ049-0121 While the police work of the Dallas authorities in the early identification and apprehension of Oswald was both efficient and prompt,
|
| 20 |
+
LJ049-0122 FBI Director J. Edgar Hoover, who strongly supports such legislation, testified that the absence of clear Federal jurisdiction
|
| 21 |
+
LJ049-0123 over the assassination of President Kennedy led to embarrassment and confusion in the subsequent investigation by Federal and local authorities.
|
| 22 |
+
LJ049-0124 In addition, the proposed legislation will insure
|
| 23 |
+
LJ049-0125 that any suspects who are arrested will be Federal prisoners, subject to Federal protection from vigilante justice and other threats.
|
| 24 |
+
LJ049-0126 Committee of Cabinet Officers. As our Government has become more complex,
|
| 25 |
+
LJ049-0127 agencies other than the Secret Service have become involved in phases of the overall problem of protecting our national leaders.
|
| 26 |
+
LJ049-0128 The FBI is the major domestic investigating agency of the United States,
|
| 27 |
+
LJ049-0129 while the CIA has the primary responsibility for collecting intelligence overseas to supplement information acquired by the Department of State.
|
| 28 |
+
LJ049-0130 The Secret Service must rely in large part
|
| 29 |
+
LJ049-0132 The Commission believes that it is necessary to improve the cooperation among these agencies
|
| 30 |
+
LJ049-0133 and to emphasize that the task of Presidential protection is one of broad national concern.
|
| 31 |
+
LJ049-0134 The Commission suggests that consideration might be given to assigning to a Cabinet-level committee or the National Security Council
|
imdanboy/jets/decode_train.loss.ave/dev/log/keys.5.scp
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
LJ049-0135 (which is responsible for advising the President respecting the coordination
|
| 2 |
+
LJ049-0136 of departmental policies relating to the national security) the responsibility to review and oversee the protective activities of the Secret Service
|
| 3 |
+
LJ049-0137 and the other Federal agencies that assist in safeguarding the President. The Committee should include the Secretary of the Treasury and the Attorney General,
|
| 4 |
+
LJ049-0138 and, if the Council is used, arrangements should be made for the attendance of the Secretary of the Treasury
|
| 5 |
+
LJ049-0139 and the Attorney General at any meetings which are concerned with Presidential protection.
|
| 6 |
+
LJ049-0140 The Council already includes, in addition to the President and Vice President, the Secretaries of State and Defense and has a competent staff.
|
| 7 |
+
LJ049-0141 The foremost assignment of the Committee would be to insure that the maximum resources of the Federal Government are fully engaged in the job of protecting the President,
|
| 8 |
+
LJ049-0142 by defining responsibilities clearly and overseeing their execution.
|
| 9 |
+
LJ049-0143 Major needs of personnel or other resources might be met more easily on its recommendation than they have been in the past.
|
| 10 |
+
LJ049-0144 The Committee would be able to provide guidance in defining the general nature of domestic and foreign dangers to Presidential security.
|
| 11 |
+
LJ049-0145 As improvements are recommended for the advance detection of potential threats to the President, it could act as a final review board.
|
| 12 |
+
LJ049-0146 The expert assistance and resources which it could draw upon would be particularly desirable in this complex and sensitive area.
|
| 13 |
+
LJ049-0147 This arrangement would provide a continuing high-level contact for agencies that may wish to consult respecting particular protective measures.
|
| 14 |
+
LJ049-0148 For various reasons the Secret Service has functioned largely as an informal part of the White House staff, with the result
|
| 15 |
+
LJ049-0149 that it has been unable, as a practical matter, to exercise sufficient influence over the security precautions which surround Presidential activities.
|
| 16 |
+
LJ049-0150 A Cabinet-level committee which is actively concerned with these problems would be able to discuss these matters more effectively with the President.
|
| 17 |
+
LJ049-0151 Responsibilities for Presidential Protection
|
| 18 |
+
LJ049-0152 The assignment of the responsibility of protecting the President to an agency of the Department of the Treasury was largely an historical accident.
|
| 19 |
+
LJ049-0153 The Secret Service was organized as a division of the Department of the Treasury in eighteen sixty-five, to deal with counterfeiting.
|
| 20 |
+
LJ049-0154 In eighteen ninety-four,
|
| 21 |
+
LJ049-0155 while investigating a plot to assassinate President Cleveland, the Service assigned a small protective detail of agents to the White House.
|
| 22 |
+
LJ049-0156 Secret Service men accompanied the President and his family to their vacation home in Massachusetts
|
| 23 |
+
LJ049-0157 and special details protected him in Washington, on trips, and at special functions.
|
| 24 |
+
LJ049-0158 These informal and part-time arrangements led to more systematic protection in nineteen oh two, after the assassination of President McKinley;
|
| 25 |
+
LJ049-0159 the Secret Service, then the only Federal investigative agency, assumed full-time responsibility for the safety of the President.
|
| 26 |
+
LJ049-0160 Since that time, the Secret Service has had and exercised responsibility for the physical protection of the President
|
| 27 |
+
LJ049-0161 and also for the preventive investigation of potential threats against the President.
|
| 28 |
+
LJ049-0162 Although the Secret Service has had the primary responsibility for the protection of the President,
|
| 29 |
+
LJ049-0163 the FBI, which was established within the Department of Justice in nineteen oh eight, has had in recent years an increasingly important role to play.
|
| 30 |
+
LJ049-0164 In the appropriations of the FBI there has recurred annually an item for the, quote, protection of the person of the President of the United States, end quote,
|
| 31 |
+
LJ049-0165 which first appeared in the appropriation of the Department of Justice in nineteen ten under the heading, quote, Miscellaneous Objects, end quote.
|
imdanboy/jets/decode_train.loss.ave/dev/log/keys.6.scp
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
LJ049-0166 Although the FBI is not charged with the physical protection of the President, it does have an assignment, as do other Government agencies,
|
| 2 |
+
LJ049-0167 in the field of preventive investigation in regard to the President's security.
|
| 3 |
+
LJ049-0168 As discussed above, the Bureau has attempted to meet its responsibilities in this field by spelling out in its Handbook
|
| 4 |
+
LJ049-0169 the procedures which its agents are to follow in connection with information received, quote,
|
| 5 |
+
LJ049-0170 indicating the possibility of an attempt against the person or safety of the President, end quote, or other protected persons.
|
| 6 |
+
LJ049-0171 With two Federal agencies operating in the same general field of preventive investigation,
|
| 7 |
+
LJ049-0172 questions inevitably arise as to the scope of each agency's authority and responsibility.
|
| 8 |
+
LJ049-0173 As the testimony of J. Edgar Hoover and other Bureau officials revealed, the FBI did not believe that its directive required the Bureau
|
| 9 |
+
LJ049-0174 to notify the Secret Service of the substantial information about Lee Harvey Oswald which the FBI had accumulated
|
| 10 |
+
LJ049-0175 before the President reached Dallas.
|
| 11 |
+
LJ049-0176 On the other hand, the Secret Service had no knowledge whatever of Oswald, his background, or his employment at the Book Depository,
|
| 12 |
+
LJ049-0177 and Robert I. Bouck, who was in charge of the Protective Research Section of the Secret Service, believed that the accumulation of the facts known to the FBI
|
| 13 |
+
LJ049-0178 should have constituted a sufficient basis to warn the Secret Service of the Oswald risk.
|
| 14 |
+
LJ049-0179 The Commission believes that both the FBI and the Secret Service have too narrowly construed their respective responsibilities.
|
| 15 |
+
LJ049-0180 The Commission has the impression
|
| 16 |
+
LJ049-0181 that too much emphasis is placed by both on the investigation of specific threats by individuals and not enough on dangers from other sources.
|
| 17 |
+
LJ049-0182 In addition, the Commission has concluded that the Secret Service particularly tends to be the passive recipient of information
|
| 18 |
+
LJ049-0183 regarding such threats and that its Protective Research Section is not adequately staffed or equipped
|
| 19 |
+
LJ049-0184 to conduct the wider investigative work that is required today for the security of the President.
|
| 20 |
+
LJ049-0185 During the period the Commission was giving thought to this situation,
|
| 21 |
+
LJ049-0186 the Commission received a number of proposals designed to improve current arrangements for protecting the President.
|
| 22 |
+
LJ049-0187 These proposals included suggestions to locate exclusive responsibility for all phases of the work
|
| 23 |
+
LJ049-0188 in one or another Government agency, to clarify the division of authority between the agencies involved, and to retain the existing system
|
| 24 |
+
LJ049-0189 but expand both the scope and the operations of the existing agencies, particularly those of the Secret Service and the FBI.
|
| 25 |
+
LJ049-0190 It has been pointed out that the FBI, as our chief investigative agency,
|
| 26 |
+
LJ049-0191 is properly manned and equipped to carry on extensive information gathering functions within the United States.
|
| 27 |
+
LJ049-0192 It was also suggested that it would take a substantial period of time for the Secret Service to build up the experience and skills necessary to meet the problem.
|
| 28 |
+
LJ049-0193 Consequently the suggestion has been made, on the one hand, that all preventive investigative functions relating to the security of the President
|
| 29 |
+
LJ049-0194 should be transferred to the FBI,
|
| 30 |
+
LJ049-0195 leaving with the Secret Service only the responsibility for the physical protection of the President, that is, the guarding function alone.
|
| 31 |
+
LJ049-0196 On the other hand, it is urged that all features of the protection of the President and his family should be committed to an elite and independent corps.
|
imdanboy/jets/decode_train.loss.ave/dev/log/keys.7.scp
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
LJ049-0197 It is also contended that the agents should be intimately associated with the life of the Presidential family
|
| 2 |
+
LJ049-0198 in all its ramifications and alert to every danger that might befall it,
|
| 3 |
+
LJ049-0199 and ready at any instant to hazard great danger to themselves in the performance of their tremendous responsibility.
|
| 4 |
+
LJ049-0200 It is suggested that an organization shorn of its power to investigate all the possibilities of danger to the President
|
| 5 |
+
LJ049-0201 and becoming merely the recipient of information gathered by others would become limited solely to acts of physical alertness and personal courage
|
| 6 |
+
LJ049-0202 incident to its responsibilities.
|
| 7 |
+
LJ049-0203 So circumscribed, it could not maintain the esprit de corps or the necessary alertness for this unique and challenging responsibility.
|
| 8 |
+
LJ049-0204 While in accordance with its mandate
|
| 9 |
+
LJ049-0205 this Commission has necessarily examined into the functioning of the various Federal agencies concerned with the tragic trip of President Kennedy to Dallas
|
| 10 |
+
LJ049-0206 and while it has arrived at certain conclusions in respect thereto, it seems clear
|
| 11 |
+
LJ049-0207 that it was not within the Commission's responsibility to make specific recommendations as to the long-range organization of the President's protection,
|
| 12 |
+
LJ049-0208 except as conclusions flowing directly from its examination of the President's assassination can be drawn.
|
| 13 |
+
LJ049-0209 The Commission was not asked to apply itself as did the Hoover Commission in nineteen forty-nine,
|
| 14 |
+
LJ049-0210 for examples to a determination of the optimum organization of the President's protection.
|
| 15 |
+
LJ049-0211 It would have been necessary for the Commission to take considerable testimony, much of it extraneous to the facts of the assassination of President Kennedy,
|
| 16 |
+
LJ049-0212 to put it in a position to reach final conclusions in this respect.
|
| 17 |
+
LJ049-0213 There are always dangers of divided responsibility,
|
| 18 |
+
LJ049-0214 duplication, and confusion of authority where more than one agency is operating in the same field; but on the other hand
|
| 19 |
+
LJ049-0215 the protection of the President is in a real sense a Government-wide responsibility which must necessarily assumed by the Department of State,
|
| 20 |
+
LJ049-0216 the FBI, the CIA, and the military intelligence agencies as well as the Secret Service.
|
| 21 |
+
LJ049-0217 Moreover, a number of imponderable questions have to be weighed if any change in the intimate association now established
|
| 22 |
+
LJ049-0218 between the Secret Service and the President and his family is contemplated.
|
| 23 |
+
LJ049-0219 These considerations have induced the Commission to believe
|
| 24 |
+
LJ049-0220 that the determination of whether or not there should be a relocation of responsibilities and functions should be left to the Executive and the Congress,
|
| 25 |
+
LJ049-0221 perhaps upon recommendations based on further studies by the Cabinet-level committee recommended above or the National Security Council.
|
| 26 |
+
LJ049-0222 Pending any such determination, however, this Commission is convinced of the necessity of better coordination
|
| 27 |
+
LJ049-0223 and direction of the activities of all existing agencies of Government which are in a position to and do, furnish information
|
| 28 |
+
LJ049-0224 and services related to the security of the President.
|
| 29 |
+
LJ049-0225 The Commission feels the Secret Service and the FBI, as well as the State Department and the CIA when the President travels abroad,
|
| 30 |
+
LJ049-0226 could improve their existing capacities and procedures so as to lessen the chances of assassination.
|
| 31 |
+
LJ049-0227 Without, therefore, coming to final conclusions respecting the long-range organization of the President's security,
|
imdanboy/jets/decode_train.loss.ave/dev/log/keys.8.scp
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
LJ049-0228 the Commission believes that the facts of the assassination of President Kennedy point to certain measures which,
|
| 2 |
+
LJ049-0229 while assuming no radical relocation of responsibilities,
|
| 3 |
+
LJ049-0230 can and should be recommended by this Commission in the interest of the more efficient protection of the President.
|
| 4 |
+
LJ050-0001 For more information, or to volunteer, please visit librivox dot org. Report of the President's Commission on the Assassination of President Kennedy.
|
| 5 |
+
LJ050-0002 The Warren Commission Report.
|
| 6 |
+
LJ050-0003 By The President's Commission on the Assassination of President Kennedy. Chapter eight. The Protection of the President. Part five.
|
| 7 |
+
LJ050-0004 General Supervision of the Secret Service
|
| 8 |
+
LJ050-0005 The intimacy of the Secret Service's relationship to the White House
|
| 9 |
+
LJ050-0006 and the dissimilarity of its protective functions to most activities of the Department of the Treasury
|
| 10 |
+
LJ050-0007 have made it difficult for the Treasury to maintain close and continuing supervision.
|
| 11 |
+
LJ050-0008 The Commission believes that the recommended Cabinet-level committee will help to correct many of the major deficiencies of supervision
|
| 12 |
+
LJ050-0009 disclosed by the Commission's investigation. Other measures should be taken as well to improve the overall operation of the Secret Service.
|
| 13 |
+
LJ050-0010 Daily supervision of the operations of the Secret Service within the Department of the Treasury should be improved.
|
| 14 |
+
LJ050-0011 The Chief of the Service now reports to the Secretary of the Treasury
|
| 15 |
+
LJ050-0012 through an Assistant Secretary whose duties also include the direct supervision of the Bureau of the Mint
|
| 16 |
+
LJ050-0013 and the Department's Employment Policy Program, and who also represents the Secretary of the Treasury on various committees and groups.
|
| 17 |
+
LJ050-0014 The incumbent has no technical qualifications in the area of Presidential protection.
|
| 18 |
+
LJ050-0015 The Commission recommends that the Secretary of the Treasury appoint a special assistant with the responsibility of supervising the Service.
|
| 19 |
+
LJ050-0016 This special assistant should be required to have sufficient stature and experience in law enforcement, intelligence, or allied fields
|
| 20 |
+
LJ050-0017 to be able to provide effective continuing supervision
|
| 21 |
+
LJ050-0018 and to keep the Secretary fully informed regarding all significant developments relating to Presidential protection.
|
| 22 |
+
LJ050-0019 This report has already pointed out several respects
|
| 23 |
+
LJ050-0020 in which the Commission believes that the Secret Service has operated with insufficient planning or control.
|
| 24 |
+
LJ050-0021 Actions by the Service since the assassination indicate its awareness of the necessity for substantial improvement in its administration.
|
| 25 |
+
LJ050-0022 A formal and thorough description of the responsibilities of the advance agent is now in preparation by the Service.
|
| 26 |
+
LJ050-0023 Work is going forward
|
| 27 |
+
LJ050-0024 toward the preparation of formal understandings of the respective roles of the Secret Service and other agencies with which it collaborates
|
| 28 |
+
LJ050-0025 or from which it derives assistance and support.
|
| 29 |
+
LJ050-0026 The Commission urges that the Service continue this effort to overhaul and define its procedures.
|
| 30 |
+
LJ050-0027 While manuals and memoranda are no guarantee of effective operations,
|
| 31 |
+
LJ050-0028 no sizable organization can achieve efficiency without the careful analysis and demarcation of responsibility
|
imdanboy/jets/decode_train.loss.ave/dev/log/output.1/durations/durations
ADDED
|
@@ -0,0 +1,32 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
LJ049-0008 5 7 9 6 10 9 8 7 6 7 6 6 7 9 11 8 7 7 6 6 6 7 6 6 5 8 9 18 9 8 6 5 6 7 8 8 9 7 7 11 8 7 9 8 8 6 9 9 9 8 5 5 5 6 9 10 16 10 9
|
| 2 |
+
LJ049-0009 5 5 6 5 5 5 6 5 5 6 5 4 4 5 5 5 5 6 5 7 7 5 5 5 7 8 9 7 7 9 10 7 6 5 7 6 7 6 5 5 5 7 8 6 4 4 5 6 6 6 5 6 6 6 6 7 7 6 7 6 6 7 7 6 12 13 9 7 7 5 5 4 5 4 6 7 7 7 6 6 6 8 9 10 10 11 30 9 6 6 7 7 7 8 9 7 7 7 6 5 4 5 5 7 11 10 6 5 5 5 7 8 7 5 6 9 10 7 6 5 6 17
|
| 3 |
+
LJ049-0010 3 7 6 8 7 7 7 6 5 6 5 6 5 6 7 8 8 7 7 7 6 5 6 7 8 7 8 6 5 6 6 8 8 9 19 18 10
|
| 4 |
+
LJ049-0011 13 8 6 6 7 7 8 10 7 6 6 7 8 8 8 6 9 16 21 20 5 5 5 5 6 6 5 5 5 5 3 9 8 7 6 6 6 5 7 6 5 5 6 7 7 7 8 8 8 10 13 14 5 6 6 8 9 11 8 7 7 14 7 6 6 5 4 5 6 6 7 7 7 10 13 12 9 8 5 5 5 5 5 5 5 7 7 7 8 7 5 5 5 5 7 8 6 4 5 6 6 6 6 6 6 8 6
|
| 5 |
+
LJ049-0012 8 8 7 9 9 8 8 12 11 10 46 13 7 6 4 4 6 5 4 4 4 4 5 5 5 5 5 5 5 5 6 6 5 10 8 6 7 8 6 7 8 10 9 13 7 6 6 5 5 6 6 5 5 5 5 5 5 6 6 5 5 7 6 8 8 9 6 4 6 6 7 6 6 6 6 7
|
| 6 |
+
LJ049-0013 5 9 9 8 9 6 5 5 7 9 10 8 7 8 11 7 5 5 6 7 7 7 6 6 7 4 7 6 6 5 5 5 6 5 5 6 7 9 9 7 6 4 4 5 5 5 6 6 5 4 4 4 3 7 9 12 12 9 7 7 6 6 8 10 5 5 6 6 7 6 7 7 7 8 7 6 8
|
| 7 |
+
LJ049-0014 5 5 5 5 5 6 7 6 5 6 6 7 6 7 7 24 9 8 8 6 7 7 6 5 6 6 7 7 7 8 10 9 9 8 7 5 5 5 6 6 6 6 6 5 5 11 10 7 6 6 7 8 7 7 8 9 7 8 7 9 9 9
|
| 8 |
+
LJ049-0015 5 7 22 9 12 8 8 5 5 5 6 6 7 7 6 4 5 5 7 8 7 5 6 9 10 7 6 4 5 7 6 7 8 9 6 5 5 7 7 5 3 4 4 5 5 5 7 8 9 16 7 6 7 6 6 4 4 7 6 6 6 6 6 7 8 10 11 14 12 10
|
| 9 |
+
LJ049-0016 5 5 5 5 6 5 5 5 5 6 6 5 4 5 5 5 8 8 12 9 9
|
| 10 |
+
LJ049-0017 9 7 5 6 5 7 8 10 9 8 9 7 5 7 10 6 5 5 5 5 5 4 4 5 6 6 5 5 6 7 7 8 9 10 8 8 9 8 5 6 6 6 6 6 6 5 5 5 6 8 6 9 5 5 6 5 8 9 10 8 6 6 8 10 35 9 7 6 5 5 6 7 9 9 6 6 8 5 5 5 5 6 10 14 13
|
| 11 |
+
LJ049-0018 5 6 6 5 6 6 7 9 8 10 6 6 6 6 5 6 7 9 23 11 9 11 6 5 6 5 5 6 7 7 7 8 6 7 8 7 9
|
| 12 |
+
LJ049-0019 5 7 8 11 9 7 5 5 5 5 5 5 5 6 6 6 6 6 3 5 6 6 6 4 6 5 7 7 6 7 5 4 7 6 6 6 6 7 4 6 6 6 7 7 8 8 7 10 12 9 7 8 11 9 10 11 7 6 6 5 5 6 7 8 8 8 9 11 8 6 6 7 7 7 7 7 8 7 7 7 8 9 12 10 9
|
| 13 |
+
LJ049-0020 8 5 5 6 6 6 7 7 6 5 7 6 7 7 7 9 7 4 7 5 6 7 6 6 7 6 7 8 8 5 5 5 9 9 8 6 6 4 5 6 7 7 7 6 5 5 7 5 5 5 6 5 6 6 5 5 5 5 5 5 5 5 8 6 5 5 5 6 6 7 7 8 8 8 7 5 6 7 7 6 6 6 5 5 4 6 8 8 9 10 9 13 17
|
| 14 |
+
LJ049-0021 8 8 7 7 6 6 5 6 5 5 6 6 7 9 6 6 6 6 7 7 9 6 6 9 11 8 8 8 10 7 4 5 4 5 5 5 6 6 5 5 5 5 5 6 6 7 5 7 8 10 16 15 10 5 6 8 10 7 6 8 10 13 10 9
|
| 15 |
+
LJ049-0022 6 7 7 6 5 5 5 7 8 7 6 7 9 6 6 8 11 7 10 4 6 5 5 5 5 6 7 6 7 7 6 7 9 8 7 7 8 13 7 6 7 6 7 6 6 5 6 5 5 5 5 6 4 5 7 9 11 11 7 5 4 5 5 6 5 8 16 6 5 7 6 5 5 6 6 6 5 7 5 6 9 6 6 6 6 8 10 13 17 27 12 6 4 5 6 9 8 7 7 8 7 7 8 7 6 7 7 9
|
| 16 |
+
LJ049-0023 10 6 6 6 6 7 8 8 8 8 6 7 8 10 9 11 27 7 7 7 6 5 5 5 7 8 9 7 9 12 24 5 5 5 6 7 8 7 7 7 6 6 6 5 5 6 6 7 9 8 5 6 6 6 9 10 6 6 8 11 10 7 6 11 5 5 7 7 7 7 7 5 9 7 5 5 7 6 7 8 9 8
|
| 17 |
+
LJ049-0024 5 8 6 6 5 5 6 6 5 5 5 7 6 4 4 6 6 6 5 4 4 4 5 6 6 6 7 8 4 4 6 6 6 6 5 6 7 5 4 4 5 5 5 6 6 5 6 6 8 6 9
|
| 18 |
+
LJ049-0025 12 9 7 8 8 6 8 8 8 8 7 5 7 7 5 5 6 6 6 6 5 4 4 5 5 5 6 4 5 5 5 5 5 5 6 6 5 6 10 12 10 10
|
| 19 |
+
LJ049-0026 10 6 7 9 11 7 9 10 13 7 7 6 4 5 5 7 8 8 6 7 9 6 7 6 6 5 6 7 7 6 6 6 6 8 6 5 6 6 5 5 8 12 8 7 8 8 8 9 6 5 3 4 5 4 5 8 8 8 7 4 8 7 6 5 6 6 6 6 5 5 5 5 9 6 5 5 5 5 5 5 6 6 5 6 6 8 6 9
|
| 20 |
+
LJ049-0027 3 5 5 6 5 5 5 4 4 6 13 6 6 6 7 5 5 8 8 8 9 8 7 7 7 7 7 11 20 6 6 4 6 6 5 5 5 6 8 8 9 8 7 11 8 10 19 7 7 8 7 7 8 7 10 10 7 6 7 5 5 5 11 5 6 7 6 5 6 10 9 10 11 8 8 5 5 5 5 6 7 7 6 8 9 8 5 4 4 5 5 5 6 6 5 5 6 6 6 8 9 5 6 6 8 9 12 14 8 10 10
|
| 21 |
+
LJ049-0028 6 7 8 6 5 5 5 7 8 8 6 7 7 3 7 6 6 6 7 8 7 9 10 5 4 7 7 7 5 5 5 6 7 8 5 6 7 7 6 9 12 8 8 8 7 8 5 5 6 8 6 6 4 5 5 4 5 6 8 7 8 9 11 9 9 11 10 10
|
| 22 |
+
LJ049-0029 5 5 7 8 7 8 7 6 6 4 5 5 6 5 6 5 5 6 7 8 9 13 11 8 6 6 5 5 6 6 6 5 6 7 5 7 6 6 6 6 8 8 8 9 10 10 24 6 12 7 6 5 6 6 6 6 8 9 9 5 5 4 4 5 7 7 6 8 8 7 5 4 6 6 6 6 5 5 5 5 5 4 5 5 6 5 6 6 5 6 8 9
|
| 23 |
+
LJ049-0030 5 5 5 5 6 6 6 6 4 6 6 6 5 6 8 11 7 6 5 5 5 4 7 9 8 11 10 6 6 5 5 4 5 4 4 5 4 4 6 6 6 6 6 6 7 7 7 8 7 6 5 6 8 9 8 5 5 4 5 6 6 7 7 8 10 11 9 7 7 9 10 8 8 11 44 6 8 7 8 9 12 9 10 6 6 7 6 6 8 9 7 7 8 7 8
|
| 24 |
+
LJ049-0031 5 5 5 5 6 5 5 5 5 6 6 6 7 6 3 4 6 6 5 5 7 7 7 9 9 7 7 9 11 9 9 11 18 6 6 7 8 8 10 7 7 6 7 9 8 8 7 10 17 14 10
|
| 25 |
+
LJ049-0032 6 6 7 7 11 12 6 6 7 6 5 6 7 10 12 12 13 6 7 6 5 6 6 5 5 5 19 6 6 7 5 4 3 3 5 6 7 7 6 6 5 5 4 5 7 7 6 5 5 5 7 8 7 5 6 9 10 7 6 7 8 8 8 6 5 4 4 5 5 5 6 8 9 8 8 8 5 5 6 6 6 6 5 5 6 7 13
|
| 26 |
+
LJ049-0033 7 10 8 11 7 7 6 5 5 5 6 8 8 8 7 4 8 9 8 7 6 6 7 5 7 5 6 7 7 6 5 5 5 5 5 7 14 8 6 6 8 15 16 18 10 6 6 5 7 6 5 5 6 5 5 5 5 6 9 9 10 7 7 9 12 7 17 8 5 7 7 7 8 7 7 6 7 4 20 7 5 6 9 12 7 6 6 8 8 8 6 5 4 4 5 5 5 6 5 5 6 7 11 13 16
|
| 27 |
+
LJ049-0034 7 5 4 3 4 7 7 9 8 7 5 5 5 5 6 6 5 5 5 5 7 6 7 10 13 10 6 6 4 4 7 6 7 6 5 4 5 6 4 4 4 5 5 7 7 7 7 7 5 4 5 7 9 8 7 6 6 11 9 11 49 7 7 6 5 6 4 5 5 5 6 6 5 6 6 8 6 9
|
| 28 |
+
LJ049-0035 10 7 7 6 11 15 9 8 12 11 9 7 8 6 6 5 5 5 5 5 6 6 5 5 5 4 4 5 6 7 6 5 6 10 9 8 6 5 5 6 9 7 11 8 7 5 5 6 6 6 5 5 11 7 7 10 9 6 8 7 8 8 9 9 8 7 6 7 7 5 6 7 7 6 5 6 7 10 8 4 6 5 5 6 5 5 6 7 11 13 16
|
| 29 |
+
LJ049-0036 4 9 9 7 9 7 6 8 8 8 7 5 9 9 7 5 5 5 6 7 7 7 8 11 12 11 12 10
|
| 30 |
+
LJ049-0037 8 8 7 7 6 7 8 13 12 11 33 8 6 7 10 9 5 5 5 5 5 5 5 6 6 6 6 7 4 5 6 6 8 7 6 7 6 7 10 8 8 7 8 6 6 7 6 6 6 5 8 9 12 8 5 5 4 5 7 7 7 9 10 7 10 13 10
|
| 31 |
+
LJ049-0038 6 6 7 8 11 10 9 8 7 6 8 9 7 6 9 8 6 5 7 12 9 8 10 5 6 6 7 8 11 7 5 5 5 6 6 10 7 7 8 9 7 10 7 7 10 9 5 5 5 5 5 5 5 5 5 9 8 8 7 8 31 7 7 7 5 6 10 10 11 8 7 5 7 11 7 5 5 5 6 6 9 6 6 7 6 8 6 5 6 6 7 11 16
|
| 32 |
+
LJ049-0039 8 8 7 7 6 6 4 4 6 6 6 6 5 5 7 6 7 7 6 8 7 9 5 5 4 5 8 8 8 7 4 8 7 6 5 6 6 6 6 5 5 5 5 5 5 6 7 6 5 6 10 8 12 6 13 10 12 6 8 9 10 12 11 8 9
|
imdanboy/jets/decode_train.loss.ave/dev/log/output.1/speech_shape/speech_shape
ADDED
|
File without changes
|
imdanboy/jets/decode_train.loss.ave/dev/log/output.2/durations/durations
ADDED
|
@@ -0,0 +1,32 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
LJ049-0040 7 7 8 8 8 8 6 6 7 8 7 7 6 5 4 9 11 12 8 9 3 6 5 5 6 4 4 3 4 4 6 6 6 4 5 7 7 8 8 8 6 7 7 6 6 6 7 8 17
|
| 2 |
+
LJ049-0041 8 5 5 5 4 4 6 6 6 6 5 5 5 5 4 4 5 5 5 5 6 5 5 6 5 9 6 5 5 5 5 5 5 5 5 5 5 6 6 5 6 6 6 7 7 7 6 6 6 6 7 8 7 7 6 6 7 8 10 8 7 8 8 6 5 5 5 5 5 5 6 5 5 5 5 8 6 8 9 9 6 5 5 5 7 8 7 5 6 9 10 7 6 5 7 6 5 5 7 8 9 9 7 9 8 7 6 7 8 9 7 8 8 8
|
| 3 |
+
LJ049-0042 10 7 7 9 7 10 15 13 7 9 12 7 6 5 4 6 5 5 5 5 6 7 7 6 6 9 6 9 7 6 8 5 5 6 8 7 6 7 8 6 7 9 18 46 13 5 7 7 8 6 6 6 5 5 6 5 6 6 8 8 8 6 7 7 8 9 9 7 5 5 6 6 7 6 8
|
| 4 |
+
LJ049-0043 10 7 7 7 11 35 9 10 10 12 7 6 4 4 6 5 6 5 6 8 9 7 10 10 5 5 5 5 6 6 5 4 4 4 3 6 9 13 11 10 5 4 5 5 5 6 6 7 7 7 5 4 5 5 9 9 8 5 5 6 6 7 6 6 5 5 5 6 7 8 6 5 7 11 9 15 8 9
|
| 5 |
+
LJ049-0044 12 7 4 4 5 5 5 6 8 13 6 8 6 7 7 7 5 4 5 5 8 6 8 9 10 8 9 9 11 18 8 6 5 7 10 11 7 7 7 10 6 5 5 6 7 7 8 7 6 5 4 5 5 5 6 6 5 5 5 5 20 6 7 7 7 8 7 7 9 7 5 8
|
| 6 |
+
LJ049-0045 6 6 7 9 7 8 10 10 8 8 6 6 5 5 6 9 8 8 5 4 6 9 8 9 8 7 6 8 9 11 9 9 14 52 14 7 5 5 6 7 5 5 5 6 7 7 12 10 13 9 8 14 7 6 7 9 6 5 5 5 5 5 6 6 5 4 4 4 4 8 7 7 8 6 6 7 7 8 6 5 7 7 8 7 7 7 6 5 5 5 4 5 6 8 7 9 8 7 6 6 5 7
|
| 7 |
+
LJ049-0046 10 7 7 10 13 20 23 13 8 8 7 7 7 8 7 5 5 5 6 7 8 9 8 8 6 6 6 8 13 10 8 9 7 8 8 7 6 5 5 7 9 10 7 7 9 7 6 7 9 9 6 5 7 9 10 10 10 7 7 9 12 16
|
| 8 |
+
LJ049-0047 8 8 8 11 10 7 6 5 5 5 7 8 8 9 7 6 6 7 8 7 5 4 5 6 8 11 7 6 4 5 5 5 5 5 5 5 5 5 5 6 6 5 6 8 14 14 7 7 8 8 9 10 9 8 10 9 6 6 7 8 8 10 19 19 9 7 7 8 10 10 12 13 20
|
| 9 |
+
LJ049-0048 4 5 6 5 6 6 8 6 6 6 6 6 7 7 4 5 4 5 4 6 7 7 7 8 8 8 8 6 10 7 7 10 9 7 6 6 5 5 13 11 8 5 5 4 5 5 5 6 6 5 5 5 4 4 5 6 7 11 13 15 7 5 6 7 10 10 8 8
|
| 10 |
+
LJ049-0049 11 6 8 8 8 8 9 8 8 8 6 9 6 5 6 8 8 7 6 6 5 5 12 11 8 5 5 4 5 5 5 6 6 5 5 5 4 5 5 7 9 7 7 7 6 7 10 10 12 10 7 9
|
| 11 |
+
LJ049-0050 5 7 6 6 5 7 6 10 11 9 10 18 5 5 5 6 9 15 11 12 8 5 5 5 8 8 8 9 6 6 5 9 6 10 8 15 7 7 8 8 7 7 7 4 5 5 6 5 5 6 6 5 3 6 5 5 5 7 6 7 7 6 5 5 7 6 8
|
| 12 |
+
LJ049-0051 8 7 9 6 5 6 6 5 10 7 7 7 7 9 9 8 5 5 6 7 6 5 6 6 8 13 13 13 10
|
| 13 |
+
LJ049-0052 8 6 6 7 7 8 9 8 10 8 7 5 7 6 5 7 10 12 11 6 5 5 5 5 5 6 7 6 5 6 6 6 6 5 4 4 5 4 6 5 6 6 5 5 6 6 6 7 7 7 9 9 11 7 6 4 4 7 6 5 7 7 7 7 8 10 14 16
|
| 14 |
+
LJ049-0053 10 6 6 7 8 6 6 7 7 7 5 6 7 8 9 9 9 10 6 4 5 8 8 8 6 6 9 7 5 6 5 5 6 7 6 5 5 5 6 10 9 8 7 8 7 6 6 9 10 5 7 10 10 7 5 5 5 4 6 7 8 14 16 12 35 7 7 7 6 5 5 5 7 8 7 5 6 10 7 6 7 7 8 7 6 7 6 11 7 7 8 9 8
|
| 15 |
+
LJ049-0054 6 7 8 6 5 5 5 7 8 8 6 7 8 6 6 6 5 5 7 6 6 6 5 4 5 4 6 12 9 7 7 6 14 7 9 8 6 6 6 6 5 6 5 4 7 5 5 5 8 7 12 7 5 7 7 8 6 6 8 11 9 9 7 6 7 8 5 5 5 5 4 5 5 5 6 6 5 6 6 8 6 9
|
| 16 |
+
LJ049-0055 18 11 12 7 6 5 4 5 6 10 10 12 8 7 7 5 5 6 7 6 7 7 7
|
| 17 |
+
LJ049-0056 6 7 7 9 9 7 9 7 8 7 6 4 6 8 11 7 6 4 4 5 5 5 5 6 6 7 7 8 10 13 22 9 7 7 7 8 7 7 6 6 12 7 6 7 6 5 6 5 5 5 5 5 6 5 5 5 6 9 10 8 7 7 8 9 8 6 7 8 7 8 7 7 6 7 6 10 7 6 5 6 6 6 5 5 5 5 6 8 12
|
| 18 |
+
LJ049-0057 5 7 7 7 11 11 8 8 5 4 6 6 5 9 9 7 6 6 4 5 5 6 5 6 6 7 8 8 4 5 7 9 9 8 7 8 5 5 6 6 5 5 5 5 5 5 6 6 5 5 6 6 6 7 11 9 7 7 9 11 7 8 8 6 7 8 9 7 8 8 8
|
| 19 |
+
LJ049-0058 13 7 5 4 5 6 8 12 41 16 7 6 5 4 5 6 7 7 7 6 5 5 5 5 5 5 6 6 6 6 7 7 10 7 5 5 5 5 6 6 5 5 5 5 10 9 8 6 5 7 7 8 7 8 8 7 8 7 7 5 4 5 6 4 5 7 9 16 19 13 7 7 8 7 6 7 5 6 7 8 9 7 8 8 8
|
| 20 |
+
LJ049-0059 13 7 6 5 4 5 6 7 7 7 6 5 5 6 6 6 6 7 15 8 12 6 6 6 6 6 7 6 5 5 5 5 6 6 7 6 7 5 4 5 6 6 6 10 16 8 9 7 5 6 7 7 5 5 7 10 24 10 7 8 7 7 7 7 9 7 5 6 5 5 5 6 5 5 5 7 8 7 9 9 9
|
| 21 |
+
LJ049-0060 6 6 5 5 6 5 5 7 6 5 7 10 6 6 6 5 5 7 10 9 7 9 9 7 5 5 6 5 7 8 8 7 9 23 11 9 8 8 8 8 8 9 13 12 9 7 7 8 9 7 6 5 4 5 5 5 6 6 5 5 5 5 6 6 8 8 5 5 4 5 7 4 4 7 6 7 6 7 8 22 7 7 11 9 13 16 14 10 10
|
| 22 |
+
LJ049-0061 11 12 7 7 6 7 6 7 5 5 5 6 6 6 8 10 8 10 10 8 7 6 7 7 7 8 10 10 11 7 6 4 5 7 6 7 6 4 5 5 5 6 6 5 5 5 4 5 5 5 5 6 7 8 6 5 5 4 4 5 6 8 9 8 8 11 8 14 10 9 8 5 5 7 6 7 7 8 10 11 10 10 14 11 7 5 6 8 9 11 10 8 9
|
| 23 |
+
LJ049-0062 5 5 7 7 7 6 7 6 7 8 6 6 6 7 6 7 5 4 5 6 6 6 10 20 9 7 7 7 7 9 10 9 8 7 7 6 5 4 4 7 6 7 6 2 5 5 5 5 6 6 5 6 6 8 6 9
|
| 24 |
+
LJ049-0063 7 8 8 7 7 6 10 8 7 6 5 5 4 7 5 5 5 8 7 16 8 7 7 6 5 5 5 7 8 7 6 7 15 6 6 7 9 6 8 10 7 11 9 9 8 6 5 5 6 7 6 5 5 6 8 11 8 9 9 8 8 9 7 6 7 8 7 7 8 14 14 8 7 7 8 8 8 8 7 6 6 8 11
|
| 25 |
+
LJ049-0064 5 5 7 7 6 6 5 5 6 8 9 9 8 7 7 5 7 8 7 7 5 5 6 5 5 6 8 7 8 8 9 6 6 6 6 6 7 7 7 10 7 7 6 8
|
| 26 |
+
LJ049-0065 11 8 5 5 5 6 6 6 9 7 7 8 4 5 7 8 8 7 5 5 5 7 6 7 6 8 7 7 7 7 7 6 5 6 7 7 7 8 6 7 7 6 7 12 7 8 9 6 6 4 5 8 8 8 6 6 10 15 8 5 5 7 7 9 7 6 9 12 8 9
|
| 27 |
+
LJ049-0066 4 5 6 7 8 6 6 7 7 10 34 7 7 8 9 8 8 9 12 9 8 7 9 10 8 7 8 11 7 6 6 11 10 6 5 5 5 7 8 7 5 6 9 10 7 5 4 4 5 6 7 5 7 8 8 6 7 5 5 6 9 10 10 9 11 35 12 7 5 6 7 7 8 6 6 6 13 7 7 7 8 7 7 7 5 3 4 5 8 7 7 9 10 8 9 9
|
| 28 |
+
LJ049-0067 6 6 8 7 7 8 11 7 10 9 7 6 7 9 10 7 6 6 7 7 8 9 12 8 16 9 7 8 8 9 7 6 7 8 3 6 7 8 7 6 6 6 5 8 6 5 5 5 6 6 7 6 6 5 4 3 5 6 6 8 7 5 5 6 6 5 5 5 4 5 6 8 9 8 7 6 7 5 8 7 8 8 7 7 8 7 7 6 5 6 9 12 10 8 12 10
|
| 29 |
+
LJ049-0068 11 8 8 7 7 11 9 7 5 5 5 6 6 7 7 8 11 12 7 9
|
| 30 |
+
LJ049-0069 7 7 10 7 9 7 7 6 7 6 7 12 13 7 5 5 5 6 7 7 6 5 6 6 6 7 7 7 8 6 6 10 5 5 7 8 7 9 7 6 8 11 11 17 8 9 12 14 11 8 9
|
| 31 |
+
LJ049-0070 12 7 9 9 9 9 13 6 6 4 5 5 7 7 7 6 10 9 9 8 6 5 5 6 7 6 5 5 6 7 8 7 7 8 10 9 7 7 8 8 7 7 8 5 7 6 6 8 6 6 5 7 9 10 8 5 5 5 6 9 7 7 7 6 5 5 5 6 6 5 5 6 5 5 7 7 6 8 12 14 10 5 7 7 9 7 5 5 6 7 6 6 6 7 7 7 8 9 9 11 11 10
|
| 32 |
+
LJ049-0071 8 8 6 6 5 5 7 10 8 7 5 5 6 8 9 8 8 7 8 10 11 10 14 12 27 6 5 4 5 5 4 4 6 6 6 6 6 6 6 5 6 7 11 10 9 7 4 6 7 7 6 9 7 9 8 6 6 6 8 10 9 7 7 10 13 14 11 7 9
|
imdanboy/jets/decode_train.loss.ave/dev/log/output.2/speech_shape/speech_shape
ADDED
|
File without changes
|
imdanboy/jets/decode_train.loss.ave/dev/log/output.3/durations/durations
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
LJ049-0072 11 14 8 6 6 6 6 7 9 6 6 6 6 5 6 8 9 7 5 5 5 6 8 8 6 6 5 4 4 6 7 7 7 7 7 7 8 6 7 8 6 7 7 6 6 6 6 5 6 6 6 9 7 6 9 13 11 37 8 5 5 5 6 6 7 7 10 9 12 9 7 7 7 6 6 6 7 7 6 5 5 5 7 8 7 5 6 9 10 7 6 4 4 5 5 5 5 6 6 7 7 8 10 14 16
|
| 2 |
+
LJ049-0073 4 6 6 7 9 8 7 5 5 5 5 5 7 6 6 6 7 6 7 5 5 5 4 6 4 4 4 4 5 5 5 6 6 5 5 4 4 4 14 8 8 8 7 11 8 9 9 7 6 5 7 6 6 5 5 6 5 7 16 6 6 5 6 9 10 7 7 10 8 9 7 5 7 11 11 12 11 8 9
|
| 3 |
+
LJ049-0074 6 6 8 10 8 6 8 7 10 7 6 5 5 6 6 6 7 9 10 10 6 5 5 5 5 5 6 6 5 5 7 6 8 10 10 7 5 6 6 8 9 7 15 8 5 5 5 6 7 6 6 6 11 9 10 9 11 7 8 7 5 6 7 9 9 8 7 6 9 8 7 8 7 7 4 4 5 5 8 11 7 6 4 4 5 5 5 5 7 7 6 5 5 5 7 9 9 7 9 14 10
|
| 4 |
+
LJ049-0075 6 5 5 4 4 5 5 8 9 9 9 12 10 10
|
| 5 |
+
LJ049-0076 5 5 5 5 6 6 7 7 7 8 8 6 7 9 9 5 5 5 5 4 5 5 6 7 6 8 6 7 4 5 6 5 5 5 5 5 5 5 5 6 5 4 4 4 7 6 7 7 7 8 5 5 6 5 6 9 8 7 5 6 5 5 5 5 5 5 5 5 5 6 5 5 5 7 7 8 6 5 7 6 5 8 9 9 7 8 21 7 5 5 5 6 6 6 6 7 7 4 7 7 6 12 5 7 7 7 7 5 5 7 7 7 7 6 5 5 5 5 6 7 7 6 6 6 5 6 11 10
|
| 6 |
+
LJ049-0077 10 6 6 6 6 7 8 8 8 8 6 7 8 10 9 11 29 7 7 7 6 5 5 5 7 8 8 7 8 9 6 8 6 5 3 4 5 6 6 6 5 5 4 5 5 5 5 5 5 6 6 7 9 12 15
|
| 7 |
+
LJ049-0078 5 7 7 6 6 7 7 7 8 9 8 7 6 4 3 6 6 7 6 7 7 7 8 7 6 6 7 8 8 6 6 8 8 5 4 5 5 17 7 10 11 11 11 11 6 6 5 5 5 5 6 4 5 5 5 5 5 5 6 6 5 4 4 4 7 7 7 7 6 7 6 8
|
| 8 |
+
LJ049-0079 7 5 6 9 9 8 6 6 9 6 6 5 6 8 6 6 6 6 7 5 4 6 8 15 6 5 7 8 9 9 9 10 6 8 7 7 6 5 5 5 6 7 8 8 7 9 28 14 8 6 5 5 6 7 17 6 5 6 7 7 8 8 9 9 6 5 4 5 5 6 5 6 7 7 6 7 6 6 6 7 6 8 7 6 12 9 6 7 7 8 7 6 5 6 12 5 5 4 8 8 8 7 9 11 10 10
|
| 9 |
+
LJ049-0080 14 12 9 8 6 6 8 8 7 9 8 7 5 6 6 7 6 10 11 16 15
|
| 10 |
+
LJ049-0081 5 4 4 5 6 6 8 9 12 5 6 6 7 5 6 5 4 4 4 5 6 7 6 7 6 6 6 6 5 6 7 7 9 9 6 6 6 7 7 8 8 8 6 7 7 7 5 4 5 5 4 5 5 5 5 5 5 5 5 7 6 5 6 7 6 8
|
| 11 |
+
LJ049-0082 6 6 6 5 5 7 5 5 8 8 8 7 6 5 5 5 5 6 8 10 6 5 5 5 6 6 8 8 8 8 6 7 8 8 7 8 25 6 5 5 5 6 6 7 7 7 4 5 5 5 5 5 7 7 6 7 8 8 11 12 32 12 5 6 7 6 6 7 7 6 6 6 6 5 6 7 8 23 7 5 3 4 4 5 5 5 6 10 10 9 7 7 6 8
|
| 12 |
+
LJ049-0083 8 4 5 7 8 7 10 9 5 5 5 9 7 5 6 6 7 5 9 10 11 14 7 6 5 5 7 8 10 12 14 8 7 9 7 9 8 8 7 9 8 5 7 8 7 9 9 9 9 10 12 30 8 6 7 8 8 8 8 10 11 28 15 17 9 7 6 7 8 7 6 6 7 9 13 11 9 11 12 34 7 8 8 9 8 6 7 6 7 8 8 8 8 6 5 5 5 8 8 7 8 8 7 6 5 5 6 9 13 10 11 14 10
|
| 13 |
+
LJ049-0084 10 9 9 7 5 5 5 5 5 5 6 6 5 5 5 4 5 7 7 7 6 5 6 8 6 6 7 8 6 6 7 6 7 10 10 5 6 6 7 9 12 9 9 7 7 7 6 12 28 11 6 5 7 6 7 6 7 6 5 5 5 6 8 8 9 8 7 6 4 4 5 5 6 7 9 8 10 10 8 6 5 5 6 7 6 7 6 6 5 7 6 6 9 10 8 6 5 5 8 7 6 5 5 6 5 9 9 10 8 7 8
|
| 14 |
+
LJ049-0085 5 6 7 7 8 7 5 7 7 7 7 7 8 5 5 4 5 5 6 8 6 6 7 8 6 7 7 6 6 6 7 7 6 6 7 7 6 8
|
| 15 |
+
LJ049-0086 7 6 7 7 8 6 7 5 6 7 8 10 9 7 9 8 9 6 5 6 8 6 7 6 7 7 7 6 7 7 7 6 6 6 6 7 7 8 7 7 9 12 9 8 8 7 6 6 6 7 6 6 6 8 8 8 8 7 7 5 5 5 7 9 9 8 7 7 7 6 5 5 5 7 8 6 8 8 9 8 9 8 6 6 8 6 8 9 9 8 11 13 10 9
|
| 16 |
+
LJ049-0087 5 5 7 9 10 7 6 7 6 8 5 6 8 9 11 10 7 5 5 5 5 5 6 6 5 5 5 5 6 7 10 11 13 13 25 11 7 6 5 4 5 5 6 8 8 7 6 6 5 5 5 5 5 6 6 5 5 5 5 9 10 7 10 7 5 6 6 5 6 9 9 10 10 12 10
|
| 17 |
+
LJ049-0088 10 8 7 10 13 6 5 6 7 9 10 7 5 5 5 9 9 7 8 8 8 10 6 5 5 5 6 5 6 7 5 6 6 5 6 7 7 7 7 8 8 8 6 7 7 7 6 5 5 7 7 7 6 5 4 5 5 8 10 9 8 11 14 11 10
|
| 18 |
+
LJ049-0089 6 7 7 8 10 7 7 9 7 5 6 7 8 8 8 11 9 11 13 18 7 3 8 5 8 16 9 9 9 8 11 8 5 7 9 7 7 8 8 10 8 9 17 9 11 7 8 6 5 5 5 6 6 7 8 7 8 7 5 7 7 6 6 6 6 7 7 7 7 6 6 5 6 8 7 7 14
|
| 19 |
+
LJ049-0090 14 5 6 7 7 7 12 7 8 7 8 7 8 7 5 3 4 6 9 8 8 9 13 47 11 8 10 7 5 6 6 7 6 10 12 13 10 9
|
| 20 |
+
LJ049-0091 14 7 5 6 6 9 8 7 7 6 6 8 8 8 6 8 8 8 10 7 7 6 6 7 9 5 5 6 6 7 7 9 7 9 13 6 8 7 5 7 6 6 6 6 6 7 10 13 10 10 10 10 5 7 7 6 5 5 5 7 8 8 6 7 12 7 6 4 4 7 6 7 6 2 5 5 5 5 6 6 5 6 7 7 6 8
|
| 21 |
+
LJ049-0092 5 5 7 9 10 12 11 10 9 8 6 6 6 5 7 14 9 9 8 8 8 5 7 7 7 6 7 7 8 8 7 7 5 7 6 10 37 8 8 8 8 8 5 5 6 6 6 6 8 9 16 11 10 10 10 9 8 8 9 8 6 7 4 8 13 9 10 10 6 5 5 6 10 10 7 7 7 6 7 12
|
| 22 |
+
LJ049-0093 11 7 8 6 6 6 6 7 5 5 5 6 9 10 7 5 5 6 8 7 7 8 8 10 9 12 8 8 7 6 5 5 5 7 7 7 7 6 8 7 6 7 7 6 12 8 9 9 7 6 6 7 8 9 10 8 5 6 6 7 8 5 6 5 9 9 5 6 7 7 6 10 9 9 9 7 8 7 6 6 6 5 6 6 10 12 13 10
|
| 23 |
+
LJ049-0094 5 5 5 5 6 6 7 6 8 8 6 8 13 8 5 5 5 5 7 7 6 5 5 5 7 8 9 7 8 10 28 6 5 5 5 6 6 5 6 9 11 10 11 10 10 20 7 6 7 7 10 9 10 11 8 6 6 5 6 10 10 7 7 7 6 7 13
|
| 24 |
+
LJ049-0095 6 6 9 13 9 8 7 7 8 7 7 6 5 5 5 6 5 5 6 6 7 6 6 6 7 6 6 6 7 9 10 25 12 8 7 7 12 14 14 7 6 5 4 7 12 7 9 7 5 6 7 5 8 6 7 9 8 11 9 12 10
|
| 25 |
+
LJ049-0096 6 4 6 6 7 6 5 5 7 9 7 5 5 5 5 7 9 9 9 8 7 7 7 5 6 6 6 7 8 8 8 6 7 7 8 7 14 9 7 5 6 6 7 6 10 11 14 14 26 5 4 7 6 6 4 5 5 6 5 7 9 9 7 6 6 6 7 7 8 8 8 6 7 7 7 5 4 5 5 4 5 5 6 5 5 5 5 11 5 8 7 6 7 8 16
|
| 26 |
+
LJ049-0097 4 6 8 6 6 8 7 7 6 5 5 4 5 5 6 8 9 8 6 7 4 5 5 5 5 5 5 4 4 7 7 7 6 9 8 6 5 6 6 5 6 8 7 5 9 9 10 8 5 6 7 9 10 9
|
| 27 |
+
LJ049-0098 8 7 6 8 7 8 8 9 14 7 9 7 10 10 11 9 11 9 9 7 9 9 9 9 8 8 8 6 6 8 9 6 5 7 9 10 7 7 12 8 9 7 6 5 4 6 7 9 8 6 5 5 5 19 9 5 5 5 7 7 6 5 6 7 6 7 8 8 10 9 15 7 6 6 6 7 6 7 5 5 5 7 8 6 6 5 4 6 6 6 7 9 9 10 10 9
|
| 28 |
+
LJ049-0099 11 8 5 5 5 6 7 8 7 11 8 7 7 8 7 4 5 5 6 7 7 7 6 7 7 6 5 5 5 5 6 11 9 7 6 6 5 5 5 7 7 8 8 8 6 7 7 7 5 4 5 5 4 5 5 6 5 5 5 5 5 6 6 5 6 7 6 8
|
| 29 |
+
LJ049-0100 5 5 5 5 5 6 7 6 7 6 6 5 6 7 7 6 6 7 6 6 5 5 7 9 6 5 7 9 11 19 7 8 7 6 9 8 7 7 6 5 8 6 7 8 8 8 8 8 8 16 6 10 9 11 8 9
|
| 30 |
+
LJ049-0101 7 6 6 8 8 6 7 7 9 13 9 9 10 8 12 10 10 8 8 8 10 12 10 7 9 7 7 6 5 6 9 6 5 5 7 7 6 6 8 8 9 10 8 10 7 9 11 11 29 9 6 6 6 8 7 6 9 12 6 5 6 6 9 9 8 8 6 7 9 16 20
|
| 31 |
+
LJ049-0102 5 5 5 5 6 6 5 5 6 5 7 27 8 10 9 5 5 5 5 5 6 7 6 10 15 11 9 8 7 9 11 8 7 8 9 8 7 7 6 6 4 4 5 8 8 6 8 9 6 6 6 5 7 8 8 8 8 21 6 5 6 9 9 9 8 6 6 6 4 5 5 6 5 6 7 6 9 17 6 5 5 5 6 6 5 5 5 4 5 7 7 8 8 9 12 9 5 5 6 7 9 8 5 5 5 5 5 5 5 4 5 7 8 9 10 8 8
|
imdanboy/jets/decode_train.loss.ave/dev/log/output.3/speech_shape/speech_shape
ADDED
|
File without changes
|
imdanboy/jets/decode_train.loss.ave/dev/log/output.4/durations/durations
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
LJ049-0103 7 5 7 7 6 6 6 7 7 7 9 11 10 18 7 7 6 7 6 6 6 6 13 9 6 5 6 6 5 5 7 6 7 10 9 7 5 5 5 5 6 8 8 6 6 5 5 6 5 5 5 5 5 6 8 6 6 7 6 7 8 7 8 10 18 10 7 7 6 7 8 7 7 6 5 7 7 7 9 7 7 8 8 7 5 7 9 13 10
|
| 2 |
+
LJ049-0104 12 8 7 7 7 7 11 11 8 15 5 5 6 7 6 5 6 5 5 5 5 6 6 5 5 6 5 5 5 6 6 7 10 9 5 5 5 5 5 5 5 6 15 15 14 24 9 5 6 8 9 8 7 6 7 5 5 5 7 7 10 9 5 5 5 6 5 6 7 7 9 23 6 6 8 8 8 6 7 7 7 7 5 6 8 10 7 6 8 7 6 6 6 5 7 8 8 9 7 6 8
|
| 3 |
+
LJ049-0105 7 6 6 5 4 4 6 7 7 9 7 5 5 5 5 6 6 6 6 9 6 6 6 7 8 7 7 13 9 6 7 9 6 8 9 9 10 9 11 29 6 5 5 5 6 6 5 5 5 4 5 7 7 7 8 7 7 7 6 7 10 9 5 5 5 5 5 5 5 4 4 7 7 7 6 7 6 4 6 8 8 7 6 6 8 9 8 7 8 7 8
|
| 4 |
+
LJ049-0106 7 6 7 5 4 5 6 5 5 4 5 6 7 7 6 7 7 6 6 6 5 7 8 12 31 13 5 6 8 8 10 7 6 6 6 6 7 8 7 7 8 7 7 7 5 5 5 5 7 9 10 10 8 6 5 6 6 6 7 7 7 8 8 9 18 10 6 7 6 8 7 7 7 7 6 7 6 7 7 7 7 8 7 6 8 7 7 9 12 6 8 7 6 4 5 8 8 6 8 8 6 6 6 5 7 8 8 9 10 15
|
| 5 |
+
LJ049-0107 22 18 7 9 8 7 7 6 6 7 7 6 6 5 5 5 4 6 5 6 7 6 5 5 5 5 6 9 14 14 9 11 10
|
| 6 |
+
LJ049-0108 11 6 8 6 6 8 9 7 10 6 5 6 7 7 5 5 5 6 6 7 5 4 5 5 6 6 6 5 5 5 6 5 8 6 5 5 5 6 8 5 12 8 14 5 5 4 4 5 4 5 6 7 7 7 8 5 6 6 20 6 6 5 5 9 6 5 6 9 15 11 9 8 7 6 6 7 8 7 7 8 7 6 5 5 6 7 7 7 7 9 9 9 8 7 8 7 5 6 6 6 6 12 17 11 9
|
| 7 |
+
LJ049-0109 4 4 5 5 5 6 8 8 6 8 8 9 8 5 6 13 26 13 6 5 6 6 6 5 7 6 6 7 7 7 6 7 8 13 12 10 11 16 10 10 8 5 7 6 6 5 4 5 5 6 7 7 6 6 5 5 6 5 5 6 9 6 6 7 6 7 9 8 9 13 12 10
|
| 8 |
+
LJ049-0110 5 5 6 5 6 7 6 5 5 5 5 5 7 7 6 5 5 5 5 5 6 6 7 6 6 5 8 8 8 8 6 7 7 7 7 12 7 7 6 4 4 4 4 5 6 6 6 6 7 8 8 7 6 8 7 6 7 7 6 9 7 5 6 5 3 4 5 6 8 6 6 6 7 8 10 7 8 8 7 5 5 6 10 17 22 8 10 12 9 8 6 5 6 6 5 7 9 8 5 4 7 7 7 7 6 6 6 8
|
| 9 |
+
LJ049-0111 7 7 7 7 6 5 5 6 7 8 7 5 5 5 6 5 5 7 5 7 7 5 6 5 6 9 11 12 7 6 8 8 8 8 6 7 7 8 8 11 9 11 19 12 23 9 7 5 6 6 8 7 7 19 6 5 5 6 6 7 8 8 8 6 7 8 10 10 16
|
| 10 |
+
LJ049-0112 9 7 9 8 10 5 6 7 7 8 6 8 7 8 15 6 5 5 7 7 6 9 9 7 7 5 5 3 4 5 6 8 6 6 6 8 9 12 7 6 9 7 11 8 7 8 8 7 5 4 4 5 6 10 17 14 9 9
|
| 11 |
+
LJ049-0113 8 7 8 9 9 6 5 5 6 9 12 8 7 8 7 11 5 5 4 5 6 6 7 7 8 8 9 16 8 6 5 7 7 12 14 8 11 11 6 5 6 10 22 8 8 9 11 9 8 7 10 12 14 30 6 7 7 7 7 8 9 7 5 5 5 7 7 6 5 6 9 6 9 12 6 8 7 8 9 8
|
| 12 |
+
LJ049-0114 6 5 6 6 8 9 6 8 9 7 11 10 10 7 7 7 6 6 8 9 7 6 5 6 8 8 9 8 6 7 7 5 8 7 7 7 6 6 5 5 4 5 5 6 5 6 7 5 5 5 5 6 5 4 3 4 5 6 8 6 6 6 8 9 13 8 8 35 9 7 5 5 5 6 6 5 5 6 6 5 5 6 6 5 5 5 5 6 6 6 6 5 5 6 8 14 10 10 8 6 6 7 6 6 8 8 7 6 6 7 10 12 21
|
| 13 |
+
LJ049-0115 7 6 5 6 8 8 9 7 5 5 5 8 10 10 6 5 5 5 4 6 6 5 4 6 8 7 7 7 9 9 10 8 5 5 5 5 6 6 7 6 5 5 5 6 6 8 12 17 13 28 18 13
|
| 14 |
+
LJ049-0116 10 7 8 8 10 9 8 8 7 6 10 7 5 5 5 5 5 5 7 7 7 7 7 6 7 8 8 7 5 5 5 6 7 6 7 8 8 8 8 6 6 7 5 5 5 8 10 6 6 4 6 7 6 5 7 9 10 8 10 8 9 10 12 8 8 9 12 12 5 6 8 7 8 6 6 9 12 11 45 10 6 6 6 6 7 7 6 6 6 5 6 4 6 6 6 7 7 7 6 6 7 9 14
|
| 15 |
+
LJ049-0117 4 4 5 6 6 6 5 7 8 7 7 6 6 8 9 8 7 7 7 7 7 7 7 7 8 10 13 5 6 5 4 12 17 15 17 10 8 7 7 8 8 9 7
|
| 16 |
+
LJ049-0118 9 8 10 7 5 5 4 5 4 5 5 5 7 7 5 7 11 11 8 17 6 5 6 7 13 11 18 6 6 6 5 5 5 6 6 6 5 5 7 9 7 5 5 5 7 7 7 6 5 4 5 9 10 9 6 9 7 7 7 8 10 14 18 6 5 5 5 5 7 8 8 6 5 6 6 6 9 5 5 6 8 7 8 8 6 9 8 7 6 6 12
|
| 17 |
+
LJ049-0119 5 5 5 6 7 6 5 5 6 6 6 5 5 5 7 10 11 5 6 6 7 8 13 7 7 7 8 7 7 7 5 3 4 5 8 6 7 8 9 7 8 20 6 6 6 8 7 6 5 5 6 8 11 24 8 57 9 11 6 5 5 6 7 8 7 7 7 6 6 6 6 5 5 5 7 7 6 5 5 5 7 9 9 7 9 14 10
|
| 18 |
+
LJ049-0120 11 7 5 5 6 6 6 5 6 15 13 5 6 7 6 8 10 6 6 7 11 9 11 5 4 7 7 6 6 6 8 10 13 7 9 7 6 7 7 6 6 7 7 7 6 7 5 4 5 7 6 6 5 5 6 7 8 6 5 5 8 7 6 7 7 10 13 13 10
|
| 19 |
+
LJ049-0121 8 4 5 6 6 6 5 7 8 10 9 9 7 6 5 5 7 8 9 7 8 8 8 6 6 7 7 8 9 8 5 4 5 11 13 9 8 8 7 5 5 4 4 5 6 8 9 9 8 7 6 8 9 8 7 5 6 6 6 7 6 5 6 6 7 8 8 8 8 8 6 5 6 8 7 9 8 9 7 7 7 7 7 5 5 5 7 7 7 7 6 7 7 8
|
| 20 |
+
LJ049-0122 6 8 10 8 7 6 7 19 11 14 11 10 7 8 10 7 10 9 14 35 7 8 8 6 6 6 7 7 8 7 6 7 8 7 6 18 8 8 11 7 7 6 7 8 8 8 9 9 10 31 9 7 6 6 7 8 11 7 6 6 5 6 9 10 8 7 6 7 5 5 7 7 7 11 11 8 5 6 7 7 6 7 7 6 6 6 6 6 6 7 10 15
|
| 21 |
+
LJ049-0123 12 7 6 6 7 8 8 8 8 6 6 7 7 5 4 5 5 4 5 5 5 5 5 4 4 5 6 5 5 5 6 11 8 9 10 7 7 6 7 6 7 7 7 6 5 6 5 6 8 6 6 6 6 7 7 7 7 8 7 12 5 5 5 8 6 6 7 5 5 5 5 5 4 4 6 5 5 6 6 5 5 7 9 8 8 6 8 10 11 5 6 7 6 6 6 7 7 7 8 6 5 5 8 7 6 7 7 10 13 13 10
|
| 22 |
+
LJ049-0124 11 6 9 6 7 8 9 6 14 6 5 4 4 4 9 12 10 7 6 7 6 7 8 8 7 8 8 11 7 6 6 7 8 11 13 17
|
| 23 |
+
LJ049-0125 5 6 7 8 7 8 8 8 9 7 7 6 6 13 15 6 6 13 9 8 8 6 6 8 5 4 4 7 10 8 5 6 6 6 5 6 6 6 6 6 8 8 13 20 12 7 7 7 6 6 4 7 10 8 5 6 6 6 5 5 4 7 6 6 6 6 6 6 5 4 5 6 5 7 6 7 8 10 7 7 10 8 7 8 7 8 8 6 6 7 7 7 8 10 9 10 12 13 10
|
| 24 |
+
LJ049-0126 6 7 5 6 7 6 5 7 9 8 6 5 4 6 8 9 9 8 11 13 11 16 11 12 14 12 7 5 6 7 5 6 5 9 15 6 6 5 6 7 7 8 6 8 8 7 6 7 7 8 9 10 11 9
|
| 25 |
+
LJ049-0127 12 7 6 8 10 10 10 8 8 6 6 4 6 5 7 7 6 5 5 5 7 8 8 6 7 10 13 5 5 5 6 7 7 7 7 7 7 9 6 5 7 6 10 9 11 8 9 6 6 5 5 10 11 7 12 15 7 8 7 7 6 5 6 5 6 6 5 4 6 6 6 6 5 6 9 10 9 8 8 8 6 5 6 6 7 8 9 13 12 10
|
| 26 |
+
LJ049-0128 6 51 12 14 9 6 5 5 7 9 7 6 5 6 6 6 7 6 5 7 6 6 6 7 7 5 5 6 7 6 6 9 12 7 6 8 10 8 5 5 4 3 4 5 6 8 6 6 6 8 10 13 9 9 10
|
| 27 |
+
LJ049-0129 8 4 5 5 8 10 13 23 17 11 7 6 5 5 6 7 6 7 6 7 8 6 6 6 7 6 7 5 4 5 6 6 6 9 6 6 5 7 6 5 6 6 7 5 6 5 5 7 6 6 6 6 6 7 7 10 9 8 10 10 13 17 7 8 6 7 6 4 4 5 5 6 6 6 5 7 5 7 9 8 7 10 8 7 8 8 9 5 6 7 6 4 4 5 6 6 6 5 5 4 4 4 5 6 10 9 13 10 9
|
| 28 |
+
LJ049-0130 6 7 8 6 5 5 5 7 8 7 6 7 7 6 6 6 4 5 7 14 10 8 8 7 8 8 9 7 8 11 13
|
| 29 |
+
LJ049-0132 5 5 5 5 6 5 7 6 5 5 6 8 11 8 10 14 7 10 6 6 7 7 6 6 7 8 7 7 9 13 8 6 5 6 6 8 9 7 6 5 7 8 9 9 9 8 8 7 5 6 6 5 8 7 10 11 11 7 6 8 12 13 17
|
| 30 |
+
LJ049-0133 5 7 11 9 10 7 6 6 8 10 13 9 4 5 5 5 7 9 12 9 6 5 6 4 5 5 5 5 5 5 6 6 5 4 4 4 7 6 6 6 7 8 11 8 8 6 7 6 6 9 6 8 9 14 8 7 7 6 5 5 5 5 6 10 11 13 9 8
|
| 31 |
+
LJ049-0134 5 5 5 5 6 6 7 6 6 6 5 4 8 11 12 8 8 3 6 6 5 5 7 6 5 8 9 8 8 7 7 7 7 7 6 6 6 5 5 7 6 7 7 9 10 9 6 5 6 6 6 8 8 8 6 5 4 6 7 6 6 5 6 5 6 7 5 6 9 29 11 6 5 6 6 7 7 6 6 6 6 5 6 6 6 5 5 5 7 8 8 8 9 9 10 12
|
imdanboy/jets/decode_train.loss.ave/dev/log/output.4/speech_shape/speech_shape
ADDED
|
File without changes
|
imdanboy/jets/decode_train.loss.ave/dev/log/output.5/durations/durations
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
LJ049-0135 5 7 6 7 8 6 6 6 7 6 7 5 5 5 4 6 6 5 5 6 6 8 11 9 6 5 4 5 5 5 6 6 5 5 5 4 5 5 6 7 6 6 6 5 5 4 5 6 7 5 6 5 5 5 7 8 10 10 16
|
| 2 |
+
LJ049-0136 8 6 5 6 6 6 6 7 6 5 5 6 10 8 8 7 8 9 11 7 6 6 7 6 6 6 8 6 6 6 7 7 7 7 7 6 6 6 5 6 6 6 5 5 6 9 10 33 5 6 6 6 7 6 7 5 4 5 6 6 7 11 14 8 7 7 7 7 12 12 8 10 10 11 8 9 10 8 8 5 4 4 6 6 6 6 5 5 5 7 7 6 6 5 6 8 8 8 6 5 5 7 7 6 5 5 5 7 9 9 7 9 18
|
| 3 |
+
LJ049-0137 4 5 6 6 7 7 7 9 8 5 6 6 6 9 10 6 6 8 12 9 7 7 6 7 8 8 7 7 6 7 9 9 8 9 7 5 5 5 4 4 4 5 5 5 6 6 5 6 7 8 10 29 7 5 6 6 5 5 7 8 9 7 6 6 7 7 6 6 7 6 7 6 6 5 4 6 6 7 7 6 5 4 5 5 6 6 7 9 9 9 7 6 5 5 6 7 8 7 7 9 8 6 6 7 7 5 7
|
| 4 |
+
LJ049-0138 5 8 5 5 8 7 5 6 8 8 8 8 6 7 8 9 11 14 9 9 19 13 8 7 7 5 5 4 5 4 7 6 4 5 6 7 12 5 5 5 5 6 6 8 7 6 6 5 6 6 6 5 5 5 7 6 6 6 4 6 6 7 7 6 5 4 5 5 6 6 7 9 12 15
|
| 5 |
+
LJ049-0139 4 6 7 6 6 7 8 7 8 9 8 6 6 6 6 11 9 6 7 7 9 7 7 7 7 9 9 6 6 6 6 7 6 6 9 9 10 6 5 4 5 7 4 5 5 5 5 5 5 6 6 5 4 4 4 7 7 7 7 6 7 6 8
|
| 6 |
+
LJ049-0140 5 6 8 8 7 7 6 7 8 6 7 6 7 7 6 7 8 8 15 9 10 9 7 6 8 6 6 7 7 6 5 5 5 5 5 5 6 6 5 5 6 6 5 7 7 7 7 10 9 5 5 5 6 5 6 7 7 10 22 7 7 7 6 5 4 6 6 7 8 7 7 5 6 9 8 12 9 8 8 5 5 8 7 9 9 13 6 6 5 6 6 6 7 7 7 6 5 6 7 6 5 6 9 10 15 11 7
|
| 7 |
+
LJ049-0141 6 7 7 7 7 6 8 7 6 8 8 9 6 5 5 4 4 5 4 4 5 5 6 5 6 7 6 5 5 4 6 31 9 9 6 8 10 11 10 6 6 5 5 6 6 8 8 5 5 5 6 6 7 9 9 6 8 7 7 9 5 5 5 8 6 5 6 6 7 6 7 5 6 7 5 6 6 13 8 8 10 7 7 8 7 8 8 14 11 9 5 5 5 7 9 10 8 6 6 5 4 6 6 6 6 5 5 5 4 5 5 5 6 6 5 6 7 7 6 8
|
| 8 |
+
LJ049-0142 8 7 7 8 7 8 6 5 5 5 6 6 7 6 7 5 4 5 6 6 6 8 10 10 8 7 8 8 8 8 7 11 9 11 8 10 11 8 6 5 5 5 7 6 6 6 6 6 7 7 8 7 6 8
|
| 9 |
+
LJ049-0143 9 10 8 9 8 11 6 6 6 7 8 8 8 6 8 12 17 9 9 8 7 8 7 10 9 6 8 7 8 7 7 7 6 6 7 6 8 8 6 7 12 11 7 7 9 10 8 6 6 5 7 7 6 5 4 4 4 5 7 9 9 8 9 17 6 5 6 6 5 5 6 6 7 6 5 5 5 6 10 19 18 7 9
|
| 10 |
+
LJ049-0144 5 5 6 6 5 5 7 6 4 4 4 6 8 9 7 5 5 5 6 4 5 5 8 9 10 9 10 6 6 7 9 7 5 5 8 7 8 6 4 5 5 6 7 6 6 6 5 5 7 9 10 9 6 5 5 5 6 7 8 7 7 8 8 12 8 9 10 6 6 8 9 9 7 8 9 16 7 6 5 5 5 5 5 5 5 6 6 5 6 5 6 6 6 5 6 6 8 7 8
|
| 11 |
+
LJ049-0145 11 8 6 6 6 7 7 5 5 5 4 4 5 6 7 6 6 5 6 6 5 5 5 5 5 5 5 5 5 5 5 5 8 10 8 6 5 6 6 7 6 5 5 5 5 6 5 8 7 6 6 6 5 7 7 7 8 7 7 6 5 5 5 5 5 6 6 5 6 7 6 9 28 8 6 5 5 9 12 9 8 8 8 10 7 8 6 6 6 7 7 6 7 10 7 11 8 7 7
|
| 12 |
+
LJ049-0146 8 7 8 7 7 6 6 7 8 7 7 7 6 7 7 6 6 7 6 7 9 9 7 8 8 8 8 10 7 6 5 5 5 5 6 6 9 11 10 8 9 9 4 5 4 6 7 5 4 7 6 5 4 5 5 6 6 7 6 6 8 8 10 7 5 5 6 7 6 6 5 7 7 8 6 6 7 7 8 9 7 23 9 7 7 7 7 6 6 6 6 7 9 9 9 8 8
|
| 13 |
+
LJ049-0147 7 7 11 10 7 7 7 5 5 5 6 4 5 5 5 5 5 7 9 6 5 5 5 6 7 6 3 6 6 6 6 8 11 8 6 5 6 6 7 8 7 7 8 7 6 5 5 9 11 7 6 8 12 8 7 4 8 6 7 10 5 7 12 8 6 5 5 7 6 7 6 6 6 6 7 6 6 6 5 5 7 5 8 6 5 4 5 4 6 6 5 4 7 6 6 6 6 6 7 8 10 11 13 12 10
|
| 14 |
+
LJ049-0148 5 6 8 6 8 8 8 7 10 9 7 8 7 7 13 9 7 7 6 5 5 5 7 8 8 6 6 8 3 7 8 6 5 5 6 6 6 9 8 8 8 8 10 9 9 8 6 5 4 5 7 8 7 6 5 5 6 7 6 5 4 5 5 5 7 7 8 8 8 8 10 7 9 15 13 15 5 5 5 5 4 6 7 8 8 9 16
|
| 15 |
+
LJ049-0149 4 5 5 5 4 5 8 8 6 5 8 10 7 9 7 9 9 17 8 7 7 6 6 6 6 5 6 5 4 6 7 11 8 11 27 10 9 8 7 7 9 9 9 11 8 7 6 5 6 6 7 7 6 6 7 6 6 6 8 9 8 6 5 5 6 6 6 6 6 5 5 5 6 6 5 5 5 8 10 9 8 6 7 8 6 10 10 9 7 7 6 5 5 5 5 5 5 5 6 6 5 6 7 7 7 6 5 6 11 11 12 10
|
| 16 |
+
LJ049-0150 12 9 7 6 5 4 6 7 6 6 5 5 5 6 7 6 6 8 11 6 6 7 7 10 10 8 6 6 6 6 6 5 6 8 9 10 6 6 7 5 6 6 8 8 7 7 7 6 5 6 7 5 4 4 4 6 8 9 7 5 5 6 5 6 7 6 7 7 8 8 8 7 7 10 8 9 10 7 6 6 10 7 6 6 5 6 6 7 8 5 5 5 5 5 5 5 6 6 5 6 6 8 6 9
|
| 17 |
+
LJ049-0151 6 6 7 7 6 7 5 5 5 6 6 7 9 11 7 5 5 6 5 5 5 5 5 5 5 6 6 5 4 4 4 7 6 7 7 7 10 15
|
| 18 |
+
LJ049-0152 6 7 8 8 10 6 5 5 4 4 5 5 5 4 6 6 6 7 6 7 5 4 5 6 6 6 8 13 6 5 4 4 6 6 6 6 5 5 5 4 5 5 5 6 6 5 5 5 5 6 8 6 5 8 11 7 6 8 9 9 5 5 5 3 4 5 6 6 6 5 5 4 5 5 5 5 5 5 6 6 7 8 9 12 8 6 7 7 7 8 9 8 6 7 6 6 7 8 6 6 6 6 6 5 8 9 8 6 5 6 6 8 7 9
|
| 19 |
+
LJ049-0153 6 7 8 6 5 5 5 7 8 8 6 7 7 7 5 9 8 5 5 6 10 10 9 6 6 6 7 5 5 6 6 6 6 5 5 5 5 3 4 5 6 6 6 5 4 4 5 5 5 5 5 5 6 6 7 8 9 16 8 9 14 11 10 7 6 6 6 6 6 10 10 16 13 32 9 8 8 9 5 4 5 7 8 7 7 6 8 7 7 7 7 6 8
|
| 20 |
+
LJ049-0154 9 10 13 12 9 8 6 7 6 7 10 10 14 12 9
|
| 21 |
+
LJ049-0155 10 8 8 6 5 6 7 7 5 5 6 7 6 6 5 7 8 8 9 11 7 7 9 9 8 8 6 6 7 4 5 5 5 5 5 5 4 4 7 7 8 10 7 7 6 8 12 16 7 8 9 7 5 6 7 9 8 9 7 6 10 9 6 8 7 5 4 7 6 6 6 6 6 10 7 12 9 9 7 8 8 12 7 6 4 5 9 6 5 5 6 8 9 8 11 12 16 10
|
| 22 |
+
LJ049-0156 12 6 5 5 5 7 8 7 5 7 9 8 7 17 7 6 6 6 4 5 5 4 5 5 5 5 5 6 6 5 5 7 6 7 14 10 5 6 7 8 7 8 6 6 7 14 7 5 5 6 7 7 9 8 9 8 7 7 17 14 9 6 5 6 9 8 8 9 10 8 8 8 17
|
| 23 |
+
LJ049-0157 4 7 7 7 6 6 7 6 5 6 10 9 9 8 6 5 4 6 6 6 5 5 5 5 6 6 9 7 7 9 8 8 6 7 7 8 7 11 9 8 8 7 10 8 10 10 6 8 6 8 7 7 6 7 7 6 8 6 5 5 6 6 9 12 10 10
|
| 24 |
+
LJ049-0158 11 15 8 7 7 8 7 6 5 7 7 8 6 6 6 6 7 7 9 12 12 10 7 6 7 5 5 4 4 8 8 8 10 6 7 7 7 9 8 7 6 5 5 6 8 6 5 6 4 4 7 6 7 6 6 7 7 7 7 6 7 7 8 7 9 16 15 16 31 13 8 6 6 6 7 7 8 8 8 6 6 7 7 5 4 5 5 4 5 5 6 5 5 5 5 9 5 8 7 6 7 8 9 9
|
| 25 |
+
LJ049-0159 6 7 8 6 5 5 5 7 8 9 7 9 13 17 9 14 10 6 8 6 7 7 9 8 5 6 6 6 6 6 4 6 7 7 5 5 6 7 6 7 10 11 7 6 9 14 13 27 11 13 10 8 8 7 8 7 10 11 7 6 6 6 7 6 7 5 4 5 6 6 6 9 5 6 4 4 6 7 8 8 8 7 7 5 4 4 5 5 5 6 6 5 6 6 8 6 9
|
| 26 |
+
LJ049-0160 11 7 6 6 6 9 8 11 16 16 35 7 7 7 6 5 5 5 7 8 8 6 7 8 3 6 6 8 13 6 5 8 9 7 7 7 9 9 9 7 6 6 6 6 7 6 7 5 4 5 6 6 6 9 5 6 4 4 5 6 6 6 5 6 5 5 5 4 4 7 6 6 6 5 5 6 5 4 4 5 5 5 6 6 5 6 7 6 13
|
| 27 |
+
LJ049-0161 6 7 8 9 8 8 17 8 4 3 4 5 4 5 5 6 6 6 5 6 5 6 4 5 7 6 5 5 7 8 7 5 5 5 6 6 8 7 6 6 6 5 7 7 7 8 8 8 7 6 6 6 5 2 4 5 5 5 6 6 5 6 6 8 6 9
|
| 28 |
+
LJ049-0162 8 6 9 8 8 7 7 6 4 5 5 7 8 8 6 7 9 4 7 5 6 7 5 5 5 6 7 7 7 6 7 7 6 6 6 7 6 7 5 4 5 6 6 6 10 5 6 4 4 4 5 4 4 7 6 6 6 5 5 6 5 4 4 5 5 5 6 6 5 6 6 7 6 8
|
| 29 |
+
LJ049-0163 6 56 12 16 27 5 6 6 6 6 6 7 6 8 8 6 5 6 5 5 5 5 5 5 5 5 4 4 5 6 6 6 5 4 4 5 5 6 8 7 7 7 7 8 6 7 6 6 7 7 8 8 9 15 16 13 12 9 6 6 7 7 5 6 7 7 8 8 5 6 6 8 9 9 10 7 6 6 7 8 7 7 8 6 7 7 5 5 7 7 6 6 5 5 6 8 7 11 10 7 8 9 13 10 9
|
| 30 |
+
LJ049-0164 7 5 5 5 6 6 6 7 6 7 8 9 8 6 6 5 5 4 6 31 10 10 7 5 5 7 8 8 7 6 9 13 8 10 5 5 5 6 6 8 6 8 11 7 7 13 5 5 5 6 6 4 6 7 7 7 2 5 4 7 6 6 6 5 5 5 5 4 5 6 8 7 8 6 7 5 4 4 5 5 5 6 6 5 5 5 5 4 5 5 4 3 4 5 6 8 6 6 6 8 9 13 8 10 7 8 8 7 7 7 8 9 8
|
| 31 |
+
LJ049-0165 5 6 9 8 9 8 5 6 9 8 6 5 4 4 5 5 6 6 6 7 6 7 8 8 8 5 5 5 5 5 3 4 5 6 6 6 5 4 4 4 5 6 8 7 7 7 7 8 6 7 5 6 7 7 7 7 8 8 10 8 6 5 4 5 5 6 7 7 6 6 6 4 7 7 8 9 25 6 6 7 7 8 7 6 7 8 9 11 8 8 8 7 7 10 8 8 8 7 7 8 8 9 7
|
imdanboy/jets/decode_train.loss.ave/dev/log/output.5/speech_shape/speech_shape
ADDED
|
File without changes
|
imdanboy/jets/decode_train.loss.ave/dev/log/output.6/durations/durations
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
LJ049-0166 8 6 9 9 9 31 13 12 8 6 6 8 7 9 10 8 9 6 4 5 5 5 6 6 6 5 6 5 5 5 4 4 7 6 6 6 5 5 6 5 4 4 5 5 5 6 6 5 6 7 6 10 30 9 6 6 5 6 6 7 5 5 6 8 8 9 7 5 6 6 7 9 10 10 9 9 8 7 8 7 5 6 7 5 5 4 6 11 7 6 8 13 13 10 9
|
| 2 |
+
LJ049-0167 8 6 6 9 7 8 7 5 5 6 5 5 5 6 6 6 5 6 5 6 4 5 7 6 5 5 7 9 8 7 6 7 6 6 7 7 7 7 5 5 5 5 5 5 5 6 6 5 5 5 4 4 12 6 6 6 6 5 6 6 8 7 8
|
| 3 |
+
LJ049-0168 14 8 6 7 6 6 7 7 6 6 9 13 14 26 6 6 5 6 8 8 9 9 6 6 8 7 6 6 4 5 5 5 5 8 6 7 6 6 5 6 6 6 6 7 6 7 5 4 5 6 6 6 9 10 9 6 7 5 8 9 8 9 9 6 7 10 11 6 6 6 6 8 14 14 8 5 5 5 5 7 8 6 7 7 12 15
|
| 4 |
+
LJ049-0169 5 5 5 6 8 9 10 9 11 9 6 6 6 6 6 10 12 7 6 4 4 7 5 7 9 13 8 7 8 10 8 7 6 5 6 6 7 6 6 6 5 5 5 5 6 7 5 7 9 8 6 4 6 8 10 11 8 10 13 7 7 8 9 8
|
| 5 |
+
LJ049-0170 7 5 5 6 8 7 5 5 4 4 6 7 8 8 6 5 6 6 6 8 6 6 4 4 5 8 7 7 6 4 9 6 6 6 7 5 3 4 6 8 8 9 7 9 9 10 9 8 8 7 7 5 4 4 5 5 5 6 6 5 6 7 6 7 10 8 8 7 7 7 8 9 15 9 8 7 7 6 4 4 6 6 6 5 5 5 6 9 10 10 9 10 10 10
|
| 6 |
+
LJ049-0171 5 5 9 9 13 9 5 6 6 6 8 10 6 6 7 10 10 9 9 9 9 7 6 6 6 5 5 5 8 9 10 8 8 6 5 6 6 9 8 8 7 6 5 6 5 5 5 6 6 6 5 6 5 6 4 5 7 6 5 5 7 9 9 8 7 8
|
| 7 |
+
LJ049-0172 2 5 6 7 6 7 6 6 7 7 7 5 4 4 5 5 7 14 10 18 18 9 6 6 5 5 7 6 7 9 8 7 9 13 11 11 7 6 8 12 10 11 11 7 6 7 7 9 7 4 7 5 5 6 6 7 6 7 5 4 5 6 6 7 9 7 8
|
| 8 |
+
LJ049-0173 11 7 6 7 7 7 7 5 5 6 5 7 7 6 8 12 14 10 10 7 9 10 6 10 8 8 6 8 8 8 6 8 5 6 7 9 17 12 7 6 7 6 6 7 7 8 9 11 11 10 21 9 63 9 16 7 5 5 6 7 7 5 6 7 10 6 10 5 6 6 5 6 10 8 6 6 5 5 6 6 6 6 8 7 8 9 5 6 5 7 8 11 21
|
| 9 |
+
LJ049-0174 8 12 7 7 7 7 9 9 7 7 8 6 5 5 5 7 8 7 6 7 7 6 5 5 6 6 5 6 7 7 7 7 6 5 5 6 6 8 5 7 9 9 7 5 7 6 9 8 7 9 7 7 7 7 10 11 8 8 9 8 8 25 7 9 8 27 11 11 8 5 5 7 7 6 5 5 5 6 7 7 7 7 14
|
| 10 |
+
LJ049-0175 5 6 7 7 5 5 5 5 5 6 6 5 5 5 5 6 7 8 9 13 10 11 10 10 14 10
|
| 11 |
+
LJ049-0176 7 6 6 6 6 6 6 7 13 9 11 32 8 7 7 6 5 5 5 7 8 8 6 6 6 3 7 7 7 10 8 9 7 6 9 5 6 8 8 7 7 6 7 8 8 9 10 10 9 20 7 7 9 7 9 8 5 8 11 9 9 25 13 8 6 6 5 6 6 7 6 7 6 5 5 10 6 6 5 7 6 6 6 5 5 8 8 7 7 7 8 10 9 8
|
| 12 |
+
LJ049-0177 5 7 8 8 7 6 9 12 17 15 8 13 13 14 5 5 5 6 6 6 6 8 9 7 5 5 4 4 5 4 4 6 6 6 6 5 6 8 7 10 8 9 9 10 7 6 5 5 5 5 5 5 7 7 6 5 5 5 7 9 9 7 9 13 25 6 6 8 10 7 9 8 6 6 6 6 7 7 6 5 4 5 5 7 7 8 5 5 5 5 5 8 7 9 8 7 10 9 10 9 6 6 6 41 15 23
|
| 13 |
+
LJ049-0178 10 5 4 4 5 7 7 7 7 6 6 6 8 7 5 6 13 10 7 8 6 6 7 6 5 6 6 10 9 9 10 8 9 7 7 6 5 6 7 7 6 5 5 5 7 8 7 6 7 8 6 5 5 9 9 8 8 8 7 9 8 11 11 8 8
|
| 14 |
+
LJ049-0179 5 5 5 5 6 5 7 6 5 5 6 8 11 7 8 17 7 10 7 9 9 7 37 13 13 5 6 6 5 7 7 6 5 5 5 7 8 8 6 7 10 13 7 6 8 9 7 8 7 7 7 6 5 5 6 6 7 7 8 7 5 5 5 6 7 6 6 6 6 5 5 6 6 6 6 7 6 7 5 4 5 6 6 7 10 12 12 10
|
| 15 |
+
LJ049-0180 5 5 5 5 6 6 7 5 5 5 8 6 6 5 5 6 6 6 7 9 10 14
|
| 16 |
+
LJ049-0181 5 7 6 7 9 6 8 10 7 7 6 7 8 9 11 8 7 7 7 8 7 6 8 12 8 10 9 6 5 5 6 5 4 5 7 6 5 5 7 8 7 6 7 5 6 7 5 7 7 6 6 6 7 9 8 9 7 6 8 11 7 6 4 5 6 6 5 5 6 7 6 6 13 9 8 7 8 6 7 7 9 8 7 8 7 8 7 8 9 6 5 4 4 6 7 7 10 10 8 10 9 11 11 10
|
| 17 |
+
LJ049-0182 11 6 8 6 7 8 9 7 27 7 5 5 5 6 6 7 5 4 5 6 6 5 5 7 7 7 7 6 6 7 11 7 6 6 7 7 6 5 5 5 7 8 7 6 6 8 5 4 6 6 5 4 5 5 6 6 7 9 9 8 7 6 17 7 5 6 8 6 6 8 9 9 6 7 5 7 7 6 7 6 5 4 5 5 6 6 6 7 5 7 9 10 10 16
|
| 18 |
+
LJ049-0183 6 7 6 6 5 4 6 8 8 8 12 11 8 10 8 8 13 9 9 5 6 6 5 5 6 5 4 6 6 6 6 5 6 8 7 10 9 9 9 10 7 6 6 8 6 7 7 7 9 9 9 6 6 5 4 5 6 6 8 8 9 12 8 10 7 6 7 7 6 8 9 10
|
| 19 |
+
LJ049-0184 7 7 7 6 7 6 6 7 7 5 7 8 10 8 7 7 5 6 7 7 5 5 6 7 7 6 9 10 10 13 17 8 7 7 7 6 6 7 8 8 9 8 6 6 11 18 7 4 5 6 6 6 6 6 6 5 5 6 8 6 5 4 4 5 5 5 6 6 5 6 6 8 6 9
|
| 20 |
+
LJ049-0185 7 6 6 5 4 4 6 7 7 9 7 5 5 4 5 5 5 6 6 7 6 6 4 6 7 6 5 5 6 10 10 11 10 6 6 6 7 7 6 7 10 10 9 10 8 7 9
|
| 21 |
+
LJ049-0186 5 5 5 5 5 6 7 6 4 6 7 9 7 6 5 5 6 5 5 5 6 6 5 5 5 4 8 11 8 7 8 9 6 8 9 13 10 13 8 6 5 6 6 7 10 9 10 8 6 5 10 9 7 6 7 5 5 4 4 4 4 4 5 4 4 7 6 6 6 5 5 5 4 5 5 5 6 6 5 6 6 8 6 9
|
| 22 |
+
LJ049-0187 11 11 9 5 5 5 8 11 8 7 7 6 6 8 7 6 6 6 6 10 7 5 4 7 7 8 6 8 7 5 7 7 7 8 10 7 5 6 6 8 6 6 7 8 6 6 6 6 6 7 6 7 5 4 5 6 6 6 10 6 6 5 7 14 9 10 11 8 9 6 6 5 5 7 9 11 15
|
| 23 |
+
LJ049-0188 8 8 7 6 11 9 5 7 5 5 7 8 7 5 6 7 5 5 4 7 11 7 6 9 13 12 22 8 7 7 5 6 6 7 8 10 8 4 5 5 6 6 6 6 6 5 6 8 7 6 7 6 8 7 5 5 6 6 5 5 6 8 11 7 6 7 10 9 7 7 7 9 14 10 7 11 20 7 8 9 6 5 5 7 9 7 6 8 5 6 6 7 6 7 5 6 7 8 7 7 6 8 12
|
| 24 |
+
LJ049-0189 5 5 7 7 6 8 7 10 6 9 6 10 8 10 7 6 7 9 7 5 4 5 5 8 9 9 9 8 8 7 6 6 5 5 5 5 6 6 7 6 7 6 6 10 12 7 6 8 13 10 8 5 4 4 7 6 6 4 5 5 6 6 7 7 10 12 9 5 4 5 7 7 6 5 5 5 7 8 8 7 8 8 5 8 7 6 57 15 13 10
|
| 25 |
+
LJ049-0190 8 4 6 9 7 6 6 6 8 7 7 5 4 8 14 15 26 7 5 6 51 12 15 16 10 9 11 10 11 10 10 8 5 6 7 7 5 5 6 7 6 7 10 11 7 6 9 13 10 9
|
| 26 |
+
LJ049-0191 9 8 6 6 7 7 7 8 8 8 13 8 6 4 5 5 6 6 6 7 8 19 7 6 8 8 9 6 8 5 5 6 8 6 7 7 5 5 6 5 5 7 5 7 8 9 7 10 9 9 8 6 6 8 7 5 5 6 6 8 7 9 6 5 5 5 5 5 4 4 5 6 8 6 6 6 9 10 12 11 11 10
|
| 27 |
+
LJ049-0192 8 5 5 5 7 8 8 8 9 8 5 4 7 8 8 6 6 7 9 7 5 5 5 4 5 6 7 7 6 6 6 6 5 7 7 7 7 6 5 6 7 7 9 7 5 4 6 7 11 12 9 5 4 4 6 7 7 6 5 5 5 7 8 8 6 7 13 6 6 5 6 7 6 6 6 6 5 5 6 8 6 7 8 7 6 7 6 8 7 8 9 7 8 9 8 7 6 7 8 7 7 9 6 7 7 6 7 5 4 5 7 7 8 6 5 6 6 8
|
| 28 |
+
LJ049-0193 7 7 6 5 5 5 5 4 5 5 6 10 17 6 6 5 4 7 7 7 6 7 6 6 7 6 5 5 6 8 17 9 13 6 6 5 6 6 6 7 10 13 10 10 27 7 7 6 11 8 6 5 5 6 6 6 6 6 6 6 5 6 7 7 5 5 6 7 6 6 9 6 5 5 6 6 7 6 5 6 6 7 6 6 6 8 6 5 5 6 6 6 6 6 5 5 6 8 6 5 4 4 5 5 5 6 6 5 6 7 6 13
|
| 29 |
+
LJ049-0194 10 6 5 5 6 7 6 8 8 7 7 7 9 6 6 7 43 16 15 10
|
| 30 |
+
LJ049-0195 10 8 7 6 5 4 5 5 5 7 7 6 4 5 5 7 8 7 6 7 19 8 8 7 4 5 4 6 6 6 7 6 7 5 4 5 6 6 6 9 5 6 4 4 5 6 6 6 5 6 5 5 5 4 4 7 6 6 6 5 5 6 5 4 4 5 5 5 6 6 5 6 7 7 8 34 6 8 11 12 12 3 6 6 6 6 5 4 6 7 7 5 5 5 5 5 6 8 10 13 10 9
|
| 31 |
+
LJ049-0196 6 6 6 6 6 6 6 7 12 9 10 16 8 6 7 11 19 12 13 21 9 7 11 8 11 11 9 9 9 6 4 5 5 4 4 7 6 6 6 5 5 6 5 4 4 5 5 5 6 6 5 5 7 6 7 14 9 5 6 7 8 7 8 6 5 6 9 8 6 4 5 6 6 7 5 6 6 6 8 7 6 5 6 8 8 10 8 8 10 8 7 6 5 6 7 5 6 5 5 5 6 9 11 9 9
|
imdanboy/jets/decode_train.loss.ave/dev/log/output.6/speech_shape/speech_shape
ADDED
|
File without changes
|
imdanboy/jets/decode_train.loss.ave/dev/log/output.7/durations/durations
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
LJ049-0197 7 5 7 7 9 8 7 8 5 5 6 7 7 6 6 7 5 8 6 5 6 8 12 7 6 4 4 5 7 6 5 5 7 7 6 5 4 4 5 5 6 8 9 9 11 9 7 7 7 6 5 5 5 5 5 7 8 9 8 6 5 4 5 5 5 5 5 5 5 5 6 6 5 10 7 8 6 6 8 18
|
| 2 |
+
LJ049-0198 10 10 13 8 7 5 7 8 7 7 6 6 6 8 9 8 8 7 6 5 6 6 9 9 9 15 8 8 9 7 6 8 8 8 7 7 11 6 7 6 7 8 7 5 9 8 9 9 8 8 8
|
| 3 |
+
LJ049-0199 4 8 9 7 6 7 8 7 7 7 7 8 7 7 6 6 6 5 8 7 6 8 9 8 7 6 7 7 8 8 7 7 7 7 8 6 6 5 4 7 9 10 8 6 5 4 5 5 5 6 7 7 6 6 5 5 6 5 5 5 5 5 6 5 5 5 6 5 5 6 7 9 6 6 6 7 6 7 5 4 6 6 6 7 9 7 8
|
| 4 |
+
LJ049-0200 6 6 7 7 6 5 4 7 8 8 6 6 7 10 8 4 5 8 9 6 5 5 5 7 8 8 8 7 6 11 8 6 5 6 5 5 5 8 11 17 21 9 7 6 5 6 7 7 5 5 7 8 9 13 6 5 5 7 8 8 6 5 6 6 6 8 9 8 6 7 7 9 7 7 8 6 5 5 5 5 5 6 6 5 6 7 7 13
|
| 5 |
+
LJ049-0201 4 8 8 5 6 7 6 6 6 7 6 8 8 7 5 5 4 6 7 7 6 7 6 5 4 5 5 6 6 6 7 5 7 8 9 7 10 9 10 7 7 5 7 12 11 8 11 7 5 4 4 5 6 7 6 9 6 5 5 6 6 6 7 9 8 9 8 9 9 10 12 9 6 6 6 7 6 6 5 6 5 5 5 7 7 7 7 6 8 8 6 5 7 8 7 7 5 6 5 7 11 12 12 23
|
| 6 |
+
LJ049-0202 8 8 7 5 5 5 5 7 8 8 6 4 7 6 6 6 7 6 7 5 4 5 6 6 7 10 12 13 10
|
| 7 |
+
LJ049-0203 14 13 10 8 5 5 6 7 8 9 13 9 11 11 8 6 5 6 5 6 8 8 6 7 7 9 7 5 7 6 7 7 6 8 9 9 12 21 13 14 9 5 5 6 6 6 7 8 7 7 7 6 6 7 7 7 6 8 8 5 5 5 6 6 6 5 6 7 11 9 7 7 7 9 8 7 6 5 6 5 5 5 6 6 7 6 7 5 5 6 6 6 7 9 7 8
|
| 8 |
+
LJ049-0204 10 8 9 5 5 7 7 6 5 4 6 7 5 5 5 5 5 6 6 6 8 7 10 16 19
|
| 9 |
+
LJ049-0205 7 7 8 6 6 5 6 7 5 4 6 7 7 6 7 7 8 8 6 6 6 6 6 6 7 8 9 6 5 6 9 6 6 5 5 5 7 6 5 5 5 6 5 5 5 6 4 5 5 6 6 8 8 7 7 11 8 5 6 6 6 9 10 6 6 7 11 10 17 5 6 8 9 10 6 5 4 5 5 5 5 7 6 8 7 5 6 6 6 6 7 6 7 5 5 5 5 5 5 4 4 5 6 6 5 5 8 11 7 8 10 10 9 10 21
|
| 10 |
+
LJ049-0206 4 6 9 8 9 8 6 5 5 8 13 11 12 7 6 7 7 8 7 6 5 6 5 5 6 7 7 8 8 8 7 11 7 5 5 7 6 6 6 7 9 7 8 8 14 17 29 11 7 9 10 9 10 9 9 13 18
|
| 11 |
+
LJ049-0207 4 4 5 5 5 5 6 6 7 8 7 4 5 5 5 5 4 5 5 5 5 6 7 6 6 7 6 6 6 7 6 7 5 4 5 6 6 7 10 11 8 6 6 7 7 7 5 7 6 6 6 6 7 6 6 5 4 4 4 5 7 9 9 8 7 9 9 7 6 5 5 7 8 9 9 9 9 7 6 7 5 5 5 5 6 8 8 7 6 6 5 4 4 5 5 5 6 6 5 4 4 4 4 5 5 4 6 7 7 7 6 8 7 8
|
| 12 |
+
LJ049-0208 10 7 8 7 7 6 6 7 6 5 7 7 7 8 7 7 7 11 7 7 6 7 5 10 8 6 6 5 6 5 6 4 4 5 5 5 5 6 7 8 7 5 5 6 8 8 6 7 5 4 4 5 5 5 6 6 5 5 4 4 3 5 8 8 8 8 6 7 7 9 8 13 6 6 5 6 8 9 10 11 9 9
|
| 13 |
+
LJ049-0209 5 5 5 5 6 6 7 5 5 5 6 6 7 9 9 13 9 9 16 8 6 7 8 10 10 9 7 8 9 8 6 9 8 6 4 5 5 6 7 7 8 9 7 6 5 6 7 7 5 7 7 6 7 7 8 7 7 9 6 6 6 9 16 12 11 9
|
| 14 |
+
LJ049-0210 2 3 4 6 7 8 8 6 6 6 7 5 7 7 5 5 7 8 7 5 5 6 8 8 6 6 5 4 5 8 8 7 5 5 5 6 6 6 5 5 5 5 6 8 8 8 6 8 5 4 4 5 5 5 6 6 5 4 4 4 4 5 5 4 6 7 7 7 6 7 6 8
|
| 15 |
+
LJ049-0211 8 6 5 4 3 4 4 5 5 5 6 6 6 7 8 7 7 8 6 6 4 4 4 5 5 5 5 6 7 7 5 6 6 7 7 7 5 5 7 7 5 5 6 5 5 5 6 7 8 7 5 5 6 6 7 10 37 8 8 8 6 5 6 6 6 6 7 6 7 6 6 7 7 5 6 5 6 8 7 9 7 6 5 5 5 5 6 7 8 8 8 6 6 7 7 5 4 5 5 4 5 5 5 5 5 4 5 5 6 6 5 6 8 8 9
|
| 16 |
+
LJ049-0212 7 10 7 5 6 7 11 7 5 6 6 7 7 6 7 7 7 7 7 8 10 12 8 7 6 6 6 5 5 6 7 7 8 8 9 7 10 6 6 5 6 8 7 7 8 7 10 12 8 9
|
| 17 |
+
LJ049-0213 6 5 5 8 8 9 8 7 8 9 8 8 7 7 8 8 6 5 5 6 7 9 6 5 5 6 6 6 7 6 7 5 4 5 6 6 7 10 8 8
|
| 18 |
+
LJ049-0214 6 5 6 6 5 6 8 10 9 8 7 13 5 5 6 6 6 7 7 7 6 6 6 7 5 5 8 7 6 7 6 8 10 10 5 7 7 7 6 4 5 6 6 7 8 12 7 6 8 11 5 7 9 11 9 9 7 7 6 6 5 5 5 8 9 10 10 11 12 12 10 27 5 5 3 6 5 6 7 6 6 6 7 13 9 14
|
| 19 |
+
LJ049-0215 5 5 4 4 6 6 6 6 5 5 5 5 4 4 5 5 5 5 6 5 5 6 6 19 10 6 6 5 8 8 11 10 9 7 7 5 7 6 5 6 7 5 5 6 21 9 8 8 5 6 6 7 6 7 5 4 5 6 6 6 10 11 14 7 8 7 6 7 6 6 7 6 7 8 6 7 6 6 8 10 10 12 8 8 6 7 6 4 4 5 6 6 6 5 5 4 4 4 5 6 10 9 14 9 8
|
| 20 |
+
LJ049-0216 6 56 11 17 10 6 8 10 16 24 23 26 6 5 6 5 6 5 6 6 6 7 7 8 8 6 7 6 6 6 6 6 6 6 8 11 6 6 8 11 9 7 8 8 7 6 8 7 6 6 7 7 6 5 5 5 7 9 9 7 9 14 10
|
| 21 |
+
LJ049-0217 8 9 9 11 8 12 22 10 7 5 5 5 6 6 6 6 7 7 6 5 6 5 5 5 5 6 6 5 6 7 6 7 6 5 9 7 7 6 5 5 8 9 13 8 6 8 7 7 9 10 10 8 5 5 4 5 7 6 6 5 5 4 5 5 8 9 11 9 8 8 9 8 10 11 11 10 8 9 9 7 6 7 9 14
|
| 22 |
+
LJ049-0218 5 5 6 6 6 5 5 7 7 6 5 5 5 7 8 8 7 8 9 7 8 5 5 5 5 5 6 6 5 5 7 6 7 13 11 5 6 7 8 7 8 6 6 7 18 9 8 7 7 6 5 5 5 6 7 8 7 7 5 7
|
| 23 |
+
LJ049-0219 13 11 8 5 5 7 6 5 8 9 8 8 7 6 5 5 5 5 6 7 8 9 7 4 5 5 5 6 6 6 8 8 6 6 5 5 6 8 15 13
|
| 24 |
+
LJ049-0220 4 4 5 5 4 5 8 8 7 5 4 6 8 8 6 5 5 5 6 6 7 8 6 5 6 7 7 5 5 7 8 6 5 6 6 9 6 7 8 8 10 9 7 6 6 6 5 5 6 6 7 6 7 5 4 5 6 6 7 9 11 8 7 6 8 6 5 5 6 6 8 6 7 7 6 5 5 8 7 7 7 11 6 5 5 5 6 6 7 6 5 5 6 8 8 7 8 9 5 5 6 7 10 7 5 7 10 12 10
|
| 25 |
+
LJ049-0221 6 5 7 9 9 6 7 7 8 7 6 6 5 4 4 4 4 7 9 8 7 6 6 7 11 7 6 10 10 10 8 8 8 8 7 7 7 10 34 8 7 6 6 8 8 5 5 4 6 7 6 6 5 5 5 6 7 5 5 7 11 7 6 5 6 6 5 5 5 5 5 8 8 13 13 10 6 6 6 6 7 7 6 6 6 6 5 6 6 6 5 5 5 7 8 8 8 9 8 6 6 8
|
| 26 |
+
LJ049-0222 7 6 7 5 5 7 7 7 9 8 7 8 6 7 8 7 5 5 6 8 9 6 5 4 7 6 7 6 11 20 8 7 8 6 6 6 6 7 7 7 7 7 5 6 5 5 6 7 6 5 5 4 5 5 5 7 7 7 7 7 9 7 6 9 6 7 8 9 8 7 6 5 5 5 7 8 10 10 15
|
| 27 |
+
LJ049-0223 4 6 6 10 8 6 6 6 5 5 5 4 5 7 6 7 6 5 5 6 8 8 7 6 7 11 6 7 7 7 6 7 6 6 10 12 7 6 7 10 8 7 6 6 6 5 6 7 5 6 6 22 7 6 6 8 8 7 5 6 6 7 7 6 7 8 8 10 10 4 7 7 14 18 28 11 7 6 7 6 6 5 7 5 7 9 10 10 16
|
| 28 |
+
LJ049-0224 4 7 10 10 7 5 7 8 10 7 6 6 7 7 7 6 6 6 5 5 6 6 6 6 6 5 5 6 8 6 5 4 4 5 5 5 6 6 5 6 6 8 6 9
|
| 29 |
+
LJ049-0225 5 5 5 5 5 6 7 6 10 9 9 11 12 8 7 7 6 5 5 5 7 8 8 7 8 8 6 8 7 6 55 14 18 23 9 9 7 6 8 7 6 6 7 6 7 8 5 5 6 6 6 6 5 5 5 6 8 9 9 7 5 8 10 15 28 13 8 4 4 4 5 5 5 6 6 5 5 4 5 4 6 6 8 6 6 6 7 7 6 12 12 9 8
|
| 30 |
+
LJ049-0226 7 5 5 5 6 7 8 11 8 6 5 6 6 6 7 6 7 5 6 6 5 6 8 8 9 7 10 10 9 6 8 6 5 6 8 8 10 10 12 23 14 13 9 6 7 7 7 7 8 7 17 10 6 9 9 8 6 6 6 6 5 7 8 8 8 6 7 8 10 7 6 8
|
| 31 |
+
LJ049-0227 5 6 8 14 15 4 5 7 7 10 10 10 9 8 6 5 5 6 6 8 8 8 6 6 6 6 5 6 7 7 8 7 7 6 7 6 6 6 6 6 6 5 5 5 5 7 8 10 9 9 9 7 6 7 5 5 5 5 7 8 8 7 6 6 5 4 4 5 5 5 6 6 5 5 5 4 4 11 6 6 6 6 5 6 6 9 8 9
|
imdanboy/jets/decode_train.loss.ave/dev/log/output.7/speech_shape/speech_shape
ADDED
|
File without changes
|
imdanboy/jets/decode_train.loss.ave/dev/log/output.8/durations/durations
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
LJ049-0228 5 5 5 5 5 5 7 6 5 5 6 8 11 8 9 22 6 6 6 8 7 9 7 6 5 5 5 5 6 7 8 8 8 6 6 7 7 5 4 5 5 4 5 5 5 5 5 4 4 5 6 5 5 5 6 8 8 9 8 9 7 10 9 7 5 5 4 7 8 9 9 11 9 11 11 9
|
| 2 |
+
LJ049-0229 9 8 7 8 12 8 6 6 7 7 14 8 8 6 6 5 5 7 6 7 8 8 10 9 7 6 5 6 5 5 6 6 7 6 7 5 4 5 6 6 7 10 12 11 9
|
| 3 |
+
LJ049-0230 10 12 7 7 5 6 7 6 5 5 8 7 6 6 6 6 6 5 5 5 5 6 8 7 7 7 6 6 6 6 8 8 10 6 5 5 7 6 5 6 6 7 7 5 5 5 5 6 6 6 4 9 7 6 7 6 5 2 4 4 7 6 6 6 5 5 6 5 4 4 5 5 5 6 6 5 6 6 8 6 9
|
| 4 |
+
LJ050-0001 10 7 8 6 7 8 7 6 8 5 7 10 10 9 6 4 13 11 7 7 7 7 6 5 7 10 13 14 40 9 8 13 11 6 6 6 7 5 5 6 6 5 5 7 10 9 9 8 10 11 12 13 10 25 7 7 8 7 6 5 5 5 4 5 5 5 6 6 5 4 5 4 3 4 5 6 6 6 8 8 8 6 5 6 6 7 8 8 8 6 6 7 7 5 4 5 5 4 5 5 6 5 5 5 5 5 6 6 5 5 7 6 8
|
| 5 |
+
LJ050-0002 5 7 6 6 6 5 5 5 6 5 6 7 6 6 5 6 8 9 10 10 9
|
| 6 |
+
LJ050-0003 7 8 12 5 5 5 6 6 5 4 5 4 3 4 5 5 6 6 7 7 7 5 4 6 6 7 8 8 8 6 6 7 7 5 4 5 5 4 5 5 5 5 5 4 4 5 6 6 5 5 8 10 31 11 8 8 9 14 18 15 29 7 5 4 4 6 6 7 6 5 5 6 5 4 4 5 5 5 6 6 5 6 7 7 9 12 8 6 7 11 11 18 11 7
|
| 7 |
+
LJ050-0004 8 6 6 6 6 8 7 7 7 7 7 6 6 6 5 5 5 5 7 7 6 5 5 5 7 8 9 7 9 18
|
| 8 |
+
LJ050-0005 7 7 7 6 5 5 5 7 9 7 5 5 5 7 7 6 5 5 5 7 8 7 5 7 8 8 7 6 6 7 7 8 6 7 8 9 22 6 5 5 6 7 9 8 11 12 22
|
| 9 |
+
LJ050-0006 4 6 7 5 6 6 7 8 5 5 6 8 8 7 6 8 9 6 5 5 4 6 5 4 6 6 6 6 5 6 9 6 5 5 6 6 8 7 8 7 7 7 8 7 7 7 7 6 6 5 6 9 8 7 6 5 5 3 4 5 6 6 6 5 4 4 5 5 5 5 5 5 6 6 7 9 12 15
|
| 10 |
+
LJ050-0007 5 6 7 7 8 5 6 6 5 6 6 6 6 6 6 6 4 4 4 5 5 6 6 7 8 9 11 8 7 6 8 7 10 9 9 8 8 10 10 6 5 7 5 5 6 7 6 3 5 6 6 8 9 8 7 7 7 8 7 7 6 8
|
| 11 |
+
LJ050-0008 5 5 5 5 5 5 7 5 5 5 6 8 11 8 9 22 6 5 5 5 6 5 5 6 6 5 5 5 5 6 9 7 5 5 4 6 7 6 6 5 5 5 6 7 5 6 8 7 5 6 6 6 6 7 7 6 6 9 8 6 7 5 6 5 6 5 5 5 5 6 6 10 8 8 6 8 7 6 7 6 7 10 8 7 6 7 8 7 7 7 7 7 7 10 14
|
| 12 |
+
LJ050-0009 6 6 8 7 8 11 8 7 6 6 6 5 5 5 5 6 7 7 7 7 6 4 5 6 6 5 5 7 9 9 9 11 52 12 8 8 7 9 9 11 12 8 6 4 5 8 8 8 6 6 12 8 9 7 8 13 8 5 5 6 6 8 10 7 6 9 10 7 7 11 8 9 9 9 8 8 6 5 5 5 5 7 7 6 5 5 5 7 9 9 7 9 14 10
|
| 13 |
+
LJ050-0010 9 11 9 9 9 7 7 7 7 6 6 6 5 5 4 5 8 9 9 9 8 8 7 6 7 5 5 5 7 7 6 5 5 5 7 8 8 6 8 13 11 5 5 5 5 5 4 4 5 6 6 6 5 5 4 5 5 5 5 5 5 6 6 7 9 10 7 9 6 5 6 6 6 6 8 10 15 9 8 8
|
| 14 |
+
LJ050-0011 6 6 9 9 8 5 5 5 8 9 8 6 7 10 9 12 7 7 8 8 8 7 14 6 5 5 7 7 6 6 4 6 6 8 8 6 5 4 5 5 6 6 8 10 12 15
|
| 15 |
+
LJ050-0012 8 7 15 7 6 8 7 7 7 6 5 5 6 6 6 6 4 6 6 7 9 10 14 9 8 8 7 8 9 8 11 9 8 10 7 7 7 6 6 5 5 5 8 8 7 7 8 11 7 7 7 7 6 6 6 5 5 5 5 6 5 6 7 9 11 7 5 5 7 6 7 8 16
|
| 16 |
+
LJ050-0013 4 5 6 5 3 4 5 6 6 6 5 4 4 4 4 5 5 6 7 6 6 6 4 5 13 8 7 7 8 8 8 7 7 9 5 8 11 11 23 7 7 6 9 8 11 8 8 7 7 6 5 5 5 6 6 5 5 6 7 7 7 6 6 4 6 6 7 7 6 5 4 5 5 6 6 7 8 10 8 7 8 7 8 8 7 6 7 6 7 5 6 8 9 7 8 7 8 7 9 10 11 13 10
|
| 17 |
+
LJ050-0014 6 6 6 7 7 6 6 5 5 4 7 9 8 8 7 11 8 6 6 4 5 5 5 6 6 5 6 6 5 5 6 8 9 8 8 7 10 6 4 6 9 10 9 8 7 6 6 4 5 5 5 5 5 5 6 6 5 4 4 4 7 7 7 7 6 7 6 8
|
| 18 |
+
LJ050-0015 5 5 5 5 5 6 7 6 7 6 6 5 7 7 9 9 6 6 4 6 6 6 7 7 6 6 4 6 6 8 8 6 5 4 5 5 6 6 7 9 9 17 9 8 7 8 6 8 7 6 6 6 6 7 9 7 7 7 6 6 5 10 6 5 5 5 4 6 6 6 7 6 7 5 4 5 6 6 6 9 17 7 8 9 8 6 6 9 11 8 6 6 5 8 9 9 7 9 14 10
|
| 19 |
+
LJ050-0016 7 6 10 7 6 6 6 6 6 8 7 7 7 6 5 4 4 6 6 5 5 6 6 6 7 8 10 10 24 6 5 5 5 6 7 8 6 6 7 5 5 6 9 8 11 12 11 19 9 5 5 6 8 6 7 8 7 6 6 8 7 9 11 8 7 6 8 7 8 7 5 6 6 9 27 7 8 7 7 6 6 7 7 7 8 13 13 13 10 10 11 9 11 10 10 9 13
|
| 20 |
+
LJ050-0017 5 4 7 8 8 7 5 5 5 6 4 5 5 7 8 7 10 7 7 7 6 6 7 5 5 6 7 6 3 5 6 6 8 9 8 7 7 7 7 7 10 14
|
| 21 |
+
LJ050-0018 5 7 13 6 7 7 8 7 6 7 7 6 5 4 6 6 6 8 8 10 7 8 7 7 7 8 8 6 5 5 9 6 6 6 5 4 5 8 14 8 8 6 5 5 6 5 6 5 5 5 2 5 6 5 6 5 5 5 4 5 4 4 6 6 6 7 6 6 6 6 6 6 5 5 5 5 5 5 5 6 6 5 4 4 4 7 7 7 7 6 7 6 8
|
| 22 |
+
LJ050-0019 7 6 8 6 7 8 8 7 7 4 7 6 7 6 6 5 6 8 8 7 7 5 4 7 9 9 9 7 6 5 5 4 6 7 7 7 9 8 7 17
|
| 23 |
+
LJ050-0020 8 6 6 7 7 5 5 5 5 5 6 7 6 5 5 6 8 12 8 9 23 6 6 6 7 7 6 5 5 5 7 8 8 6 7 10 5 6 9 11 10 9 7 6 6 5 4 5 6 6 7 7 7 6 6 7 5 5 7 6 7 8 6 6 11 8 6 5 5 5 8 8 11 8 9
|
| 24 |
+
LJ050-0021 11 9 7 7 7 6 7 7 7 8 9 8 6 7 23 10 5 6 6 6 7 7 8 8 8 6 7 7 9 8 24 8 5 5 6 8 7 6 6 6 7 8 7 7 6 6 8 6 6 5 5 5 5 7 7 7 7 7 10 8 7 7 8 8 5 5 7 7 8 7 6 5 5 5 5 6 7 6 5 5 5 5 23 7 5 5 5 5 6 6 5 5 5 6 6 6 7 7 9 7 6 8
|
| 25 |
+
LJ050-0022 9 11 7 6 5 7 7 3 7 8 9 9 10 6 5 7 4 5 5 6 5 6 6 5 5 5 4 6 6 6 7 6 7 5 4 5 6 6 6 8 9 8 6 5 5 5 5 5 8 11 8 12 11 7 6 6 7 8 7 13 21 8 6 5 6 6 8 9 8 8 7 7 6 7 7 8 9 9 7 9 14 10
|
| 26 |
+
LJ050-0023 9 10 8 7 7 8 6 6 8 10 8 8 7 8 13
|
| 27 |
+
LJ050-0024 5 4 4 4 5 4 4 5 5 5 6 8 9 8 8 6 6 6 8 7 7 6 5 7 7 7 7 7 10 7 7 8 6 6 6 7 6 6 5 5 4 6 6 6 6 6 6 5 6 9 8 13 8 8 5 5 5 7 7 6 5 5 5 7 8 8 7 8 8 5 7 8 8 8 11 10 6 6 8 11 10 8 6 5 6 6 7 6 5 6 6 6 8 8 8 8 9 7 18
|
| 28 |
+
LJ050-0025 19 9 7 5 4 4 5 8 6 6 6 10 10 10 9 11 8 7 7 7 6 7 7 7 14 6 6 6 7 8 8 10 10 9
|
| 29 |
+
LJ050-0026 5 5 5 5 5 6 7 9 10 11 10 10 7 13 6 6 6 9 9 8 6 6 28 6 5 6 7 6 4 4 5 7 7 10 9 9 9 8 7 9 9 7 7 9 12 9 7 6 5 5 8 8 12 8 7 5 6 5 6 8 9 11 10 13 13 10
|
| 30 |
+
LJ050-0027 8 7 8 7 8 6 6 6 7 6 5 7 7 6 6 6 10 8 9 9 10 12 9 8 8 9 6 6 6 5 5 9 8 6 8 10 7 6 7 5 6 8 10 9 9 9 9 10 10 10 9
|
| 31 |
+
LJ050-0028 11 18 15 11 8 5 5 6 6 7 6 5 5 5 7 8 9 9 8 15 8 7 6 8 11 11 9 9 7 6 7 6 8 11 9 14 5 6 8 6 5 6 7 7 7 6 6 8 6 7 9 8 8 9 10 8 17 8 7 7 9 6 6 8 9 8 8 6 6 6 5 6 6 6 7 6 7 5 4 5 6 6 7 11 16
|
imdanboy/jets/decode_train.loss.ave/dev/log/output.8/speech_shape/speech_shape
ADDED
|
File without changes
|
imdanboy/jets/decode_train.loss.ave/dev/log/tts_inference.1.log
ADDED
|
@@ -0,0 +1,902 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# python3 -m espnet2.bin.tts_inference --ngpu 0 --data_path_and_name_and_type dump/raw/dev/text,text,text --data_path_and_name_and_type dump/raw/dev/wav.scp,speech,sound --key_file exp/imdanboy/jets/decode_train.loss.ave/dev/log/keys.1.scp --model_file exp/imdanboy/jets/train.total_count.ave_5best.pth --train_config exp/imdanboy/jets/config.yaml --output_dir exp/imdanboy/jets/decode_train.loss.ave/dev/log/output.1 --vocoder_file none --config conf/decode.yaml
|
| 2 |
+
# Started at Fri Feb 21 15:00:40 JST 2025
|
| 3 |
+
#
|
| 4 |
+
/usr/lib/python3/dist-packages/requests/__init__.py:89: RequestsDependencyWarning: urllib3 (2.2.3) or chardet (3.0.4) doesn't match a supported version!
|
| 5 |
+
warnings.warn("urllib3 ({}) or chardet ({}) doesn't match a supported "
|
| 6 |
+
/usr/bin/python3 /work/espnet/espnet2/bin/tts_inference.py --ngpu 0 --data_path_and_name_and_type dump/raw/dev/text,text,text --data_path_and_name_and_type dump/raw/dev/wav.scp,speech,sound --key_file exp/imdanboy/jets/decode_train.loss.ave/dev/log/keys.1.scp --model_file exp/imdanboy/jets/train.total_count.ave_5best.pth --train_config exp/imdanboy/jets/config.yaml --output_dir exp/imdanboy/jets/decode_train.loss.ave/dev/log/output.1 --vocoder_file none --config conf/decode.yaml
|
| 7 |
+
2025-02-21 15:00:43,859 (tts:302) INFO: Vocabulary size: 78
|
| 8 |
+
2025-02-21 15:00:43,979 (encoder:172) INFO: encoder self-attention layer type = self-attention
|
| 9 |
+
2025-02-21 15:00:44,095 (encoder:172) INFO: encoder self-attention layer type = self-attention
|
| 10 |
+
2025-02-21 15:00:45,901 (tts_inference:126) INFO: Extractor:
|
| 11 |
+
LogMelFbank(
|
| 12 |
+
(stft): Stft(n_fft=1024, win_length=1024, hop_length=256, center=True, normalized=False, onesided=True)
|
| 13 |
+
(logmel): LogMel(sr=22050, n_fft=1024, n_mels=80, fmin=80, fmax=7600, htk=False)
|
| 14 |
+
)
|
| 15 |
+
2025-02-21 15:00:45,901 (tts_inference:127) INFO: Normalizer:
|
| 16 |
+
GlobalMVN(stats_file=/usr/local/lib/python3.8/dist-packages/espnet_model_zoo/models--imdanboy--jets/snapshots/1db95c26516c44e6789bf06417c51e89400b190b/exp/tts_stats_raw_phn_tacotron_g2p_en_no_space/train/feats_stats.npz, norm_means=True, norm_vars=True)
|
| 17 |
+
2025-02-21 15:00:45,904 (tts_inference:128) INFO: TTS:
|
| 18 |
+
JETS(
|
| 19 |
+
(generator): JETSGenerator(
|
| 20 |
+
(encoder): Encoder(
|
| 21 |
+
(embed): Sequential(
|
| 22 |
+
(0): Embedding(78, 256, padding_idx=0)
|
| 23 |
+
(1): ScaledPositionalEncoding(
|
| 24 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 25 |
+
)
|
| 26 |
+
)
|
| 27 |
+
(encoders): MultiSequential(
|
| 28 |
+
(0): EncoderLayer(
|
| 29 |
+
(self_attn): MultiHeadedAttention(
|
| 30 |
+
(linear_q): Linear(in_features=256, out_features=256, bias=True)
|
| 31 |
+
(linear_k): Linear(in_features=256, out_features=256, bias=True)
|
| 32 |
+
(linear_v): Linear(in_features=256, out_features=256, bias=True)
|
| 33 |
+
(linear_out): Linear(in_features=256, out_features=256, bias=True)
|
| 34 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 35 |
+
)
|
| 36 |
+
(feed_forward): MultiLayeredConv1d(
|
| 37 |
+
(w_1): Conv1d(256, 1024, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 38 |
+
(w_2): Conv1d(1024, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 39 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 40 |
+
)
|
| 41 |
+
(norm1): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 42 |
+
(norm2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 43 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 44 |
+
)
|
| 45 |
+
(1): EncoderLayer(
|
| 46 |
+
(self_attn): MultiHeadedAttention(
|
| 47 |
+
(linear_q): Linear(in_features=256, out_features=256, bias=True)
|
| 48 |
+
(linear_k): Linear(in_features=256, out_features=256, bias=True)
|
| 49 |
+
(linear_v): Linear(in_features=256, out_features=256, bias=True)
|
| 50 |
+
(linear_out): Linear(in_features=256, out_features=256, bias=True)
|
| 51 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 52 |
+
)
|
| 53 |
+
(feed_forward): MultiLayeredConv1d(
|
| 54 |
+
(w_1): Conv1d(256, 1024, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 55 |
+
(w_2): Conv1d(1024, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 56 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 57 |
+
)
|
| 58 |
+
(norm1): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 59 |
+
(norm2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 60 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 61 |
+
)
|
| 62 |
+
(2): EncoderLayer(
|
| 63 |
+
(self_attn): MultiHeadedAttention(
|
| 64 |
+
(linear_q): Linear(in_features=256, out_features=256, bias=True)
|
| 65 |
+
(linear_k): Linear(in_features=256, out_features=256, bias=True)
|
| 66 |
+
(linear_v): Linear(in_features=256, out_features=256, bias=True)
|
| 67 |
+
(linear_out): Linear(in_features=256, out_features=256, bias=True)
|
| 68 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 69 |
+
)
|
| 70 |
+
(feed_forward): MultiLayeredConv1d(
|
| 71 |
+
(w_1): Conv1d(256, 1024, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 72 |
+
(w_2): Conv1d(1024, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 73 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 74 |
+
)
|
| 75 |
+
(norm1): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 76 |
+
(norm2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 77 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 78 |
+
)
|
| 79 |
+
(3): EncoderLayer(
|
| 80 |
+
(self_attn): MultiHeadedAttention(
|
| 81 |
+
(linear_q): Linear(in_features=256, out_features=256, bias=True)
|
| 82 |
+
(linear_k): Linear(in_features=256, out_features=256, bias=True)
|
| 83 |
+
(linear_v): Linear(in_features=256, out_features=256, bias=True)
|
| 84 |
+
(linear_out): Linear(in_features=256, out_features=256, bias=True)
|
| 85 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 86 |
+
)
|
| 87 |
+
(feed_forward): MultiLayeredConv1d(
|
| 88 |
+
(w_1): Conv1d(256, 1024, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 89 |
+
(w_2): Conv1d(1024, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 90 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 91 |
+
)
|
| 92 |
+
(norm1): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 93 |
+
(norm2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 94 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 95 |
+
)
|
| 96 |
+
)
|
| 97 |
+
(after_norm): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 98 |
+
)
|
| 99 |
+
(duration_predictor): DurationPredictor(
|
| 100 |
+
(conv): ModuleList(
|
| 101 |
+
(0): Sequential(
|
| 102 |
+
(0): Conv1d(256, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 103 |
+
(1): ReLU()
|
| 104 |
+
(2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 105 |
+
(3): Dropout(p=0.1, inplace=False)
|
| 106 |
+
)
|
| 107 |
+
(1): Sequential(
|
| 108 |
+
(0): Conv1d(256, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 109 |
+
(1): ReLU()
|
| 110 |
+
(2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 111 |
+
(3): Dropout(p=0.1, inplace=False)
|
| 112 |
+
)
|
| 113 |
+
)
|
| 114 |
+
(linear): Linear(in_features=256, out_features=1, bias=True)
|
| 115 |
+
)
|
| 116 |
+
(pitch_predictor): VariancePredictor(
|
| 117 |
+
(conv): ModuleList(
|
| 118 |
+
(0): Sequential(
|
| 119 |
+
(0): Conv1d(256, 256, kernel_size=(5,), stride=(1,), padding=(2,))
|
| 120 |
+
(1): ReLU()
|
| 121 |
+
(2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 122 |
+
(3): Dropout(p=0.5, inplace=False)
|
| 123 |
+
)
|
| 124 |
+
(1): Sequential(
|
| 125 |
+
(0): Conv1d(256, 256, kernel_size=(5,), stride=(1,), padding=(2,))
|
| 126 |
+
(1): ReLU()
|
| 127 |
+
(2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 128 |
+
(3): Dropout(p=0.5, inplace=False)
|
| 129 |
+
)
|
| 130 |
+
(2): Sequential(
|
| 131 |
+
(0): Conv1d(256, 256, kernel_size=(5,), stride=(1,), padding=(2,))
|
| 132 |
+
(1): ReLU()
|
| 133 |
+
(2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 134 |
+
(3): Dropout(p=0.5, inplace=False)
|
| 135 |
+
)
|
| 136 |
+
(3): Sequential(
|
| 137 |
+
(0): Conv1d(256, 256, kernel_size=(5,), stride=(1,), padding=(2,))
|
| 138 |
+
(1): ReLU()
|
| 139 |
+
(2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 140 |
+
(3): Dropout(p=0.5, inplace=False)
|
| 141 |
+
)
|
| 142 |
+
(4): Sequential(
|
| 143 |
+
(0): Conv1d(256, 256, kernel_size=(5,), stride=(1,), padding=(2,))
|
| 144 |
+
(1): ReLU()
|
| 145 |
+
(2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 146 |
+
(3): Dropout(p=0.5, inplace=False)
|
| 147 |
+
)
|
| 148 |
+
)
|
| 149 |
+
(linear): Linear(in_features=256, out_features=1, bias=True)
|
| 150 |
+
)
|
| 151 |
+
(pitch_embed): Sequential(
|
| 152 |
+
(0): Conv1d(1, 256, kernel_size=(1,), stride=(1,))
|
| 153 |
+
(1): Dropout(p=0.0, inplace=False)
|
| 154 |
+
)
|
| 155 |
+
(energy_predictor): VariancePredictor(
|
| 156 |
+
(conv): ModuleList(
|
| 157 |
+
(0): Sequential(
|
| 158 |
+
(0): Conv1d(256, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 159 |
+
(1): ReLU()
|
| 160 |
+
(2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 161 |
+
(3): Dropout(p=0.5, inplace=False)
|
| 162 |
+
)
|
| 163 |
+
(1): Sequential(
|
| 164 |
+
(0): Conv1d(256, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 165 |
+
(1): ReLU()
|
| 166 |
+
(2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 167 |
+
(3): Dropout(p=0.5, inplace=False)
|
| 168 |
+
)
|
| 169 |
+
)
|
| 170 |
+
(linear): Linear(in_features=256, out_features=1, bias=True)
|
| 171 |
+
)
|
| 172 |
+
(energy_embed): Sequential(
|
| 173 |
+
(0): Conv1d(1, 256, kernel_size=(1,), stride=(1,))
|
| 174 |
+
(1): Dropout(p=0.0, inplace=False)
|
| 175 |
+
)
|
| 176 |
+
(alignment_module): AlignmentModule(
|
| 177 |
+
(t_conv1): Conv1d(256, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 178 |
+
(t_conv2): Conv1d(256, 256, kernel_size=(1,), stride=(1,))
|
| 179 |
+
(f_conv1): Conv1d(80, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 180 |
+
(f_conv2): Conv1d(256, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 181 |
+
(f_conv3): Conv1d(256, 256, kernel_size=(1,), stride=(1,))
|
| 182 |
+
)
|
| 183 |
+
(length_regulator): GaussianUpsampling()
|
| 184 |
+
(decoder): Encoder(
|
| 185 |
+
(embed): Sequential(
|
| 186 |
+
(0): ScaledPositionalEncoding(
|
| 187 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 188 |
+
)
|
| 189 |
+
)
|
| 190 |
+
(encoders): MultiSequential(
|
| 191 |
+
(0): EncoderLayer(
|
| 192 |
+
(self_attn): MultiHeadedAttention(
|
| 193 |
+
(linear_q): Linear(in_features=256, out_features=256, bias=True)
|
| 194 |
+
(linear_k): Linear(in_features=256, out_features=256, bias=True)
|
| 195 |
+
(linear_v): Linear(in_features=256, out_features=256, bias=True)
|
| 196 |
+
(linear_out): Linear(in_features=256, out_features=256, bias=True)
|
| 197 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 198 |
+
)
|
| 199 |
+
(feed_forward): MultiLayeredConv1d(
|
| 200 |
+
(w_1): Conv1d(256, 1024, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 201 |
+
(w_2): Conv1d(1024, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 202 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 203 |
+
)
|
| 204 |
+
(norm1): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 205 |
+
(norm2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 206 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 207 |
+
)
|
| 208 |
+
(1): EncoderLayer(
|
| 209 |
+
(self_attn): MultiHeadedAttention(
|
| 210 |
+
(linear_q): Linear(in_features=256, out_features=256, bias=True)
|
| 211 |
+
(linear_k): Linear(in_features=256, out_features=256, bias=True)
|
| 212 |
+
(linear_v): Linear(in_features=256, out_features=256, bias=True)
|
| 213 |
+
(linear_out): Linear(in_features=256, out_features=256, bias=True)
|
| 214 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 215 |
+
)
|
| 216 |
+
(feed_forward): MultiLayeredConv1d(
|
| 217 |
+
(w_1): Conv1d(256, 1024, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 218 |
+
(w_2): Conv1d(1024, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 219 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 220 |
+
)
|
| 221 |
+
(norm1): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 222 |
+
(norm2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 223 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 224 |
+
)
|
| 225 |
+
(2): EncoderLayer(
|
| 226 |
+
(self_attn): MultiHeadedAttention(
|
| 227 |
+
(linear_q): Linear(in_features=256, out_features=256, bias=True)
|
| 228 |
+
(linear_k): Linear(in_features=256, out_features=256, bias=True)
|
| 229 |
+
(linear_v): Linear(in_features=256, out_features=256, bias=True)
|
| 230 |
+
(linear_out): Linear(in_features=256, out_features=256, bias=True)
|
| 231 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 232 |
+
)
|
| 233 |
+
(feed_forward): MultiLayeredConv1d(
|
| 234 |
+
(w_1): Conv1d(256, 1024, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 235 |
+
(w_2): Conv1d(1024, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 236 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 237 |
+
)
|
| 238 |
+
(norm1): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 239 |
+
(norm2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 240 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 241 |
+
)
|
| 242 |
+
(3): EncoderLayer(
|
| 243 |
+
(self_attn): MultiHeadedAttention(
|
| 244 |
+
(linear_q): Linear(in_features=256, out_features=256, bias=True)
|
| 245 |
+
(linear_k): Linear(in_features=256, out_features=256, bias=True)
|
| 246 |
+
(linear_v): Linear(in_features=256, out_features=256, bias=True)
|
| 247 |
+
(linear_out): Linear(in_features=256, out_features=256, bias=True)
|
| 248 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 249 |
+
)
|
| 250 |
+
(feed_forward): MultiLayeredConv1d(
|
| 251 |
+
(w_1): Conv1d(256, 1024, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 252 |
+
(w_2): Conv1d(1024, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 253 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 254 |
+
)
|
| 255 |
+
(norm1): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 256 |
+
(norm2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 257 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 258 |
+
)
|
| 259 |
+
)
|
| 260 |
+
(after_norm): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 261 |
+
)
|
| 262 |
+
(generator): HiFiGANGenerator(
|
| 263 |
+
(input_conv): Conv1d(256, 512, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 264 |
+
(upsamples): ModuleList(
|
| 265 |
+
(0): Sequential(
|
| 266 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 267 |
+
(1): ConvTranspose1d(512, 256, kernel_size=(16,), stride=(8,), padding=(4,))
|
| 268 |
+
)
|
| 269 |
+
(1): Sequential(
|
| 270 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 271 |
+
(1): ConvTranspose1d(256, 128, kernel_size=(16,), stride=(8,), padding=(4,))
|
| 272 |
+
)
|
| 273 |
+
(2): Sequential(
|
| 274 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 275 |
+
(1): ConvTranspose1d(128, 64, kernel_size=(4,), stride=(2,), padding=(1,))
|
| 276 |
+
)
|
| 277 |
+
(3): Sequential(
|
| 278 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 279 |
+
(1): ConvTranspose1d(64, 32, kernel_size=(4,), stride=(2,), padding=(1,))
|
| 280 |
+
)
|
| 281 |
+
)
|
| 282 |
+
(blocks): ModuleList(
|
| 283 |
+
(0): ResidualBlock(
|
| 284 |
+
(convs1): ModuleList(
|
| 285 |
+
(0): Sequential(
|
| 286 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 287 |
+
(1): Conv1d(256, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 288 |
+
)
|
| 289 |
+
(1): Sequential(
|
| 290 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 291 |
+
(1): Conv1d(256, 256, kernel_size=(3,), stride=(1,), padding=(3,), dilation=(3,))
|
| 292 |
+
)
|
| 293 |
+
(2): Sequential(
|
| 294 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 295 |
+
(1): Conv1d(256, 256, kernel_size=(3,), stride=(1,), padding=(5,), dilation=(5,))
|
| 296 |
+
)
|
| 297 |
+
)
|
| 298 |
+
(convs2): ModuleList(
|
| 299 |
+
(0): Sequential(
|
| 300 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 301 |
+
(1): Conv1d(256, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 302 |
+
)
|
| 303 |
+
(1): Sequential(
|
| 304 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 305 |
+
(1): Conv1d(256, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 306 |
+
)
|
| 307 |
+
(2): Sequential(
|
| 308 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 309 |
+
(1): Conv1d(256, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 310 |
+
)
|
| 311 |
+
)
|
| 312 |
+
)
|
| 313 |
+
(1): ResidualBlock(
|
| 314 |
+
(convs1): ModuleList(
|
| 315 |
+
(0): Sequential(
|
| 316 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 317 |
+
(1): Conv1d(256, 256, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 318 |
+
)
|
| 319 |
+
(1): Sequential(
|
| 320 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 321 |
+
(1): Conv1d(256, 256, kernel_size=(7,), stride=(1,), padding=(9,), dilation=(3,))
|
| 322 |
+
)
|
| 323 |
+
(2): Sequential(
|
| 324 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 325 |
+
(1): Conv1d(256, 256, kernel_size=(7,), stride=(1,), padding=(15,), dilation=(5,))
|
| 326 |
+
)
|
| 327 |
+
)
|
| 328 |
+
(convs2): ModuleList(
|
| 329 |
+
(0): Sequential(
|
| 330 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 331 |
+
(1): Conv1d(256, 256, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 332 |
+
)
|
| 333 |
+
(1): Sequential(
|
| 334 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 335 |
+
(1): Conv1d(256, 256, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 336 |
+
)
|
| 337 |
+
(2): Sequential(
|
| 338 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 339 |
+
(1): Conv1d(256, 256, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 340 |
+
)
|
| 341 |
+
)
|
| 342 |
+
)
|
| 343 |
+
(2): ResidualBlock(
|
| 344 |
+
(convs1): ModuleList(
|
| 345 |
+
(0): Sequential(
|
| 346 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 347 |
+
(1): Conv1d(256, 256, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 348 |
+
)
|
| 349 |
+
(1): Sequential(
|
| 350 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 351 |
+
(1): Conv1d(256, 256, kernel_size=(11,), stride=(1,), padding=(15,), dilation=(3,))
|
| 352 |
+
)
|
| 353 |
+
(2): Sequential(
|
| 354 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 355 |
+
(1): Conv1d(256, 256, kernel_size=(11,), stride=(1,), padding=(25,), dilation=(5,))
|
| 356 |
+
)
|
| 357 |
+
)
|
| 358 |
+
(convs2): ModuleList(
|
| 359 |
+
(0): Sequential(
|
| 360 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 361 |
+
(1): Conv1d(256, 256, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 362 |
+
)
|
| 363 |
+
(1): Sequential(
|
| 364 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 365 |
+
(1): Conv1d(256, 256, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 366 |
+
)
|
| 367 |
+
(2): Sequential(
|
| 368 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 369 |
+
(1): Conv1d(256, 256, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 370 |
+
)
|
| 371 |
+
)
|
| 372 |
+
)
|
| 373 |
+
(3): ResidualBlock(
|
| 374 |
+
(convs1): ModuleList(
|
| 375 |
+
(0): Sequential(
|
| 376 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 377 |
+
(1): Conv1d(128, 128, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 378 |
+
)
|
| 379 |
+
(1): Sequential(
|
| 380 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 381 |
+
(1): Conv1d(128, 128, kernel_size=(3,), stride=(1,), padding=(3,), dilation=(3,))
|
| 382 |
+
)
|
| 383 |
+
(2): Sequential(
|
| 384 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 385 |
+
(1): Conv1d(128, 128, kernel_size=(3,), stride=(1,), padding=(5,), dilation=(5,))
|
| 386 |
+
)
|
| 387 |
+
)
|
| 388 |
+
(convs2): ModuleList(
|
| 389 |
+
(0): Sequential(
|
| 390 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 391 |
+
(1): Conv1d(128, 128, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 392 |
+
)
|
| 393 |
+
(1): Sequential(
|
| 394 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 395 |
+
(1): Conv1d(128, 128, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 396 |
+
)
|
| 397 |
+
(2): Sequential(
|
| 398 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 399 |
+
(1): Conv1d(128, 128, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 400 |
+
)
|
| 401 |
+
)
|
| 402 |
+
)
|
| 403 |
+
(4): ResidualBlock(
|
| 404 |
+
(convs1): ModuleList(
|
| 405 |
+
(0): Sequential(
|
| 406 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 407 |
+
(1): Conv1d(128, 128, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 408 |
+
)
|
| 409 |
+
(1): Sequential(
|
| 410 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 411 |
+
(1): Conv1d(128, 128, kernel_size=(7,), stride=(1,), padding=(9,), dilation=(3,))
|
| 412 |
+
)
|
| 413 |
+
(2): Sequential(
|
| 414 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 415 |
+
(1): Conv1d(128, 128, kernel_size=(7,), stride=(1,), padding=(15,), dilation=(5,))
|
| 416 |
+
)
|
| 417 |
+
)
|
| 418 |
+
(convs2): ModuleList(
|
| 419 |
+
(0): Sequential(
|
| 420 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 421 |
+
(1): Conv1d(128, 128, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 422 |
+
)
|
| 423 |
+
(1): Sequential(
|
| 424 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 425 |
+
(1): Conv1d(128, 128, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 426 |
+
)
|
| 427 |
+
(2): Sequential(
|
| 428 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 429 |
+
(1): Conv1d(128, 128, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 430 |
+
)
|
| 431 |
+
)
|
| 432 |
+
)
|
| 433 |
+
(5): ResidualBlock(
|
| 434 |
+
(convs1): ModuleList(
|
| 435 |
+
(0): Sequential(
|
| 436 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 437 |
+
(1): Conv1d(128, 128, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 438 |
+
)
|
| 439 |
+
(1): Sequential(
|
| 440 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 441 |
+
(1): Conv1d(128, 128, kernel_size=(11,), stride=(1,), padding=(15,), dilation=(3,))
|
| 442 |
+
)
|
| 443 |
+
(2): Sequential(
|
| 444 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 445 |
+
(1): Conv1d(128, 128, kernel_size=(11,), stride=(1,), padding=(25,), dilation=(5,))
|
| 446 |
+
)
|
| 447 |
+
)
|
| 448 |
+
(convs2): ModuleList(
|
| 449 |
+
(0): Sequential(
|
| 450 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 451 |
+
(1): Conv1d(128, 128, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 452 |
+
)
|
| 453 |
+
(1): Sequential(
|
| 454 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 455 |
+
(1): Conv1d(128, 128, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 456 |
+
)
|
| 457 |
+
(2): Sequential(
|
| 458 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 459 |
+
(1): Conv1d(128, 128, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 460 |
+
)
|
| 461 |
+
)
|
| 462 |
+
)
|
| 463 |
+
(6): ResidualBlock(
|
| 464 |
+
(convs1): ModuleList(
|
| 465 |
+
(0): Sequential(
|
| 466 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 467 |
+
(1): Conv1d(64, 64, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 468 |
+
)
|
| 469 |
+
(1): Sequential(
|
| 470 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 471 |
+
(1): Conv1d(64, 64, kernel_size=(3,), stride=(1,), padding=(3,), dilation=(3,))
|
| 472 |
+
)
|
| 473 |
+
(2): Sequential(
|
| 474 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 475 |
+
(1): Conv1d(64, 64, kernel_size=(3,), stride=(1,), padding=(5,), dilation=(5,))
|
| 476 |
+
)
|
| 477 |
+
)
|
| 478 |
+
(convs2): ModuleList(
|
| 479 |
+
(0): Sequential(
|
| 480 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 481 |
+
(1): Conv1d(64, 64, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 482 |
+
)
|
| 483 |
+
(1): Sequential(
|
| 484 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 485 |
+
(1): Conv1d(64, 64, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 486 |
+
)
|
| 487 |
+
(2): Sequential(
|
| 488 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 489 |
+
(1): Conv1d(64, 64, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 490 |
+
)
|
| 491 |
+
)
|
| 492 |
+
)
|
| 493 |
+
(7): ResidualBlock(
|
| 494 |
+
(convs1): ModuleList(
|
| 495 |
+
(0): Sequential(
|
| 496 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 497 |
+
(1): Conv1d(64, 64, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 498 |
+
)
|
| 499 |
+
(1): Sequential(
|
| 500 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 501 |
+
(1): Conv1d(64, 64, kernel_size=(7,), stride=(1,), padding=(9,), dilation=(3,))
|
| 502 |
+
)
|
| 503 |
+
(2): Sequential(
|
| 504 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 505 |
+
(1): Conv1d(64, 64, kernel_size=(7,), stride=(1,), padding=(15,), dilation=(5,))
|
| 506 |
+
)
|
| 507 |
+
)
|
| 508 |
+
(convs2): ModuleList(
|
| 509 |
+
(0): Sequential(
|
| 510 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 511 |
+
(1): Conv1d(64, 64, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 512 |
+
)
|
| 513 |
+
(1): Sequential(
|
| 514 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 515 |
+
(1): Conv1d(64, 64, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 516 |
+
)
|
| 517 |
+
(2): Sequential(
|
| 518 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 519 |
+
(1): Conv1d(64, 64, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 520 |
+
)
|
| 521 |
+
)
|
| 522 |
+
)
|
| 523 |
+
(8): ResidualBlock(
|
| 524 |
+
(convs1): ModuleList(
|
| 525 |
+
(0): Sequential(
|
| 526 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 527 |
+
(1): Conv1d(64, 64, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 528 |
+
)
|
| 529 |
+
(1): Sequential(
|
| 530 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 531 |
+
(1): Conv1d(64, 64, kernel_size=(11,), stride=(1,), padding=(15,), dilation=(3,))
|
| 532 |
+
)
|
| 533 |
+
(2): Sequential(
|
| 534 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 535 |
+
(1): Conv1d(64, 64, kernel_size=(11,), stride=(1,), padding=(25,), dilation=(5,))
|
| 536 |
+
)
|
| 537 |
+
)
|
| 538 |
+
(convs2): ModuleList(
|
| 539 |
+
(0): Sequential(
|
| 540 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 541 |
+
(1): Conv1d(64, 64, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 542 |
+
)
|
| 543 |
+
(1): Sequential(
|
| 544 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 545 |
+
(1): Conv1d(64, 64, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 546 |
+
)
|
| 547 |
+
(2): Sequential(
|
| 548 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 549 |
+
(1): Conv1d(64, 64, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 550 |
+
)
|
| 551 |
+
)
|
| 552 |
+
)
|
| 553 |
+
(9): ResidualBlock(
|
| 554 |
+
(convs1): ModuleList(
|
| 555 |
+
(0): Sequential(
|
| 556 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 557 |
+
(1): Conv1d(32, 32, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 558 |
+
)
|
| 559 |
+
(1): Sequential(
|
| 560 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 561 |
+
(1): Conv1d(32, 32, kernel_size=(3,), stride=(1,), padding=(3,), dilation=(3,))
|
| 562 |
+
)
|
| 563 |
+
(2): Sequential(
|
| 564 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 565 |
+
(1): Conv1d(32, 32, kernel_size=(3,), stride=(1,), padding=(5,), dilation=(5,))
|
| 566 |
+
)
|
| 567 |
+
)
|
| 568 |
+
(convs2): ModuleList(
|
| 569 |
+
(0): Sequential(
|
| 570 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 571 |
+
(1): Conv1d(32, 32, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 572 |
+
)
|
| 573 |
+
(1): Sequential(
|
| 574 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 575 |
+
(1): Conv1d(32, 32, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 576 |
+
)
|
| 577 |
+
(2): Sequential(
|
| 578 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 579 |
+
(1): Conv1d(32, 32, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 580 |
+
)
|
| 581 |
+
)
|
| 582 |
+
)
|
| 583 |
+
(10): ResidualBlock(
|
| 584 |
+
(convs1): ModuleList(
|
| 585 |
+
(0): Sequential(
|
| 586 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 587 |
+
(1): Conv1d(32, 32, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 588 |
+
)
|
| 589 |
+
(1): Sequential(
|
| 590 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 591 |
+
(1): Conv1d(32, 32, kernel_size=(7,), stride=(1,), padding=(9,), dilation=(3,))
|
| 592 |
+
)
|
| 593 |
+
(2): Sequential(
|
| 594 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 595 |
+
(1): Conv1d(32, 32, kernel_size=(7,), stride=(1,), padding=(15,), dilation=(5,))
|
| 596 |
+
)
|
| 597 |
+
)
|
| 598 |
+
(convs2): ModuleList(
|
| 599 |
+
(0): Sequential(
|
| 600 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 601 |
+
(1): Conv1d(32, 32, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 602 |
+
)
|
| 603 |
+
(1): Sequential(
|
| 604 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 605 |
+
(1): Conv1d(32, 32, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 606 |
+
)
|
| 607 |
+
(2): Sequential(
|
| 608 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 609 |
+
(1): Conv1d(32, 32, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 610 |
+
)
|
| 611 |
+
)
|
| 612 |
+
)
|
| 613 |
+
(11): ResidualBlock(
|
| 614 |
+
(convs1): ModuleList(
|
| 615 |
+
(0): Sequential(
|
| 616 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 617 |
+
(1): Conv1d(32, 32, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 618 |
+
)
|
| 619 |
+
(1): Sequential(
|
| 620 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 621 |
+
(1): Conv1d(32, 32, kernel_size=(11,), stride=(1,), padding=(15,), dilation=(3,))
|
| 622 |
+
)
|
| 623 |
+
(2): Sequential(
|
| 624 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 625 |
+
(1): Conv1d(32, 32, kernel_size=(11,), stride=(1,), padding=(25,), dilation=(5,))
|
| 626 |
+
)
|
| 627 |
+
)
|
| 628 |
+
(convs2): ModuleList(
|
| 629 |
+
(0): Sequential(
|
| 630 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 631 |
+
(1): Conv1d(32, 32, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 632 |
+
)
|
| 633 |
+
(1): Sequential(
|
| 634 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 635 |
+
(1): Conv1d(32, 32, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 636 |
+
)
|
| 637 |
+
(2): Sequential(
|
| 638 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 639 |
+
(1): Conv1d(32, 32, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 640 |
+
)
|
| 641 |
+
)
|
| 642 |
+
)
|
| 643 |
+
)
|
| 644 |
+
(output_conv): Sequential(
|
| 645 |
+
(0): LeakyReLU(negative_slope=0.01)
|
| 646 |
+
(1): Conv1d(32, 1, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 647 |
+
(2): Tanh()
|
| 648 |
+
)
|
| 649 |
+
)
|
| 650 |
+
)
|
| 651 |
+
(discriminator): HiFiGANMultiScaleMultiPeriodDiscriminator(
|
| 652 |
+
(msd): HiFiGANMultiScaleDiscriminator(
|
| 653 |
+
(discriminators): ModuleList(
|
| 654 |
+
(0): HiFiGANScaleDiscriminator(
|
| 655 |
+
(layers): ModuleList(
|
| 656 |
+
(0): Sequential(
|
| 657 |
+
(0): Conv1d(1, 128, kernel_size=(15,), stride=(1,), padding=(7,))
|
| 658 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 659 |
+
)
|
| 660 |
+
(1): Sequential(
|
| 661 |
+
(0): Conv1d(128, 128, kernel_size=(41,), stride=(2,), padding=(20,), groups=4)
|
| 662 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 663 |
+
)
|
| 664 |
+
(2): Sequential(
|
| 665 |
+
(0): Conv1d(128, 256, kernel_size=(41,), stride=(2,), padding=(20,), groups=16)
|
| 666 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 667 |
+
)
|
| 668 |
+
(3): Sequential(
|
| 669 |
+
(0): Conv1d(256, 512, kernel_size=(41,), stride=(4,), padding=(20,), groups=16)
|
| 670 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 671 |
+
)
|
| 672 |
+
(4): Sequential(
|
| 673 |
+
(0): Conv1d(512, 1024, kernel_size=(41,), stride=(4,), padding=(20,), groups=16)
|
| 674 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 675 |
+
)
|
| 676 |
+
(5): Sequential(
|
| 677 |
+
(0): Conv1d(1024, 1024, kernel_size=(41,), stride=(1,), padding=(20,), groups=16)
|
| 678 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 679 |
+
)
|
| 680 |
+
(6): Sequential(
|
| 681 |
+
(0): Conv1d(1024, 1024, kernel_size=(5,), stride=(1,), padding=(2,))
|
| 682 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 683 |
+
)
|
| 684 |
+
(7): Conv1d(1024, 1, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 685 |
+
)
|
| 686 |
+
)
|
| 687 |
+
)
|
| 688 |
+
)
|
| 689 |
+
(mpd): HiFiGANMultiPeriodDiscriminator(
|
| 690 |
+
(discriminators): ModuleList(
|
| 691 |
+
(0): HiFiGANPeriodDiscriminator(
|
| 692 |
+
(convs): ModuleList(
|
| 693 |
+
(0): Sequential(
|
| 694 |
+
(0): Conv2d(1, 32, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 695 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 696 |
+
)
|
| 697 |
+
(1): Sequential(
|
| 698 |
+
(0): Conv2d(32, 128, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 699 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 700 |
+
)
|
| 701 |
+
(2): Sequential(
|
| 702 |
+
(0): Conv2d(128, 512, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 703 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 704 |
+
)
|
| 705 |
+
(3): Sequential(
|
| 706 |
+
(0): Conv2d(512, 1024, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 707 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 708 |
+
)
|
| 709 |
+
(4): Sequential(
|
| 710 |
+
(0): Conv2d(1024, 1024, kernel_size=(5, 1), stride=(1, 1), padding=(2, 0))
|
| 711 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 712 |
+
)
|
| 713 |
+
)
|
| 714 |
+
(output_conv): Conv2d(1024, 1, kernel_size=(2, 1), stride=(1, 1), padding=(1, 0))
|
| 715 |
+
)
|
| 716 |
+
(1): HiFiGANPeriodDiscriminator(
|
| 717 |
+
(convs): ModuleList(
|
| 718 |
+
(0): Sequential(
|
| 719 |
+
(0): Conv2d(1, 32, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 720 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 721 |
+
)
|
| 722 |
+
(1): Sequential(
|
| 723 |
+
(0): Conv2d(32, 128, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 724 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 725 |
+
)
|
| 726 |
+
(2): Sequential(
|
| 727 |
+
(0): Conv2d(128, 512, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 728 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 729 |
+
)
|
| 730 |
+
(3): Sequential(
|
| 731 |
+
(0): Conv2d(512, 1024, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 732 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 733 |
+
)
|
| 734 |
+
(4): Sequential(
|
| 735 |
+
(0): Conv2d(1024, 1024, kernel_size=(5, 1), stride=(1, 1), padding=(2, 0))
|
| 736 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 737 |
+
)
|
| 738 |
+
)
|
| 739 |
+
(output_conv): Conv2d(1024, 1, kernel_size=(2, 1), stride=(1, 1), padding=(1, 0))
|
| 740 |
+
)
|
| 741 |
+
(2): HiFiGANPeriodDiscriminator(
|
| 742 |
+
(convs): ModuleList(
|
| 743 |
+
(0): Sequential(
|
| 744 |
+
(0): Conv2d(1, 32, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 745 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 746 |
+
)
|
| 747 |
+
(1): Sequential(
|
| 748 |
+
(0): Conv2d(32, 128, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 749 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 750 |
+
)
|
| 751 |
+
(2): Sequential(
|
| 752 |
+
(0): Conv2d(128, 512, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 753 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 754 |
+
)
|
| 755 |
+
(3): Sequential(
|
| 756 |
+
(0): Conv2d(512, 1024, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 757 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 758 |
+
)
|
| 759 |
+
(4): Sequential(
|
| 760 |
+
(0): Conv2d(1024, 1024, kernel_size=(5, 1), stride=(1, 1), padding=(2, 0))
|
| 761 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 762 |
+
)
|
| 763 |
+
)
|
| 764 |
+
(output_conv): Conv2d(1024, 1, kernel_size=(2, 1), stride=(1, 1), padding=(1, 0))
|
| 765 |
+
)
|
| 766 |
+
(3): HiFiGANPeriodDiscriminator(
|
| 767 |
+
(convs): ModuleList(
|
| 768 |
+
(0): Sequential(
|
| 769 |
+
(0): Conv2d(1, 32, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 770 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 771 |
+
)
|
| 772 |
+
(1): Sequential(
|
| 773 |
+
(0): Conv2d(32, 128, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 774 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 775 |
+
)
|
| 776 |
+
(2): Sequential(
|
| 777 |
+
(0): Conv2d(128, 512, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 778 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 779 |
+
)
|
| 780 |
+
(3): Sequential(
|
| 781 |
+
(0): Conv2d(512, 1024, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 782 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 783 |
+
)
|
| 784 |
+
(4): Sequential(
|
| 785 |
+
(0): Conv2d(1024, 1024, kernel_size=(5, 1), stride=(1, 1), padding=(2, 0))
|
| 786 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 787 |
+
)
|
| 788 |
+
)
|
| 789 |
+
(output_conv): Conv2d(1024, 1, kernel_size=(2, 1), stride=(1, 1), padding=(1, 0))
|
| 790 |
+
)
|
| 791 |
+
(4): HiFiGANPeriodDiscriminator(
|
| 792 |
+
(convs): ModuleList(
|
| 793 |
+
(0): Sequential(
|
| 794 |
+
(0): Conv2d(1, 32, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 795 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 796 |
+
)
|
| 797 |
+
(1): Sequential(
|
| 798 |
+
(0): Conv2d(32, 128, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 799 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 800 |
+
)
|
| 801 |
+
(2): Sequential(
|
| 802 |
+
(0): Conv2d(128, 512, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 803 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 804 |
+
)
|
| 805 |
+
(3): Sequential(
|
| 806 |
+
(0): Conv2d(512, 1024, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 807 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 808 |
+
)
|
| 809 |
+
(4): Sequential(
|
| 810 |
+
(0): Conv2d(1024, 1024, kernel_size=(5, 1), stride=(1, 1), padding=(2, 0))
|
| 811 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 812 |
+
)
|
| 813 |
+
)
|
| 814 |
+
(output_conv): Conv2d(1024, 1, kernel_size=(2, 1), stride=(1, 1), padding=(1, 0))
|
| 815 |
+
)
|
| 816 |
+
)
|
| 817 |
+
)
|
| 818 |
+
)
|
| 819 |
+
(generator_adv_loss): GeneratorAdversarialLoss()
|
| 820 |
+
(discriminator_adv_loss): DiscriminatorAdversarialLoss()
|
| 821 |
+
(feat_match_loss): FeatureMatchLoss()
|
| 822 |
+
(mel_loss): MelSpectrogramLoss(
|
| 823 |
+
(wav_to_mel): LogMelFbank(
|
| 824 |
+
(stft): Stft(n_fft=1024, win_length=1024, hop_length=256, center=True, normalized=False, onesided=True)
|
| 825 |
+
(logmel): LogMel(sr=22050, n_fft=1024, n_mels=80, fmin=0, fmax=11025.0, htk=False)
|
| 826 |
+
)
|
| 827 |
+
)
|
| 828 |
+
(var_loss): VarianceLoss(
|
| 829 |
+
(mse_criterion): MSELoss()
|
| 830 |
+
(duration_criterion): DurationPredictorLoss(
|
| 831 |
+
(criterion): MSELoss()
|
| 832 |
+
)
|
| 833 |
+
)
|
| 834 |
+
(forwardsum_loss): ForwardSumLoss()
|
| 835 |
+
)
|
| 836 |
+
2025-02-21 15:00:46,689 (font_manager:1547) INFO: generated new fontManager
|
| 837 |
+
2025-02-21 15:00:52,161 (tts_inference:476) INFO: inference speed = 29666.6 points / sec.
|
| 838 |
+
2025-02-21 15:00:52,162 (tts_inference:481) INFO: LJ049-0008 (size:60->117504)
|
| 839 |
+
2025-02-21 15:00:59,011 (tts_inference:476) INFO: inference speed = 30792.4 points / sec.
|
| 840 |
+
2025-02-21 15:00:59,011 (tts_inference:481) INFO: LJ049-0009 (size:123->210688)
|
| 841 |
+
2025-02-21 15:01:01,096 (tts_inference:476) INFO: inference speed = 33279.6 points / sec.
|
| 842 |
+
2025-02-21 15:01:01,096 (tts_inference:481) INFO: LJ049-0010 (size:38->69120)
|
| 843 |
+
2025-02-21 15:01:06,890 (tts_inference:476) INFO: inference speed = 34051.6 points / sec.
|
| 844 |
+
2025-02-21 15:01:06,890 (tts_inference:481) INFO: LJ049-0011 (size:108->197120)
|
| 845 |
+
2025-02-21 15:01:10,928 (tts_inference:476) INFO: inference speed = 34118.3 points / sec.
|
| 846 |
+
2025-02-21 15:01:10,928 (tts_inference:481) INFO: LJ049-0012 (size:77->137472)
|
| 847 |
+
2025-02-21 15:01:14,777 (tts_inference:476) INFO: inference speed = 33976.1 points / sec.
|
| 848 |
+
2025-02-21 15:01:14,778 (tts_inference:481) INFO: LJ049-0013 (size:78->130560)
|
| 849 |
+
2025-02-21 15:01:18,104 (tts_inference:476) INFO: inference speed = 34093.9 points / sec.
|
| 850 |
+
2025-02-21 15:01:18,104 (tts_inference:481) INFO: LJ049-0014 (size:63->113152)
|
| 851 |
+
2025-02-21 15:01:21,821 (tts_inference:476) INFO: inference speed = 34011.6 points / sec.
|
| 852 |
+
2025-02-21 15:01:21,821 (tts_inference:481) INFO: LJ049-0015 (size:71->126208)
|
| 853 |
+
2025-02-21 15:01:22,887 (tts_inference:476) INFO: inference speed = 30920.7 points / sec.
|
| 854 |
+
2025-02-21 15:01:22,887 (tts_inference:481) INFO: LJ049-0016 (size:22->32768)
|
| 855 |
+
2025-02-21 15:01:27,889 (tts_inference:476) INFO: inference speed = 31252.2 points / sec.
|
| 856 |
+
2025-02-21 15:01:27,889 (tts_inference:481) INFO: LJ049-0017 (size:86->156160)
|
| 857 |
+
2025-02-21 15:01:30,037 (tts_inference:476) INFO: inference speed = 32531.8 points / sec.
|
| 858 |
+
2025-02-21 15:01:30,037 (tts_inference:481) INFO: LJ049-0018 (size:38->69632)
|
| 859 |
+
2025-02-21 15:01:34,513 (tts_inference:476) INFO: inference speed = 33845.7 points / sec.
|
| 860 |
+
2025-02-21 15:01:34,513 (tts_inference:481) INFO: LJ049-0019 (size:86->151296)
|
| 861 |
+
2025-02-21 15:01:39,204 (tts_inference:476) INFO: inference speed = 32840.9 points / sec.
|
| 862 |
+
2025-02-21 15:01:39,205 (tts_inference:481) INFO: LJ049-0020 (size:94->153856)
|
| 863 |
+
2025-02-21 15:01:42,722 (tts_inference:476) INFO: inference speed = 33402.2 points / sec.
|
| 864 |
+
2025-02-21 15:01:42,722 (tts_inference:481) INFO: LJ049-0021 (size:65->117248)
|
| 865 |
+
2025-02-21 15:01:49,059 (tts_inference:476) INFO: inference speed = 30895.3 points / sec.
|
| 866 |
+
2025-02-21 15:01:49,059 (tts_inference:481) INFO: LJ049-0022 (size:109->195584)
|
| 867 |
+
2025-02-21 15:01:53,719 (tts_inference:476) INFO: inference speed = 34005.4 points / sec.
|
| 868 |
+
2025-02-21 15:01:53,719 (tts_inference:481) INFO: LJ049-0023 (size:83->158208)
|
| 869 |
+
2025-02-21 15:01:55,940 (tts_inference:476) INFO: inference speed = 33081.5 points / sec.
|
| 870 |
+
2025-02-21 15:01:55,940 (tts_inference:481) INFO: LJ049-0024 (size:52->73216)
|
| 871 |
+
2025-02-21 15:01:58,060 (tts_inference:476) INFO: inference speed = 33283.6 points / sec.
|
| 872 |
+
2025-02-21 15:01:58,061 (tts_inference:481) INFO: LJ049-0025 (size:43->70400)
|
| 873 |
+
2025-02-21 15:02:02,410 (tts_inference:476) INFO: inference speed = 34064.0 points / sec.
|
| 874 |
+
2025-02-21 15:02:02,410 (tts_inference:481) INFO: LJ049-0026 (size:89->147968)
|
| 875 |
+
2025-02-21 15:02:09,003 (tts_inference:476) INFO: inference speed = 30786.4 points / sec.
|
| 876 |
+
2025-02-21 15:02:09,003 (tts_inference:481) INFO: LJ049-0027 (size:112->202752)
|
| 877 |
+
2025-02-21 15:02:12,572 (tts_inference:476) INFO: inference speed = 33639.8 points / sec.
|
| 878 |
+
2025-02-21 15:02:12,572 (tts_inference:481) INFO: LJ049-0028 (size:69->119808)
|
| 879 |
+
2025-02-21 15:02:17,150 (tts_inference:476) INFO: inference speed = 34040.5 points / sec.
|
| 880 |
+
2025-02-21 15:02:17,150 (tts_inference:481) INFO: LJ049-0029 (size:93->155648)
|
| 881 |
+
2025-02-21 15:02:22,233 (tts_inference:476) INFO: inference speed = 34294.5 points / sec.
|
| 882 |
+
2025-02-21 15:02:22,234 (tts_inference:481) INFO: LJ049-0030 (size:96->174080)
|
| 883 |
+
2025-02-21 15:02:25,161 (tts_inference:476) INFO: inference speed = 33398.6 points / sec.
|
| 884 |
+
2025-02-21 15:02:25,161 (tts_inference:481) INFO: LJ049-0031 (size:52->97536)
|
| 885 |
+
2025-02-21 15:02:29,532 (tts_inference:476) INFO: inference speed = 34077.6 points / sec.
|
| 886 |
+
2025-02-21 15:02:29,532 (tts_inference:481) INFO: LJ049-0032 (size:88->148736)
|
| 887 |
+
2025-02-21 15:02:35,395 (tts_inference:476) INFO: inference speed = 33918.3 points / sec.
|
| 888 |
+
2025-02-21 15:02:35,396 (tts_inference:481) INFO: LJ049-0033 (size:106->198656)
|
| 889 |
+
2025-02-21 15:02:39,439 (tts_inference:476) INFO: inference speed = 33488.6 points / sec.
|
| 890 |
+
2025-02-21 15:02:39,439 (tts_inference:481) INFO: LJ049-0034 (size:79->135168)
|
| 891 |
+
2025-02-21 15:02:44,253 (tts_inference:476) INFO: inference speed = 34403.8 points / sec.
|
| 892 |
+
2025-02-21 15:02:44,253 (tts_inference:481) INFO: LJ049-0035 (size:92->165376)
|
| 893 |
+
2025-02-21 15:02:45,983 (tts_inference:476) INFO: inference speed = 32394.4 points / sec.
|
| 894 |
+
2025-02-21 15:02:45,983 (tts_inference:481) INFO: LJ049-0036 (size:29->55808)
|
| 895 |
+
2025-02-21 15:02:49,837 (tts_inference:476) INFO: inference speed = 33725.8 points / sec.
|
| 896 |
+
2025-02-21 15:02:49,837 (tts_inference:481) INFO: LJ049-0037 (size:68->129792)
|
| 897 |
+
2025-02-21 15:02:55,135 (tts_inference:476) INFO: inference speed = 33860.6 points / sec.
|
| 898 |
+
2025-02-21 15:02:55,136 (tts_inference:481) INFO: LJ049-0038 (size:94->179200)
|
| 899 |
+
2025-02-21 15:02:58,526 (tts_inference:476) INFO: inference speed = 33972.2 points / sec.
|
| 900 |
+
2025-02-21 15:02:58,526 (tts_inference:481) INFO: LJ049-0039 (size:66->114944)
|
| 901 |
+
# Accounting: time=139 threads=1
|
| 902 |
+
# Ended (code 0) at Fri Feb 21 15:02:59 JST 2025, elapsed time 139 seconds
|
imdanboy/jets/decode_train.loss.ave/dev/log/tts_inference.2.log
ADDED
|
@@ -0,0 +1,902 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# python3 -m espnet2.bin.tts_inference --ngpu 0 --data_path_and_name_and_type dump/raw/dev/text,text,text --data_path_and_name_and_type dump/raw/dev/wav.scp,speech,sound --key_file exp/imdanboy/jets/decode_train.loss.ave/dev/log/keys.2.scp --model_file exp/imdanboy/jets/train.total_count.ave_5best.pth --train_config exp/imdanboy/jets/config.yaml --output_dir exp/imdanboy/jets/decode_train.loss.ave/dev/log/output.2 --vocoder_file none --config conf/decode.yaml
|
| 2 |
+
# Started at Fri Feb 21 15:00:40 JST 2025
|
| 3 |
+
#
|
| 4 |
+
/usr/lib/python3/dist-packages/requests/__init__.py:89: RequestsDependencyWarning: urllib3 (2.2.3) or chardet (3.0.4) doesn't match a supported version!
|
| 5 |
+
warnings.warn("urllib3 ({}) or chardet ({}) doesn't match a supported "
|
| 6 |
+
/usr/bin/python3 /work/espnet/espnet2/bin/tts_inference.py --ngpu 0 --data_path_and_name_and_type dump/raw/dev/text,text,text --data_path_and_name_and_type dump/raw/dev/wav.scp,speech,sound --key_file exp/imdanboy/jets/decode_train.loss.ave/dev/log/keys.2.scp --model_file exp/imdanboy/jets/train.total_count.ave_5best.pth --train_config exp/imdanboy/jets/config.yaml --output_dir exp/imdanboy/jets/decode_train.loss.ave/dev/log/output.2 --vocoder_file none --config conf/decode.yaml
|
| 7 |
+
2025-02-21 15:00:43,859 (tts:302) INFO: Vocabulary size: 78
|
| 8 |
+
2025-02-21 15:00:43,978 (encoder:172) INFO: encoder self-attention layer type = self-attention
|
| 9 |
+
2025-02-21 15:00:44,094 (encoder:172) INFO: encoder self-attention layer type = self-attention
|
| 10 |
+
2025-02-21 15:00:45,897 (tts_inference:126) INFO: Extractor:
|
| 11 |
+
LogMelFbank(
|
| 12 |
+
(stft): Stft(n_fft=1024, win_length=1024, hop_length=256, center=True, normalized=False, onesided=True)
|
| 13 |
+
(logmel): LogMel(sr=22050, n_fft=1024, n_mels=80, fmin=80, fmax=7600, htk=False)
|
| 14 |
+
)
|
| 15 |
+
2025-02-21 15:00:45,897 (tts_inference:127) INFO: Normalizer:
|
| 16 |
+
GlobalMVN(stats_file=/usr/local/lib/python3.8/dist-packages/espnet_model_zoo/models--imdanboy--jets/snapshots/1db95c26516c44e6789bf06417c51e89400b190b/exp/tts_stats_raw_phn_tacotron_g2p_en_no_space/train/feats_stats.npz, norm_means=True, norm_vars=True)
|
| 17 |
+
2025-02-21 15:00:45,901 (tts_inference:128) INFO: TTS:
|
| 18 |
+
JETS(
|
| 19 |
+
(generator): JETSGenerator(
|
| 20 |
+
(encoder): Encoder(
|
| 21 |
+
(embed): Sequential(
|
| 22 |
+
(0): Embedding(78, 256, padding_idx=0)
|
| 23 |
+
(1): ScaledPositionalEncoding(
|
| 24 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 25 |
+
)
|
| 26 |
+
)
|
| 27 |
+
(encoders): MultiSequential(
|
| 28 |
+
(0): EncoderLayer(
|
| 29 |
+
(self_attn): MultiHeadedAttention(
|
| 30 |
+
(linear_q): Linear(in_features=256, out_features=256, bias=True)
|
| 31 |
+
(linear_k): Linear(in_features=256, out_features=256, bias=True)
|
| 32 |
+
(linear_v): Linear(in_features=256, out_features=256, bias=True)
|
| 33 |
+
(linear_out): Linear(in_features=256, out_features=256, bias=True)
|
| 34 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 35 |
+
)
|
| 36 |
+
(feed_forward): MultiLayeredConv1d(
|
| 37 |
+
(w_1): Conv1d(256, 1024, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 38 |
+
(w_2): Conv1d(1024, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 39 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 40 |
+
)
|
| 41 |
+
(norm1): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 42 |
+
(norm2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 43 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 44 |
+
)
|
| 45 |
+
(1): EncoderLayer(
|
| 46 |
+
(self_attn): MultiHeadedAttention(
|
| 47 |
+
(linear_q): Linear(in_features=256, out_features=256, bias=True)
|
| 48 |
+
(linear_k): Linear(in_features=256, out_features=256, bias=True)
|
| 49 |
+
(linear_v): Linear(in_features=256, out_features=256, bias=True)
|
| 50 |
+
(linear_out): Linear(in_features=256, out_features=256, bias=True)
|
| 51 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 52 |
+
)
|
| 53 |
+
(feed_forward): MultiLayeredConv1d(
|
| 54 |
+
(w_1): Conv1d(256, 1024, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 55 |
+
(w_2): Conv1d(1024, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 56 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 57 |
+
)
|
| 58 |
+
(norm1): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 59 |
+
(norm2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 60 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 61 |
+
)
|
| 62 |
+
(2): EncoderLayer(
|
| 63 |
+
(self_attn): MultiHeadedAttention(
|
| 64 |
+
(linear_q): Linear(in_features=256, out_features=256, bias=True)
|
| 65 |
+
(linear_k): Linear(in_features=256, out_features=256, bias=True)
|
| 66 |
+
(linear_v): Linear(in_features=256, out_features=256, bias=True)
|
| 67 |
+
(linear_out): Linear(in_features=256, out_features=256, bias=True)
|
| 68 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 69 |
+
)
|
| 70 |
+
(feed_forward): MultiLayeredConv1d(
|
| 71 |
+
(w_1): Conv1d(256, 1024, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 72 |
+
(w_2): Conv1d(1024, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 73 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 74 |
+
)
|
| 75 |
+
(norm1): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 76 |
+
(norm2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 77 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 78 |
+
)
|
| 79 |
+
(3): EncoderLayer(
|
| 80 |
+
(self_attn): MultiHeadedAttention(
|
| 81 |
+
(linear_q): Linear(in_features=256, out_features=256, bias=True)
|
| 82 |
+
(linear_k): Linear(in_features=256, out_features=256, bias=True)
|
| 83 |
+
(linear_v): Linear(in_features=256, out_features=256, bias=True)
|
| 84 |
+
(linear_out): Linear(in_features=256, out_features=256, bias=True)
|
| 85 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 86 |
+
)
|
| 87 |
+
(feed_forward): MultiLayeredConv1d(
|
| 88 |
+
(w_1): Conv1d(256, 1024, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 89 |
+
(w_2): Conv1d(1024, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 90 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 91 |
+
)
|
| 92 |
+
(norm1): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 93 |
+
(norm2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 94 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 95 |
+
)
|
| 96 |
+
)
|
| 97 |
+
(after_norm): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 98 |
+
)
|
| 99 |
+
(duration_predictor): DurationPredictor(
|
| 100 |
+
(conv): ModuleList(
|
| 101 |
+
(0): Sequential(
|
| 102 |
+
(0): Conv1d(256, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 103 |
+
(1): ReLU()
|
| 104 |
+
(2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 105 |
+
(3): Dropout(p=0.1, inplace=False)
|
| 106 |
+
)
|
| 107 |
+
(1): Sequential(
|
| 108 |
+
(0): Conv1d(256, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 109 |
+
(1): ReLU()
|
| 110 |
+
(2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 111 |
+
(3): Dropout(p=0.1, inplace=False)
|
| 112 |
+
)
|
| 113 |
+
)
|
| 114 |
+
(linear): Linear(in_features=256, out_features=1, bias=True)
|
| 115 |
+
)
|
| 116 |
+
(pitch_predictor): VariancePredictor(
|
| 117 |
+
(conv): ModuleList(
|
| 118 |
+
(0): Sequential(
|
| 119 |
+
(0): Conv1d(256, 256, kernel_size=(5,), stride=(1,), padding=(2,))
|
| 120 |
+
(1): ReLU()
|
| 121 |
+
(2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 122 |
+
(3): Dropout(p=0.5, inplace=False)
|
| 123 |
+
)
|
| 124 |
+
(1): Sequential(
|
| 125 |
+
(0): Conv1d(256, 256, kernel_size=(5,), stride=(1,), padding=(2,))
|
| 126 |
+
(1): ReLU()
|
| 127 |
+
(2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 128 |
+
(3): Dropout(p=0.5, inplace=False)
|
| 129 |
+
)
|
| 130 |
+
(2): Sequential(
|
| 131 |
+
(0): Conv1d(256, 256, kernel_size=(5,), stride=(1,), padding=(2,))
|
| 132 |
+
(1): ReLU()
|
| 133 |
+
(2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 134 |
+
(3): Dropout(p=0.5, inplace=False)
|
| 135 |
+
)
|
| 136 |
+
(3): Sequential(
|
| 137 |
+
(0): Conv1d(256, 256, kernel_size=(5,), stride=(1,), padding=(2,))
|
| 138 |
+
(1): ReLU()
|
| 139 |
+
(2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 140 |
+
(3): Dropout(p=0.5, inplace=False)
|
| 141 |
+
)
|
| 142 |
+
(4): Sequential(
|
| 143 |
+
(0): Conv1d(256, 256, kernel_size=(5,), stride=(1,), padding=(2,))
|
| 144 |
+
(1): ReLU()
|
| 145 |
+
(2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 146 |
+
(3): Dropout(p=0.5, inplace=False)
|
| 147 |
+
)
|
| 148 |
+
)
|
| 149 |
+
(linear): Linear(in_features=256, out_features=1, bias=True)
|
| 150 |
+
)
|
| 151 |
+
(pitch_embed): Sequential(
|
| 152 |
+
(0): Conv1d(1, 256, kernel_size=(1,), stride=(1,))
|
| 153 |
+
(1): Dropout(p=0.0, inplace=False)
|
| 154 |
+
)
|
| 155 |
+
(energy_predictor): VariancePredictor(
|
| 156 |
+
(conv): ModuleList(
|
| 157 |
+
(0): Sequential(
|
| 158 |
+
(0): Conv1d(256, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 159 |
+
(1): ReLU()
|
| 160 |
+
(2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 161 |
+
(3): Dropout(p=0.5, inplace=False)
|
| 162 |
+
)
|
| 163 |
+
(1): Sequential(
|
| 164 |
+
(0): Conv1d(256, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 165 |
+
(1): ReLU()
|
| 166 |
+
(2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 167 |
+
(3): Dropout(p=0.5, inplace=False)
|
| 168 |
+
)
|
| 169 |
+
)
|
| 170 |
+
(linear): Linear(in_features=256, out_features=1, bias=True)
|
| 171 |
+
)
|
| 172 |
+
(energy_embed): Sequential(
|
| 173 |
+
(0): Conv1d(1, 256, kernel_size=(1,), stride=(1,))
|
| 174 |
+
(1): Dropout(p=0.0, inplace=False)
|
| 175 |
+
)
|
| 176 |
+
(alignment_module): AlignmentModule(
|
| 177 |
+
(t_conv1): Conv1d(256, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 178 |
+
(t_conv2): Conv1d(256, 256, kernel_size=(1,), stride=(1,))
|
| 179 |
+
(f_conv1): Conv1d(80, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 180 |
+
(f_conv2): Conv1d(256, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 181 |
+
(f_conv3): Conv1d(256, 256, kernel_size=(1,), stride=(1,))
|
| 182 |
+
)
|
| 183 |
+
(length_regulator): GaussianUpsampling()
|
| 184 |
+
(decoder): Encoder(
|
| 185 |
+
(embed): Sequential(
|
| 186 |
+
(0): ScaledPositionalEncoding(
|
| 187 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 188 |
+
)
|
| 189 |
+
)
|
| 190 |
+
(encoders): MultiSequential(
|
| 191 |
+
(0): EncoderLayer(
|
| 192 |
+
(self_attn): MultiHeadedAttention(
|
| 193 |
+
(linear_q): Linear(in_features=256, out_features=256, bias=True)
|
| 194 |
+
(linear_k): Linear(in_features=256, out_features=256, bias=True)
|
| 195 |
+
(linear_v): Linear(in_features=256, out_features=256, bias=True)
|
| 196 |
+
(linear_out): Linear(in_features=256, out_features=256, bias=True)
|
| 197 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 198 |
+
)
|
| 199 |
+
(feed_forward): MultiLayeredConv1d(
|
| 200 |
+
(w_1): Conv1d(256, 1024, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 201 |
+
(w_2): Conv1d(1024, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 202 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 203 |
+
)
|
| 204 |
+
(norm1): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 205 |
+
(norm2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 206 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 207 |
+
)
|
| 208 |
+
(1): EncoderLayer(
|
| 209 |
+
(self_attn): MultiHeadedAttention(
|
| 210 |
+
(linear_q): Linear(in_features=256, out_features=256, bias=True)
|
| 211 |
+
(linear_k): Linear(in_features=256, out_features=256, bias=True)
|
| 212 |
+
(linear_v): Linear(in_features=256, out_features=256, bias=True)
|
| 213 |
+
(linear_out): Linear(in_features=256, out_features=256, bias=True)
|
| 214 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 215 |
+
)
|
| 216 |
+
(feed_forward): MultiLayeredConv1d(
|
| 217 |
+
(w_1): Conv1d(256, 1024, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 218 |
+
(w_2): Conv1d(1024, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 219 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 220 |
+
)
|
| 221 |
+
(norm1): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 222 |
+
(norm2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 223 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 224 |
+
)
|
| 225 |
+
(2): EncoderLayer(
|
| 226 |
+
(self_attn): MultiHeadedAttention(
|
| 227 |
+
(linear_q): Linear(in_features=256, out_features=256, bias=True)
|
| 228 |
+
(linear_k): Linear(in_features=256, out_features=256, bias=True)
|
| 229 |
+
(linear_v): Linear(in_features=256, out_features=256, bias=True)
|
| 230 |
+
(linear_out): Linear(in_features=256, out_features=256, bias=True)
|
| 231 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 232 |
+
)
|
| 233 |
+
(feed_forward): MultiLayeredConv1d(
|
| 234 |
+
(w_1): Conv1d(256, 1024, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 235 |
+
(w_2): Conv1d(1024, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 236 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 237 |
+
)
|
| 238 |
+
(norm1): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 239 |
+
(norm2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 240 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 241 |
+
)
|
| 242 |
+
(3): EncoderLayer(
|
| 243 |
+
(self_attn): MultiHeadedAttention(
|
| 244 |
+
(linear_q): Linear(in_features=256, out_features=256, bias=True)
|
| 245 |
+
(linear_k): Linear(in_features=256, out_features=256, bias=True)
|
| 246 |
+
(linear_v): Linear(in_features=256, out_features=256, bias=True)
|
| 247 |
+
(linear_out): Linear(in_features=256, out_features=256, bias=True)
|
| 248 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 249 |
+
)
|
| 250 |
+
(feed_forward): MultiLayeredConv1d(
|
| 251 |
+
(w_1): Conv1d(256, 1024, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 252 |
+
(w_2): Conv1d(1024, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 253 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 254 |
+
)
|
| 255 |
+
(norm1): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 256 |
+
(norm2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 257 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 258 |
+
)
|
| 259 |
+
)
|
| 260 |
+
(after_norm): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 261 |
+
)
|
| 262 |
+
(generator): HiFiGANGenerator(
|
| 263 |
+
(input_conv): Conv1d(256, 512, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 264 |
+
(upsamples): ModuleList(
|
| 265 |
+
(0): Sequential(
|
| 266 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 267 |
+
(1): ConvTranspose1d(512, 256, kernel_size=(16,), stride=(8,), padding=(4,))
|
| 268 |
+
)
|
| 269 |
+
(1): Sequential(
|
| 270 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 271 |
+
(1): ConvTranspose1d(256, 128, kernel_size=(16,), stride=(8,), padding=(4,))
|
| 272 |
+
)
|
| 273 |
+
(2): Sequential(
|
| 274 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 275 |
+
(1): ConvTranspose1d(128, 64, kernel_size=(4,), stride=(2,), padding=(1,))
|
| 276 |
+
)
|
| 277 |
+
(3): Sequential(
|
| 278 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 279 |
+
(1): ConvTranspose1d(64, 32, kernel_size=(4,), stride=(2,), padding=(1,))
|
| 280 |
+
)
|
| 281 |
+
)
|
| 282 |
+
(blocks): ModuleList(
|
| 283 |
+
(0): ResidualBlock(
|
| 284 |
+
(convs1): ModuleList(
|
| 285 |
+
(0): Sequential(
|
| 286 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 287 |
+
(1): Conv1d(256, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 288 |
+
)
|
| 289 |
+
(1): Sequential(
|
| 290 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 291 |
+
(1): Conv1d(256, 256, kernel_size=(3,), stride=(1,), padding=(3,), dilation=(3,))
|
| 292 |
+
)
|
| 293 |
+
(2): Sequential(
|
| 294 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 295 |
+
(1): Conv1d(256, 256, kernel_size=(3,), stride=(1,), padding=(5,), dilation=(5,))
|
| 296 |
+
)
|
| 297 |
+
)
|
| 298 |
+
(convs2): ModuleList(
|
| 299 |
+
(0): Sequential(
|
| 300 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 301 |
+
(1): Conv1d(256, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 302 |
+
)
|
| 303 |
+
(1): Sequential(
|
| 304 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 305 |
+
(1): Conv1d(256, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 306 |
+
)
|
| 307 |
+
(2): Sequential(
|
| 308 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 309 |
+
(1): Conv1d(256, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 310 |
+
)
|
| 311 |
+
)
|
| 312 |
+
)
|
| 313 |
+
(1): ResidualBlock(
|
| 314 |
+
(convs1): ModuleList(
|
| 315 |
+
(0): Sequential(
|
| 316 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 317 |
+
(1): Conv1d(256, 256, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 318 |
+
)
|
| 319 |
+
(1): Sequential(
|
| 320 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 321 |
+
(1): Conv1d(256, 256, kernel_size=(7,), stride=(1,), padding=(9,), dilation=(3,))
|
| 322 |
+
)
|
| 323 |
+
(2): Sequential(
|
| 324 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 325 |
+
(1): Conv1d(256, 256, kernel_size=(7,), stride=(1,), padding=(15,), dilation=(5,))
|
| 326 |
+
)
|
| 327 |
+
)
|
| 328 |
+
(convs2): ModuleList(
|
| 329 |
+
(0): Sequential(
|
| 330 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 331 |
+
(1): Conv1d(256, 256, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 332 |
+
)
|
| 333 |
+
(1): Sequential(
|
| 334 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 335 |
+
(1): Conv1d(256, 256, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 336 |
+
)
|
| 337 |
+
(2): Sequential(
|
| 338 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 339 |
+
(1): Conv1d(256, 256, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 340 |
+
)
|
| 341 |
+
)
|
| 342 |
+
)
|
| 343 |
+
(2): ResidualBlock(
|
| 344 |
+
(convs1): ModuleList(
|
| 345 |
+
(0): Sequential(
|
| 346 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 347 |
+
(1): Conv1d(256, 256, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 348 |
+
)
|
| 349 |
+
(1): Sequential(
|
| 350 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 351 |
+
(1): Conv1d(256, 256, kernel_size=(11,), stride=(1,), padding=(15,), dilation=(3,))
|
| 352 |
+
)
|
| 353 |
+
(2): Sequential(
|
| 354 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 355 |
+
(1): Conv1d(256, 256, kernel_size=(11,), stride=(1,), padding=(25,), dilation=(5,))
|
| 356 |
+
)
|
| 357 |
+
)
|
| 358 |
+
(convs2): ModuleList(
|
| 359 |
+
(0): Sequential(
|
| 360 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 361 |
+
(1): Conv1d(256, 256, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 362 |
+
)
|
| 363 |
+
(1): Sequential(
|
| 364 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 365 |
+
(1): Conv1d(256, 256, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 366 |
+
)
|
| 367 |
+
(2): Sequential(
|
| 368 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 369 |
+
(1): Conv1d(256, 256, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 370 |
+
)
|
| 371 |
+
)
|
| 372 |
+
)
|
| 373 |
+
(3): ResidualBlock(
|
| 374 |
+
(convs1): ModuleList(
|
| 375 |
+
(0): Sequential(
|
| 376 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 377 |
+
(1): Conv1d(128, 128, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 378 |
+
)
|
| 379 |
+
(1): Sequential(
|
| 380 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 381 |
+
(1): Conv1d(128, 128, kernel_size=(3,), stride=(1,), padding=(3,), dilation=(3,))
|
| 382 |
+
)
|
| 383 |
+
(2): Sequential(
|
| 384 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 385 |
+
(1): Conv1d(128, 128, kernel_size=(3,), stride=(1,), padding=(5,), dilation=(5,))
|
| 386 |
+
)
|
| 387 |
+
)
|
| 388 |
+
(convs2): ModuleList(
|
| 389 |
+
(0): Sequential(
|
| 390 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 391 |
+
(1): Conv1d(128, 128, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 392 |
+
)
|
| 393 |
+
(1): Sequential(
|
| 394 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 395 |
+
(1): Conv1d(128, 128, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 396 |
+
)
|
| 397 |
+
(2): Sequential(
|
| 398 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 399 |
+
(1): Conv1d(128, 128, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 400 |
+
)
|
| 401 |
+
)
|
| 402 |
+
)
|
| 403 |
+
(4): ResidualBlock(
|
| 404 |
+
(convs1): ModuleList(
|
| 405 |
+
(0): Sequential(
|
| 406 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 407 |
+
(1): Conv1d(128, 128, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 408 |
+
)
|
| 409 |
+
(1): Sequential(
|
| 410 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 411 |
+
(1): Conv1d(128, 128, kernel_size=(7,), stride=(1,), padding=(9,), dilation=(3,))
|
| 412 |
+
)
|
| 413 |
+
(2): Sequential(
|
| 414 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 415 |
+
(1): Conv1d(128, 128, kernel_size=(7,), stride=(1,), padding=(15,), dilation=(5,))
|
| 416 |
+
)
|
| 417 |
+
)
|
| 418 |
+
(convs2): ModuleList(
|
| 419 |
+
(0): Sequential(
|
| 420 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 421 |
+
(1): Conv1d(128, 128, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 422 |
+
)
|
| 423 |
+
(1): Sequential(
|
| 424 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 425 |
+
(1): Conv1d(128, 128, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 426 |
+
)
|
| 427 |
+
(2): Sequential(
|
| 428 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 429 |
+
(1): Conv1d(128, 128, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 430 |
+
)
|
| 431 |
+
)
|
| 432 |
+
)
|
| 433 |
+
(5): ResidualBlock(
|
| 434 |
+
(convs1): ModuleList(
|
| 435 |
+
(0): Sequential(
|
| 436 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 437 |
+
(1): Conv1d(128, 128, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 438 |
+
)
|
| 439 |
+
(1): Sequential(
|
| 440 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 441 |
+
(1): Conv1d(128, 128, kernel_size=(11,), stride=(1,), padding=(15,), dilation=(3,))
|
| 442 |
+
)
|
| 443 |
+
(2): Sequential(
|
| 444 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 445 |
+
(1): Conv1d(128, 128, kernel_size=(11,), stride=(1,), padding=(25,), dilation=(5,))
|
| 446 |
+
)
|
| 447 |
+
)
|
| 448 |
+
(convs2): ModuleList(
|
| 449 |
+
(0): Sequential(
|
| 450 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 451 |
+
(1): Conv1d(128, 128, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 452 |
+
)
|
| 453 |
+
(1): Sequential(
|
| 454 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 455 |
+
(1): Conv1d(128, 128, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 456 |
+
)
|
| 457 |
+
(2): Sequential(
|
| 458 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 459 |
+
(1): Conv1d(128, 128, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 460 |
+
)
|
| 461 |
+
)
|
| 462 |
+
)
|
| 463 |
+
(6): ResidualBlock(
|
| 464 |
+
(convs1): ModuleList(
|
| 465 |
+
(0): Sequential(
|
| 466 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 467 |
+
(1): Conv1d(64, 64, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 468 |
+
)
|
| 469 |
+
(1): Sequential(
|
| 470 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 471 |
+
(1): Conv1d(64, 64, kernel_size=(3,), stride=(1,), padding=(3,), dilation=(3,))
|
| 472 |
+
)
|
| 473 |
+
(2): Sequential(
|
| 474 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 475 |
+
(1): Conv1d(64, 64, kernel_size=(3,), stride=(1,), padding=(5,), dilation=(5,))
|
| 476 |
+
)
|
| 477 |
+
)
|
| 478 |
+
(convs2): ModuleList(
|
| 479 |
+
(0): Sequential(
|
| 480 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 481 |
+
(1): Conv1d(64, 64, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 482 |
+
)
|
| 483 |
+
(1): Sequential(
|
| 484 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 485 |
+
(1): Conv1d(64, 64, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 486 |
+
)
|
| 487 |
+
(2): Sequential(
|
| 488 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 489 |
+
(1): Conv1d(64, 64, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 490 |
+
)
|
| 491 |
+
)
|
| 492 |
+
)
|
| 493 |
+
(7): ResidualBlock(
|
| 494 |
+
(convs1): ModuleList(
|
| 495 |
+
(0): Sequential(
|
| 496 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 497 |
+
(1): Conv1d(64, 64, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 498 |
+
)
|
| 499 |
+
(1): Sequential(
|
| 500 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 501 |
+
(1): Conv1d(64, 64, kernel_size=(7,), stride=(1,), padding=(9,), dilation=(3,))
|
| 502 |
+
)
|
| 503 |
+
(2): Sequential(
|
| 504 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 505 |
+
(1): Conv1d(64, 64, kernel_size=(7,), stride=(1,), padding=(15,), dilation=(5,))
|
| 506 |
+
)
|
| 507 |
+
)
|
| 508 |
+
(convs2): ModuleList(
|
| 509 |
+
(0): Sequential(
|
| 510 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 511 |
+
(1): Conv1d(64, 64, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 512 |
+
)
|
| 513 |
+
(1): Sequential(
|
| 514 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 515 |
+
(1): Conv1d(64, 64, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 516 |
+
)
|
| 517 |
+
(2): Sequential(
|
| 518 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 519 |
+
(1): Conv1d(64, 64, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 520 |
+
)
|
| 521 |
+
)
|
| 522 |
+
)
|
| 523 |
+
(8): ResidualBlock(
|
| 524 |
+
(convs1): ModuleList(
|
| 525 |
+
(0): Sequential(
|
| 526 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 527 |
+
(1): Conv1d(64, 64, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 528 |
+
)
|
| 529 |
+
(1): Sequential(
|
| 530 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 531 |
+
(1): Conv1d(64, 64, kernel_size=(11,), stride=(1,), padding=(15,), dilation=(3,))
|
| 532 |
+
)
|
| 533 |
+
(2): Sequential(
|
| 534 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 535 |
+
(1): Conv1d(64, 64, kernel_size=(11,), stride=(1,), padding=(25,), dilation=(5,))
|
| 536 |
+
)
|
| 537 |
+
)
|
| 538 |
+
(convs2): ModuleList(
|
| 539 |
+
(0): Sequential(
|
| 540 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 541 |
+
(1): Conv1d(64, 64, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 542 |
+
)
|
| 543 |
+
(1): Sequential(
|
| 544 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 545 |
+
(1): Conv1d(64, 64, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 546 |
+
)
|
| 547 |
+
(2): Sequential(
|
| 548 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 549 |
+
(1): Conv1d(64, 64, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 550 |
+
)
|
| 551 |
+
)
|
| 552 |
+
)
|
| 553 |
+
(9): ResidualBlock(
|
| 554 |
+
(convs1): ModuleList(
|
| 555 |
+
(0): Sequential(
|
| 556 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 557 |
+
(1): Conv1d(32, 32, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 558 |
+
)
|
| 559 |
+
(1): Sequential(
|
| 560 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 561 |
+
(1): Conv1d(32, 32, kernel_size=(3,), stride=(1,), padding=(3,), dilation=(3,))
|
| 562 |
+
)
|
| 563 |
+
(2): Sequential(
|
| 564 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 565 |
+
(1): Conv1d(32, 32, kernel_size=(3,), stride=(1,), padding=(5,), dilation=(5,))
|
| 566 |
+
)
|
| 567 |
+
)
|
| 568 |
+
(convs2): ModuleList(
|
| 569 |
+
(0): Sequential(
|
| 570 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 571 |
+
(1): Conv1d(32, 32, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 572 |
+
)
|
| 573 |
+
(1): Sequential(
|
| 574 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 575 |
+
(1): Conv1d(32, 32, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 576 |
+
)
|
| 577 |
+
(2): Sequential(
|
| 578 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 579 |
+
(1): Conv1d(32, 32, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 580 |
+
)
|
| 581 |
+
)
|
| 582 |
+
)
|
| 583 |
+
(10): ResidualBlock(
|
| 584 |
+
(convs1): ModuleList(
|
| 585 |
+
(0): Sequential(
|
| 586 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 587 |
+
(1): Conv1d(32, 32, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 588 |
+
)
|
| 589 |
+
(1): Sequential(
|
| 590 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 591 |
+
(1): Conv1d(32, 32, kernel_size=(7,), stride=(1,), padding=(9,), dilation=(3,))
|
| 592 |
+
)
|
| 593 |
+
(2): Sequential(
|
| 594 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 595 |
+
(1): Conv1d(32, 32, kernel_size=(7,), stride=(1,), padding=(15,), dilation=(5,))
|
| 596 |
+
)
|
| 597 |
+
)
|
| 598 |
+
(convs2): ModuleList(
|
| 599 |
+
(0): Sequential(
|
| 600 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 601 |
+
(1): Conv1d(32, 32, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 602 |
+
)
|
| 603 |
+
(1): Sequential(
|
| 604 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 605 |
+
(1): Conv1d(32, 32, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 606 |
+
)
|
| 607 |
+
(2): Sequential(
|
| 608 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 609 |
+
(1): Conv1d(32, 32, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 610 |
+
)
|
| 611 |
+
)
|
| 612 |
+
)
|
| 613 |
+
(11): ResidualBlock(
|
| 614 |
+
(convs1): ModuleList(
|
| 615 |
+
(0): Sequential(
|
| 616 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 617 |
+
(1): Conv1d(32, 32, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 618 |
+
)
|
| 619 |
+
(1): Sequential(
|
| 620 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 621 |
+
(1): Conv1d(32, 32, kernel_size=(11,), stride=(1,), padding=(15,), dilation=(3,))
|
| 622 |
+
)
|
| 623 |
+
(2): Sequential(
|
| 624 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 625 |
+
(1): Conv1d(32, 32, kernel_size=(11,), stride=(1,), padding=(25,), dilation=(5,))
|
| 626 |
+
)
|
| 627 |
+
)
|
| 628 |
+
(convs2): ModuleList(
|
| 629 |
+
(0): Sequential(
|
| 630 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 631 |
+
(1): Conv1d(32, 32, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 632 |
+
)
|
| 633 |
+
(1): Sequential(
|
| 634 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 635 |
+
(1): Conv1d(32, 32, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 636 |
+
)
|
| 637 |
+
(2): Sequential(
|
| 638 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 639 |
+
(1): Conv1d(32, 32, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 640 |
+
)
|
| 641 |
+
)
|
| 642 |
+
)
|
| 643 |
+
)
|
| 644 |
+
(output_conv): Sequential(
|
| 645 |
+
(0): LeakyReLU(negative_slope=0.01)
|
| 646 |
+
(1): Conv1d(32, 1, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 647 |
+
(2): Tanh()
|
| 648 |
+
)
|
| 649 |
+
)
|
| 650 |
+
)
|
| 651 |
+
(discriminator): HiFiGANMultiScaleMultiPeriodDiscriminator(
|
| 652 |
+
(msd): HiFiGANMultiScaleDiscriminator(
|
| 653 |
+
(discriminators): ModuleList(
|
| 654 |
+
(0): HiFiGANScaleDiscriminator(
|
| 655 |
+
(layers): ModuleList(
|
| 656 |
+
(0): Sequential(
|
| 657 |
+
(0): Conv1d(1, 128, kernel_size=(15,), stride=(1,), padding=(7,))
|
| 658 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 659 |
+
)
|
| 660 |
+
(1): Sequential(
|
| 661 |
+
(0): Conv1d(128, 128, kernel_size=(41,), stride=(2,), padding=(20,), groups=4)
|
| 662 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 663 |
+
)
|
| 664 |
+
(2): Sequential(
|
| 665 |
+
(0): Conv1d(128, 256, kernel_size=(41,), stride=(2,), padding=(20,), groups=16)
|
| 666 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 667 |
+
)
|
| 668 |
+
(3): Sequential(
|
| 669 |
+
(0): Conv1d(256, 512, kernel_size=(41,), stride=(4,), padding=(20,), groups=16)
|
| 670 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 671 |
+
)
|
| 672 |
+
(4): Sequential(
|
| 673 |
+
(0): Conv1d(512, 1024, kernel_size=(41,), stride=(4,), padding=(20,), groups=16)
|
| 674 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 675 |
+
)
|
| 676 |
+
(5): Sequential(
|
| 677 |
+
(0): Conv1d(1024, 1024, kernel_size=(41,), stride=(1,), padding=(20,), groups=16)
|
| 678 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 679 |
+
)
|
| 680 |
+
(6): Sequential(
|
| 681 |
+
(0): Conv1d(1024, 1024, kernel_size=(5,), stride=(1,), padding=(2,))
|
| 682 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 683 |
+
)
|
| 684 |
+
(7): Conv1d(1024, 1, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 685 |
+
)
|
| 686 |
+
)
|
| 687 |
+
)
|
| 688 |
+
)
|
| 689 |
+
(mpd): HiFiGANMultiPeriodDiscriminator(
|
| 690 |
+
(discriminators): ModuleList(
|
| 691 |
+
(0): HiFiGANPeriodDiscriminator(
|
| 692 |
+
(convs): ModuleList(
|
| 693 |
+
(0): Sequential(
|
| 694 |
+
(0): Conv2d(1, 32, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 695 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 696 |
+
)
|
| 697 |
+
(1): Sequential(
|
| 698 |
+
(0): Conv2d(32, 128, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 699 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 700 |
+
)
|
| 701 |
+
(2): Sequential(
|
| 702 |
+
(0): Conv2d(128, 512, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 703 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 704 |
+
)
|
| 705 |
+
(3): Sequential(
|
| 706 |
+
(0): Conv2d(512, 1024, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 707 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 708 |
+
)
|
| 709 |
+
(4): Sequential(
|
| 710 |
+
(0): Conv2d(1024, 1024, kernel_size=(5, 1), stride=(1, 1), padding=(2, 0))
|
| 711 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 712 |
+
)
|
| 713 |
+
)
|
| 714 |
+
(output_conv): Conv2d(1024, 1, kernel_size=(2, 1), stride=(1, 1), padding=(1, 0))
|
| 715 |
+
)
|
| 716 |
+
(1): HiFiGANPeriodDiscriminator(
|
| 717 |
+
(convs): ModuleList(
|
| 718 |
+
(0): Sequential(
|
| 719 |
+
(0): Conv2d(1, 32, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 720 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 721 |
+
)
|
| 722 |
+
(1): Sequential(
|
| 723 |
+
(0): Conv2d(32, 128, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 724 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 725 |
+
)
|
| 726 |
+
(2): Sequential(
|
| 727 |
+
(0): Conv2d(128, 512, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 728 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 729 |
+
)
|
| 730 |
+
(3): Sequential(
|
| 731 |
+
(0): Conv2d(512, 1024, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 732 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 733 |
+
)
|
| 734 |
+
(4): Sequential(
|
| 735 |
+
(0): Conv2d(1024, 1024, kernel_size=(5, 1), stride=(1, 1), padding=(2, 0))
|
| 736 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 737 |
+
)
|
| 738 |
+
)
|
| 739 |
+
(output_conv): Conv2d(1024, 1, kernel_size=(2, 1), stride=(1, 1), padding=(1, 0))
|
| 740 |
+
)
|
| 741 |
+
(2): HiFiGANPeriodDiscriminator(
|
| 742 |
+
(convs): ModuleList(
|
| 743 |
+
(0): Sequential(
|
| 744 |
+
(0): Conv2d(1, 32, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 745 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 746 |
+
)
|
| 747 |
+
(1): Sequential(
|
| 748 |
+
(0): Conv2d(32, 128, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 749 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 750 |
+
)
|
| 751 |
+
(2): Sequential(
|
| 752 |
+
(0): Conv2d(128, 512, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 753 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 754 |
+
)
|
| 755 |
+
(3): Sequential(
|
| 756 |
+
(0): Conv2d(512, 1024, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 757 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 758 |
+
)
|
| 759 |
+
(4): Sequential(
|
| 760 |
+
(0): Conv2d(1024, 1024, kernel_size=(5, 1), stride=(1, 1), padding=(2, 0))
|
| 761 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 762 |
+
)
|
| 763 |
+
)
|
| 764 |
+
(output_conv): Conv2d(1024, 1, kernel_size=(2, 1), stride=(1, 1), padding=(1, 0))
|
| 765 |
+
)
|
| 766 |
+
(3): HiFiGANPeriodDiscriminator(
|
| 767 |
+
(convs): ModuleList(
|
| 768 |
+
(0): Sequential(
|
| 769 |
+
(0): Conv2d(1, 32, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 770 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 771 |
+
)
|
| 772 |
+
(1): Sequential(
|
| 773 |
+
(0): Conv2d(32, 128, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 774 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 775 |
+
)
|
| 776 |
+
(2): Sequential(
|
| 777 |
+
(0): Conv2d(128, 512, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 778 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 779 |
+
)
|
| 780 |
+
(3): Sequential(
|
| 781 |
+
(0): Conv2d(512, 1024, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 782 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 783 |
+
)
|
| 784 |
+
(4): Sequential(
|
| 785 |
+
(0): Conv2d(1024, 1024, kernel_size=(5, 1), stride=(1, 1), padding=(2, 0))
|
| 786 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 787 |
+
)
|
| 788 |
+
)
|
| 789 |
+
(output_conv): Conv2d(1024, 1, kernel_size=(2, 1), stride=(1, 1), padding=(1, 0))
|
| 790 |
+
)
|
| 791 |
+
(4): HiFiGANPeriodDiscriminator(
|
| 792 |
+
(convs): ModuleList(
|
| 793 |
+
(0): Sequential(
|
| 794 |
+
(0): Conv2d(1, 32, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 795 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 796 |
+
)
|
| 797 |
+
(1): Sequential(
|
| 798 |
+
(0): Conv2d(32, 128, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 799 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 800 |
+
)
|
| 801 |
+
(2): Sequential(
|
| 802 |
+
(0): Conv2d(128, 512, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 803 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 804 |
+
)
|
| 805 |
+
(3): Sequential(
|
| 806 |
+
(0): Conv2d(512, 1024, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 807 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 808 |
+
)
|
| 809 |
+
(4): Sequential(
|
| 810 |
+
(0): Conv2d(1024, 1024, kernel_size=(5, 1), stride=(1, 1), padding=(2, 0))
|
| 811 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 812 |
+
)
|
| 813 |
+
)
|
| 814 |
+
(output_conv): Conv2d(1024, 1, kernel_size=(2, 1), stride=(1, 1), padding=(1, 0))
|
| 815 |
+
)
|
| 816 |
+
)
|
| 817 |
+
)
|
| 818 |
+
)
|
| 819 |
+
(generator_adv_loss): GeneratorAdversarialLoss()
|
| 820 |
+
(discriminator_adv_loss): DiscriminatorAdversarialLoss()
|
| 821 |
+
(feat_match_loss): FeatureMatchLoss()
|
| 822 |
+
(mel_loss): MelSpectrogramLoss(
|
| 823 |
+
(wav_to_mel): LogMelFbank(
|
| 824 |
+
(stft): Stft(n_fft=1024, win_length=1024, hop_length=256, center=True, normalized=False, onesided=True)
|
| 825 |
+
(logmel): LogMel(sr=22050, n_fft=1024, n_mels=80, fmin=0, fmax=11025.0, htk=False)
|
| 826 |
+
)
|
| 827 |
+
)
|
| 828 |
+
(var_loss): VarianceLoss(
|
| 829 |
+
(mse_criterion): MSELoss()
|
| 830 |
+
(duration_criterion): DurationPredictorLoss(
|
| 831 |
+
(criterion): MSELoss()
|
| 832 |
+
)
|
| 833 |
+
)
|
| 834 |
+
(forwardsum_loss): ForwardSumLoss()
|
| 835 |
+
)
|
| 836 |
+
2025-02-21 15:00:46,384 (font_manager:1547) INFO: generated new fontManager
|
| 837 |
+
2025-02-21 15:00:50,827 (tts_inference:476) INFO: inference speed = 28969.4 points / sec.
|
| 838 |
+
2025-02-21 15:00:50,827 (tts_inference:481) INFO: LJ049-0040 (size:50->84480)
|
| 839 |
+
2025-02-21 15:00:57,045 (tts_inference:476) INFO: inference speed = 29384.6 points / sec.
|
| 840 |
+
2025-02-21 15:00:57,045 (tts_inference:481) INFO: LJ049-0041 (size:115->182528)
|
| 841 |
+
2025-02-21 15:01:01,434 (tts_inference:476) INFO: inference speed = 33892.1 points / sec.
|
| 842 |
+
2025-02-21 15:01:01,434 (tts_inference:481) INFO: LJ049-0042 (size:76->148480)
|
| 843 |
+
2025-02-21 15:01:05,661 (tts_inference:476) INFO: inference speed = 34026.6 points / sec.
|
| 844 |
+
2025-02-21 15:01:05,661 (tts_inference:481) INFO: LJ049-0043 (size:79->143616)
|
| 845 |
+
2025-02-21 15:01:09,530 (tts_inference:476) INFO: inference speed = 34075.9 points / sec.
|
| 846 |
+
2025-02-21 15:01:09,530 (tts_inference:481) INFO: LJ049-0044 (size:73->131584)
|
| 847 |
+
2025-02-21 15:01:16,187 (tts_inference:476) INFO: inference speed = 29292.1 points / sec.
|
| 848 |
+
2025-02-21 15:01:16,188 (tts_inference:481) INFO: LJ049-0045 (size:103->194816)
|
| 849 |
+
2025-02-21 15:01:20,130 (tts_inference:476) INFO: inference speed = 34029.9 points / sec.
|
| 850 |
+
2025-02-21 15:01:20,130 (tts_inference:481) INFO: LJ049-0046 (size:63->133888)
|
| 851 |
+
2025-02-21 15:01:24,470 (tts_inference:476) INFO: inference speed = 33972.8 points / sec.
|
| 852 |
+
2025-02-21 15:01:24,470 (tts_inference:481) INFO: LJ049-0047 (size:74->147200)
|
| 853 |
+
2025-02-21 15:01:27,928 (tts_inference:476) INFO: inference speed = 33897.5 points / sec.
|
| 854 |
+
2025-02-21 15:01:27,928 (tts_inference:481) INFO: LJ049-0048 (size:69->116992)
|
| 855 |
+
2025-02-21 15:01:30,743 (tts_inference:476) INFO: inference speed = 33538.3 points / sec.
|
| 856 |
+
2025-02-21 15:01:30,743 (tts_inference:481) INFO: LJ049-0049 (size:53->94208)
|
| 857 |
+
2025-02-21 15:01:34,356 (tts_inference:476) INFO: inference speed = 33708.5 points / sec.
|
| 858 |
+
2025-02-21 15:01:34,356 (tts_inference:481) INFO: LJ049-0050 (size:68->121600)
|
| 859 |
+
2025-02-21 15:01:36,076 (tts_inference:476) INFO: inference speed = 32729.7 points / sec.
|
| 860 |
+
2025-02-21 15:01:36,076 (tts_inference:481) INFO: LJ049-0051 (size:30->56064)
|
| 861 |
+
2025-02-21 15:01:39,610 (tts_inference:476) INFO: inference speed = 33657.0 points / sec.
|
| 862 |
+
2025-02-21 15:01:39,610 (tts_inference:481) INFO: LJ049-0052 (size:69->118784)
|
| 863 |
+
2025-02-21 15:01:45,393 (tts_inference:476) INFO: inference speed = 30710.9 points / sec.
|
| 864 |
+
2025-02-21 15:01:45,393 (tts_inference:481) INFO: LJ049-0053 (size:94->177408)
|
| 865 |
+
2025-02-21 15:01:49,475 (tts_inference:476) INFO: inference speed = 33745.3 points / sec.
|
| 866 |
+
2025-02-21 15:01:49,475 (tts_inference:481) INFO: LJ049-0054 (size:83->137472)
|
| 867 |
+
2025-02-21 15:01:50,905 (tts_inference:476) INFO: inference speed = 32007.2 points / sec.
|
| 868 |
+
2025-02-21 15:01:50,905 (tts_inference:481) INFO: LJ049-0055 (size:24->45568)
|
| 869 |
+
2025-02-21 15:01:55,616 (tts_inference:476) INFO: inference speed = 33891.6 points / sec.
|
| 870 |
+
2025-02-21 15:01:55,616 (tts_inference:481) INFO: LJ049-0056 (size:90->159488)
|
| 871 |
+
2025-02-21 15:01:59,281 (tts_inference:476) INFO: inference speed = 33665.9 points / sec.
|
| 872 |
+
2025-02-21 15:01:59,281 (tts_inference:481) INFO: LJ049-0057 (size:72->123136)
|
| 873 |
+
2025-02-21 15:02:04,351 (tts_inference:476) INFO: inference speed = 31702.1 points / sec.
|
| 874 |
+
2025-02-21 15:02:04,351 (tts_inference:481) INFO: LJ049-0058 (size:86->160512)
|
| 875 |
+
2025-02-21 15:02:08,662 (tts_inference:476) INFO: inference speed = 33721.4 points / sec.
|
| 876 |
+
2025-02-21 15:02:08,662 (tts_inference:481) INFO: LJ049-0059 (size:82->145152)
|
| 877 |
+
2025-02-21 15:02:14,200 (tts_inference:476) INFO: inference speed = 30869.2 points / sec.
|
| 878 |
+
2025-02-21 15:02:14,200 (tts_inference:481) INFO: LJ049-0060 (size:90->170752)
|
| 879 |
+
2025-02-21 15:02:19,837 (tts_inference:476) INFO: inference speed = 30652.5 points / sec.
|
| 880 |
+
2025-02-21 15:02:19,837 (tts_inference:481) INFO: LJ049-0061 (size:94->172544)
|
| 881 |
+
2025-02-21 15:02:22,698 (tts_inference:476) INFO: inference speed = 33292.4 points / sec.
|
| 882 |
+
2025-02-21 15:02:22,698 (tts_inference:481) INFO: LJ049-0062 (size:57->94976)
|
| 883 |
+
2025-02-21 15:02:27,367 (tts_inference:476) INFO: inference speed = 34089.6 points / sec.
|
| 884 |
+
2025-02-21 15:02:27,367 (tts_inference:481) INFO: LJ049-0063 (size:84->158976)
|
| 885 |
+
2025-02-21 15:02:29,663 (tts_inference:476) INFO: inference speed = 32996.6 points / sec.
|
| 886 |
+
2025-02-21 15:02:29,663 (tts_inference:481) INFO: LJ049-0064 (size:45->75520)
|
| 887 |
+
2025-02-21 15:02:33,461 (tts_inference:476) INFO: inference speed = 33411.6 points / sec.
|
| 888 |
+
2025-02-21 15:02:33,461 (tts_inference:481) INFO: LJ049-0065 (size:71->126720)
|
| 889 |
+
2025-02-21 15:02:39,928 (tts_inference:476) INFO: inference speed = 29880.2 points / sec.
|
| 890 |
+
2025-02-21 15:02:39,928 (tts_inference:481) INFO: LJ049-0066 (size:97->193024)
|
| 891 |
+
2025-02-21 15:02:44,993 (tts_inference:476) INFO: inference speed = 34319.3 points / sec.
|
| 892 |
+
2025-02-21 15:02:44,993 (tts_inference:481) INFO: LJ049-0067 (size:97->173568)
|
| 893 |
+
2025-02-21 15:02:46,274 (tts_inference:476) INFO: inference speed = 31338.2 points / sec.
|
| 894 |
+
2025-02-21 15:02:46,274 (tts_inference:481) INFO: LJ049-0068 (size:21->39936)
|
| 895 |
+
2025-02-21 15:02:49,369 (tts_inference:476) INFO: inference speed = 33065.7 points / sec.
|
| 896 |
+
2025-02-21 15:02:49,369 (tts_inference:481) INFO: LJ049-0069 (size:52->102144)
|
| 897 |
+
2025-02-21 15:02:55,485 (tts_inference:476) INFO: inference speed = 30504.5 points / sec.
|
| 898 |
+
2025-02-21 15:02:55,485 (tts_inference:481) INFO: LJ049-0070 (size:103->186368)
|
| 899 |
+
2025-02-21 15:02:59,574 (tts_inference:476) INFO: inference speed = 33492.4 points / sec.
|
| 900 |
+
2025-02-21 15:02:59,574 (tts_inference:481) INFO: LJ049-0071 (size:70->136704)
|
| 901 |
+
# Accounting: time=140 threads=1
|
| 902 |
+
# Ended (code 0) at Fri Feb 21 15:03:00 JST 2025, elapsed time 140 seconds
|
imdanboy/jets/decode_train.loss.ave/dev/log/tts_inference.3.log
ADDED
|
@@ -0,0 +1,900 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# python3 -m espnet2.bin.tts_inference --ngpu 0 --data_path_and_name_and_type dump/raw/dev/text,text,text --data_path_and_name_and_type dump/raw/dev/wav.scp,speech,sound --key_file exp/imdanboy/jets/decode_train.loss.ave/dev/log/keys.3.scp --model_file exp/imdanboy/jets/train.total_count.ave_5best.pth --train_config exp/imdanboy/jets/config.yaml --output_dir exp/imdanboy/jets/decode_train.loss.ave/dev/log/output.3 --vocoder_file none --config conf/decode.yaml
|
| 2 |
+
# Started at Fri Feb 21 15:00:40 JST 2025
|
| 3 |
+
#
|
| 4 |
+
/usr/lib/python3/dist-packages/requests/__init__.py:89: RequestsDependencyWarning: urllib3 (2.2.3) or chardet (3.0.4) doesn't match a supported version!
|
| 5 |
+
warnings.warn("urllib3 ({}) or chardet ({}) doesn't match a supported "
|
| 6 |
+
/usr/bin/python3 /work/espnet/espnet2/bin/tts_inference.py --ngpu 0 --data_path_and_name_and_type dump/raw/dev/text,text,text --data_path_and_name_and_type dump/raw/dev/wav.scp,speech,sound --key_file exp/imdanboy/jets/decode_train.loss.ave/dev/log/keys.3.scp --model_file exp/imdanboy/jets/train.total_count.ave_5best.pth --train_config exp/imdanboy/jets/config.yaml --output_dir exp/imdanboy/jets/decode_train.loss.ave/dev/log/output.3 --vocoder_file none --config conf/decode.yaml
|
| 7 |
+
2025-02-21 15:00:43,859 (tts:302) INFO: Vocabulary size: 78
|
| 8 |
+
2025-02-21 15:00:43,979 (encoder:172) INFO: encoder self-attention layer type = self-attention
|
| 9 |
+
2025-02-21 15:00:44,095 (encoder:172) INFO: encoder self-attention layer type = self-attention
|
| 10 |
+
2025-02-21 15:00:45,901 (tts_inference:126) INFO: Extractor:
|
| 11 |
+
LogMelFbank(
|
| 12 |
+
(stft): Stft(n_fft=1024, win_length=1024, hop_length=256, center=True, normalized=False, onesided=True)
|
| 13 |
+
(logmel): LogMel(sr=22050, n_fft=1024, n_mels=80, fmin=80, fmax=7600, htk=False)
|
| 14 |
+
)
|
| 15 |
+
2025-02-21 15:00:45,901 (tts_inference:127) INFO: Normalizer:
|
| 16 |
+
GlobalMVN(stats_file=/usr/local/lib/python3.8/dist-packages/espnet_model_zoo/models--imdanboy--jets/snapshots/1db95c26516c44e6789bf06417c51e89400b190b/exp/tts_stats_raw_phn_tacotron_g2p_en_no_space/train/feats_stats.npz, norm_means=True, norm_vars=True)
|
| 17 |
+
2025-02-21 15:00:45,904 (tts_inference:128) INFO: TTS:
|
| 18 |
+
JETS(
|
| 19 |
+
(generator): JETSGenerator(
|
| 20 |
+
(encoder): Encoder(
|
| 21 |
+
(embed): Sequential(
|
| 22 |
+
(0): Embedding(78, 256, padding_idx=0)
|
| 23 |
+
(1): ScaledPositionalEncoding(
|
| 24 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 25 |
+
)
|
| 26 |
+
)
|
| 27 |
+
(encoders): MultiSequential(
|
| 28 |
+
(0): EncoderLayer(
|
| 29 |
+
(self_attn): MultiHeadedAttention(
|
| 30 |
+
(linear_q): Linear(in_features=256, out_features=256, bias=True)
|
| 31 |
+
(linear_k): Linear(in_features=256, out_features=256, bias=True)
|
| 32 |
+
(linear_v): Linear(in_features=256, out_features=256, bias=True)
|
| 33 |
+
(linear_out): Linear(in_features=256, out_features=256, bias=True)
|
| 34 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 35 |
+
)
|
| 36 |
+
(feed_forward): MultiLayeredConv1d(
|
| 37 |
+
(w_1): Conv1d(256, 1024, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 38 |
+
(w_2): Conv1d(1024, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 39 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 40 |
+
)
|
| 41 |
+
(norm1): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 42 |
+
(norm2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 43 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 44 |
+
)
|
| 45 |
+
(1): EncoderLayer(
|
| 46 |
+
(self_attn): MultiHeadedAttention(
|
| 47 |
+
(linear_q): Linear(in_features=256, out_features=256, bias=True)
|
| 48 |
+
(linear_k): Linear(in_features=256, out_features=256, bias=True)
|
| 49 |
+
(linear_v): Linear(in_features=256, out_features=256, bias=True)
|
| 50 |
+
(linear_out): Linear(in_features=256, out_features=256, bias=True)
|
| 51 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 52 |
+
)
|
| 53 |
+
(feed_forward): MultiLayeredConv1d(
|
| 54 |
+
(w_1): Conv1d(256, 1024, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 55 |
+
(w_2): Conv1d(1024, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 56 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 57 |
+
)
|
| 58 |
+
(norm1): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 59 |
+
(norm2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 60 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 61 |
+
)
|
| 62 |
+
(2): EncoderLayer(
|
| 63 |
+
(self_attn): MultiHeadedAttention(
|
| 64 |
+
(linear_q): Linear(in_features=256, out_features=256, bias=True)
|
| 65 |
+
(linear_k): Linear(in_features=256, out_features=256, bias=True)
|
| 66 |
+
(linear_v): Linear(in_features=256, out_features=256, bias=True)
|
| 67 |
+
(linear_out): Linear(in_features=256, out_features=256, bias=True)
|
| 68 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 69 |
+
)
|
| 70 |
+
(feed_forward): MultiLayeredConv1d(
|
| 71 |
+
(w_1): Conv1d(256, 1024, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 72 |
+
(w_2): Conv1d(1024, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 73 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 74 |
+
)
|
| 75 |
+
(norm1): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 76 |
+
(norm2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 77 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 78 |
+
)
|
| 79 |
+
(3): EncoderLayer(
|
| 80 |
+
(self_attn): MultiHeadedAttention(
|
| 81 |
+
(linear_q): Linear(in_features=256, out_features=256, bias=True)
|
| 82 |
+
(linear_k): Linear(in_features=256, out_features=256, bias=True)
|
| 83 |
+
(linear_v): Linear(in_features=256, out_features=256, bias=True)
|
| 84 |
+
(linear_out): Linear(in_features=256, out_features=256, bias=True)
|
| 85 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 86 |
+
)
|
| 87 |
+
(feed_forward): MultiLayeredConv1d(
|
| 88 |
+
(w_1): Conv1d(256, 1024, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 89 |
+
(w_2): Conv1d(1024, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 90 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 91 |
+
)
|
| 92 |
+
(norm1): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 93 |
+
(norm2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 94 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 95 |
+
)
|
| 96 |
+
)
|
| 97 |
+
(after_norm): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 98 |
+
)
|
| 99 |
+
(duration_predictor): DurationPredictor(
|
| 100 |
+
(conv): ModuleList(
|
| 101 |
+
(0): Sequential(
|
| 102 |
+
(0): Conv1d(256, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 103 |
+
(1): ReLU()
|
| 104 |
+
(2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 105 |
+
(3): Dropout(p=0.1, inplace=False)
|
| 106 |
+
)
|
| 107 |
+
(1): Sequential(
|
| 108 |
+
(0): Conv1d(256, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 109 |
+
(1): ReLU()
|
| 110 |
+
(2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 111 |
+
(3): Dropout(p=0.1, inplace=False)
|
| 112 |
+
)
|
| 113 |
+
)
|
| 114 |
+
(linear): Linear(in_features=256, out_features=1, bias=True)
|
| 115 |
+
)
|
| 116 |
+
(pitch_predictor): VariancePredictor(
|
| 117 |
+
(conv): ModuleList(
|
| 118 |
+
(0): Sequential(
|
| 119 |
+
(0): Conv1d(256, 256, kernel_size=(5,), stride=(1,), padding=(2,))
|
| 120 |
+
(1): ReLU()
|
| 121 |
+
(2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 122 |
+
(3): Dropout(p=0.5, inplace=False)
|
| 123 |
+
)
|
| 124 |
+
(1): Sequential(
|
| 125 |
+
(0): Conv1d(256, 256, kernel_size=(5,), stride=(1,), padding=(2,))
|
| 126 |
+
(1): ReLU()
|
| 127 |
+
(2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 128 |
+
(3): Dropout(p=0.5, inplace=False)
|
| 129 |
+
)
|
| 130 |
+
(2): Sequential(
|
| 131 |
+
(0): Conv1d(256, 256, kernel_size=(5,), stride=(1,), padding=(2,))
|
| 132 |
+
(1): ReLU()
|
| 133 |
+
(2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 134 |
+
(3): Dropout(p=0.5, inplace=False)
|
| 135 |
+
)
|
| 136 |
+
(3): Sequential(
|
| 137 |
+
(0): Conv1d(256, 256, kernel_size=(5,), stride=(1,), padding=(2,))
|
| 138 |
+
(1): ReLU()
|
| 139 |
+
(2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 140 |
+
(3): Dropout(p=0.5, inplace=False)
|
| 141 |
+
)
|
| 142 |
+
(4): Sequential(
|
| 143 |
+
(0): Conv1d(256, 256, kernel_size=(5,), stride=(1,), padding=(2,))
|
| 144 |
+
(1): ReLU()
|
| 145 |
+
(2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 146 |
+
(3): Dropout(p=0.5, inplace=False)
|
| 147 |
+
)
|
| 148 |
+
)
|
| 149 |
+
(linear): Linear(in_features=256, out_features=1, bias=True)
|
| 150 |
+
)
|
| 151 |
+
(pitch_embed): Sequential(
|
| 152 |
+
(0): Conv1d(1, 256, kernel_size=(1,), stride=(1,))
|
| 153 |
+
(1): Dropout(p=0.0, inplace=False)
|
| 154 |
+
)
|
| 155 |
+
(energy_predictor): VariancePredictor(
|
| 156 |
+
(conv): ModuleList(
|
| 157 |
+
(0): Sequential(
|
| 158 |
+
(0): Conv1d(256, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 159 |
+
(1): ReLU()
|
| 160 |
+
(2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 161 |
+
(3): Dropout(p=0.5, inplace=False)
|
| 162 |
+
)
|
| 163 |
+
(1): Sequential(
|
| 164 |
+
(0): Conv1d(256, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 165 |
+
(1): ReLU()
|
| 166 |
+
(2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 167 |
+
(3): Dropout(p=0.5, inplace=False)
|
| 168 |
+
)
|
| 169 |
+
)
|
| 170 |
+
(linear): Linear(in_features=256, out_features=1, bias=True)
|
| 171 |
+
)
|
| 172 |
+
(energy_embed): Sequential(
|
| 173 |
+
(0): Conv1d(1, 256, kernel_size=(1,), stride=(1,))
|
| 174 |
+
(1): Dropout(p=0.0, inplace=False)
|
| 175 |
+
)
|
| 176 |
+
(alignment_module): AlignmentModule(
|
| 177 |
+
(t_conv1): Conv1d(256, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 178 |
+
(t_conv2): Conv1d(256, 256, kernel_size=(1,), stride=(1,))
|
| 179 |
+
(f_conv1): Conv1d(80, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 180 |
+
(f_conv2): Conv1d(256, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 181 |
+
(f_conv3): Conv1d(256, 256, kernel_size=(1,), stride=(1,))
|
| 182 |
+
)
|
| 183 |
+
(length_regulator): GaussianUpsampling()
|
| 184 |
+
(decoder): Encoder(
|
| 185 |
+
(embed): Sequential(
|
| 186 |
+
(0): ScaledPositionalEncoding(
|
| 187 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 188 |
+
)
|
| 189 |
+
)
|
| 190 |
+
(encoders): MultiSequential(
|
| 191 |
+
(0): EncoderLayer(
|
| 192 |
+
(self_attn): MultiHeadedAttention(
|
| 193 |
+
(linear_q): Linear(in_features=256, out_features=256, bias=True)
|
| 194 |
+
(linear_k): Linear(in_features=256, out_features=256, bias=True)
|
| 195 |
+
(linear_v): Linear(in_features=256, out_features=256, bias=True)
|
| 196 |
+
(linear_out): Linear(in_features=256, out_features=256, bias=True)
|
| 197 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 198 |
+
)
|
| 199 |
+
(feed_forward): MultiLayeredConv1d(
|
| 200 |
+
(w_1): Conv1d(256, 1024, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 201 |
+
(w_2): Conv1d(1024, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 202 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 203 |
+
)
|
| 204 |
+
(norm1): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 205 |
+
(norm2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 206 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 207 |
+
)
|
| 208 |
+
(1): EncoderLayer(
|
| 209 |
+
(self_attn): MultiHeadedAttention(
|
| 210 |
+
(linear_q): Linear(in_features=256, out_features=256, bias=True)
|
| 211 |
+
(linear_k): Linear(in_features=256, out_features=256, bias=True)
|
| 212 |
+
(linear_v): Linear(in_features=256, out_features=256, bias=True)
|
| 213 |
+
(linear_out): Linear(in_features=256, out_features=256, bias=True)
|
| 214 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 215 |
+
)
|
| 216 |
+
(feed_forward): MultiLayeredConv1d(
|
| 217 |
+
(w_1): Conv1d(256, 1024, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 218 |
+
(w_2): Conv1d(1024, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 219 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 220 |
+
)
|
| 221 |
+
(norm1): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 222 |
+
(norm2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 223 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 224 |
+
)
|
| 225 |
+
(2): EncoderLayer(
|
| 226 |
+
(self_attn): MultiHeadedAttention(
|
| 227 |
+
(linear_q): Linear(in_features=256, out_features=256, bias=True)
|
| 228 |
+
(linear_k): Linear(in_features=256, out_features=256, bias=True)
|
| 229 |
+
(linear_v): Linear(in_features=256, out_features=256, bias=True)
|
| 230 |
+
(linear_out): Linear(in_features=256, out_features=256, bias=True)
|
| 231 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 232 |
+
)
|
| 233 |
+
(feed_forward): MultiLayeredConv1d(
|
| 234 |
+
(w_1): Conv1d(256, 1024, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 235 |
+
(w_2): Conv1d(1024, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 236 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 237 |
+
)
|
| 238 |
+
(norm1): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 239 |
+
(norm2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 240 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 241 |
+
)
|
| 242 |
+
(3): EncoderLayer(
|
| 243 |
+
(self_attn): MultiHeadedAttention(
|
| 244 |
+
(linear_q): Linear(in_features=256, out_features=256, bias=True)
|
| 245 |
+
(linear_k): Linear(in_features=256, out_features=256, bias=True)
|
| 246 |
+
(linear_v): Linear(in_features=256, out_features=256, bias=True)
|
| 247 |
+
(linear_out): Linear(in_features=256, out_features=256, bias=True)
|
| 248 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 249 |
+
)
|
| 250 |
+
(feed_forward): MultiLayeredConv1d(
|
| 251 |
+
(w_1): Conv1d(256, 1024, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 252 |
+
(w_2): Conv1d(1024, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 253 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 254 |
+
)
|
| 255 |
+
(norm1): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 256 |
+
(norm2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 257 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 258 |
+
)
|
| 259 |
+
)
|
| 260 |
+
(after_norm): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 261 |
+
)
|
| 262 |
+
(generator): HiFiGANGenerator(
|
| 263 |
+
(input_conv): Conv1d(256, 512, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 264 |
+
(upsamples): ModuleList(
|
| 265 |
+
(0): Sequential(
|
| 266 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 267 |
+
(1): ConvTranspose1d(512, 256, kernel_size=(16,), stride=(8,), padding=(4,))
|
| 268 |
+
)
|
| 269 |
+
(1): Sequential(
|
| 270 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 271 |
+
(1): ConvTranspose1d(256, 128, kernel_size=(16,), stride=(8,), padding=(4,))
|
| 272 |
+
)
|
| 273 |
+
(2): Sequential(
|
| 274 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 275 |
+
(1): ConvTranspose1d(128, 64, kernel_size=(4,), stride=(2,), padding=(1,))
|
| 276 |
+
)
|
| 277 |
+
(3): Sequential(
|
| 278 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 279 |
+
(1): ConvTranspose1d(64, 32, kernel_size=(4,), stride=(2,), padding=(1,))
|
| 280 |
+
)
|
| 281 |
+
)
|
| 282 |
+
(blocks): ModuleList(
|
| 283 |
+
(0): ResidualBlock(
|
| 284 |
+
(convs1): ModuleList(
|
| 285 |
+
(0): Sequential(
|
| 286 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 287 |
+
(1): Conv1d(256, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 288 |
+
)
|
| 289 |
+
(1): Sequential(
|
| 290 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 291 |
+
(1): Conv1d(256, 256, kernel_size=(3,), stride=(1,), padding=(3,), dilation=(3,))
|
| 292 |
+
)
|
| 293 |
+
(2): Sequential(
|
| 294 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 295 |
+
(1): Conv1d(256, 256, kernel_size=(3,), stride=(1,), padding=(5,), dilation=(5,))
|
| 296 |
+
)
|
| 297 |
+
)
|
| 298 |
+
(convs2): ModuleList(
|
| 299 |
+
(0): Sequential(
|
| 300 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 301 |
+
(1): Conv1d(256, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 302 |
+
)
|
| 303 |
+
(1): Sequential(
|
| 304 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 305 |
+
(1): Conv1d(256, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 306 |
+
)
|
| 307 |
+
(2): Sequential(
|
| 308 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 309 |
+
(1): Conv1d(256, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 310 |
+
)
|
| 311 |
+
)
|
| 312 |
+
)
|
| 313 |
+
(1): ResidualBlock(
|
| 314 |
+
(convs1): ModuleList(
|
| 315 |
+
(0): Sequential(
|
| 316 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 317 |
+
(1): Conv1d(256, 256, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 318 |
+
)
|
| 319 |
+
(1): Sequential(
|
| 320 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 321 |
+
(1): Conv1d(256, 256, kernel_size=(7,), stride=(1,), padding=(9,), dilation=(3,))
|
| 322 |
+
)
|
| 323 |
+
(2): Sequential(
|
| 324 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 325 |
+
(1): Conv1d(256, 256, kernel_size=(7,), stride=(1,), padding=(15,), dilation=(5,))
|
| 326 |
+
)
|
| 327 |
+
)
|
| 328 |
+
(convs2): ModuleList(
|
| 329 |
+
(0): Sequential(
|
| 330 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 331 |
+
(1): Conv1d(256, 256, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 332 |
+
)
|
| 333 |
+
(1): Sequential(
|
| 334 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 335 |
+
(1): Conv1d(256, 256, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 336 |
+
)
|
| 337 |
+
(2): Sequential(
|
| 338 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 339 |
+
(1): Conv1d(256, 256, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 340 |
+
)
|
| 341 |
+
)
|
| 342 |
+
)
|
| 343 |
+
(2): ResidualBlock(
|
| 344 |
+
(convs1): ModuleList(
|
| 345 |
+
(0): Sequential(
|
| 346 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 347 |
+
(1): Conv1d(256, 256, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 348 |
+
)
|
| 349 |
+
(1): Sequential(
|
| 350 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 351 |
+
(1): Conv1d(256, 256, kernel_size=(11,), stride=(1,), padding=(15,), dilation=(3,))
|
| 352 |
+
)
|
| 353 |
+
(2): Sequential(
|
| 354 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 355 |
+
(1): Conv1d(256, 256, kernel_size=(11,), stride=(1,), padding=(25,), dilation=(5,))
|
| 356 |
+
)
|
| 357 |
+
)
|
| 358 |
+
(convs2): ModuleList(
|
| 359 |
+
(0): Sequential(
|
| 360 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 361 |
+
(1): Conv1d(256, 256, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 362 |
+
)
|
| 363 |
+
(1): Sequential(
|
| 364 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 365 |
+
(1): Conv1d(256, 256, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 366 |
+
)
|
| 367 |
+
(2): Sequential(
|
| 368 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 369 |
+
(1): Conv1d(256, 256, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 370 |
+
)
|
| 371 |
+
)
|
| 372 |
+
)
|
| 373 |
+
(3): ResidualBlock(
|
| 374 |
+
(convs1): ModuleList(
|
| 375 |
+
(0): Sequential(
|
| 376 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 377 |
+
(1): Conv1d(128, 128, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 378 |
+
)
|
| 379 |
+
(1): Sequential(
|
| 380 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 381 |
+
(1): Conv1d(128, 128, kernel_size=(3,), stride=(1,), padding=(3,), dilation=(3,))
|
| 382 |
+
)
|
| 383 |
+
(2): Sequential(
|
| 384 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 385 |
+
(1): Conv1d(128, 128, kernel_size=(3,), stride=(1,), padding=(5,), dilation=(5,))
|
| 386 |
+
)
|
| 387 |
+
)
|
| 388 |
+
(convs2): ModuleList(
|
| 389 |
+
(0): Sequential(
|
| 390 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 391 |
+
(1): Conv1d(128, 128, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 392 |
+
)
|
| 393 |
+
(1): Sequential(
|
| 394 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 395 |
+
(1): Conv1d(128, 128, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 396 |
+
)
|
| 397 |
+
(2): Sequential(
|
| 398 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 399 |
+
(1): Conv1d(128, 128, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 400 |
+
)
|
| 401 |
+
)
|
| 402 |
+
)
|
| 403 |
+
(4): ResidualBlock(
|
| 404 |
+
(convs1): ModuleList(
|
| 405 |
+
(0): Sequential(
|
| 406 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 407 |
+
(1): Conv1d(128, 128, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 408 |
+
)
|
| 409 |
+
(1): Sequential(
|
| 410 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 411 |
+
(1): Conv1d(128, 128, kernel_size=(7,), stride=(1,), padding=(9,), dilation=(3,))
|
| 412 |
+
)
|
| 413 |
+
(2): Sequential(
|
| 414 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 415 |
+
(1): Conv1d(128, 128, kernel_size=(7,), stride=(1,), padding=(15,), dilation=(5,))
|
| 416 |
+
)
|
| 417 |
+
)
|
| 418 |
+
(convs2): ModuleList(
|
| 419 |
+
(0): Sequential(
|
| 420 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 421 |
+
(1): Conv1d(128, 128, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 422 |
+
)
|
| 423 |
+
(1): Sequential(
|
| 424 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 425 |
+
(1): Conv1d(128, 128, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 426 |
+
)
|
| 427 |
+
(2): Sequential(
|
| 428 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 429 |
+
(1): Conv1d(128, 128, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 430 |
+
)
|
| 431 |
+
)
|
| 432 |
+
)
|
| 433 |
+
(5): ResidualBlock(
|
| 434 |
+
(convs1): ModuleList(
|
| 435 |
+
(0): Sequential(
|
| 436 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 437 |
+
(1): Conv1d(128, 128, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 438 |
+
)
|
| 439 |
+
(1): Sequential(
|
| 440 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 441 |
+
(1): Conv1d(128, 128, kernel_size=(11,), stride=(1,), padding=(15,), dilation=(3,))
|
| 442 |
+
)
|
| 443 |
+
(2): Sequential(
|
| 444 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 445 |
+
(1): Conv1d(128, 128, kernel_size=(11,), stride=(1,), padding=(25,), dilation=(5,))
|
| 446 |
+
)
|
| 447 |
+
)
|
| 448 |
+
(convs2): ModuleList(
|
| 449 |
+
(0): Sequential(
|
| 450 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 451 |
+
(1): Conv1d(128, 128, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 452 |
+
)
|
| 453 |
+
(1): Sequential(
|
| 454 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 455 |
+
(1): Conv1d(128, 128, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 456 |
+
)
|
| 457 |
+
(2): Sequential(
|
| 458 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 459 |
+
(1): Conv1d(128, 128, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 460 |
+
)
|
| 461 |
+
)
|
| 462 |
+
)
|
| 463 |
+
(6): ResidualBlock(
|
| 464 |
+
(convs1): ModuleList(
|
| 465 |
+
(0): Sequential(
|
| 466 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 467 |
+
(1): Conv1d(64, 64, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 468 |
+
)
|
| 469 |
+
(1): Sequential(
|
| 470 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 471 |
+
(1): Conv1d(64, 64, kernel_size=(3,), stride=(1,), padding=(3,), dilation=(3,))
|
| 472 |
+
)
|
| 473 |
+
(2): Sequential(
|
| 474 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 475 |
+
(1): Conv1d(64, 64, kernel_size=(3,), stride=(1,), padding=(5,), dilation=(5,))
|
| 476 |
+
)
|
| 477 |
+
)
|
| 478 |
+
(convs2): ModuleList(
|
| 479 |
+
(0): Sequential(
|
| 480 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 481 |
+
(1): Conv1d(64, 64, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 482 |
+
)
|
| 483 |
+
(1): Sequential(
|
| 484 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 485 |
+
(1): Conv1d(64, 64, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 486 |
+
)
|
| 487 |
+
(2): Sequential(
|
| 488 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 489 |
+
(1): Conv1d(64, 64, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 490 |
+
)
|
| 491 |
+
)
|
| 492 |
+
)
|
| 493 |
+
(7): ResidualBlock(
|
| 494 |
+
(convs1): ModuleList(
|
| 495 |
+
(0): Sequential(
|
| 496 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 497 |
+
(1): Conv1d(64, 64, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 498 |
+
)
|
| 499 |
+
(1): Sequential(
|
| 500 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 501 |
+
(1): Conv1d(64, 64, kernel_size=(7,), stride=(1,), padding=(9,), dilation=(3,))
|
| 502 |
+
)
|
| 503 |
+
(2): Sequential(
|
| 504 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 505 |
+
(1): Conv1d(64, 64, kernel_size=(7,), stride=(1,), padding=(15,), dilation=(5,))
|
| 506 |
+
)
|
| 507 |
+
)
|
| 508 |
+
(convs2): ModuleList(
|
| 509 |
+
(0): Sequential(
|
| 510 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 511 |
+
(1): Conv1d(64, 64, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 512 |
+
)
|
| 513 |
+
(1): Sequential(
|
| 514 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 515 |
+
(1): Conv1d(64, 64, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 516 |
+
)
|
| 517 |
+
(2): Sequential(
|
| 518 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 519 |
+
(1): Conv1d(64, 64, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 520 |
+
)
|
| 521 |
+
)
|
| 522 |
+
)
|
| 523 |
+
(8): ResidualBlock(
|
| 524 |
+
(convs1): ModuleList(
|
| 525 |
+
(0): Sequential(
|
| 526 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 527 |
+
(1): Conv1d(64, 64, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 528 |
+
)
|
| 529 |
+
(1): Sequential(
|
| 530 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 531 |
+
(1): Conv1d(64, 64, kernel_size=(11,), stride=(1,), padding=(15,), dilation=(3,))
|
| 532 |
+
)
|
| 533 |
+
(2): Sequential(
|
| 534 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 535 |
+
(1): Conv1d(64, 64, kernel_size=(11,), stride=(1,), padding=(25,), dilation=(5,))
|
| 536 |
+
)
|
| 537 |
+
)
|
| 538 |
+
(convs2): ModuleList(
|
| 539 |
+
(0): Sequential(
|
| 540 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 541 |
+
(1): Conv1d(64, 64, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 542 |
+
)
|
| 543 |
+
(1): Sequential(
|
| 544 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 545 |
+
(1): Conv1d(64, 64, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 546 |
+
)
|
| 547 |
+
(2): Sequential(
|
| 548 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 549 |
+
(1): Conv1d(64, 64, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 550 |
+
)
|
| 551 |
+
)
|
| 552 |
+
)
|
| 553 |
+
(9): ResidualBlock(
|
| 554 |
+
(convs1): ModuleList(
|
| 555 |
+
(0): Sequential(
|
| 556 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 557 |
+
(1): Conv1d(32, 32, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 558 |
+
)
|
| 559 |
+
(1): Sequential(
|
| 560 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 561 |
+
(1): Conv1d(32, 32, kernel_size=(3,), stride=(1,), padding=(3,), dilation=(3,))
|
| 562 |
+
)
|
| 563 |
+
(2): Sequential(
|
| 564 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 565 |
+
(1): Conv1d(32, 32, kernel_size=(3,), stride=(1,), padding=(5,), dilation=(5,))
|
| 566 |
+
)
|
| 567 |
+
)
|
| 568 |
+
(convs2): ModuleList(
|
| 569 |
+
(0): Sequential(
|
| 570 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 571 |
+
(1): Conv1d(32, 32, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 572 |
+
)
|
| 573 |
+
(1): Sequential(
|
| 574 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 575 |
+
(1): Conv1d(32, 32, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 576 |
+
)
|
| 577 |
+
(2): Sequential(
|
| 578 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 579 |
+
(1): Conv1d(32, 32, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 580 |
+
)
|
| 581 |
+
)
|
| 582 |
+
)
|
| 583 |
+
(10): ResidualBlock(
|
| 584 |
+
(convs1): ModuleList(
|
| 585 |
+
(0): Sequential(
|
| 586 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 587 |
+
(1): Conv1d(32, 32, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 588 |
+
)
|
| 589 |
+
(1): Sequential(
|
| 590 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 591 |
+
(1): Conv1d(32, 32, kernel_size=(7,), stride=(1,), padding=(9,), dilation=(3,))
|
| 592 |
+
)
|
| 593 |
+
(2): Sequential(
|
| 594 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 595 |
+
(1): Conv1d(32, 32, kernel_size=(7,), stride=(1,), padding=(15,), dilation=(5,))
|
| 596 |
+
)
|
| 597 |
+
)
|
| 598 |
+
(convs2): ModuleList(
|
| 599 |
+
(0): Sequential(
|
| 600 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 601 |
+
(1): Conv1d(32, 32, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 602 |
+
)
|
| 603 |
+
(1): Sequential(
|
| 604 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 605 |
+
(1): Conv1d(32, 32, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 606 |
+
)
|
| 607 |
+
(2): Sequential(
|
| 608 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 609 |
+
(1): Conv1d(32, 32, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 610 |
+
)
|
| 611 |
+
)
|
| 612 |
+
)
|
| 613 |
+
(11): ResidualBlock(
|
| 614 |
+
(convs1): ModuleList(
|
| 615 |
+
(0): Sequential(
|
| 616 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 617 |
+
(1): Conv1d(32, 32, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 618 |
+
)
|
| 619 |
+
(1): Sequential(
|
| 620 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 621 |
+
(1): Conv1d(32, 32, kernel_size=(11,), stride=(1,), padding=(15,), dilation=(3,))
|
| 622 |
+
)
|
| 623 |
+
(2): Sequential(
|
| 624 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 625 |
+
(1): Conv1d(32, 32, kernel_size=(11,), stride=(1,), padding=(25,), dilation=(5,))
|
| 626 |
+
)
|
| 627 |
+
)
|
| 628 |
+
(convs2): ModuleList(
|
| 629 |
+
(0): Sequential(
|
| 630 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 631 |
+
(1): Conv1d(32, 32, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 632 |
+
)
|
| 633 |
+
(1): Sequential(
|
| 634 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 635 |
+
(1): Conv1d(32, 32, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 636 |
+
)
|
| 637 |
+
(2): Sequential(
|
| 638 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 639 |
+
(1): Conv1d(32, 32, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 640 |
+
)
|
| 641 |
+
)
|
| 642 |
+
)
|
| 643 |
+
)
|
| 644 |
+
(output_conv): Sequential(
|
| 645 |
+
(0): LeakyReLU(negative_slope=0.01)
|
| 646 |
+
(1): Conv1d(32, 1, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 647 |
+
(2): Tanh()
|
| 648 |
+
)
|
| 649 |
+
)
|
| 650 |
+
)
|
| 651 |
+
(discriminator): HiFiGANMultiScaleMultiPeriodDiscriminator(
|
| 652 |
+
(msd): HiFiGANMultiScaleDiscriminator(
|
| 653 |
+
(discriminators): ModuleList(
|
| 654 |
+
(0): HiFiGANScaleDiscriminator(
|
| 655 |
+
(layers): ModuleList(
|
| 656 |
+
(0): Sequential(
|
| 657 |
+
(0): Conv1d(1, 128, kernel_size=(15,), stride=(1,), padding=(7,))
|
| 658 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 659 |
+
)
|
| 660 |
+
(1): Sequential(
|
| 661 |
+
(0): Conv1d(128, 128, kernel_size=(41,), stride=(2,), padding=(20,), groups=4)
|
| 662 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 663 |
+
)
|
| 664 |
+
(2): Sequential(
|
| 665 |
+
(0): Conv1d(128, 256, kernel_size=(41,), stride=(2,), padding=(20,), groups=16)
|
| 666 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 667 |
+
)
|
| 668 |
+
(3): Sequential(
|
| 669 |
+
(0): Conv1d(256, 512, kernel_size=(41,), stride=(4,), padding=(20,), groups=16)
|
| 670 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 671 |
+
)
|
| 672 |
+
(4): Sequential(
|
| 673 |
+
(0): Conv1d(512, 1024, kernel_size=(41,), stride=(4,), padding=(20,), groups=16)
|
| 674 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 675 |
+
)
|
| 676 |
+
(5): Sequential(
|
| 677 |
+
(0): Conv1d(1024, 1024, kernel_size=(41,), stride=(1,), padding=(20,), groups=16)
|
| 678 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 679 |
+
)
|
| 680 |
+
(6): Sequential(
|
| 681 |
+
(0): Conv1d(1024, 1024, kernel_size=(5,), stride=(1,), padding=(2,))
|
| 682 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 683 |
+
)
|
| 684 |
+
(7): Conv1d(1024, 1, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 685 |
+
)
|
| 686 |
+
)
|
| 687 |
+
)
|
| 688 |
+
)
|
| 689 |
+
(mpd): HiFiGANMultiPeriodDiscriminator(
|
| 690 |
+
(discriminators): ModuleList(
|
| 691 |
+
(0): HiFiGANPeriodDiscriminator(
|
| 692 |
+
(convs): ModuleList(
|
| 693 |
+
(0): Sequential(
|
| 694 |
+
(0): Conv2d(1, 32, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 695 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 696 |
+
)
|
| 697 |
+
(1): Sequential(
|
| 698 |
+
(0): Conv2d(32, 128, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 699 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 700 |
+
)
|
| 701 |
+
(2): Sequential(
|
| 702 |
+
(0): Conv2d(128, 512, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 703 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 704 |
+
)
|
| 705 |
+
(3): Sequential(
|
| 706 |
+
(0): Conv2d(512, 1024, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 707 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 708 |
+
)
|
| 709 |
+
(4): Sequential(
|
| 710 |
+
(0): Conv2d(1024, 1024, kernel_size=(5, 1), stride=(1, 1), padding=(2, 0))
|
| 711 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 712 |
+
)
|
| 713 |
+
)
|
| 714 |
+
(output_conv): Conv2d(1024, 1, kernel_size=(2, 1), stride=(1, 1), padding=(1, 0))
|
| 715 |
+
)
|
| 716 |
+
(1): HiFiGANPeriodDiscriminator(
|
| 717 |
+
(convs): ModuleList(
|
| 718 |
+
(0): Sequential(
|
| 719 |
+
(0): Conv2d(1, 32, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 720 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 721 |
+
)
|
| 722 |
+
(1): Sequential(
|
| 723 |
+
(0): Conv2d(32, 128, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 724 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 725 |
+
)
|
| 726 |
+
(2): Sequential(
|
| 727 |
+
(0): Conv2d(128, 512, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 728 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 729 |
+
)
|
| 730 |
+
(3): Sequential(
|
| 731 |
+
(0): Conv2d(512, 1024, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 732 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 733 |
+
)
|
| 734 |
+
(4): Sequential(
|
| 735 |
+
(0): Conv2d(1024, 1024, kernel_size=(5, 1), stride=(1, 1), padding=(2, 0))
|
| 736 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 737 |
+
)
|
| 738 |
+
)
|
| 739 |
+
(output_conv): Conv2d(1024, 1, kernel_size=(2, 1), stride=(1, 1), padding=(1, 0))
|
| 740 |
+
)
|
| 741 |
+
(2): HiFiGANPeriodDiscriminator(
|
| 742 |
+
(convs): ModuleList(
|
| 743 |
+
(0): Sequential(
|
| 744 |
+
(0): Conv2d(1, 32, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 745 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 746 |
+
)
|
| 747 |
+
(1): Sequential(
|
| 748 |
+
(0): Conv2d(32, 128, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 749 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 750 |
+
)
|
| 751 |
+
(2): Sequential(
|
| 752 |
+
(0): Conv2d(128, 512, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 753 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 754 |
+
)
|
| 755 |
+
(3): Sequential(
|
| 756 |
+
(0): Conv2d(512, 1024, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 757 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 758 |
+
)
|
| 759 |
+
(4): Sequential(
|
| 760 |
+
(0): Conv2d(1024, 1024, kernel_size=(5, 1), stride=(1, 1), padding=(2, 0))
|
| 761 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 762 |
+
)
|
| 763 |
+
)
|
| 764 |
+
(output_conv): Conv2d(1024, 1, kernel_size=(2, 1), stride=(1, 1), padding=(1, 0))
|
| 765 |
+
)
|
| 766 |
+
(3): HiFiGANPeriodDiscriminator(
|
| 767 |
+
(convs): ModuleList(
|
| 768 |
+
(0): Sequential(
|
| 769 |
+
(0): Conv2d(1, 32, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 770 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 771 |
+
)
|
| 772 |
+
(1): Sequential(
|
| 773 |
+
(0): Conv2d(32, 128, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 774 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 775 |
+
)
|
| 776 |
+
(2): Sequential(
|
| 777 |
+
(0): Conv2d(128, 512, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 778 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 779 |
+
)
|
| 780 |
+
(3): Sequential(
|
| 781 |
+
(0): Conv2d(512, 1024, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 782 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 783 |
+
)
|
| 784 |
+
(4): Sequential(
|
| 785 |
+
(0): Conv2d(1024, 1024, kernel_size=(5, 1), stride=(1, 1), padding=(2, 0))
|
| 786 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 787 |
+
)
|
| 788 |
+
)
|
| 789 |
+
(output_conv): Conv2d(1024, 1, kernel_size=(2, 1), stride=(1, 1), padding=(1, 0))
|
| 790 |
+
)
|
| 791 |
+
(4): HiFiGANPeriodDiscriminator(
|
| 792 |
+
(convs): ModuleList(
|
| 793 |
+
(0): Sequential(
|
| 794 |
+
(0): Conv2d(1, 32, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 795 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 796 |
+
)
|
| 797 |
+
(1): Sequential(
|
| 798 |
+
(0): Conv2d(32, 128, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 799 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 800 |
+
)
|
| 801 |
+
(2): Sequential(
|
| 802 |
+
(0): Conv2d(128, 512, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 803 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 804 |
+
)
|
| 805 |
+
(3): Sequential(
|
| 806 |
+
(0): Conv2d(512, 1024, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 807 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 808 |
+
)
|
| 809 |
+
(4): Sequential(
|
| 810 |
+
(0): Conv2d(1024, 1024, kernel_size=(5, 1), stride=(1, 1), padding=(2, 0))
|
| 811 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 812 |
+
)
|
| 813 |
+
)
|
| 814 |
+
(output_conv): Conv2d(1024, 1, kernel_size=(2, 1), stride=(1, 1), padding=(1, 0))
|
| 815 |
+
)
|
| 816 |
+
)
|
| 817 |
+
)
|
| 818 |
+
)
|
| 819 |
+
(generator_adv_loss): GeneratorAdversarialLoss()
|
| 820 |
+
(discriminator_adv_loss): DiscriminatorAdversarialLoss()
|
| 821 |
+
(feat_match_loss): FeatureMatchLoss()
|
| 822 |
+
(mel_loss): MelSpectrogramLoss(
|
| 823 |
+
(wav_to_mel): LogMelFbank(
|
| 824 |
+
(stft): Stft(n_fft=1024, win_length=1024, hop_length=256, center=True, normalized=False, onesided=True)
|
| 825 |
+
(logmel): LogMel(sr=22050, n_fft=1024, n_mels=80, fmin=0, fmax=11025.0, htk=False)
|
| 826 |
+
)
|
| 827 |
+
)
|
| 828 |
+
(var_loss): VarianceLoss(
|
| 829 |
+
(mse_criterion): MSELoss()
|
| 830 |
+
(duration_criterion): DurationPredictorLoss(
|
| 831 |
+
(criterion): MSELoss()
|
| 832 |
+
)
|
| 833 |
+
)
|
| 834 |
+
(forwardsum_loss): ForwardSumLoss()
|
| 835 |
+
)
|
| 836 |
+
2025-02-21 15:00:46,588 (font_manager:1547) INFO: generated new fontManager
|
| 837 |
+
2025-02-21 15:00:54,819 (tts_inference:476) INFO: inference speed = 29016.3 points / sec.
|
| 838 |
+
2025-02-21 15:00:54,819 (tts_inference:481) INFO: LJ049-0072 (size:106->194560)
|
| 839 |
+
2025-02-21 15:00:58,859 (tts_inference:476) INFO: inference speed = 33909.9 points / sec.
|
| 840 |
+
2025-02-21 15:00:58,860 (tts_inference:481) INFO: LJ049-0073 (size:80->136704)
|
| 841 |
+
2025-02-21 15:01:04,141 (tts_inference:476) INFO: inference speed = 34455.3 points / sec.
|
| 842 |
+
2025-02-21 15:01:04,142 (tts_inference:481) INFO: LJ049-0074 (size:102->181760)
|
| 843 |
+
2025-02-21 15:01:05,014 (tts_inference:476) INFO: inference speed = 29887.7 points / sec.
|
| 844 |
+
2025-02-21 15:01:05,014 (tts_inference:481) INFO: LJ049-0075 (size:15->25856)
|
| 845 |
+
2025-02-21 15:01:11,975 (tts_inference:476) INFO: inference speed = 29366.0 points / sec.
|
| 846 |
+
2025-02-21 15:01:11,976 (tts_inference:481) INFO: LJ049-0076 (size:129->204288)
|
| 847 |
+
2025-02-21 15:01:15,023 (tts_inference:476) INFO: inference speed = 33351.4 points / sec.
|
| 848 |
+
2025-02-21 15:01:15,023 (tts_inference:481) INFO: LJ049-0077 (size:56->101376)
|
| 849 |
+
2025-02-21 15:01:18,590 (tts_inference:476) INFO: inference speed = 34086.8 points / sec.
|
| 850 |
+
2025-02-21 15:01:18,590 (tts_inference:481) INFO: LJ049-0078 (size:73->121344)
|
| 851 |
+
2025-02-21 15:01:24,371 (tts_inference:476) INFO: inference speed = 34312.2 points / sec.
|
| 852 |
+
2025-02-21 15:01:24,372 (tts_inference:481) INFO: LJ049-0079 (size:107->198144)
|
| 853 |
+
2025-02-21 15:01:25,844 (tts_inference:476) INFO: inference speed = 32152.5 points / sec.
|
| 854 |
+
2025-02-21 15:01:25,844 (tts_inference:481) INFO: LJ049-0080 (size:22->47104)
|
| 855 |
+
2025-02-21 15:01:28,934 (tts_inference:476) INFO: inference speed = 33195.7 points / sec.
|
| 856 |
+
2025-02-21 15:01:28,934 (tts_inference:481) INFO: LJ049-0081 (size:67->102400)
|
| 857 |
+
2025-02-21 15:01:33,973 (tts_inference:476) INFO: inference speed = 34129.7 points / sec.
|
| 858 |
+
2025-02-21 15:01:33,973 (tts_inference:481) INFO: LJ049-0082 (size:95->171776)
|
| 859 |
+
2025-02-21 15:01:42,534 (tts_inference:476) INFO: inference speed = 29360.8 points / sec.
|
| 860 |
+
2025-02-21 15:01:42,535 (tts_inference:481) INFO: LJ049-0083 (size:114->251136)
|
| 861 |
+
2025-02-21 15:01:48,189 (tts_inference:476) INFO: inference speed = 35097.4 points / sec.
|
| 862 |
+
2025-02-21 15:01:48,189 (tts_inference:481) INFO: LJ049-0084 (size:112->198144)
|
| 863 |
+
2025-02-21 15:01:50,117 (tts_inference:476) INFO: inference speed = 32675.8 points / sec.
|
| 864 |
+
2025-02-21 15:01:50,117 (tts_inference:481) INFO: LJ049-0085 (size:39->62720)
|
| 865 |
+
2025-02-21 15:01:55,285 (tts_inference:476) INFO: inference speed = 33919.0 points / sec.
|
| 866 |
+
2025-02-21 15:01:55,285 (tts_inference:481) INFO: LJ049-0086 (size:95->175104)
|
| 867 |
+
2025-02-21 15:01:59,357 (tts_inference:476) INFO: inference speed = 34005.8 points / sec.
|
| 868 |
+
2025-02-21 15:01:59,358 (tts_inference:481) INFO: LJ049-0087 (size:76->138240)
|
| 869 |
+
2025-02-21 15:02:02,898 (tts_inference:476) INFO: inference speed = 33831.1 points / sec.
|
| 870 |
+
2025-02-21 15:02:02,898 (tts_inference:481) INFO: LJ049-0088 (size:66->119552)
|
| 871 |
+
2025-02-21 15:02:07,447 (tts_inference:476) INFO: inference speed = 33980.2 points / sec.
|
| 872 |
+
2025-02-21 15:02:07,447 (tts_inference:481) INFO: LJ049-0089 (size:78->154368)
|
| 873 |
+
2025-02-21 15:02:10,099 (tts_inference:476) INFO: inference speed = 33102.9 points / sec.
|
| 874 |
+
2025-02-21 15:02:10,100 (tts_inference:481) INFO: LJ049-0090 (size:39->87552)
|
| 875 |
+
2025-02-21 15:02:14,626 (tts_inference:476) INFO: inference speed = 34088.5 points / sec.
|
| 876 |
+
2025-02-21 15:02:14,626 (tts_inference:481) INFO: LJ049-0091 (size:87->154112)
|
| 877 |
+
2025-02-21 15:02:19,882 (tts_inference:476) INFO: inference speed = 32093.0 points / sec.
|
| 878 |
+
2025-02-21 15:02:19,882 (tts_inference:481) INFO: LJ049-0092 (size:81->168448)
|
| 879 |
+
2025-02-21 15:02:24,714 (tts_inference:476) INFO: inference speed = 34119.6 points / sec.
|
| 880 |
+
2025-02-21 15:02:24,714 (tts_inference:481) INFO: LJ049-0093 (size:89->164608)
|
| 881 |
+
2025-02-21 15:02:28,628 (tts_inference:476) INFO: inference speed = 33940.5 points / sec.
|
| 882 |
+
2025-02-21 15:02:28,628 (tts_inference:481) INFO: LJ049-0094 (size:68->132608)
|
| 883 |
+
2025-02-21 15:02:32,273 (tts_inference:476) INFO: inference speed = 33635.1 points / sec.
|
| 884 |
+
2025-02-21 15:02:32,273 (tts_inference:481) INFO: LJ049-0095 (size:62->122368)
|
| 885 |
+
2025-02-21 15:02:37,898 (tts_inference:476) INFO: inference speed = 30526.2 points / sec.
|
| 886 |
+
2025-02-21 15:02:37,898 (tts_inference:481) INFO: LJ049-0096 (size:96->171520)
|
| 887 |
+
2025-02-21 15:02:40,601 (tts_inference:476) INFO: inference speed = 33047.2 points / sec.
|
| 888 |
+
2025-02-21 15:02:40,602 (tts_inference:481) INFO: LJ049-0097 (size:55->89088)
|
| 889 |
+
2025-02-21 15:02:46,939 (tts_inference:476) INFO: inference speed = 29434.0 points / sec.
|
| 890 |
+
2025-02-21 15:02:46,939 (tts_inference:481) INFO: LJ049-0098 (size:97->186368)
|
| 891 |
+
2025-02-21 15:02:50,231 (tts_inference:476) INFO: inference speed = 33515.3 points / sec.
|
| 892 |
+
2025-02-21 15:02:50,232 (tts_inference:481) INFO: LJ049-0099 (size:69->110080)
|
| 893 |
+
2025-02-21 15:02:53,393 (tts_inference:476) INFO: inference speed = 33269.2 points / sec.
|
| 894 |
+
2025-02-21 15:02:53,393 (tts_inference:481) INFO: LJ049-0100 (size:57->104960)
|
| 895 |
+
2025-02-21 15:02:57,758 (tts_inference:476) INFO: inference speed = 35412.2 points / sec.
|
| 896 |
+
2025-02-21 15:02:57,758 (tts_inference:481) INFO: LJ049-0101 (size:72->154368)
|
| 897 |
+
2025-02-21 15:03:04,527 (tts_inference:476) INFO: inference speed = 31422.6 points / sec.
|
| 898 |
+
2025-02-21 15:03:04,527 (tts_inference:481) INFO: LJ049-0102 (size:118->212480)
|
| 899 |
+
# Accounting: time=145 threads=1
|
| 900 |
+
# Ended (code 0) at Fri Feb 21 15:03:05 JST 2025, elapsed time 145 seconds
|
imdanboy/jets/decode_train.loss.ave/dev/log/tts_inference.4.log
ADDED
|
@@ -0,0 +1,900 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# python3 -m espnet2.bin.tts_inference --ngpu 0 --data_path_and_name_and_type dump/raw/dev/text,text,text --data_path_and_name_and_type dump/raw/dev/wav.scp,speech,sound --key_file exp/imdanboy/jets/decode_train.loss.ave/dev/log/keys.4.scp --model_file exp/imdanboy/jets/train.total_count.ave_5best.pth --train_config exp/imdanboy/jets/config.yaml --output_dir exp/imdanboy/jets/decode_train.loss.ave/dev/log/output.4 --vocoder_file none --config conf/decode.yaml
|
| 2 |
+
# Started at Fri Feb 21 15:00:40 JST 2025
|
| 3 |
+
#
|
| 4 |
+
/usr/lib/python3/dist-packages/requests/__init__.py:89: RequestsDependencyWarning: urllib3 (2.2.3) or chardet (3.0.4) doesn't match a supported version!
|
| 5 |
+
warnings.warn("urllib3 ({}) or chardet ({}) doesn't match a supported "
|
| 6 |
+
/usr/bin/python3 /work/espnet/espnet2/bin/tts_inference.py --ngpu 0 --data_path_and_name_and_type dump/raw/dev/text,text,text --data_path_and_name_and_type dump/raw/dev/wav.scp,speech,sound --key_file exp/imdanboy/jets/decode_train.loss.ave/dev/log/keys.4.scp --model_file exp/imdanboy/jets/train.total_count.ave_5best.pth --train_config exp/imdanboy/jets/config.yaml --output_dir exp/imdanboy/jets/decode_train.loss.ave/dev/log/output.4 --vocoder_file none --config conf/decode.yaml
|
| 7 |
+
2025-02-21 15:00:43,858 (tts:302) INFO: Vocabulary size: 78
|
| 8 |
+
2025-02-21 15:00:43,977 (encoder:172) INFO: encoder self-attention layer type = self-attention
|
| 9 |
+
2025-02-21 15:00:44,092 (encoder:172) INFO: encoder self-attention layer type = self-attention
|
| 10 |
+
2025-02-21 15:00:45,899 (tts_inference:126) INFO: Extractor:
|
| 11 |
+
LogMelFbank(
|
| 12 |
+
(stft): Stft(n_fft=1024, win_length=1024, hop_length=256, center=True, normalized=False, onesided=True)
|
| 13 |
+
(logmel): LogMel(sr=22050, n_fft=1024, n_mels=80, fmin=80, fmax=7600, htk=False)
|
| 14 |
+
)
|
| 15 |
+
2025-02-21 15:00:45,900 (tts_inference:127) INFO: Normalizer:
|
| 16 |
+
GlobalMVN(stats_file=/usr/local/lib/python3.8/dist-packages/espnet_model_zoo/models--imdanboy--jets/snapshots/1db95c26516c44e6789bf06417c51e89400b190b/exp/tts_stats_raw_phn_tacotron_g2p_en_no_space/train/feats_stats.npz, norm_means=True, norm_vars=True)
|
| 17 |
+
2025-02-21 15:00:45,903 (tts_inference:128) INFO: TTS:
|
| 18 |
+
JETS(
|
| 19 |
+
(generator): JETSGenerator(
|
| 20 |
+
(encoder): Encoder(
|
| 21 |
+
(embed): Sequential(
|
| 22 |
+
(0): Embedding(78, 256, padding_idx=0)
|
| 23 |
+
(1): ScaledPositionalEncoding(
|
| 24 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 25 |
+
)
|
| 26 |
+
)
|
| 27 |
+
(encoders): MultiSequential(
|
| 28 |
+
(0): EncoderLayer(
|
| 29 |
+
(self_attn): MultiHeadedAttention(
|
| 30 |
+
(linear_q): Linear(in_features=256, out_features=256, bias=True)
|
| 31 |
+
(linear_k): Linear(in_features=256, out_features=256, bias=True)
|
| 32 |
+
(linear_v): Linear(in_features=256, out_features=256, bias=True)
|
| 33 |
+
(linear_out): Linear(in_features=256, out_features=256, bias=True)
|
| 34 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 35 |
+
)
|
| 36 |
+
(feed_forward): MultiLayeredConv1d(
|
| 37 |
+
(w_1): Conv1d(256, 1024, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 38 |
+
(w_2): Conv1d(1024, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 39 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 40 |
+
)
|
| 41 |
+
(norm1): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 42 |
+
(norm2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 43 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 44 |
+
)
|
| 45 |
+
(1): EncoderLayer(
|
| 46 |
+
(self_attn): MultiHeadedAttention(
|
| 47 |
+
(linear_q): Linear(in_features=256, out_features=256, bias=True)
|
| 48 |
+
(linear_k): Linear(in_features=256, out_features=256, bias=True)
|
| 49 |
+
(linear_v): Linear(in_features=256, out_features=256, bias=True)
|
| 50 |
+
(linear_out): Linear(in_features=256, out_features=256, bias=True)
|
| 51 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 52 |
+
)
|
| 53 |
+
(feed_forward): MultiLayeredConv1d(
|
| 54 |
+
(w_1): Conv1d(256, 1024, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 55 |
+
(w_2): Conv1d(1024, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 56 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 57 |
+
)
|
| 58 |
+
(norm1): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 59 |
+
(norm2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 60 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 61 |
+
)
|
| 62 |
+
(2): EncoderLayer(
|
| 63 |
+
(self_attn): MultiHeadedAttention(
|
| 64 |
+
(linear_q): Linear(in_features=256, out_features=256, bias=True)
|
| 65 |
+
(linear_k): Linear(in_features=256, out_features=256, bias=True)
|
| 66 |
+
(linear_v): Linear(in_features=256, out_features=256, bias=True)
|
| 67 |
+
(linear_out): Linear(in_features=256, out_features=256, bias=True)
|
| 68 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 69 |
+
)
|
| 70 |
+
(feed_forward): MultiLayeredConv1d(
|
| 71 |
+
(w_1): Conv1d(256, 1024, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 72 |
+
(w_2): Conv1d(1024, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 73 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 74 |
+
)
|
| 75 |
+
(norm1): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 76 |
+
(norm2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 77 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 78 |
+
)
|
| 79 |
+
(3): EncoderLayer(
|
| 80 |
+
(self_attn): MultiHeadedAttention(
|
| 81 |
+
(linear_q): Linear(in_features=256, out_features=256, bias=True)
|
| 82 |
+
(linear_k): Linear(in_features=256, out_features=256, bias=True)
|
| 83 |
+
(linear_v): Linear(in_features=256, out_features=256, bias=True)
|
| 84 |
+
(linear_out): Linear(in_features=256, out_features=256, bias=True)
|
| 85 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 86 |
+
)
|
| 87 |
+
(feed_forward): MultiLayeredConv1d(
|
| 88 |
+
(w_1): Conv1d(256, 1024, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 89 |
+
(w_2): Conv1d(1024, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 90 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 91 |
+
)
|
| 92 |
+
(norm1): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 93 |
+
(norm2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 94 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 95 |
+
)
|
| 96 |
+
)
|
| 97 |
+
(after_norm): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 98 |
+
)
|
| 99 |
+
(duration_predictor): DurationPredictor(
|
| 100 |
+
(conv): ModuleList(
|
| 101 |
+
(0): Sequential(
|
| 102 |
+
(0): Conv1d(256, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 103 |
+
(1): ReLU()
|
| 104 |
+
(2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 105 |
+
(3): Dropout(p=0.1, inplace=False)
|
| 106 |
+
)
|
| 107 |
+
(1): Sequential(
|
| 108 |
+
(0): Conv1d(256, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 109 |
+
(1): ReLU()
|
| 110 |
+
(2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 111 |
+
(3): Dropout(p=0.1, inplace=False)
|
| 112 |
+
)
|
| 113 |
+
)
|
| 114 |
+
(linear): Linear(in_features=256, out_features=1, bias=True)
|
| 115 |
+
)
|
| 116 |
+
(pitch_predictor): VariancePredictor(
|
| 117 |
+
(conv): ModuleList(
|
| 118 |
+
(0): Sequential(
|
| 119 |
+
(0): Conv1d(256, 256, kernel_size=(5,), stride=(1,), padding=(2,))
|
| 120 |
+
(1): ReLU()
|
| 121 |
+
(2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 122 |
+
(3): Dropout(p=0.5, inplace=False)
|
| 123 |
+
)
|
| 124 |
+
(1): Sequential(
|
| 125 |
+
(0): Conv1d(256, 256, kernel_size=(5,), stride=(1,), padding=(2,))
|
| 126 |
+
(1): ReLU()
|
| 127 |
+
(2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 128 |
+
(3): Dropout(p=0.5, inplace=False)
|
| 129 |
+
)
|
| 130 |
+
(2): Sequential(
|
| 131 |
+
(0): Conv1d(256, 256, kernel_size=(5,), stride=(1,), padding=(2,))
|
| 132 |
+
(1): ReLU()
|
| 133 |
+
(2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 134 |
+
(3): Dropout(p=0.5, inplace=False)
|
| 135 |
+
)
|
| 136 |
+
(3): Sequential(
|
| 137 |
+
(0): Conv1d(256, 256, kernel_size=(5,), stride=(1,), padding=(2,))
|
| 138 |
+
(1): ReLU()
|
| 139 |
+
(2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 140 |
+
(3): Dropout(p=0.5, inplace=False)
|
| 141 |
+
)
|
| 142 |
+
(4): Sequential(
|
| 143 |
+
(0): Conv1d(256, 256, kernel_size=(5,), stride=(1,), padding=(2,))
|
| 144 |
+
(1): ReLU()
|
| 145 |
+
(2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 146 |
+
(3): Dropout(p=0.5, inplace=False)
|
| 147 |
+
)
|
| 148 |
+
)
|
| 149 |
+
(linear): Linear(in_features=256, out_features=1, bias=True)
|
| 150 |
+
)
|
| 151 |
+
(pitch_embed): Sequential(
|
| 152 |
+
(0): Conv1d(1, 256, kernel_size=(1,), stride=(1,))
|
| 153 |
+
(1): Dropout(p=0.0, inplace=False)
|
| 154 |
+
)
|
| 155 |
+
(energy_predictor): VariancePredictor(
|
| 156 |
+
(conv): ModuleList(
|
| 157 |
+
(0): Sequential(
|
| 158 |
+
(0): Conv1d(256, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 159 |
+
(1): ReLU()
|
| 160 |
+
(2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 161 |
+
(3): Dropout(p=0.5, inplace=False)
|
| 162 |
+
)
|
| 163 |
+
(1): Sequential(
|
| 164 |
+
(0): Conv1d(256, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 165 |
+
(1): ReLU()
|
| 166 |
+
(2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 167 |
+
(3): Dropout(p=0.5, inplace=False)
|
| 168 |
+
)
|
| 169 |
+
)
|
| 170 |
+
(linear): Linear(in_features=256, out_features=1, bias=True)
|
| 171 |
+
)
|
| 172 |
+
(energy_embed): Sequential(
|
| 173 |
+
(0): Conv1d(1, 256, kernel_size=(1,), stride=(1,))
|
| 174 |
+
(1): Dropout(p=0.0, inplace=False)
|
| 175 |
+
)
|
| 176 |
+
(alignment_module): AlignmentModule(
|
| 177 |
+
(t_conv1): Conv1d(256, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 178 |
+
(t_conv2): Conv1d(256, 256, kernel_size=(1,), stride=(1,))
|
| 179 |
+
(f_conv1): Conv1d(80, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 180 |
+
(f_conv2): Conv1d(256, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 181 |
+
(f_conv3): Conv1d(256, 256, kernel_size=(1,), stride=(1,))
|
| 182 |
+
)
|
| 183 |
+
(length_regulator): GaussianUpsampling()
|
| 184 |
+
(decoder): Encoder(
|
| 185 |
+
(embed): Sequential(
|
| 186 |
+
(0): ScaledPositionalEncoding(
|
| 187 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 188 |
+
)
|
| 189 |
+
)
|
| 190 |
+
(encoders): MultiSequential(
|
| 191 |
+
(0): EncoderLayer(
|
| 192 |
+
(self_attn): MultiHeadedAttention(
|
| 193 |
+
(linear_q): Linear(in_features=256, out_features=256, bias=True)
|
| 194 |
+
(linear_k): Linear(in_features=256, out_features=256, bias=True)
|
| 195 |
+
(linear_v): Linear(in_features=256, out_features=256, bias=True)
|
| 196 |
+
(linear_out): Linear(in_features=256, out_features=256, bias=True)
|
| 197 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 198 |
+
)
|
| 199 |
+
(feed_forward): MultiLayeredConv1d(
|
| 200 |
+
(w_1): Conv1d(256, 1024, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 201 |
+
(w_2): Conv1d(1024, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 202 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 203 |
+
)
|
| 204 |
+
(norm1): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 205 |
+
(norm2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 206 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 207 |
+
)
|
| 208 |
+
(1): EncoderLayer(
|
| 209 |
+
(self_attn): MultiHeadedAttention(
|
| 210 |
+
(linear_q): Linear(in_features=256, out_features=256, bias=True)
|
| 211 |
+
(linear_k): Linear(in_features=256, out_features=256, bias=True)
|
| 212 |
+
(linear_v): Linear(in_features=256, out_features=256, bias=True)
|
| 213 |
+
(linear_out): Linear(in_features=256, out_features=256, bias=True)
|
| 214 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 215 |
+
)
|
| 216 |
+
(feed_forward): MultiLayeredConv1d(
|
| 217 |
+
(w_1): Conv1d(256, 1024, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 218 |
+
(w_2): Conv1d(1024, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 219 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 220 |
+
)
|
| 221 |
+
(norm1): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 222 |
+
(norm2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 223 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 224 |
+
)
|
| 225 |
+
(2): EncoderLayer(
|
| 226 |
+
(self_attn): MultiHeadedAttention(
|
| 227 |
+
(linear_q): Linear(in_features=256, out_features=256, bias=True)
|
| 228 |
+
(linear_k): Linear(in_features=256, out_features=256, bias=True)
|
| 229 |
+
(linear_v): Linear(in_features=256, out_features=256, bias=True)
|
| 230 |
+
(linear_out): Linear(in_features=256, out_features=256, bias=True)
|
| 231 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 232 |
+
)
|
| 233 |
+
(feed_forward): MultiLayeredConv1d(
|
| 234 |
+
(w_1): Conv1d(256, 1024, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 235 |
+
(w_2): Conv1d(1024, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 236 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 237 |
+
)
|
| 238 |
+
(norm1): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 239 |
+
(norm2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 240 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 241 |
+
)
|
| 242 |
+
(3): EncoderLayer(
|
| 243 |
+
(self_attn): MultiHeadedAttention(
|
| 244 |
+
(linear_q): Linear(in_features=256, out_features=256, bias=True)
|
| 245 |
+
(linear_k): Linear(in_features=256, out_features=256, bias=True)
|
| 246 |
+
(linear_v): Linear(in_features=256, out_features=256, bias=True)
|
| 247 |
+
(linear_out): Linear(in_features=256, out_features=256, bias=True)
|
| 248 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 249 |
+
)
|
| 250 |
+
(feed_forward): MultiLayeredConv1d(
|
| 251 |
+
(w_1): Conv1d(256, 1024, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 252 |
+
(w_2): Conv1d(1024, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 253 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 254 |
+
)
|
| 255 |
+
(norm1): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 256 |
+
(norm2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 257 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 258 |
+
)
|
| 259 |
+
)
|
| 260 |
+
(after_norm): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 261 |
+
)
|
| 262 |
+
(generator): HiFiGANGenerator(
|
| 263 |
+
(input_conv): Conv1d(256, 512, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 264 |
+
(upsamples): ModuleList(
|
| 265 |
+
(0): Sequential(
|
| 266 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 267 |
+
(1): ConvTranspose1d(512, 256, kernel_size=(16,), stride=(8,), padding=(4,))
|
| 268 |
+
)
|
| 269 |
+
(1): Sequential(
|
| 270 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 271 |
+
(1): ConvTranspose1d(256, 128, kernel_size=(16,), stride=(8,), padding=(4,))
|
| 272 |
+
)
|
| 273 |
+
(2): Sequential(
|
| 274 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 275 |
+
(1): ConvTranspose1d(128, 64, kernel_size=(4,), stride=(2,), padding=(1,))
|
| 276 |
+
)
|
| 277 |
+
(3): Sequential(
|
| 278 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 279 |
+
(1): ConvTranspose1d(64, 32, kernel_size=(4,), stride=(2,), padding=(1,))
|
| 280 |
+
)
|
| 281 |
+
)
|
| 282 |
+
(blocks): ModuleList(
|
| 283 |
+
(0): ResidualBlock(
|
| 284 |
+
(convs1): ModuleList(
|
| 285 |
+
(0): Sequential(
|
| 286 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 287 |
+
(1): Conv1d(256, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 288 |
+
)
|
| 289 |
+
(1): Sequential(
|
| 290 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 291 |
+
(1): Conv1d(256, 256, kernel_size=(3,), stride=(1,), padding=(3,), dilation=(3,))
|
| 292 |
+
)
|
| 293 |
+
(2): Sequential(
|
| 294 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 295 |
+
(1): Conv1d(256, 256, kernel_size=(3,), stride=(1,), padding=(5,), dilation=(5,))
|
| 296 |
+
)
|
| 297 |
+
)
|
| 298 |
+
(convs2): ModuleList(
|
| 299 |
+
(0): Sequential(
|
| 300 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 301 |
+
(1): Conv1d(256, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 302 |
+
)
|
| 303 |
+
(1): Sequential(
|
| 304 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 305 |
+
(1): Conv1d(256, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 306 |
+
)
|
| 307 |
+
(2): Sequential(
|
| 308 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 309 |
+
(1): Conv1d(256, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 310 |
+
)
|
| 311 |
+
)
|
| 312 |
+
)
|
| 313 |
+
(1): ResidualBlock(
|
| 314 |
+
(convs1): ModuleList(
|
| 315 |
+
(0): Sequential(
|
| 316 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 317 |
+
(1): Conv1d(256, 256, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 318 |
+
)
|
| 319 |
+
(1): Sequential(
|
| 320 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 321 |
+
(1): Conv1d(256, 256, kernel_size=(7,), stride=(1,), padding=(9,), dilation=(3,))
|
| 322 |
+
)
|
| 323 |
+
(2): Sequential(
|
| 324 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 325 |
+
(1): Conv1d(256, 256, kernel_size=(7,), stride=(1,), padding=(15,), dilation=(5,))
|
| 326 |
+
)
|
| 327 |
+
)
|
| 328 |
+
(convs2): ModuleList(
|
| 329 |
+
(0): Sequential(
|
| 330 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 331 |
+
(1): Conv1d(256, 256, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 332 |
+
)
|
| 333 |
+
(1): Sequential(
|
| 334 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 335 |
+
(1): Conv1d(256, 256, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 336 |
+
)
|
| 337 |
+
(2): Sequential(
|
| 338 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 339 |
+
(1): Conv1d(256, 256, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 340 |
+
)
|
| 341 |
+
)
|
| 342 |
+
)
|
| 343 |
+
(2): ResidualBlock(
|
| 344 |
+
(convs1): ModuleList(
|
| 345 |
+
(0): Sequential(
|
| 346 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 347 |
+
(1): Conv1d(256, 256, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 348 |
+
)
|
| 349 |
+
(1): Sequential(
|
| 350 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 351 |
+
(1): Conv1d(256, 256, kernel_size=(11,), stride=(1,), padding=(15,), dilation=(3,))
|
| 352 |
+
)
|
| 353 |
+
(2): Sequential(
|
| 354 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 355 |
+
(1): Conv1d(256, 256, kernel_size=(11,), stride=(1,), padding=(25,), dilation=(5,))
|
| 356 |
+
)
|
| 357 |
+
)
|
| 358 |
+
(convs2): ModuleList(
|
| 359 |
+
(0): Sequential(
|
| 360 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 361 |
+
(1): Conv1d(256, 256, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 362 |
+
)
|
| 363 |
+
(1): Sequential(
|
| 364 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 365 |
+
(1): Conv1d(256, 256, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 366 |
+
)
|
| 367 |
+
(2): Sequential(
|
| 368 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 369 |
+
(1): Conv1d(256, 256, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 370 |
+
)
|
| 371 |
+
)
|
| 372 |
+
)
|
| 373 |
+
(3): ResidualBlock(
|
| 374 |
+
(convs1): ModuleList(
|
| 375 |
+
(0): Sequential(
|
| 376 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 377 |
+
(1): Conv1d(128, 128, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 378 |
+
)
|
| 379 |
+
(1): Sequential(
|
| 380 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 381 |
+
(1): Conv1d(128, 128, kernel_size=(3,), stride=(1,), padding=(3,), dilation=(3,))
|
| 382 |
+
)
|
| 383 |
+
(2): Sequential(
|
| 384 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 385 |
+
(1): Conv1d(128, 128, kernel_size=(3,), stride=(1,), padding=(5,), dilation=(5,))
|
| 386 |
+
)
|
| 387 |
+
)
|
| 388 |
+
(convs2): ModuleList(
|
| 389 |
+
(0): Sequential(
|
| 390 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 391 |
+
(1): Conv1d(128, 128, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 392 |
+
)
|
| 393 |
+
(1): Sequential(
|
| 394 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 395 |
+
(1): Conv1d(128, 128, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 396 |
+
)
|
| 397 |
+
(2): Sequential(
|
| 398 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 399 |
+
(1): Conv1d(128, 128, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 400 |
+
)
|
| 401 |
+
)
|
| 402 |
+
)
|
| 403 |
+
(4): ResidualBlock(
|
| 404 |
+
(convs1): ModuleList(
|
| 405 |
+
(0): Sequential(
|
| 406 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 407 |
+
(1): Conv1d(128, 128, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 408 |
+
)
|
| 409 |
+
(1): Sequential(
|
| 410 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 411 |
+
(1): Conv1d(128, 128, kernel_size=(7,), stride=(1,), padding=(9,), dilation=(3,))
|
| 412 |
+
)
|
| 413 |
+
(2): Sequential(
|
| 414 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 415 |
+
(1): Conv1d(128, 128, kernel_size=(7,), stride=(1,), padding=(15,), dilation=(5,))
|
| 416 |
+
)
|
| 417 |
+
)
|
| 418 |
+
(convs2): ModuleList(
|
| 419 |
+
(0): Sequential(
|
| 420 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 421 |
+
(1): Conv1d(128, 128, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 422 |
+
)
|
| 423 |
+
(1): Sequential(
|
| 424 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 425 |
+
(1): Conv1d(128, 128, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 426 |
+
)
|
| 427 |
+
(2): Sequential(
|
| 428 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 429 |
+
(1): Conv1d(128, 128, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 430 |
+
)
|
| 431 |
+
)
|
| 432 |
+
)
|
| 433 |
+
(5): ResidualBlock(
|
| 434 |
+
(convs1): ModuleList(
|
| 435 |
+
(0): Sequential(
|
| 436 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 437 |
+
(1): Conv1d(128, 128, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 438 |
+
)
|
| 439 |
+
(1): Sequential(
|
| 440 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 441 |
+
(1): Conv1d(128, 128, kernel_size=(11,), stride=(1,), padding=(15,), dilation=(3,))
|
| 442 |
+
)
|
| 443 |
+
(2): Sequential(
|
| 444 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 445 |
+
(1): Conv1d(128, 128, kernel_size=(11,), stride=(1,), padding=(25,), dilation=(5,))
|
| 446 |
+
)
|
| 447 |
+
)
|
| 448 |
+
(convs2): ModuleList(
|
| 449 |
+
(0): Sequential(
|
| 450 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 451 |
+
(1): Conv1d(128, 128, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 452 |
+
)
|
| 453 |
+
(1): Sequential(
|
| 454 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 455 |
+
(1): Conv1d(128, 128, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 456 |
+
)
|
| 457 |
+
(2): Sequential(
|
| 458 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 459 |
+
(1): Conv1d(128, 128, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 460 |
+
)
|
| 461 |
+
)
|
| 462 |
+
)
|
| 463 |
+
(6): ResidualBlock(
|
| 464 |
+
(convs1): ModuleList(
|
| 465 |
+
(0): Sequential(
|
| 466 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 467 |
+
(1): Conv1d(64, 64, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 468 |
+
)
|
| 469 |
+
(1): Sequential(
|
| 470 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 471 |
+
(1): Conv1d(64, 64, kernel_size=(3,), stride=(1,), padding=(3,), dilation=(3,))
|
| 472 |
+
)
|
| 473 |
+
(2): Sequential(
|
| 474 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 475 |
+
(1): Conv1d(64, 64, kernel_size=(3,), stride=(1,), padding=(5,), dilation=(5,))
|
| 476 |
+
)
|
| 477 |
+
)
|
| 478 |
+
(convs2): ModuleList(
|
| 479 |
+
(0): Sequential(
|
| 480 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 481 |
+
(1): Conv1d(64, 64, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 482 |
+
)
|
| 483 |
+
(1): Sequential(
|
| 484 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 485 |
+
(1): Conv1d(64, 64, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 486 |
+
)
|
| 487 |
+
(2): Sequential(
|
| 488 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 489 |
+
(1): Conv1d(64, 64, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 490 |
+
)
|
| 491 |
+
)
|
| 492 |
+
)
|
| 493 |
+
(7): ResidualBlock(
|
| 494 |
+
(convs1): ModuleList(
|
| 495 |
+
(0): Sequential(
|
| 496 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 497 |
+
(1): Conv1d(64, 64, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 498 |
+
)
|
| 499 |
+
(1): Sequential(
|
| 500 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 501 |
+
(1): Conv1d(64, 64, kernel_size=(7,), stride=(1,), padding=(9,), dilation=(3,))
|
| 502 |
+
)
|
| 503 |
+
(2): Sequential(
|
| 504 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 505 |
+
(1): Conv1d(64, 64, kernel_size=(7,), stride=(1,), padding=(15,), dilation=(5,))
|
| 506 |
+
)
|
| 507 |
+
)
|
| 508 |
+
(convs2): ModuleList(
|
| 509 |
+
(0): Sequential(
|
| 510 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 511 |
+
(1): Conv1d(64, 64, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 512 |
+
)
|
| 513 |
+
(1): Sequential(
|
| 514 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 515 |
+
(1): Conv1d(64, 64, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 516 |
+
)
|
| 517 |
+
(2): Sequential(
|
| 518 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 519 |
+
(1): Conv1d(64, 64, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 520 |
+
)
|
| 521 |
+
)
|
| 522 |
+
)
|
| 523 |
+
(8): ResidualBlock(
|
| 524 |
+
(convs1): ModuleList(
|
| 525 |
+
(0): Sequential(
|
| 526 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 527 |
+
(1): Conv1d(64, 64, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 528 |
+
)
|
| 529 |
+
(1): Sequential(
|
| 530 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 531 |
+
(1): Conv1d(64, 64, kernel_size=(11,), stride=(1,), padding=(15,), dilation=(3,))
|
| 532 |
+
)
|
| 533 |
+
(2): Sequential(
|
| 534 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 535 |
+
(1): Conv1d(64, 64, kernel_size=(11,), stride=(1,), padding=(25,), dilation=(5,))
|
| 536 |
+
)
|
| 537 |
+
)
|
| 538 |
+
(convs2): ModuleList(
|
| 539 |
+
(0): Sequential(
|
| 540 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 541 |
+
(1): Conv1d(64, 64, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 542 |
+
)
|
| 543 |
+
(1): Sequential(
|
| 544 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 545 |
+
(1): Conv1d(64, 64, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 546 |
+
)
|
| 547 |
+
(2): Sequential(
|
| 548 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 549 |
+
(1): Conv1d(64, 64, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 550 |
+
)
|
| 551 |
+
)
|
| 552 |
+
)
|
| 553 |
+
(9): ResidualBlock(
|
| 554 |
+
(convs1): ModuleList(
|
| 555 |
+
(0): Sequential(
|
| 556 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 557 |
+
(1): Conv1d(32, 32, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 558 |
+
)
|
| 559 |
+
(1): Sequential(
|
| 560 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 561 |
+
(1): Conv1d(32, 32, kernel_size=(3,), stride=(1,), padding=(3,), dilation=(3,))
|
| 562 |
+
)
|
| 563 |
+
(2): Sequential(
|
| 564 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 565 |
+
(1): Conv1d(32, 32, kernel_size=(3,), stride=(1,), padding=(5,), dilation=(5,))
|
| 566 |
+
)
|
| 567 |
+
)
|
| 568 |
+
(convs2): ModuleList(
|
| 569 |
+
(0): Sequential(
|
| 570 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 571 |
+
(1): Conv1d(32, 32, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 572 |
+
)
|
| 573 |
+
(1): Sequential(
|
| 574 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 575 |
+
(1): Conv1d(32, 32, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 576 |
+
)
|
| 577 |
+
(2): Sequential(
|
| 578 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 579 |
+
(1): Conv1d(32, 32, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 580 |
+
)
|
| 581 |
+
)
|
| 582 |
+
)
|
| 583 |
+
(10): ResidualBlock(
|
| 584 |
+
(convs1): ModuleList(
|
| 585 |
+
(0): Sequential(
|
| 586 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 587 |
+
(1): Conv1d(32, 32, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 588 |
+
)
|
| 589 |
+
(1): Sequential(
|
| 590 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 591 |
+
(1): Conv1d(32, 32, kernel_size=(7,), stride=(1,), padding=(9,), dilation=(3,))
|
| 592 |
+
)
|
| 593 |
+
(2): Sequential(
|
| 594 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 595 |
+
(1): Conv1d(32, 32, kernel_size=(7,), stride=(1,), padding=(15,), dilation=(5,))
|
| 596 |
+
)
|
| 597 |
+
)
|
| 598 |
+
(convs2): ModuleList(
|
| 599 |
+
(0): Sequential(
|
| 600 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 601 |
+
(1): Conv1d(32, 32, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 602 |
+
)
|
| 603 |
+
(1): Sequential(
|
| 604 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 605 |
+
(1): Conv1d(32, 32, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 606 |
+
)
|
| 607 |
+
(2): Sequential(
|
| 608 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 609 |
+
(1): Conv1d(32, 32, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 610 |
+
)
|
| 611 |
+
)
|
| 612 |
+
)
|
| 613 |
+
(11): ResidualBlock(
|
| 614 |
+
(convs1): ModuleList(
|
| 615 |
+
(0): Sequential(
|
| 616 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 617 |
+
(1): Conv1d(32, 32, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 618 |
+
)
|
| 619 |
+
(1): Sequential(
|
| 620 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 621 |
+
(1): Conv1d(32, 32, kernel_size=(11,), stride=(1,), padding=(15,), dilation=(3,))
|
| 622 |
+
)
|
| 623 |
+
(2): Sequential(
|
| 624 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 625 |
+
(1): Conv1d(32, 32, kernel_size=(11,), stride=(1,), padding=(25,), dilation=(5,))
|
| 626 |
+
)
|
| 627 |
+
)
|
| 628 |
+
(convs2): ModuleList(
|
| 629 |
+
(0): Sequential(
|
| 630 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 631 |
+
(1): Conv1d(32, 32, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 632 |
+
)
|
| 633 |
+
(1): Sequential(
|
| 634 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 635 |
+
(1): Conv1d(32, 32, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 636 |
+
)
|
| 637 |
+
(2): Sequential(
|
| 638 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 639 |
+
(1): Conv1d(32, 32, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 640 |
+
)
|
| 641 |
+
)
|
| 642 |
+
)
|
| 643 |
+
)
|
| 644 |
+
(output_conv): Sequential(
|
| 645 |
+
(0): LeakyReLU(negative_slope=0.01)
|
| 646 |
+
(1): Conv1d(32, 1, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 647 |
+
(2): Tanh()
|
| 648 |
+
)
|
| 649 |
+
)
|
| 650 |
+
)
|
| 651 |
+
(discriminator): HiFiGANMultiScaleMultiPeriodDiscriminator(
|
| 652 |
+
(msd): HiFiGANMultiScaleDiscriminator(
|
| 653 |
+
(discriminators): ModuleList(
|
| 654 |
+
(0): HiFiGANScaleDiscriminator(
|
| 655 |
+
(layers): ModuleList(
|
| 656 |
+
(0): Sequential(
|
| 657 |
+
(0): Conv1d(1, 128, kernel_size=(15,), stride=(1,), padding=(7,))
|
| 658 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 659 |
+
)
|
| 660 |
+
(1): Sequential(
|
| 661 |
+
(0): Conv1d(128, 128, kernel_size=(41,), stride=(2,), padding=(20,), groups=4)
|
| 662 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 663 |
+
)
|
| 664 |
+
(2): Sequential(
|
| 665 |
+
(0): Conv1d(128, 256, kernel_size=(41,), stride=(2,), padding=(20,), groups=16)
|
| 666 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 667 |
+
)
|
| 668 |
+
(3): Sequential(
|
| 669 |
+
(0): Conv1d(256, 512, kernel_size=(41,), stride=(4,), padding=(20,), groups=16)
|
| 670 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 671 |
+
)
|
| 672 |
+
(4): Sequential(
|
| 673 |
+
(0): Conv1d(512, 1024, kernel_size=(41,), stride=(4,), padding=(20,), groups=16)
|
| 674 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 675 |
+
)
|
| 676 |
+
(5): Sequential(
|
| 677 |
+
(0): Conv1d(1024, 1024, kernel_size=(41,), stride=(1,), padding=(20,), groups=16)
|
| 678 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 679 |
+
)
|
| 680 |
+
(6): Sequential(
|
| 681 |
+
(0): Conv1d(1024, 1024, kernel_size=(5,), stride=(1,), padding=(2,))
|
| 682 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 683 |
+
)
|
| 684 |
+
(7): Conv1d(1024, 1, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 685 |
+
)
|
| 686 |
+
)
|
| 687 |
+
)
|
| 688 |
+
)
|
| 689 |
+
(mpd): HiFiGANMultiPeriodDiscriminator(
|
| 690 |
+
(discriminators): ModuleList(
|
| 691 |
+
(0): HiFiGANPeriodDiscriminator(
|
| 692 |
+
(convs): ModuleList(
|
| 693 |
+
(0): Sequential(
|
| 694 |
+
(0): Conv2d(1, 32, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 695 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 696 |
+
)
|
| 697 |
+
(1): Sequential(
|
| 698 |
+
(0): Conv2d(32, 128, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 699 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 700 |
+
)
|
| 701 |
+
(2): Sequential(
|
| 702 |
+
(0): Conv2d(128, 512, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 703 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 704 |
+
)
|
| 705 |
+
(3): Sequential(
|
| 706 |
+
(0): Conv2d(512, 1024, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 707 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 708 |
+
)
|
| 709 |
+
(4): Sequential(
|
| 710 |
+
(0): Conv2d(1024, 1024, kernel_size=(5, 1), stride=(1, 1), padding=(2, 0))
|
| 711 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 712 |
+
)
|
| 713 |
+
)
|
| 714 |
+
(output_conv): Conv2d(1024, 1, kernel_size=(2, 1), stride=(1, 1), padding=(1, 0))
|
| 715 |
+
)
|
| 716 |
+
(1): HiFiGANPeriodDiscriminator(
|
| 717 |
+
(convs): ModuleList(
|
| 718 |
+
(0): Sequential(
|
| 719 |
+
(0): Conv2d(1, 32, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 720 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 721 |
+
)
|
| 722 |
+
(1): Sequential(
|
| 723 |
+
(0): Conv2d(32, 128, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 724 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 725 |
+
)
|
| 726 |
+
(2): Sequential(
|
| 727 |
+
(0): Conv2d(128, 512, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 728 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 729 |
+
)
|
| 730 |
+
(3): Sequential(
|
| 731 |
+
(0): Conv2d(512, 1024, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 732 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 733 |
+
)
|
| 734 |
+
(4): Sequential(
|
| 735 |
+
(0): Conv2d(1024, 1024, kernel_size=(5, 1), stride=(1, 1), padding=(2, 0))
|
| 736 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 737 |
+
)
|
| 738 |
+
)
|
| 739 |
+
(output_conv): Conv2d(1024, 1, kernel_size=(2, 1), stride=(1, 1), padding=(1, 0))
|
| 740 |
+
)
|
| 741 |
+
(2): HiFiGANPeriodDiscriminator(
|
| 742 |
+
(convs): ModuleList(
|
| 743 |
+
(0): Sequential(
|
| 744 |
+
(0): Conv2d(1, 32, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 745 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 746 |
+
)
|
| 747 |
+
(1): Sequential(
|
| 748 |
+
(0): Conv2d(32, 128, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 749 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 750 |
+
)
|
| 751 |
+
(2): Sequential(
|
| 752 |
+
(0): Conv2d(128, 512, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 753 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 754 |
+
)
|
| 755 |
+
(3): Sequential(
|
| 756 |
+
(0): Conv2d(512, 1024, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 757 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 758 |
+
)
|
| 759 |
+
(4): Sequential(
|
| 760 |
+
(0): Conv2d(1024, 1024, kernel_size=(5, 1), stride=(1, 1), padding=(2, 0))
|
| 761 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 762 |
+
)
|
| 763 |
+
)
|
| 764 |
+
(output_conv): Conv2d(1024, 1, kernel_size=(2, 1), stride=(1, 1), padding=(1, 0))
|
| 765 |
+
)
|
| 766 |
+
(3): HiFiGANPeriodDiscriminator(
|
| 767 |
+
(convs): ModuleList(
|
| 768 |
+
(0): Sequential(
|
| 769 |
+
(0): Conv2d(1, 32, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 770 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 771 |
+
)
|
| 772 |
+
(1): Sequential(
|
| 773 |
+
(0): Conv2d(32, 128, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 774 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 775 |
+
)
|
| 776 |
+
(2): Sequential(
|
| 777 |
+
(0): Conv2d(128, 512, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 778 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 779 |
+
)
|
| 780 |
+
(3): Sequential(
|
| 781 |
+
(0): Conv2d(512, 1024, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 782 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 783 |
+
)
|
| 784 |
+
(4): Sequential(
|
| 785 |
+
(0): Conv2d(1024, 1024, kernel_size=(5, 1), stride=(1, 1), padding=(2, 0))
|
| 786 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 787 |
+
)
|
| 788 |
+
)
|
| 789 |
+
(output_conv): Conv2d(1024, 1, kernel_size=(2, 1), stride=(1, 1), padding=(1, 0))
|
| 790 |
+
)
|
| 791 |
+
(4): HiFiGANPeriodDiscriminator(
|
| 792 |
+
(convs): ModuleList(
|
| 793 |
+
(0): Sequential(
|
| 794 |
+
(0): Conv2d(1, 32, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 795 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 796 |
+
)
|
| 797 |
+
(1): Sequential(
|
| 798 |
+
(0): Conv2d(32, 128, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 799 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 800 |
+
)
|
| 801 |
+
(2): Sequential(
|
| 802 |
+
(0): Conv2d(128, 512, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 803 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 804 |
+
)
|
| 805 |
+
(3): Sequential(
|
| 806 |
+
(0): Conv2d(512, 1024, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 807 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 808 |
+
)
|
| 809 |
+
(4): Sequential(
|
| 810 |
+
(0): Conv2d(1024, 1024, kernel_size=(5, 1), stride=(1, 1), padding=(2, 0))
|
| 811 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 812 |
+
)
|
| 813 |
+
)
|
| 814 |
+
(output_conv): Conv2d(1024, 1, kernel_size=(2, 1), stride=(1, 1), padding=(1, 0))
|
| 815 |
+
)
|
| 816 |
+
)
|
| 817 |
+
)
|
| 818 |
+
)
|
| 819 |
+
(generator_adv_loss): GeneratorAdversarialLoss()
|
| 820 |
+
(discriminator_adv_loss): DiscriminatorAdversarialLoss()
|
| 821 |
+
(feat_match_loss): FeatureMatchLoss()
|
| 822 |
+
(mel_loss): MelSpectrogramLoss(
|
| 823 |
+
(wav_to_mel): LogMelFbank(
|
| 824 |
+
(stft): Stft(n_fft=1024, win_length=1024, hop_length=256, center=True, normalized=False, onesided=True)
|
| 825 |
+
(logmel): LogMel(sr=22050, n_fft=1024, n_mels=80, fmin=0, fmax=11025.0, htk=False)
|
| 826 |
+
)
|
| 827 |
+
)
|
| 828 |
+
(var_loss): VarianceLoss(
|
| 829 |
+
(mse_criterion): MSELoss()
|
| 830 |
+
(duration_criterion): DurationPredictorLoss(
|
| 831 |
+
(criterion): MSELoss()
|
| 832 |
+
)
|
| 833 |
+
)
|
| 834 |
+
(forwardsum_loss): ForwardSumLoss()
|
| 835 |
+
)
|
| 836 |
+
2025-02-21 15:00:46,389 (font_manager:1547) INFO: generated new fontManager
|
| 837 |
+
2025-02-21 15:00:53,718 (tts_inference:476) INFO: inference speed = 28124.3 points / sec.
|
| 838 |
+
2025-02-21 15:00:53,718 (tts_inference:481) INFO: LJ049-0103 (size:90->163328)
|
| 839 |
+
2025-02-21 15:00:59,945 (tts_inference:476) INFO: inference speed = 30336.5 points / sec.
|
| 840 |
+
2025-02-21 15:00:59,945 (tts_inference:481) INFO: LJ049-0104 (size:102->188672)
|
| 841 |
+
2025-02-21 15:01:04,582 (tts_inference:476) INFO: inference speed = 34563.3 points / sec.
|
| 842 |
+
2025-02-21 15:01:04,582 (tts_inference:481) INFO: LJ049-0105 (size:92->160000)
|
| 843 |
+
2025-02-21 15:01:11,668 (tts_inference:476) INFO: inference speed = 29147.4 points / sec.
|
| 844 |
+
2025-02-21 15:01:11,668 (tts_inference:481) INFO: LJ049-0106 (size:111->206336)
|
| 845 |
+
2025-02-21 15:01:13,682 (tts_inference:476) INFO: inference speed = 32931.9 points / sec.
|
| 846 |
+
2025-02-21 15:01:13,682 (tts_inference:481) INFO: LJ049-0107 (size:34->66048)
|
| 847 |
+
2025-02-21 15:01:19,134 (tts_inference:476) INFO: inference speed = 34456.8 points / sec.
|
| 848 |
+
2025-02-21 15:01:19,134 (tts_inference:481) INFO: LJ049-0108 (size:106->187648)
|
| 849 |
+
2025-02-21 15:01:23,186 (tts_inference:476) INFO: inference speed = 34247.1 points / sec.
|
| 850 |
+
2025-02-21 15:01:23,186 (tts_inference:481) INFO: LJ049-0109 (size:73->138496)
|
| 851 |
+
2025-02-21 15:01:28,617 (tts_inference:476) INFO: inference speed = 34166.6 points / sec.
|
| 852 |
+
2025-02-21 15:01:28,617 (tts_inference:481) INFO: LJ049-0110 (size:109->185344)
|
| 853 |
+
2025-02-21 15:01:32,676 (tts_inference:476) INFO: inference speed = 34046.9 points / sec.
|
| 854 |
+
2025-02-21 15:01:32,677 (tts_inference:481) INFO: LJ049-0111 (size:70->137984)
|
| 855 |
+
2025-02-21 15:01:35,997 (tts_inference:476) INFO: inference speed = 33373.8 points / sec.
|
| 856 |
+
2025-02-21 15:01:35,997 (tts_inference:481) INFO: LJ049-0112 (size:59->110592)
|
| 857 |
+
2025-02-21 15:01:40,995 (tts_inference:476) INFO: inference speed = 33902.3 points / sec.
|
| 858 |
+
2025-02-21 15:01:40,995 (tts_inference:481) INFO: LJ049-0113 (size:81->169216)
|
| 859 |
+
2025-02-21 15:01:46,970 (tts_inference:476) INFO: inference speed = 34100.9 points / sec.
|
| 860 |
+
2025-02-21 15:01:46,971 (tts_inference:481) INFO: LJ049-0114 (size:114->203520)
|
| 861 |
+
2025-02-21 15:01:50,024 (tts_inference:476) INFO: inference speed = 33367.4 points / sec.
|
| 862 |
+
2025-02-21 15:01:50,025 (tts_inference:481) INFO: LJ049-0115 (size:53->101632)
|
| 863 |
+
2025-02-21 15:01:55,859 (tts_inference:476) INFO: inference speed = 34301.3 points / sec.
|
| 864 |
+
2025-02-21 15:01:55,859 (tts_inference:481) INFO: LJ049-0116 (size:104->199936)
|
| 865 |
+
2025-02-21 15:01:58,412 (tts_inference:476) INFO: inference speed = 33593.8 points / sec.
|
| 866 |
+
2025-02-21 15:01:58,412 (tts_inference:481) INFO: LJ049-0117 (size:44->85504)
|
| 867 |
+
2025-02-21 15:02:03,290 (tts_inference:476) INFO: inference speed = 34313.2 points / sec.
|
| 868 |
+
2025-02-21 15:02:03,290 (tts_inference:481) INFO: LJ049-0118 (size:92->167168)
|
| 869 |
+
2025-02-21 15:02:08,369 (tts_inference:476) INFO: inference speed = 33818.6 points / sec.
|
| 870 |
+
2025-02-21 15:02:08,369 (tts_inference:481) INFO: LJ049-0119 (size:88->171520)
|
| 871 |
+
2025-02-21 15:02:12,154 (tts_inference:476) INFO: inference speed = 33811.8 points / sec.
|
| 872 |
+
2025-02-21 15:02:12,154 (tts_inference:481) INFO: LJ049-0120 (size:70->127744)
|
| 873 |
+
2025-02-21 15:02:17,093 (tts_inference:476) INFO: inference speed = 33943.7 points / sec.
|
| 874 |
+
2025-02-21 15:02:17,094 (tts_inference:481) INFO: LJ049-0121 (size:95->167424)
|
| 875 |
+
2025-02-21 15:02:22,863 (tts_inference:476) INFO: inference speed = 34703.4 points / sec.
|
| 876 |
+
2025-02-21 15:02:22,863 (tts_inference:481) INFO: LJ049-0122 (size:93->199936)
|
| 877 |
+
2025-02-21 15:02:28,234 (tts_inference:476) INFO: inference speed = 34985.4 points / sec.
|
| 878 |
+
2025-02-21 15:02:28,235 (tts_inference:481) INFO: LJ049-0123 (size:112->187648)
|
| 879 |
+
2025-02-21 15:02:30,461 (tts_inference:476) INFO: inference speed = 33222.3 points / sec.
|
| 880 |
+
2025-02-21 15:02:30,462 (tts_inference:481) INFO: LJ049-0124 (size:37->73728)
|
| 881 |
+
2025-02-21 15:02:36,021 (tts_inference:476) INFO: inference speed = 33507.5 points / sec.
|
| 882 |
+
2025-02-21 15:02:36,021 (tts_inference:481) INFO: LJ049-0125 (size:100->186112)
|
| 883 |
+
2025-02-21 15:02:39,247 (tts_inference:476) INFO: inference speed = 33967.0 points / sec.
|
| 884 |
+
2025-02-21 15:02:39,247 (tts_inference:481) INFO: LJ049-0126 (size:55->109312)
|
| 885 |
+
2025-02-21 15:02:44,138 (tts_inference:476) INFO: inference speed = 34646.2 points / sec.
|
| 886 |
+
2025-02-21 15:02:44,138 (tts_inference:481) INFO: LJ049-0127 (size:91->169216)
|
| 887 |
+
2025-02-21 15:02:47,411 (tts_inference:476) INFO: inference speed = 33632.5 points / sec.
|
| 888 |
+
2025-02-21 15:02:47,411 (tts_inference:481) INFO: LJ049-0128 (size:56->109824)
|
| 889 |
+
2025-02-21 15:02:53,863 (tts_inference:476) INFO: inference speed = 30301.2 points / sec.
|
| 890 |
+
2025-02-21 15:02:53,863 (tts_inference:481) INFO: LJ049-0129 (size:110->195328)
|
| 891 |
+
2025-02-21 15:02:55,683 (tts_inference:476) INFO: inference speed = 32357.8 points / sec.
|
| 892 |
+
2025-02-21 15:02:55,683 (tts_inference:481) INFO: LJ049-0130 (size:32->58624)
|
| 893 |
+
2025-02-21 15:02:59,359 (tts_inference:476) INFO: inference speed = 33470.7 points / sec.
|
| 894 |
+
2025-02-21 15:02:59,359 (tts_inference:481) INFO: LJ049-0132 (size:64->122880)
|
| 895 |
+
2025-02-21 15:03:02,861 (tts_inference:476) INFO: inference speed = 35152.8 points / sec.
|
| 896 |
+
2025-02-21 15:03:02,862 (tts_inference:481) INFO: LJ049-0133 (size:69->122880)
|
| 897 |
+
2025-02-21 15:03:07,734 (tts_inference:476) INFO: inference speed = 34671.7 points / sec.
|
| 898 |
+
2025-02-21 15:03:07,734 (tts_inference:481) INFO: LJ049-0134 (size:97->168704)
|
| 899 |
+
# Accounting: time=148 threads=1
|
| 900 |
+
# Ended (code 0) at Fri Feb 21 15:03:08 JST 2025, elapsed time 148 seconds
|
imdanboy/jets/decode_train.loss.ave/dev/log/tts_inference.5.log
ADDED
|
@@ -0,0 +1,900 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# python3 -m espnet2.bin.tts_inference --ngpu 0 --data_path_and_name_and_type dump/raw/dev/text,text,text --data_path_and_name_and_type dump/raw/dev/wav.scp,speech,sound --key_file exp/imdanboy/jets/decode_train.loss.ave/dev/log/keys.5.scp --model_file exp/imdanboy/jets/train.total_count.ave_5best.pth --train_config exp/imdanboy/jets/config.yaml --output_dir exp/imdanboy/jets/decode_train.loss.ave/dev/log/output.5 --vocoder_file none --config conf/decode.yaml
|
| 2 |
+
# Started at Fri Feb 21 15:00:40 JST 2025
|
| 3 |
+
#
|
| 4 |
+
/usr/lib/python3/dist-packages/requests/__init__.py:89: RequestsDependencyWarning: urllib3 (2.2.3) or chardet (3.0.4) doesn't match a supported version!
|
| 5 |
+
warnings.warn("urllib3 ({}) or chardet ({}) doesn't match a supported "
|
| 6 |
+
/usr/bin/python3 /work/espnet/espnet2/bin/tts_inference.py --ngpu 0 --data_path_and_name_and_type dump/raw/dev/text,text,text --data_path_and_name_and_type dump/raw/dev/wav.scp,speech,sound --key_file exp/imdanboy/jets/decode_train.loss.ave/dev/log/keys.5.scp --model_file exp/imdanboy/jets/train.total_count.ave_5best.pth --train_config exp/imdanboy/jets/config.yaml --output_dir exp/imdanboy/jets/decode_train.loss.ave/dev/log/output.5 --vocoder_file none --config conf/decode.yaml
|
| 7 |
+
2025-02-21 15:00:43,756 (tts:302) INFO: Vocabulary size: 78
|
| 8 |
+
2025-02-21 15:00:43,977 (encoder:172) INFO: encoder self-attention layer type = self-attention
|
| 9 |
+
2025-02-21 15:00:44,093 (encoder:172) INFO: encoder self-attention layer type = self-attention
|
| 10 |
+
2025-02-21 15:00:45,896 (tts_inference:126) INFO: Extractor:
|
| 11 |
+
LogMelFbank(
|
| 12 |
+
(stft): Stft(n_fft=1024, win_length=1024, hop_length=256, center=True, normalized=False, onesided=True)
|
| 13 |
+
(logmel): LogMel(sr=22050, n_fft=1024, n_mels=80, fmin=80, fmax=7600, htk=False)
|
| 14 |
+
)
|
| 15 |
+
2025-02-21 15:00:45,896 (tts_inference:127) INFO: Normalizer:
|
| 16 |
+
GlobalMVN(stats_file=/usr/local/lib/python3.8/dist-packages/espnet_model_zoo/models--imdanboy--jets/snapshots/1db95c26516c44e6789bf06417c51e89400b190b/exp/tts_stats_raw_phn_tacotron_g2p_en_no_space/train/feats_stats.npz, norm_means=True, norm_vars=True)
|
| 17 |
+
2025-02-21 15:00:45,900 (tts_inference:128) INFO: TTS:
|
| 18 |
+
JETS(
|
| 19 |
+
(generator): JETSGenerator(
|
| 20 |
+
(encoder): Encoder(
|
| 21 |
+
(embed): Sequential(
|
| 22 |
+
(0): Embedding(78, 256, padding_idx=0)
|
| 23 |
+
(1): ScaledPositionalEncoding(
|
| 24 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 25 |
+
)
|
| 26 |
+
)
|
| 27 |
+
(encoders): MultiSequential(
|
| 28 |
+
(0): EncoderLayer(
|
| 29 |
+
(self_attn): MultiHeadedAttention(
|
| 30 |
+
(linear_q): Linear(in_features=256, out_features=256, bias=True)
|
| 31 |
+
(linear_k): Linear(in_features=256, out_features=256, bias=True)
|
| 32 |
+
(linear_v): Linear(in_features=256, out_features=256, bias=True)
|
| 33 |
+
(linear_out): Linear(in_features=256, out_features=256, bias=True)
|
| 34 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 35 |
+
)
|
| 36 |
+
(feed_forward): MultiLayeredConv1d(
|
| 37 |
+
(w_1): Conv1d(256, 1024, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 38 |
+
(w_2): Conv1d(1024, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 39 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 40 |
+
)
|
| 41 |
+
(norm1): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 42 |
+
(norm2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 43 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 44 |
+
)
|
| 45 |
+
(1): EncoderLayer(
|
| 46 |
+
(self_attn): MultiHeadedAttention(
|
| 47 |
+
(linear_q): Linear(in_features=256, out_features=256, bias=True)
|
| 48 |
+
(linear_k): Linear(in_features=256, out_features=256, bias=True)
|
| 49 |
+
(linear_v): Linear(in_features=256, out_features=256, bias=True)
|
| 50 |
+
(linear_out): Linear(in_features=256, out_features=256, bias=True)
|
| 51 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 52 |
+
)
|
| 53 |
+
(feed_forward): MultiLayeredConv1d(
|
| 54 |
+
(w_1): Conv1d(256, 1024, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 55 |
+
(w_2): Conv1d(1024, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 56 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 57 |
+
)
|
| 58 |
+
(norm1): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 59 |
+
(norm2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 60 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 61 |
+
)
|
| 62 |
+
(2): EncoderLayer(
|
| 63 |
+
(self_attn): MultiHeadedAttention(
|
| 64 |
+
(linear_q): Linear(in_features=256, out_features=256, bias=True)
|
| 65 |
+
(linear_k): Linear(in_features=256, out_features=256, bias=True)
|
| 66 |
+
(linear_v): Linear(in_features=256, out_features=256, bias=True)
|
| 67 |
+
(linear_out): Linear(in_features=256, out_features=256, bias=True)
|
| 68 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 69 |
+
)
|
| 70 |
+
(feed_forward): MultiLayeredConv1d(
|
| 71 |
+
(w_1): Conv1d(256, 1024, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 72 |
+
(w_2): Conv1d(1024, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 73 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 74 |
+
)
|
| 75 |
+
(norm1): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 76 |
+
(norm2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 77 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 78 |
+
)
|
| 79 |
+
(3): EncoderLayer(
|
| 80 |
+
(self_attn): MultiHeadedAttention(
|
| 81 |
+
(linear_q): Linear(in_features=256, out_features=256, bias=True)
|
| 82 |
+
(linear_k): Linear(in_features=256, out_features=256, bias=True)
|
| 83 |
+
(linear_v): Linear(in_features=256, out_features=256, bias=True)
|
| 84 |
+
(linear_out): Linear(in_features=256, out_features=256, bias=True)
|
| 85 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 86 |
+
)
|
| 87 |
+
(feed_forward): MultiLayeredConv1d(
|
| 88 |
+
(w_1): Conv1d(256, 1024, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 89 |
+
(w_2): Conv1d(1024, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 90 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 91 |
+
)
|
| 92 |
+
(norm1): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 93 |
+
(norm2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 94 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 95 |
+
)
|
| 96 |
+
)
|
| 97 |
+
(after_norm): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 98 |
+
)
|
| 99 |
+
(duration_predictor): DurationPredictor(
|
| 100 |
+
(conv): ModuleList(
|
| 101 |
+
(0): Sequential(
|
| 102 |
+
(0): Conv1d(256, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 103 |
+
(1): ReLU()
|
| 104 |
+
(2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 105 |
+
(3): Dropout(p=0.1, inplace=False)
|
| 106 |
+
)
|
| 107 |
+
(1): Sequential(
|
| 108 |
+
(0): Conv1d(256, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 109 |
+
(1): ReLU()
|
| 110 |
+
(2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 111 |
+
(3): Dropout(p=0.1, inplace=False)
|
| 112 |
+
)
|
| 113 |
+
)
|
| 114 |
+
(linear): Linear(in_features=256, out_features=1, bias=True)
|
| 115 |
+
)
|
| 116 |
+
(pitch_predictor): VariancePredictor(
|
| 117 |
+
(conv): ModuleList(
|
| 118 |
+
(0): Sequential(
|
| 119 |
+
(0): Conv1d(256, 256, kernel_size=(5,), stride=(1,), padding=(2,))
|
| 120 |
+
(1): ReLU()
|
| 121 |
+
(2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 122 |
+
(3): Dropout(p=0.5, inplace=False)
|
| 123 |
+
)
|
| 124 |
+
(1): Sequential(
|
| 125 |
+
(0): Conv1d(256, 256, kernel_size=(5,), stride=(1,), padding=(2,))
|
| 126 |
+
(1): ReLU()
|
| 127 |
+
(2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 128 |
+
(3): Dropout(p=0.5, inplace=False)
|
| 129 |
+
)
|
| 130 |
+
(2): Sequential(
|
| 131 |
+
(0): Conv1d(256, 256, kernel_size=(5,), stride=(1,), padding=(2,))
|
| 132 |
+
(1): ReLU()
|
| 133 |
+
(2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 134 |
+
(3): Dropout(p=0.5, inplace=False)
|
| 135 |
+
)
|
| 136 |
+
(3): Sequential(
|
| 137 |
+
(0): Conv1d(256, 256, kernel_size=(5,), stride=(1,), padding=(2,))
|
| 138 |
+
(1): ReLU()
|
| 139 |
+
(2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 140 |
+
(3): Dropout(p=0.5, inplace=False)
|
| 141 |
+
)
|
| 142 |
+
(4): Sequential(
|
| 143 |
+
(0): Conv1d(256, 256, kernel_size=(5,), stride=(1,), padding=(2,))
|
| 144 |
+
(1): ReLU()
|
| 145 |
+
(2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 146 |
+
(3): Dropout(p=0.5, inplace=False)
|
| 147 |
+
)
|
| 148 |
+
)
|
| 149 |
+
(linear): Linear(in_features=256, out_features=1, bias=True)
|
| 150 |
+
)
|
| 151 |
+
(pitch_embed): Sequential(
|
| 152 |
+
(0): Conv1d(1, 256, kernel_size=(1,), stride=(1,))
|
| 153 |
+
(1): Dropout(p=0.0, inplace=False)
|
| 154 |
+
)
|
| 155 |
+
(energy_predictor): VariancePredictor(
|
| 156 |
+
(conv): ModuleList(
|
| 157 |
+
(0): Sequential(
|
| 158 |
+
(0): Conv1d(256, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 159 |
+
(1): ReLU()
|
| 160 |
+
(2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 161 |
+
(3): Dropout(p=0.5, inplace=False)
|
| 162 |
+
)
|
| 163 |
+
(1): Sequential(
|
| 164 |
+
(0): Conv1d(256, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 165 |
+
(1): ReLU()
|
| 166 |
+
(2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 167 |
+
(3): Dropout(p=0.5, inplace=False)
|
| 168 |
+
)
|
| 169 |
+
)
|
| 170 |
+
(linear): Linear(in_features=256, out_features=1, bias=True)
|
| 171 |
+
)
|
| 172 |
+
(energy_embed): Sequential(
|
| 173 |
+
(0): Conv1d(1, 256, kernel_size=(1,), stride=(1,))
|
| 174 |
+
(1): Dropout(p=0.0, inplace=False)
|
| 175 |
+
)
|
| 176 |
+
(alignment_module): AlignmentModule(
|
| 177 |
+
(t_conv1): Conv1d(256, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 178 |
+
(t_conv2): Conv1d(256, 256, kernel_size=(1,), stride=(1,))
|
| 179 |
+
(f_conv1): Conv1d(80, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 180 |
+
(f_conv2): Conv1d(256, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 181 |
+
(f_conv3): Conv1d(256, 256, kernel_size=(1,), stride=(1,))
|
| 182 |
+
)
|
| 183 |
+
(length_regulator): GaussianUpsampling()
|
| 184 |
+
(decoder): Encoder(
|
| 185 |
+
(embed): Sequential(
|
| 186 |
+
(0): ScaledPositionalEncoding(
|
| 187 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 188 |
+
)
|
| 189 |
+
)
|
| 190 |
+
(encoders): MultiSequential(
|
| 191 |
+
(0): EncoderLayer(
|
| 192 |
+
(self_attn): MultiHeadedAttention(
|
| 193 |
+
(linear_q): Linear(in_features=256, out_features=256, bias=True)
|
| 194 |
+
(linear_k): Linear(in_features=256, out_features=256, bias=True)
|
| 195 |
+
(linear_v): Linear(in_features=256, out_features=256, bias=True)
|
| 196 |
+
(linear_out): Linear(in_features=256, out_features=256, bias=True)
|
| 197 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 198 |
+
)
|
| 199 |
+
(feed_forward): MultiLayeredConv1d(
|
| 200 |
+
(w_1): Conv1d(256, 1024, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 201 |
+
(w_2): Conv1d(1024, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 202 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 203 |
+
)
|
| 204 |
+
(norm1): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 205 |
+
(norm2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 206 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 207 |
+
)
|
| 208 |
+
(1): EncoderLayer(
|
| 209 |
+
(self_attn): MultiHeadedAttention(
|
| 210 |
+
(linear_q): Linear(in_features=256, out_features=256, bias=True)
|
| 211 |
+
(linear_k): Linear(in_features=256, out_features=256, bias=True)
|
| 212 |
+
(linear_v): Linear(in_features=256, out_features=256, bias=True)
|
| 213 |
+
(linear_out): Linear(in_features=256, out_features=256, bias=True)
|
| 214 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 215 |
+
)
|
| 216 |
+
(feed_forward): MultiLayeredConv1d(
|
| 217 |
+
(w_1): Conv1d(256, 1024, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 218 |
+
(w_2): Conv1d(1024, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 219 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 220 |
+
)
|
| 221 |
+
(norm1): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 222 |
+
(norm2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 223 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 224 |
+
)
|
| 225 |
+
(2): EncoderLayer(
|
| 226 |
+
(self_attn): MultiHeadedAttention(
|
| 227 |
+
(linear_q): Linear(in_features=256, out_features=256, bias=True)
|
| 228 |
+
(linear_k): Linear(in_features=256, out_features=256, bias=True)
|
| 229 |
+
(linear_v): Linear(in_features=256, out_features=256, bias=True)
|
| 230 |
+
(linear_out): Linear(in_features=256, out_features=256, bias=True)
|
| 231 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 232 |
+
)
|
| 233 |
+
(feed_forward): MultiLayeredConv1d(
|
| 234 |
+
(w_1): Conv1d(256, 1024, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 235 |
+
(w_2): Conv1d(1024, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 236 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 237 |
+
)
|
| 238 |
+
(norm1): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 239 |
+
(norm2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 240 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 241 |
+
)
|
| 242 |
+
(3): EncoderLayer(
|
| 243 |
+
(self_attn): MultiHeadedAttention(
|
| 244 |
+
(linear_q): Linear(in_features=256, out_features=256, bias=True)
|
| 245 |
+
(linear_k): Linear(in_features=256, out_features=256, bias=True)
|
| 246 |
+
(linear_v): Linear(in_features=256, out_features=256, bias=True)
|
| 247 |
+
(linear_out): Linear(in_features=256, out_features=256, bias=True)
|
| 248 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 249 |
+
)
|
| 250 |
+
(feed_forward): MultiLayeredConv1d(
|
| 251 |
+
(w_1): Conv1d(256, 1024, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 252 |
+
(w_2): Conv1d(1024, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 253 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 254 |
+
)
|
| 255 |
+
(norm1): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 256 |
+
(norm2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 257 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 258 |
+
)
|
| 259 |
+
)
|
| 260 |
+
(after_norm): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 261 |
+
)
|
| 262 |
+
(generator): HiFiGANGenerator(
|
| 263 |
+
(input_conv): Conv1d(256, 512, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 264 |
+
(upsamples): ModuleList(
|
| 265 |
+
(0): Sequential(
|
| 266 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 267 |
+
(1): ConvTranspose1d(512, 256, kernel_size=(16,), stride=(8,), padding=(4,))
|
| 268 |
+
)
|
| 269 |
+
(1): Sequential(
|
| 270 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 271 |
+
(1): ConvTranspose1d(256, 128, kernel_size=(16,), stride=(8,), padding=(4,))
|
| 272 |
+
)
|
| 273 |
+
(2): Sequential(
|
| 274 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 275 |
+
(1): ConvTranspose1d(128, 64, kernel_size=(4,), stride=(2,), padding=(1,))
|
| 276 |
+
)
|
| 277 |
+
(3): Sequential(
|
| 278 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 279 |
+
(1): ConvTranspose1d(64, 32, kernel_size=(4,), stride=(2,), padding=(1,))
|
| 280 |
+
)
|
| 281 |
+
)
|
| 282 |
+
(blocks): ModuleList(
|
| 283 |
+
(0): ResidualBlock(
|
| 284 |
+
(convs1): ModuleList(
|
| 285 |
+
(0): Sequential(
|
| 286 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 287 |
+
(1): Conv1d(256, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 288 |
+
)
|
| 289 |
+
(1): Sequential(
|
| 290 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 291 |
+
(1): Conv1d(256, 256, kernel_size=(3,), stride=(1,), padding=(3,), dilation=(3,))
|
| 292 |
+
)
|
| 293 |
+
(2): Sequential(
|
| 294 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 295 |
+
(1): Conv1d(256, 256, kernel_size=(3,), stride=(1,), padding=(5,), dilation=(5,))
|
| 296 |
+
)
|
| 297 |
+
)
|
| 298 |
+
(convs2): ModuleList(
|
| 299 |
+
(0): Sequential(
|
| 300 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 301 |
+
(1): Conv1d(256, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 302 |
+
)
|
| 303 |
+
(1): Sequential(
|
| 304 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 305 |
+
(1): Conv1d(256, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 306 |
+
)
|
| 307 |
+
(2): Sequential(
|
| 308 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 309 |
+
(1): Conv1d(256, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 310 |
+
)
|
| 311 |
+
)
|
| 312 |
+
)
|
| 313 |
+
(1): ResidualBlock(
|
| 314 |
+
(convs1): ModuleList(
|
| 315 |
+
(0): Sequential(
|
| 316 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 317 |
+
(1): Conv1d(256, 256, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 318 |
+
)
|
| 319 |
+
(1): Sequential(
|
| 320 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 321 |
+
(1): Conv1d(256, 256, kernel_size=(7,), stride=(1,), padding=(9,), dilation=(3,))
|
| 322 |
+
)
|
| 323 |
+
(2): Sequential(
|
| 324 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 325 |
+
(1): Conv1d(256, 256, kernel_size=(7,), stride=(1,), padding=(15,), dilation=(5,))
|
| 326 |
+
)
|
| 327 |
+
)
|
| 328 |
+
(convs2): ModuleList(
|
| 329 |
+
(0): Sequential(
|
| 330 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 331 |
+
(1): Conv1d(256, 256, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 332 |
+
)
|
| 333 |
+
(1): Sequential(
|
| 334 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 335 |
+
(1): Conv1d(256, 256, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 336 |
+
)
|
| 337 |
+
(2): Sequential(
|
| 338 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 339 |
+
(1): Conv1d(256, 256, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 340 |
+
)
|
| 341 |
+
)
|
| 342 |
+
)
|
| 343 |
+
(2): ResidualBlock(
|
| 344 |
+
(convs1): ModuleList(
|
| 345 |
+
(0): Sequential(
|
| 346 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 347 |
+
(1): Conv1d(256, 256, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 348 |
+
)
|
| 349 |
+
(1): Sequential(
|
| 350 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 351 |
+
(1): Conv1d(256, 256, kernel_size=(11,), stride=(1,), padding=(15,), dilation=(3,))
|
| 352 |
+
)
|
| 353 |
+
(2): Sequential(
|
| 354 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 355 |
+
(1): Conv1d(256, 256, kernel_size=(11,), stride=(1,), padding=(25,), dilation=(5,))
|
| 356 |
+
)
|
| 357 |
+
)
|
| 358 |
+
(convs2): ModuleList(
|
| 359 |
+
(0): Sequential(
|
| 360 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 361 |
+
(1): Conv1d(256, 256, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 362 |
+
)
|
| 363 |
+
(1): Sequential(
|
| 364 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 365 |
+
(1): Conv1d(256, 256, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 366 |
+
)
|
| 367 |
+
(2): Sequential(
|
| 368 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 369 |
+
(1): Conv1d(256, 256, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 370 |
+
)
|
| 371 |
+
)
|
| 372 |
+
)
|
| 373 |
+
(3): ResidualBlock(
|
| 374 |
+
(convs1): ModuleList(
|
| 375 |
+
(0): Sequential(
|
| 376 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 377 |
+
(1): Conv1d(128, 128, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 378 |
+
)
|
| 379 |
+
(1): Sequential(
|
| 380 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 381 |
+
(1): Conv1d(128, 128, kernel_size=(3,), stride=(1,), padding=(3,), dilation=(3,))
|
| 382 |
+
)
|
| 383 |
+
(2): Sequential(
|
| 384 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 385 |
+
(1): Conv1d(128, 128, kernel_size=(3,), stride=(1,), padding=(5,), dilation=(5,))
|
| 386 |
+
)
|
| 387 |
+
)
|
| 388 |
+
(convs2): ModuleList(
|
| 389 |
+
(0): Sequential(
|
| 390 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 391 |
+
(1): Conv1d(128, 128, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 392 |
+
)
|
| 393 |
+
(1): Sequential(
|
| 394 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 395 |
+
(1): Conv1d(128, 128, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 396 |
+
)
|
| 397 |
+
(2): Sequential(
|
| 398 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 399 |
+
(1): Conv1d(128, 128, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 400 |
+
)
|
| 401 |
+
)
|
| 402 |
+
)
|
| 403 |
+
(4): ResidualBlock(
|
| 404 |
+
(convs1): ModuleList(
|
| 405 |
+
(0): Sequential(
|
| 406 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 407 |
+
(1): Conv1d(128, 128, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 408 |
+
)
|
| 409 |
+
(1): Sequential(
|
| 410 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 411 |
+
(1): Conv1d(128, 128, kernel_size=(7,), stride=(1,), padding=(9,), dilation=(3,))
|
| 412 |
+
)
|
| 413 |
+
(2): Sequential(
|
| 414 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 415 |
+
(1): Conv1d(128, 128, kernel_size=(7,), stride=(1,), padding=(15,), dilation=(5,))
|
| 416 |
+
)
|
| 417 |
+
)
|
| 418 |
+
(convs2): ModuleList(
|
| 419 |
+
(0): Sequential(
|
| 420 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 421 |
+
(1): Conv1d(128, 128, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 422 |
+
)
|
| 423 |
+
(1): Sequential(
|
| 424 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 425 |
+
(1): Conv1d(128, 128, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 426 |
+
)
|
| 427 |
+
(2): Sequential(
|
| 428 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 429 |
+
(1): Conv1d(128, 128, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 430 |
+
)
|
| 431 |
+
)
|
| 432 |
+
)
|
| 433 |
+
(5): ResidualBlock(
|
| 434 |
+
(convs1): ModuleList(
|
| 435 |
+
(0): Sequential(
|
| 436 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 437 |
+
(1): Conv1d(128, 128, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 438 |
+
)
|
| 439 |
+
(1): Sequential(
|
| 440 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 441 |
+
(1): Conv1d(128, 128, kernel_size=(11,), stride=(1,), padding=(15,), dilation=(3,))
|
| 442 |
+
)
|
| 443 |
+
(2): Sequential(
|
| 444 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 445 |
+
(1): Conv1d(128, 128, kernel_size=(11,), stride=(1,), padding=(25,), dilation=(5,))
|
| 446 |
+
)
|
| 447 |
+
)
|
| 448 |
+
(convs2): ModuleList(
|
| 449 |
+
(0): Sequential(
|
| 450 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 451 |
+
(1): Conv1d(128, 128, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 452 |
+
)
|
| 453 |
+
(1): Sequential(
|
| 454 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 455 |
+
(1): Conv1d(128, 128, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 456 |
+
)
|
| 457 |
+
(2): Sequential(
|
| 458 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 459 |
+
(1): Conv1d(128, 128, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 460 |
+
)
|
| 461 |
+
)
|
| 462 |
+
)
|
| 463 |
+
(6): ResidualBlock(
|
| 464 |
+
(convs1): ModuleList(
|
| 465 |
+
(0): Sequential(
|
| 466 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 467 |
+
(1): Conv1d(64, 64, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 468 |
+
)
|
| 469 |
+
(1): Sequential(
|
| 470 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 471 |
+
(1): Conv1d(64, 64, kernel_size=(3,), stride=(1,), padding=(3,), dilation=(3,))
|
| 472 |
+
)
|
| 473 |
+
(2): Sequential(
|
| 474 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 475 |
+
(1): Conv1d(64, 64, kernel_size=(3,), stride=(1,), padding=(5,), dilation=(5,))
|
| 476 |
+
)
|
| 477 |
+
)
|
| 478 |
+
(convs2): ModuleList(
|
| 479 |
+
(0): Sequential(
|
| 480 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 481 |
+
(1): Conv1d(64, 64, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 482 |
+
)
|
| 483 |
+
(1): Sequential(
|
| 484 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 485 |
+
(1): Conv1d(64, 64, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 486 |
+
)
|
| 487 |
+
(2): Sequential(
|
| 488 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 489 |
+
(1): Conv1d(64, 64, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 490 |
+
)
|
| 491 |
+
)
|
| 492 |
+
)
|
| 493 |
+
(7): ResidualBlock(
|
| 494 |
+
(convs1): ModuleList(
|
| 495 |
+
(0): Sequential(
|
| 496 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 497 |
+
(1): Conv1d(64, 64, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 498 |
+
)
|
| 499 |
+
(1): Sequential(
|
| 500 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 501 |
+
(1): Conv1d(64, 64, kernel_size=(7,), stride=(1,), padding=(9,), dilation=(3,))
|
| 502 |
+
)
|
| 503 |
+
(2): Sequential(
|
| 504 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 505 |
+
(1): Conv1d(64, 64, kernel_size=(7,), stride=(1,), padding=(15,), dilation=(5,))
|
| 506 |
+
)
|
| 507 |
+
)
|
| 508 |
+
(convs2): ModuleList(
|
| 509 |
+
(0): Sequential(
|
| 510 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 511 |
+
(1): Conv1d(64, 64, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 512 |
+
)
|
| 513 |
+
(1): Sequential(
|
| 514 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 515 |
+
(1): Conv1d(64, 64, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 516 |
+
)
|
| 517 |
+
(2): Sequential(
|
| 518 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 519 |
+
(1): Conv1d(64, 64, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 520 |
+
)
|
| 521 |
+
)
|
| 522 |
+
)
|
| 523 |
+
(8): ResidualBlock(
|
| 524 |
+
(convs1): ModuleList(
|
| 525 |
+
(0): Sequential(
|
| 526 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 527 |
+
(1): Conv1d(64, 64, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 528 |
+
)
|
| 529 |
+
(1): Sequential(
|
| 530 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 531 |
+
(1): Conv1d(64, 64, kernel_size=(11,), stride=(1,), padding=(15,), dilation=(3,))
|
| 532 |
+
)
|
| 533 |
+
(2): Sequential(
|
| 534 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 535 |
+
(1): Conv1d(64, 64, kernel_size=(11,), stride=(1,), padding=(25,), dilation=(5,))
|
| 536 |
+
)
|
| 537 |
+
)
|
| 538 |
+
(convs2): ModuleList(
|
| 539 |
+
(0): Sequential(
|
| 540 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 541 |
+
(1): Conv1d(64, 64, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 542 |
+
)
|
| 543 |
+
(1): Sequential(
|
| 544 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 545 |
+
(1): Conv1d(64, 64, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 546 |
+
)
|
| 547 |
+
(2): Sequential(
|
| 548 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 549 |
+
(1): Conv1d(64, 64, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 550 |
+
)
|
| 551 |
+
)
|
| 552 |
+
)
|
| 553 |
+
(9): ResidualBlock(
|
| 554 |
+
(convs1): ModuleList(
|
| 555 |
+
(0): Sequential(
|
| 556 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 557 |
+
(1): Conv1d(32, 32, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 558 |
+
)
|
| 559 |
+
(1): Sequential(
|
| 560 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 561 |
+
(1): Conv1d(32, 32, kernel_size=(3,), stride=(1,), padding=(3,), dilation=(3,))
|
| 562 |
+
)
|
| 563 |
+
(2): Sequential(
|
| 564 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 565 |
+
(1): Conv1d(32, 32, kernel_size=(3,), stride=(1,), padding=(5,), dilation=(5,))
|
| 566 |
+
)
|
| 567 |
+
)
|
| 568 |
+
(convs2): ModuleList(
|
| 569 |
+
(0): Sequential(
|
| 570 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 571 |
+
(1): Conv1d(32, 32, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 572 |
+
)
|
| 573 |
+
(1): Sequential(
|
| 574 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 575 |
+
(1): Conv1d(32, 32, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 576 |
+
)
|
| 577 |
+
(2): Sequential(
|
| 578 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 579 |
+
(1): Conv1d(32, 32, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 580 |
+
)
|
| 581 |
+
)
|
| 582 |
+
)
|
| 583 |
+
(10): ResidualBlock(
|
| 584 |
+
(convs1): ModuleList(
|
| 585 |
+
(0): Sequential(
|
| 586 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 587 |
+
(1): Conv1d(32, 32, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 588 |
+
)
|
| 589 |
+
(1): Sequential(
|
| 590 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 591 |
+
(1): Conv1d(32, 32, kernel_size=(7,), stride=(1,), padding=(9,), dilation=(3,))
|
| 592 |
+
)
|
| 593 |
+
(2): Sequential(
|
| 594 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 595 |
+
(1): Conv1d(32, 32, kernel_size=(7,), stride=(1,), padding=(15,), dilation=(5,))
|
| 596 |
+
)
|
| 597 |
+
)
|
| 598 |
+
(convs2): ModuleList(
|
| 599 |
+
(0): Sequential(
|
| 600 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 601 |
+
(1): Conv1d(32, 32, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 602 |
+
)
|
| 603 |
+
(1): Sequential(
|
| 604 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 605 |
+
(1): Conv1d(32, 32, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 606 |
+
)
|
| 607 |
+
(2): Sequential(
|
| 608 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 609 |
+
(1): Conv1d(32, 32, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 610 |
+
)
|
| 611 |
+
)
|
| 612 |
+
)
|
| 613 |
+
(11): ResidualBlock(
|
| 614 |
+
(convs1): ModuleList(
|
| 615 |
+
(0): Sequential(
|
| 616 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 617 |
+
(1): Conv1d(32, 32, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 618 |
+
)
|
| 619 |
+
(1): Sequential(
|
| 620 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 621 |
+
(1): Conv1d(32, 32, kernel_size=(11,), stride=(1,), padding=(15,), dilation=(3,))
|
| 622 |
+
)
|
| 623 |
+
(2): Sequential(
|
| 624 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 625 |
+
(1): Conv1d(32, 32, kernel_size=(11,), stride=(1,), padding=(25,), dilation=(5,))
|
| 626 |
+
)
|
| 627 |
+
)
|
| 628 |
+
(convs2): ModuleList(
|
| 629 |
+
(0): Sequential(
|
| 630 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 631 |
+
(1): Conv1d(32, 32, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 632 |
+
)
|
| 633 |
+
(1): Sequential(
|
| 634 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 635 |
+
(1): Conv1d(32, 32, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 636 |
+
)
|
| 637 |
+
(2): Sequential(
|
| 638 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 639 |
+
(1): Conv1d(32, 32, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 640 |
+
)
|
| 641 |
+
)
|
| 642 |
+
)
|
| 643 |
+
)
|
| 644 |
+
(output_conv): Sequential(
|
| 645 |
+
(0): LeakyReLU(negative_slope=0.01)
|
| 646 |
+
(1): Conv1d(32, 1, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 647 |
+
(2): Tanh()
|
| 648 |
+
)
|
| 649 |
+
)
|
| 650 |
+
)
|
| 651 |
+
(discriminator): HiFiGANMultiScaleMultiPeriodDiscriminator(
|
| 652 |
+
(msd): HiFiGANMultiScaleDiscriminator(
|
| 653 |
+
(discriminators): ModuleList(
|
| 654 |
+
(0): HiFiGANScaleDiscriminator(
|
| 655 |
+
(layers): ModuleList(
|
| 656 |
+
(0): Sequential(
|
| 657 |
+
(0): Conv1d(1, 128, kernel_size=(15,), stride=(1,), padding=(7,))
|
| 658 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 659 |
+
)
|
| 660 |
+
(1): Sequential(
|
| 661 |
+
(0): Conv1d(128, 128, kernel_size=(41,), stride=(2,), padding=(20,), groups=4)
|
| 662 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 663 |
+
)
|
| 664 |
+
(2): Sequential(
|
| 665 |
+
(0): Conv1d(128, 256, kernel_size=(41,), stride=(2,), padding=(20,), groups=16)
|
| 666 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 667 |
+
)
|
| 668 |
+
(3): Sequential(
|
| 669 |
+
(0): Conv1d(256, 512, kernel_size=(41,), stride=(4,), padding=(20,), groups=16)
|
| 670 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 671 |
+
)
|
| 672 |
+
(4): Sequential(
|
| 673 |
+
(0): Conv1d(512, 1024, kernel_size=(41,), stride=(4,), padding=(20,), groups=16)
|
| 674 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 675 |
+
)
|
| 676 |
+
(5): Sequential(
|
| 677 |
+
(0): Conv1d(1024, 1024, kernel_size=(41,), stride=(1,), padding=(20,), groups=16)
|
| 678 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 679 |
+
)
|
| 680 |
+
(6): Sequential(
|
| 681 |
+
(0): Conv1d(1024, 1024, kernel_size=(5,), stride=(1,), padding=(2,))
|
| 682 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 683 |
+
)
|
| 684 |
+
(7): Conv1d(1024, 1, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 685 |
+
)
|
| 686 |
+
)
|
| 687 |
+
)
|
| 688 |
+
)
|
| 689 |
+
(mpd): HiFiGANMultiPeriodDiscriminator(
|
| 690 |
+
(discriminators): ModuleList(
|
| 691 |
+
(0): HiFiGANPeriodDiscriminator(
|
| 692 |
+
(convs): ModuleList(
|
| 693 |
+
(0): Sequential(
|
| 694 |
+
(0): Conv2d(1, 32, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 695 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 696 |
+
)
|
| 697 |
+
(1): Sequential(
|
| 698 |
+
(0): Conv2d(32, 128, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 699 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 700 |
+
)
|
| 701 |
+
(2): Sequential(
|
| 702 |
+
(0): Conv2d(128, 512, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 703 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 704 |
+
)
|
| 705 |
+
(3): Sequential(
|
| 706 |
+
(0): Conv2d(512, 1024, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 707 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 708 |
+
)
|
| 709 |
+
(4): Sequential(
|
| 710 |
+
(0): Conv2d(1024, 1024, kernel_size=(5, 1), stride=(1, 1), padding=(2, 0))
|
| 711 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 712 |
+
)
|
| 713 |
+
)
|
| 714 |
+
(output_conv): Conv2d(1024, 1, kernel_size=(2, 1), stride=(1, 1), padding=(1, 0))
|
| 715 |
+
)
|
| 716 |
+
(1): HiFiGANPeriodDiscriminator(
|
| 717 |
+
(convs): ModuleList(
|
| 718 |
+
(0): Sequential(
|
| 719 |
+
(0): Conv2d(1, 32, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 720 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 721 |
+
)
|
| 722 |
+
(1): Sequential(
|
| 723 |
+
(0): Conv2d(32, 128, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 724 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 725 |
+
)
|
| 726 |
+
(2): Sequential(
|
| 727 |
+
(0): Conv2d(128, 512, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 728 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 729 |
+
)
|
| 730 |
+
(3): Sequential(
|
| 731 |
+
(0): Conv2d(512, 1024, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 732 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 733 |
+
)
|
| 734 |
+
(4): Sequential(
|
| 735 |
+
(0): Conv2d(1024, 1024, kernel_size=(5, 1), stride=(1, 1), padding=(2, 0))
|
| 736 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 737 |
+
)
|
| 738 |
+
)
|
| 739 |
+
(output_conv): Conv2d(1024, 1, kernel_size=(2, 1), stride=(1, 1), padding=(1, 0))
|
| 740 |
+
)
|
| 741 |
+
(2): HiFiGANPeriodDiscriminator(
|
| 742 |
+
(convs): ModuleList(
|
| 743 |
+
(0): Sequential(
|
| 744 |
+
(0): Conv2d(1, 32, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 745 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 746 |
+
)
|
| 747 |
+
(1): Sequential(
|
| 748 |
+
(0): Conv2d(32, 128, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 749 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 750 |
+
)
|
| 751 |
+
(2): Sequential(
|
| 752 |
+
(0): Conv2d(128, 512, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 753 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 754 |
+
)
|
| 755 |
+
(3): Sequential(
|
| 756 |
+
(0): Conv2d(512, 1024, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 757 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 758 |
+
)
|
| 759 |
+
(4): Sequential(
|
| 760 |
+
(0): Conv2d(1024, 1024, kernel_size=(5, 1), stride=(1, 1), padding=(2, 0))
|
| 761 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 762 |
+
)
|
| 763 |
+
)
|
| 764 |
+
(output_conv): Conv2d(1024, 1, kernel_size=(2, 1), stride=(1, 1), padding=(1, 0))
|
| 765 |
+
)
|
| 766 |
+
(3): HiFiGANPeriodDiscriminator(
|
| 767 |
+
(convs): ModuleList(
|
| 768 |
+
(0): Sequential(
|
| 769 |
+
(0): Conv2d(1, 32, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 770 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 771 |
+
)
|
| 772 |
+
(1): Sequential(
|
| 773 |
+
(0): Conv2d(32, 128, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 774 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 775 |
+
)
|
| 776 |
+
(2): Sequential(
|
| 777 |
+
(0): Conv2d(128, 512, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 778 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 779 |
+
)
|
| 780 |
+
(3): Sequential(
|
| 781 |
+
(0): Conv2d(512, 1024, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 782 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 783 |
+
)
|
| 784 |
+
(4): Sequential(
|
| 785 |
+
(0): Conv2d(1024, 1024, kernel_size=(5, 1), stride=(1, 1), padding=(2, 0))
|
| 786 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 787 |
+
)
|
| 788 |
+
)
|
| 789 |
+
(output_conv): Conv2d(1024, 1, kernel_size=(2, 1), stride=(1, 1), padding=(1, 0))
|
| 790 |
+
)
|
| 791 |
+
(4): HiFiGANPeriodDiscriminator(
|
| 792 |
+
(convs): ModuleList(
|
| 793 |
+
(0): Sequential(
|
| 794 |
+
(0): Conv2d(1, 32, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 795 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 796 |
+
)
|
| 797 |
+
(1): Sequential(
|
| 798 |
+
(0): Conv2d(32, 128, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 799 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 800 |
+
)
|
| 801 |
+
(2): Sequential(
|
| 802 |
+
(0): Conv2d(128, 512, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 803 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 804 |
+
)
|
| 805 |
+
(3): Sequential(
|
| 806 |
+
(0): Conv2d(512, 1024, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 807 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 808 |
+
)
|
| 809 |
+
(4): Sequential(
|
| 810 |
+
(0): Conv2d(1024, 1024, kernel_size=(5, 1), stride=(1, 1), padding=(2, 0))
|
| 811 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 812 |
+
)
|
| 813 |
+
)
|
| 814 |
+
(output_conv): Conv2d(1024, 1, kernel_size=(2, 1), stride=(1, 1), padding=(1, 0))
|
| 815 |
+
)
|
| 816 |
+
)
|
| 817 |
+
)
|
| 818 |
+
)
|
| 819 |
+
(generator_adv_loss): GeneratorAdversarialLoss()
|
| 820 |
+
(discriminator_adv_loss): DiscriminatorAdversarialLoss()
|
| 821 |
+
(feat_match_loss): FeatureMatchLoss()
|
| 822 |
+
(mel_loss): MelSpectrogramLoss(
|
| 823 |
+
(wav_to_mel): LogMelFbank(
|
| 824 |
+
(stft): Stft(n_fft=1024, win_length=1024, hop_length=256, center=True, normalized=False, onesided=True)
|
| 825 |
+
(logmel): LogMel(sr=22050, n_fft=1024, n_mels=80, fmin=0, fmax=11025.0, htk=False)
|
| 826 |
+
)
|
| 827 |
+
)
|
| 828 |
+
(var_loss): VarianceLoss(
|
| 829 |
+
(mse_criterion): MSELoss()
|
| 830 |
+
(duration_criterion): DurationPredictorLoss(
|
| 831 |
+
(criterion): MSELoss()
|
| 832 |
+
)
|
| 833 |
+
)
|
| 834 |
+
(forwardsum_loss): ForwardSumLoss()
|
| 835 |
+
)
|
| 836 |
+
2025-02-21 15:00:46,283 (font_manager:1547) INFO: generated new fontManager
|
| 837 |
+
2025-02-21 15:00:50,976 (tts_inference:476) INFO: inference speed = 29302.2 points / sec.
|
| 838 |
+
2025-02-21 15:00:50,976 (tts_inference:481) INFO: LJ049-0135 (size:60->92928)
|
| 839 |
+
2025-02-21 15:00:57,869 (tts_inference:476) INFO: inference speed = 30220.7 points / sec.
|
| 840 |
+
2025-02-21 15:00:57,870 (tts_inference:481) INFO: LJ049-0136 (size:114->208128)
|
| 841 |
+
2025-02-21 15:01:03,272 (tts_inference:476) INFO: inference speed = 34496.8 points / sec.
|
| 842 |
+
2025-02-21 15:01:03,273 (tts_inference:481) INFO: LJ049-0137 (size:108->186112)
|
| 843 |
+
2025-02-21 15:01:07,138 (tts_inference:476) INFO: inference speed = 33835.7 points / sec.
|
| 844 |
+
2025-02-21 15:01:07,139 (tts_inference:481) INFO: LJ049-0138 (size:75->130560)
|
| 845 |
+
2025-02-21 15:01:10,268 (tts_inference:476) INFO: inference speed = 33688.3 points / sec.
|
| 846 |
+
2025-02-21 15:01:10,268 (tts_inference:481) INFO: LJ049-0139 (size:64->105216)
|
| 847 |
+
2025-02-21 15:01:16,384 (tts_inference:476) INFO: inference speed = 31884.3 points / sec.
|
| 848 |
+
2025-02-21 15:01:16,385 (tts_inference:481) INFO: LJ049-0140 (size:108->194816)
|
| 849 |
+
2025-02-21 15:01:23,271 (tts_inference:476) INFO: inference speed = 30292.2 points / sec.
|
| 850 |
+
2025-02-21 15:01:23,271 (tts_inference:481) INFO: LJ049-0141 (size:121->208384)
|
| 851 |
+
2025-02-21 15:01:26,172 (tts_inference:476) INFO: inference speed = 33712.3 points / sec.
|
| 852 |
+
2025-02-21 15:01:26,172 (tts_inference:481) INFO: LJ049-0142 (size:55->97536)
|
| 853 |
+
2025-02-21 15:01:30,999 (tts_inference:476) INFO: inference speed = 34089.1 points / sec.
|
| 854 |
+
2025-02-21 15:01:30,999 (tts_inference:481) INFO: LJ049-0143 (size:84->164352)
|
| 855 |
+
2025-02-21 15:01:35,903 (tts_inference:476) INFO: inference speed = 34243.0 points / sec.
|
| 856 |
+
2025-02-21 15:01:35,903 (tts_inference:481) INFO: LJ049-0144 (size:100->167680)
|
| 857 |
+
2025-02-21 15:01:40,850 (tts_inference:476) INFO: inference speed = 34050.4 points / sec.
|
| 858 |
+
2025-02-21 15:01:40,850 (tts_inference:481) INFO: LJ049-0145 (size:100->168192)
|
| 859 |
+
2025-02-21 15:01:46,312 (tts_inference:476) INFO: inference speed = 32008.0 points / sec.
|
| 860 |
+
2025-02-21 15:01:46,313 (tts_inference:481) INFO: LJ049-0146 (size:98->174592)
|
| 861 |
+
2025-02-21 15:01:51,970 (tts_inference:476) INFO: inference speed = 31803.6 points / sec.
|
| 862 |
+
2025-02-21 15:01:51,971 (tts_inference:481) INFO: LJ049-0147 (size:106->179712)
|
| 863 |
+
2025-02-21 15:01:56,663 (tts_inference:476) INFO: inference speed = 34262.3 points / sec.
|
| 864 |
+
2025-02-21 15:01:56,663 (tts_inference:481) INFO: LJ049-0148 (size:88->160512)
|
| 865 |
+
2025-02-21 15:02:03,070 (tts_inference:476) INFO: inference speed = 31958.4 points / sec.
|
| 866 |
+
2025-02-21 15:02:03,071 (tts_inference:481) INFO: LJ049-0149 (size:113->204544)
|
| 867 |
+
2025-02-21 15:02:08,225 (tts_inference:476) INFO: inference speed = 34264.4 points / sec.
|
| 868 |
+
2025-02-21 15:02:08,226 (tts_inference:481) INFO: LJ049-0150 (size:105->176384)
|
| 869 |
+
2025-02-21 15:02:10,106 (tts_inference:476) INFO: inference speed = 32654.9 points / sec.
|
| 870 |
+
2025-02-21 15:02:10,107 (tts_inference:481) INFO: LJ049-0151 (size:39->61184)
|
| 871 |
+
2025-02-21 15:02:15,461 (tts_inference:476) INFO: inference speed = 33923.3 points / sec.
|
| 872 |
+
2025-02-21 15:02:15,462 (tts_inference:481) INFO: LJ049-0152 (size:115->181504)
|
| 873 |
+
2025-02-21 15:02:20,438 (tts_inference:476) INFO: inference speed = 34309.0 points / sec.
|
| 874 |
+
2025-02-21 15:02:20,439 (tts_inference:481) INFO: LJ049-0153 (size:94->170496)
|
| 875 |
+
2025-02-21 15:02:21,587 (tts_inference:476) INFO: inference speed = 31866.0 points / sec.
|
| 876 |
+
2025-02-21 15:02:21,587 (tts_inference:481) INFO: LJ049-0154 (size:16->36352)
|
| 877 |
+
2025-02-21 15:02:27,678 (tts_inference:476) INFO: inference speed = 30288.1 points / sec.
|
| 878 |
+
2025-02-21 15:02:27,678 (tts_inference:481) INFO: LJ049-0155 (size:99->184320)
|
| 879 |
+
2025-02-21 15:02:31,744 (tts_inference:476) INFO: inference speed = 34248.0 points / sec.
|
| 880 |
+
2025-02-21 15:02:31,744 (tts_inference:481) INFO: LJ049-0156 (size:74->139008)
|
| 881 |
+
2025-02-21 15:02:35,321 (tts_inference:476) INFO: inference speed = 33776.5 points / sec.
|
| 882 |
+
2025-02-21 15:02:35,321 (tts_inference:481) INFO: LJ049-0157 (size:67->120576)
|
| 883 |
+
2025-02-21 15:02:41,894 (tts_inference:476) INFO: inference speed = 29900.7 points / sec.
|
| 884 |
+
2025-02-21 15:02:41,894 (tts_inference:481) INFO: LJ049-0158 (size:105->196352)
|
| 885 |
+
2025-02-21 15:02:47,247 (tts_inference:476) INFO: inference speed = 34343.4 points / sec.
|
| 886 |
+
2025-02-21 15:02:47,247 (tts_inference:481) INFO: LJ049-0159 (size:98->183552)
|
| 887 |
+
2025-02-21 15:02:51,897 (tts_inference:476) INFO: inference speed = 33910.1 points / sec.
|
| 888 |
+
2025-02-21 15:02:51,897 (tts_inference:481) INFO: LJ049-0160 (size:90->157440)
|
| 889 |
+
2025-02-21 15:02:55,019 (tts_inference:476) INFO: inference speed = 33528.9 points / sec.
|
| 890 |
+
2025-02-21 15:02:55,020 (tts_inference:481) INFO: LJ049-0161 (size:67->104448)
|
| 891 |
+
2025-02-21 15:02:58,425 (tts_inference:476) INFO: inference speed = 33432.5 points / sec.
|
| 892 |
+
2025-02-21 15:02:58,426 (tts_inference:481) INFO: LJ049-0162 (size:75->113664)
|
| 893 |
+
2025-02-21 15:03:04,757 (tts_inference:476) INFO: inference speed = 32825.1 points / sec.
|
| 894 |
+
2025-02-21 15:03:04,757 (tts_inference:481) INFO: LJ049-0163 (size:106->207616)
|
| 895 |
+
2025-02-21 15:03:10,293 (tts_inference:476) INFO: inference speed = 34827.7 points / sec.
|
| 896 |
+
2025-02-21 15:03:10,293 (tts_inference:481) INFO: LJ049-0164 (size:114->192512)
|
| 897 |
+
2025-02-21 15:03:15,320 (tts_inference:476) INFO: inference speed = 35706.3 points / sec.
|
| 898 |
+
2025-02-21 15:03:15,320 (tts_inference:481) INFO: LJ049-0165 (size:104->179200)
|
| 899 |
+
# Accounting: time=156 threads=1
|
| 900 |
+
# Ended (code 0) at Fri Feb 21 15:03:16 JST 2025, elapsed time 156 seconds
|
imdanboy/jets/decode_train.loss.ave/dev/log/tts_inference.6.log
ADDED
|
@@ -0,0 +1,900 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# python3 -m espnet2.bin.tts_inference --ngpu 0 --data_path_and_name_and_type dump/raw/dev/text,text,text --data_path_and_name_and_type dump/raw/dev/wav.scp,speech,sound --key_file exp/imdanboy/jets/decode_train.loss.ave/dev/log/keys.6.scp --model_file exp/imdanboy/jets/train.total_count.ave_5best.pth --train_config exp/imdanboy/jets/config.yaml --output_dir exp/imdanboy/jets/decode_train.loss.ave/dev/log/output.6 --vocoder_file none --config conf/decode.yaml
|
| 2 |
+
# Started at Fri Feb 21 15:00:40 JST 2025
|
| 3 |
+
#
|
| 4 |
+
/usr/lib/python3/dist-packages/requests/__init__.py:89: RequestsDependencyWarning: urllib3 (2.2.3) or chardet (3.0.4) doesn't match a supported version!
|
| 5 |
+
warnings.warn("urllib3 ({}) or chardet ({}) doesn't match a supported "
|
| 6 |
+
/usr/bin/python3 /work/espnet/espnet2/bin/tts_inference.py --ngpu 0 --data_path_and_name_and_type dump/raw/dev/text,text,text --data_path_and_name_and_type dump/raw/dev/wav.scp,speech,sound --key_file exp/imdanboy/jets/decode_train.loss.ave/dev/log/keys.6.scp --model_file exp/imdanboy/jets/train.total_count.ave_5best.pth --train_config exp/imdanboy/jets/config.yaml --output_dir exp/imdanboy/jets/decode_train.loss.ave/dev/log/output.6 --vocoder_file none --config conf/decode.yaml
|
| 7 |
+
2025-02-21 15:00:43,854 (tts:302) INFO: Vocabulary size: 78
|
| 8 |
+
2025-02-21 15:00:43,973 (encoder:172) INFO: encoder self-attention layer type = self-attention
|
| 9 |
+
2025-02-21 15:00:44,087 (encoder:172) INFO: encoder self-attention layer type = self-attention
|
| 10 |
+
2025-02-21 15:00:45,850 (tts_inference:126) INFO: Extractor:
|
| 11 |
+
LogMelFbank(
|
| 12 |
+
(stft): Stft(n_fft=1024, win_length=1024, hop_length=256, center=True, normalized=False, onesided=True)
|
| 13 |
+
(logmel): LogMel(sr=22050, n_fft=1024, n_mels=80, fmin=80, fmax=7600, htk=False)
|
| 14 |
+
)
|
| 15 |
+
2025-02-21 15:00:45,850 (tts_inference:127) INFO: Normalizer:
|
| 16 |
+
GlobalMVN(stats_file=/usr/local/lib/python3.8/dist-packages/espnet_model_zoo/models--imdanboy--jets/snapshots/1db95c26516c44e6789bf06417c51e89400b190b/exp/tts_stats_raw_phn_tacotron_g2p_en_no_space/train/feats_stats.npz, norm_means=True, norm_vars=True)
|
| 17 |
+
2025-02-21 15:00:45,854 (tts_inference:128) INFO: TTS:
|
| 18 |
+
JETS(
|
| 19 |
+
(generator): JETSGenerator(
|
| 20 |
+
(encoder): Encoder(
|
| 21 |
+
(embed): Sequential(
|
| 22 |
+
(0): Embedding(78, 256, padding_idx=0)
|
| 23 |
+
(1): ScaledPositionalEncoding(
|
| 24 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 25 |
+
)
|
| 26 |
+
)
|
| 27 |
+
(encoders): MultiSequential(
|
| 28 |
+
(0): EncoderLayer(
|
| 29 |
+
(self_attn): MultiHeadedAttention(
|
| 30 |
+
(linear_q): Linear(in_features=256, out_features=256, bias=True)
|
| 31 |
+
(linear_k): Linear(in_features=256, out_features=256, bias=True)
|
| 32 |
+
(linear_v): Linear(in_features=256, out_features=256, bias=True)
|
| 33 |
+
(linear_out): Linear(in_features=256, out_features=256, bias=True)
|
| 34 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 35 |
+
)
|
| 36 |
+
(feed_forward): MultiLayeredConv1d(
|
| 37 |
+
(w_1): Conv1d(256, 1024, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 38 |
+
(w_2): Conv1d(1024, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 39 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 40 |
+
)
|
| 41 |
+
(norm1): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 42 |
+
(norm2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 43 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 44 |
+
)
|
| 45 |
+
(1): EncoderLayer(
|
| 46 |
+
(self_attn): MultiHeadedAttention(
|
| 47 |
+
(linear_q): Linear(in_features=256, out_features=256, bias=True)
|
| 48 |
+
(linear_k): Linear(in_features=256, out_features=256, bias=True)
|
| 49 |
+
(linear_v): Linear(in_features=256, out_features=256, bias=True)
|
| 50 |
+
(linear_out): Linear(in_features=256, out_features=256, bias=True)
|
| 51 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 52 |
+
)
|
| 53 |
+
(feed_forward): MultiLayeredConv1d(
|
| 54 |
+
(w_1): Conv1d(256, 1024, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 55 |
+
(w_2): Conv1d(1024, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 56 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 57 |
+
)
|
| 58 |
+
(norm1): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 59 |
+
(norm2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 60 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 61 |
+
)
|
| 62 |
+
(2): EncoderLayer(
|
| 63 |
+
(self_attn): MultiHeadedAttention(
|
| 64 |
+
(linear_q): Linear(in_features=256, out_features=256, bias=True)
|
| 65 |
+
(linear_k): Linear(in_features=256, out_features=256, bias=True)
|
| 66 |
+
(linear_v): Linear(in_features=256, out_features=256, bias=True)
|
| 67 |
+
(linear_out): Linear(in_features=256, out_features=256, bias=True)
|
| 68 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 69 |
+
)
|
| 70 |
+
(feed_forward): MultiLayeredConv1d(
|
| 71 |
+
(w_1): Conv1d(256, 1024, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 72 |
+
(w_2): Conv1d(1024, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 73 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 74 |
+
)
|
| 75 |
+
(norm1): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 76 |
+
(norm2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 77 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 78 |
+
)
|
| 79 |
+
(3): EncoderLayer(
|
| 80 |
+
(self_attn): MultiHeadedAttention(
|
| 81 |
+
(linear_q): Linear(in_features=256, out_features=256, bias=True)
|
| 82 |
+
(linear_k): Linear(in_features=256, out_features=256, bias=True)
|
| 83 |
+
(linear_v): Linear(in_features=256, out_features=256, bias=True)
|
| 84 |
+
(linear_out): Linear(in_features=256, out_features=256, bias=True)
|
| 85 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 86 |
+
)
|
| 87 |
+
(feed_forward): MultiLayeredConv1d(
|
| 88 |
+
(w_1): Conv1d(256, 1024, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 89 |
+
(w_2): Conv1d(1024, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 90 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 91 |
+
)
|
| 92 |
+
(norm1): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 93 |
+
(norm2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 94 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 95 |
+
)
|
| 96 |
+
)
|
| 97 |
+
(after_norm): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 98 |
+
)
|
| 99 |
+
(duration_predictor): DurationPredictor(
|
| 100 |
+
(conv): ModuleList(
|
| 101 |
+
(0): Sequential(
|
| 102 |
+
(0): Conv1d(256, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 103 |
+
(1): ReLU()
|
| 104 |
+
(2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 105 |
+
(3): Dropout(p=0.1, inplace=False)
|
| 106 |
+
)
|
| 107 |
+
(1): Sequential(
|
| 108 |
+
(0): Conv1d(256, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 109 |
+
(1): ReLU()
|
| 110 |
+
(2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 111 |
+
(3): Dropout(p=0.1, inplace=False)
|
| 112 |
+
)
|
| 113 |
+
)
|
| 114 |
+
(linear): Linear(in_features=256, out_features=1, bias=True)
|
| 115 |
+
)
|
| 116 |
+
(pitch_predictor): VariancePredictor(
|
| 117 |
+
(conv): ModuleList(
|
| 118 |
+
(0): Sequential(
|
| 119 |
+
(0): Conv1d(256, 256, kernel_size=(5,), stride=(1,), padding=(2,))
|
| 120 |
+
(1): ReLU()
|
| 121 |
+
(2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 122 |
+
(3): Dropout(p=0.5, inplace=False)
|
| 123 |
+
)
|
| 124 |
+
(1): Sequential(
|
| 125 |
+
(0): Conv1d(256, 256, kernel_size=(5,), stride=(1,), padding=(2,))
|
| 126 |
+
(1): ReLU()
|
| 127 |
+
(2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 128 |
+
(3): Dropout(p=0.5, inplace=False)
|
| 129 |
+
)
|
| 130 |
+
(2): Sequential(
|
| 131 |
+
(0): Conv1d(256, 256, kernel_size=(5,), stride=(1,), padding=(2,))
|
| 132 |
+
(1): ReLU()
|
| 133 |
+
(2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 134 |
+
(3): Dropout(p=0.5, inplace=False)
|
| 135 |
+
)
|
| 136 |
+
(3): Sequential(
|
| 137 |
+
(0): Conv1d(256, 256, kernel_size=(5,), stride=(1,), padding=(2,))
|
| 138 |
+
(1): ReLU()
|
| 139 |
+
(2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 140 |
+
(3): Dropout(p=0.5, inplace=False)
|
| 141 |
+
)
|
| 142 |
+
(4): Sequential(
|
| 143 |
+
(0): Conv1d(256, 256, kernel_size=(5,), stride=(1,), padding=(2,))
|
| 144 |
+
(1): ReLU()
|
| 145 |
+
(2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 146 |
+
(3): Dropout(p=0.5, inplace=False)
|
| 147 |
+
)
|
| 148 |
+
)
|
| 149 |
+
(linear): Linear(in_features=256, out_features=1, bias=True)
|
| 150 |
+
)
|
| 151 |
+
(pitch_embed): Sequential(
|
| 152 |
+
(0): Conv1d(1, 256, kernel_size=(1,), stride=(1,))
|
| 153 |
+
(1): Dropout(p=0.0, inplace=False)
|
| 154 |
+
)
|
| 155 |
+
(energy_predictor): VariancePredictor(
|
| 156 |
+
(conv): ModuleList(
|
| 157 |
+
(0): Sequential(
|
| 158 |
+
(0): Conv1d(256, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 159 |
+
(1): ReLU()
|
| 160 |
+
(2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 161 |
+
(3): Dropout(p=0.5, inplace=False)
|
| 162 |
+
)
|
| 163 |
+
(1): Sequential(
|
| 164 |
+
(0): Conv1d(256, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 165 |
+
(1): ReLU()
|
| 166 |
+
(2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 167 |
+
(3): Dropout(p=0.5, inplace=False)
|
| 168 |
+
)
|
| 169 |
+
)
|
| 170 |
+
(linear): Linear(in_features=256, out_features=1, bias=True)
|
| 171 |
+
)
|
| 172 |
+
(energy_embed): Sequential(
|
| 173 |
+
(0): Conv1d(1, 256, kernel_size=(1,), stride=(1,))
|
| 174 |
+
(1): Dropout(p=0.0, inplace=False)
|
| 175 |
+
)
|
| 176 |
+
(alignment_module): AlignmentModule(
|
| 177 |
+
(t_conv1): Conv1d(256, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 178 |
+
(t_conv2): Conv1d(256, 256, kernel_size=(1,), stride=(1,))
|
| 179 |
+
(f_conv1): Conv1d(80, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 180 |
+
(f_conv2): Conv1d(256, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 181 |
+
(f_conv3): Conv1d(256, 256, kernel_size=(1,), stride=(1,))
|
| 182 |
+
)
|
| 183 |
+
(length_regulator): GaussianUpsampling()
|
| 184 |
+
(decoder): Encoder(
|
| 185 |
+
(embed): Sequential(
|
| 186 |
+
(0): ScaledPositionalEncoding(
|
| 187 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 188 |
+
)
|
| 189 |
+
)
|
| 190 |
+
(encoders): MultiSequential(
|
| 191 |
+
(0): EncoderLayer(
|
| 192 |
+
(self_attn): MultiHeadedAttention(
|
| 193 |
+
(linear_q): Linear(in_features=256, out_features=256, bias=True)
|
| 194 |
+
(linear_k): Linear(in_features=256, out_features=256, bias=True)
|
| 195 |
+
(linear_v): Linear(in_features=256, out_features=256, bias=True)
|
| 196 |
+
(linear_out): Linear(in_features=256, out_features=256, bias=True)
|
| 197 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 198 |
+
)
|
| 199 |
+
(feed_forward): MultiLayeredConv1d(
|
| 200 |
+
(w_1): Conv1d(256, 1024, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 201 |
+
(w_2): Conv1d(1024, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 202 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 203 |
+
)
|
| 204 |
+
(norm1): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 205 |
+
(norm2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 206 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 207 |
+
)
|
| 208 |
+
(1): EncoderLayer(
|
| 209 |
+
(self_attn): MultiHeadedAttention(
|
| 210 |
+
(linear_q): Linear(in_features=256, out_features=256, bias=True)
|
| 211 |
+
(linear_k): Linear(in_features=256, out_features=256, bias=True)
|
| 212 |
+
(linear_v): Linear(in_features=256, out_features=256, bias=True)
|
| 213 |
+
(linear_out): Linear(in_features=256, out_features=256, bias=True)
|
| 214 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 215 |
+
)
|
| 216 |
+
(feed_forward): MultiLayeredConv1d(
|
| 217 |
+
(w_1): Conv1d(256, 1024, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 218 |
+
(w_2): Conv1d(1024, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 219 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 220 |
+
)
|
| 221 |
+
(norm1): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 222 |
+
(norm2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 223 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 224 |
+
)
|
| 225 |
+
(2): EncoderLayer(
|
| 226 |
+
(self_attn): MultiHeadedAttention(
|
| 227 |
+
(linear_q): Linear(in_features=256, out_features=256, bias=True)
|
| 228 |
+
(linear_k): Linear(in_features=256, out_features=256, bias=True)
|
| 229 |
+
(linear_v): Linear(in_features=256, out_features=256, bias=True)
|
| 230 |
+
(linear_out): Linear(in_features=256, out_features=256, bias=True)
|
| 231 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 232 |
+
)
|
| 233 |
+
(feed_forward): MultiLayeredConv1d(
|
| 234 |
+
(w_1): Conv1d(256, 1024, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 235 |
+
(w_2): Conv1d(1024, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 236 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 237 |
+
)
|
| 238 |
+
(norm1): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 239 |
+
(norm2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 240 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 241 |
+
)
|
| 242 |
+
(3): EncoderLayer(
|
| 243 |
+
(self_attn): MultiHeadedAttention(
|
| 244 |
+
(linear_q): Linear(in_features=256, out_features=256, bias=True)
|
| 245 |
+
(linear_k): Linear(in_features=256, out_features=256, bias=True)
|
| 246 |
+
(linear_v): Linear(in_features=256, out_features=256, bias=True)
|
| 247 |
+
(linear_out): Linear(in_features=256, out_features=256, bias=True)
|
| 248 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 249 |
+
)
|
| 250 |
+
(feed_forward): MultiLayeredConv1d(
|
| 251 |
+
(w_1): Conv1d(256, 1024, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 252 |
+
(w_2): Conv1d(1024, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 253 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 254 |
+
)
|
| 255 |
+
(norm1): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 256 |
+
(norm2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 257 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 258 |
+
)
|
| 259 |
+
)
|
| 260 |
+
(after_norm): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 261 |
+
)
|
| 262 |
+
(generator): HiFiGANGenerator(
|
| 263 |
+
(input_conv): Conv1d(256, 512, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 264 |
+
(upsamples): ModuleList(
|
| 265 |
+
(0): Sequential(
|
| 266 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 267 |
+
(1): ConvTranspose1d(512, 256, kernel_size=(16,), stride=(8,), padding=(4,))
|
| 268 |
+
)
|
| 269 |
+
(1): Sequential(
|
| 270 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 271 |
+
(1): ConvTranspose1d(256, 128, kernel_size=(16,), stride=(8,), padding=(4,))
|
| 272 |
+
)
|
| 273 |
+
(2): Sequential(
|
| 274 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 275 |
+
(1): ConvTranspose1d(128, 64, kernel_size=(4,), stride=(2,), padding=(1,))
|
| 276 |
+
)
|
| 277 |
+
(3): Sequential(
|
| 278 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 279 |
+
(1): ConvTranspose1d(64, 32, kernel_size=(4,), stride=(2,), padding=(1,))
|
| 280 |
+
)
|
| 281 |
+
)
|
| 282 |
+
(blocks): ModuleList(
|
| 283 |
+
(0): ResidualBlock(
|
| 284 |
+
(convs1): ModuleList(
|
| 285 |
+
(0): Sequential(
|
| 286 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 287 |
+
(1): Conv1d(256, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 288 |
+
)
|
| 289 |
+
(1): Sequential(
|
| 290 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 291 |
+
(1): Conv1d(256, 256, kernel_size=(3,), stride=(1,), padding=(3,), dilation=(3,))
|
| 292 |
+
)
|
| 293 |
+
(2): Sequential(
|
| 294 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 295 |
+
(1): Conv1d(256, 256, kernel_size=(3,), stride=(1,), padding=(5,), dilation=(5,))
|
| 296 |
+
)
|
| 297 |
+
)
|
| 298 |
+
(convs2): ModuleList(
|
| 299 |
+
(0): Sequential(
|
| 300 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 301 |
+
(1): Conv1d(256, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 302 |
+
)
|
| 303 |
+
(1): Sequential(
|
| 304 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 305 |
+
(1): Conv1d(256, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 306 |
+
)
|
| 307 |
+
(2): Sequential(
|
| 308 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 309 |
+
(1): Conv1d(256, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 310 |
+
)
|
| 311 |
+
)
|
| 312 |
+
)
|
| 313 |
+
(1): ResidualBlock(
|
| 314 |
+
(convs1): ModuleList(
|
| 315 |
+
(0): Sequential(
|
| 316 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 317 |
+
(1): Conv1d(256, 256, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 318 |
+
)
|
| 319 |
+
(1): Sequential(
|
| 320 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 321 |
+
(1): Conv1d(256, 256, kernel_size=(7,), stride=(1,), padding=(9,), dilation=(3,))
|
| 322 |
+
)
|
| 323 |
+
(2): Sequential(
|
| 324 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 325 |
+
(1): Conv1d(256, 256, kernel_size=(7,), stride=(1,), padding=(15,), dilation=(5,))
|
| 326 |
+
)
|
| 327 |
+
)
|
| 328 |
+
(convs2): ModuleList(
|
| 329 |
+
(0): Sequential(
|
| 330 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 331 |
+
(1): Conv1d(256, 256, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 332 |
+
)
|
| 333 |
+
(1): Sequential(
|
| 334 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 335 |
+
(1): Conv1d(256, 256, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 336 |
+
)
|
| 337 |
+
(2): Sequential(
|
| 338 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 339 |
+
(1): Conv1d(256, 256, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 340 |
+
)
|
| 341 |
+
)
|
| 342 |
+
)
|
| 343 |
+
(2): ResidualBlock(
|
| 344 |
+
(convs1): ModuleList(
|
| 345 |
+
(0): Sequential(
|
| 346 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 347 |
+
(1): Conv1d(256, 256, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 348 |
+
)
|
| 349 |
+
(1): Sequential(
|
| 350 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 351 |
+
(1): Conv1d(256, 256, kernel_size=(11,), stride=(1,), padding=(15,), dilation=(3,))
|
| 352 |
+
)
|
| 353 |
+
(2): Sequential(
|
| 354 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 355 |
+
(1): Conv1d(256, 256, kernel_size=(11,), stride=(1,), padding=(25,), dilation=(5,))
|
| 356 |
+
)
|
| 357 |
+
)
|
| 358 |
+
(convs2): ModuleList(
|
| 359 |
+
(0): Sequential(
|
| 360 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 361 |
+
(1): Conv1d(256, 256, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 362 |
+
)
|
| 363 |
+
(1): Sequential(
|
| 364 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 365 |
+
(1): Conv1d(256, 256, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 366 |
+
)
|
| 367 |
+
(2): Sequential(
|
| 368 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 369 |
+
(1): Conv1d(256, 256, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 370 |
+
)
|
| 371 |
+
)
|
| 372 |
+
)
|
| 373 |
+
(3): ResidualBlock(
|
| 374 |
+
(convs1): ModuleList(
|
| 375 |
+
(0): Sequential(
|
| 376 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 377 |
+
(1): Conv1d(128, 128, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 378 |
+
)
|
| 379 |
+
(1): Sequential(
|
| 380 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 381 |
+
(1): Conv1d(128, 128, kernel_size=(3,), stride=(1,), padding=(3,), dilation=(3,))
|
| 382 |
+
)
|
| 383 |
+
(2): Sequential(
|
| 384 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 385 |
+
(1): Conv1d(128, 128, kernel_size=(3,), stride=(1,), padding=(5,), dilation=(5,))
|
| 386 |
+
)
|
| 387 |
+
)
|
| 388 |
+
(convs2): ModuleList(
|
| 389 |
+
(0): Sequential(
|
| 390 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 391 |
+
(1): Conv1d(128, 128, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 392 |
+
)
|
| 393 |
+
(1): Sequential(
|
| 394 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 395 |
+
(1): Conv1d(128, 128, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 396 |
+
)
|
| 397 |
+
(2): Sequential(
|
| 398 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 399 |
+
(1): Conv1d(128, 128, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 400 |
+
)
|
| 401 |
+
)
|
| 402 |
+
)
|
| 403 |
+
(4): ResidualBlock(
|
| 404 |
+
(convs1): ModuleList(
|
| 405 |
+
(0): Sequential(
|
| 406 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 407 |
+
(1): Conv1d(128, 128, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 408 |
+
)
|
| 409 |
+
(1): Sequential(
|
| 410 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 411 |
+
(1): Conv1d(128, 128, kernel_size=(7,), stride=(1,), padding=(9,), dilation=(3,))
|
| 412 |
+
)
|
| 413 |
+
(2): Sequential(
|
| 414 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 415 |
+
(1): Conv1d(128, 128, kernel_size=(7,), stride=(1,), padding=(15,), dilation=(5,))
|
| 416 |
+
)
|
| 417 |
+
)
|
| 418 |
+
(convs2): ModuleList(
|
| 419 |
+
(0): Sequential(
|
| 420 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 421 |
+
(1): Conv1d(128, 128, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 422 |
+
)
|
| 423 |
+
(1): Sequential(
|
| 424 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 425 |
+
(1): Conv1d(128, 128, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 426 |
+
)
|
| 427 |
+
(2): Sequential(
|
| 428 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 429 |
+
(1): Conv1d(128, 128, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 430 |
+
)
|
| 431 |
+
)
|
| 432 |
+
)
|
| 433 |
+
(5): ResidualBlock(
|
| 434 |
+
(convs1): ModuleList(
|
| 435 |
+
(0): Sequential(
|
| 436 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 437 |
+
(1): Conv1d(128, 128, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 438 |
+
)
|
| 439 |
+
(1): Sequential(
|
| 440 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 441 |
+
(1): Conv1d(128, 128, kernel_size=(11,), stride=(1,), padding=(15,), dilation=(3,))
|
| 442 |
+
)
|
| 443 |
+
(2): Sequential(
|
| 444 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 445 |
+
(1): Conv1d(128, 128, kernel_size=(11,), stride=(1,), padding=(25,), dilation=(5,))
|
| 446 |
+
)
|
| 447 |
+
)
|
| 448 |
+
(convs2): ModuleList(
|
| 449 |
+
(0): Sequential(
|
| 450 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 451 |
+
(1): Conv1d(128, 128, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 452 |
+
)
|
| 453 |
+
(1): Sequential(
|
| 454 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 455 |
+
(1): Conv1d(128, 128, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 456 |
+
)
|
| 457 |
+
(2): Sequential(
|
| 458 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 459 |
+
(1): Conv1d(128, 128, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 460 |
+
)
|
| 461 |
+
)
|
| 462 |
+
)
|
| 463 |
+
(6): ResidualBlock(
|
| 464 |
+
(convs1): ModuleList(
|
| 465 |
+
(0): Sequential(
|
| 466 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 467 |
+
(1): Conv1d(64, 64, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 468 |
+
)
|
| 469 |
+
(1): Sequential(
|
| 470 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 471 |
+
(1): Conv1d(64, 64, kernel_size=(3,), stride=(1,), padding=(3,), dilation=(3,))
|
| 472 |
+
)
|
| 473 |
+
(2): Sequential(
|
| 474 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 475 |
+
(1): Conv1d(64, 64, kernel_size=(3,), stride=(1,), padding=(5,), dilation=(5,))
|
| 476 |
+
)
|
| 477 |
+
)
|
| 478 |
+
(convs2): ModuleList(
|
| 479 |
+
(0): Sequential(
|
| 480 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 481 |
+
(1): Conv1d(64, 64, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 482 |
+
)
|
| 483 |
+
(1): Sequential(
|
| 484 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 485 |
+
(1): Conv1d(64, 64, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 486 |
+
)
|
| 487 |
+
(2): Sequential(
|
| 488 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 489 |
+
(1): Conv1d(64, 64, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 490 |
+
)
|
| 491 |
+
)
|
| 492 |
+
)
|
| 493 |
+
(7): ResidualBlock(
|
| 494 |
+
(convs1): ModuleList(
|
| 495 |
+
(0): Sequential(
|
| 496 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 497 |
+
(1): Conv1d(64, 64, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 498 |
+
)
|
| 499 |
+
(1): Sequential(
|
| 500 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 501 |
+
(1): Conv1d(64, 64, kernel_size=(7,), stride=(1,), padding=(9,), dilation=(3,))
|
| 502 |
+
)
|
| 503 |
+
(2): Sequential(
|
| 504 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 505 |
+
(1): Conv1d(64, 64, kernel_size=(7,), stride=(1,), padding=(15,), dilation=(5,))
|
| 506 |
+
)
|
| 507 |
+
)
|
| 508 |
+
(convs2): ModuleList(
|
| 509 |
+
(0): Sequential(
|
| 510 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 511 |
+
(1): Conv1d(64, 64, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 512 |
+
)
|
| 513 |
+
(1): Sequential(
|
| 514 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 515 |
+
(1): Conv1d(64, 64, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 516 |
+
)
|
| 517 |
+
(2): Sequential(
|
| 518 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 519 |
+
(1): Conv1d(64, 64, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 520 |
+
)
|
| 521 |
+
)
|
| 522 |
+
)
|
| 523 |
+
(8): ResidualBlock(
|
| 524 |
+
(convs1): ModuleList(
|
| 525 |
+
(0): Sequential(
|
| 526 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 527 |
+
(1): Conv1d(64, 64, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 528 |
+
)
|
| 529 |
+
(1): Sequential(
|
| 530 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 531 |
+
(1): Conv1d(64, 64, kernel_size=(11,), stride=(1,), padding=(15,), dilation=(3,))
|
| 532 |
+
)
|
| 533 |
+
(2): Sequential(
|
| 534 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 535 |
+
(1): Conv1d(64, 64, kernel_size=(11,), stride=(1,), padding=(25,), dilation=(5,))
|
| 536 |
+
)
|
| 537 |
+
)
|
| 538 |
+
(convs2): ModuleList(
|
| 539 |
+
(0): Sequential(
|
| 540 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 541 |
+
(1): Conv1d(64, 64, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 542 |
+
)
|
| 543 |
+
(1): Sequential(
|
| 544 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 545 |
+
(1): Conv1d(64, 64, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 546 |
+
)
|
| 547 |
+
(2): Sequential(
|
| 548 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 549 |
+
(1): Conv1d(64, 64, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 550 |
+
)
|
| 551 |
+
)
|
| 552 |
+
)
|
| 553 |
+
(9): ResidualBlock(
|
| 554 |
+
(convs1): ModuleList(
|
| 555 |
+
(0): Sequential(
|
| 556 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 557 |
+
(1): Conv1d(32, 32, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 558 |
+
)
|
| 559 |
+
(1): Sequential(
|
| 560 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 561 |
+
(1): Conv1d(32, 32, kernel_size=(3,), stride=(1,), padding=(3,), dilation=(3,))
|
| 562 |
+
)
|
| 563 |
+
(2): Sequential(
|
| 564 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 565 |
+
(1): Conv1d(32, 32, kernel_size=(3,), stride=(1,), padding=(5,), dilation=(5,))
|
| 566 |
+
)
|
| 567 |
+
)
|
| 568 |
+
(convs2): ModuleList(
|
| 569 |
+
(0): Sequential(
|
| 570 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 571 |
+
(1): Conv1d(32, 32, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 572 |
+
)
|
| 573 |
+
(1): Sequential(
|
| 574 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 575 |
+
(1): Conv1d(32, 32, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 576 |
+
)
|
| 577 |
+
(2): Sequential(
|
| 578 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 579 |
+
(1): Conv1d(32, 32, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 580 |
+
)
|
| 581 |
+
)
|
| 582 |
+
)
|
| 583 |
+
(10): ResidualBlock(
|
| 584 |
+
(convs1): ModuleList(
|
| 585 |
+
(0): Sequential(
|
| 586 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 587 |
+
(1): Conv1d(32, 32, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 588 |
+
)
|
| 589 |
+
(1): Sequential(
|
| 590 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 591 |
+
(1): Conv1d(32, 32, kernel_size=(7,), stride=(1,), padding=(9,), dilation=(3,))
|
| 592 |
+
)
|
| 593 |
+
(2): Sequential(
|
| 594 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 595 |
+
(1): Conv1d(32, 32, kernel_size=(7,), stride=(1,), padding=(15,), dilation=(5,))
|
| 596 |
+
)
|
| 597 |
+
)
|
| 598 |
+
(convs2): ModuleList(
|
| 599 |
+
(0): Sequential(
|
| 600 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 601 |
+
(1): Conv1d(32, 32, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 602 |
+
)
|
| 603 |
+
(1): Sequential(
|
| 604 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 605 |
+
(1): Conv1d(32, 32, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 606 |
+
)
|
| 607 |
+
(2): Sequential(
|
| 608 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 609 |
+
(1): Conv1d(32, 32, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 610 |
+
)
|
| 611 |
+
)
|
| 612 |
+
)
|
| 613 |
+
(11): ResidualBlock(
|
| 614 |
+
(convs1): ModuleList(
|
| 615 |
+
(0): Sequential(
|
| 616 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 617 |
+
(1): Conv1d(32, 32, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 618 |
+
)
|
| 619 |
+
(1): Sequential(
|
| 620 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 621 |
+
(1): Conv1d(32, 32, kernel_size=(11,), stride=(1,), padding=(15,), dilation=(3,))
|
| 622 |
+
)
|
| 623 |
+
(2): Sequential(
|
| 624 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 625 |
+
(1): Conv1d(32, 32, kernel_size=(11,), stride=(1,), padding=(25,), dilation=(5,))
|
| 626 |
+
)
|
| 627 |
+
)
|
| 628 |
+
(convs2): ModuleList(
|
| 629 |
+
(0): Sequential(
|
| 630 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 631 |
+
(1): Conv1d(32, 32, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 632 |
+
)
|
| 633 |
+
(1): Sequential(
|
| 634 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 635 |
+
(1): Conv1d(32, 32, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 636 |
+
)
|
| 637 |
+
(2): Sequential(
|
| 638 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 639 |
+
(1): Conv1d(32, 32, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 640 |
+
)
|
| 641 |
+
)
|
| 642 |
+
)
|
| 643 |
+
)
|
| 644 |
+
(output_conv): Sequential(
|
| 645 |
+
(0): LeakyReLU(negative_slope=0.01)
|
| 646 |
+
(1): Conv1d(32, 1, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 647 |
+
(2): Tanh()
|
| 648 |
+
)
|
| 649 |
+
)
|
| 650 |
+
)
|
| 651 |
+
(discriminator): HiFiGANMultiScaleMultiPeriodDiscriminator(
|
| 652 |
+
(msd): HiFiGANMultiScaleDiscriminator(
|
| 653 |
+
(discriminators): ModuleList(
|
| 654 |
+
(0): HiFiGANScaleDiscriminator(
|
| 655 |
+
(layers): ModuleList(
|
| 656 |
+
(0): Sequential(
|
| 657 |
+
(0): Conv1d(1, 128, kernel_size=(15,), stride=(1,), padding=(7,))
|
| 658 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 659 |
+
)
|
| 660 |
+
(1): Sequential(
|
| 661 |
+
(0): Conv1d(128, 128, kernel_size=(41,), stride=(2,), padding=(20,), groups=4)
|
| 662 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 663 |
+
)
|
| 664 |
+
(2): Sequential(
|
| 665 |
+
(0): Conv1d(128, 256, kernel_size=(41,), stride=(2,), padding=(20,), groups=16)
|
| 666 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 667 |
+
)
|
| 668 |
+
(3): Sequential(
|
| 669 |
+
(0): Conv1d(256, 512, kernel_size=(41,), stride=(4,), padding=(20,), groups=16)
|
| 670 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 671 |
+
)
|
| 672 |
+
(4): Sequential(
|
| 673 |
+
(0): Conv1d(512, 1024, kernel_size=(41,), stride=(4,), padding=(20,), groups=16)
|
| 674 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 675 |
+
)
|
| 676 |
+
(5): Sequential(
|
| 677 |
+
(0): Conv1d(1024, 1024, kernel_size=(41,), stride=(1,), padding=(20,), groups=16)
|
| 678 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 679 |
+
)
|
| 680 |
+
(6): Sequential(
|
| 681 |
+
(0): Conv1d(1024, 1024, kernel_size=(5,), stride=(1,), padding=(2,))
|
| 682 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 683 |
+
)
|
| 684 |
+
(7): Conv1d(1024, 1, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 685 |
+
)
|
| 686 |
+
)
|
| 687 |
+
)
|
| 688 |
+
)
|
| 689 |
+
(mpd): HiFiGANMultiPeriodDiscriminator(
|
| 690 |
+
(discriminators): ModuleList(
|
| 691 |
+
(0): HiFiGANPeriodDiscriminator(
|
| 692 |
+
(convs): ModuleList(
|
| 693 |
+
(0): Sequential(
|
| 694 |
+
(0): Conv2d(1, 32, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 695 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 696 |
+
)
|
| 697 |
+
(1): Sequential(
|
| 698 |
+
(0): Conv2d(32, 128, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 699 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 700 |
+
)
|
| 701 |
+
(2): Sequential(
|
| 702 |
+
(0): Conv2d(128, 512, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 703 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 704 |
+
)
|
| 705 |
+
(3): Sequential(
|
| 706 |
+
(0): Conv2d(512, 1024, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 707 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 708 |
+
)
|
| 709 |
+
(4): Sequential(
|
| 710 |
+
(0): Conv2d(1024, 1024, kernel_size=(5, 1), stride=(1, 1), padding=(2, 0))
|
| 711 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 712 |
+
)
|
| 713 |
+
)
|
| 714 |
+
(output_conv): Conv2d(1024, 1, kernel_size=(2, 1), stride=(1, 1), padding=(1, 0))
|
| 715 |
+
)
|
| 716 |
+
(1): HiFiGANPeriodDiscriminator(
|
| 717 |
+
(convs): ModuleList(
|
| 718 |
+
(0): Sequential(
|
| 719 |
+
(0): Conv2d(1, 32, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 720 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 721 |
+
)
|
| 722 |
+
(1): Sequential(
|
| 723 |
+
(0): Conv2d(32, 128, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 724 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 725 |
+
)
|
| 726 |
+
(2): Sequential(
|
| 727 |
+
(0): Conv2d(128, 512, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 728 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 729 |
+
)
|
| 730 |
+
(3): Sequential(
|
| 731 |
+
(0): Conv2d(512, 1024, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 732 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 733 |
+
)
|
| 734 |
+
(4): Sequential(
|
| 735 |
+
(0): Conv2d(1024, 1024, kernel_size=(5, 1), stride=(1, 1), padding=(2, 0))
|
| 736 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 737 |
+
)
|
| 738 |
+
)
|
| 739 |
+
(output_conv): Conv2d(1024, 1, kernel_size=(2, 1), stride=(1, 1), padding=(1, 0))
|
| 740 |
+
)
|
| 741 |
+
(2): HiFiGANPeriodDiscriminator(
|
| 742 |
+
(convs): ModuleList(
|
| 743 |
+
(0): Sequential(
|
| 744 |
+
(0): Conv2d(1, 32, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 745 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 746 |
+
)
|
| 747 |
+
(1): Sequential(
|
| 748 |
+
(0): Conv2d(32, 128, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 749 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 750 |
+
)
|
| 751 |
+
(2): Sequential(
|
| 752 |
+
(0): Conv2d(128, 512, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 753 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 754 |
+
)
|
| 755 |
+
(3): Sequential(
|
| 756 |
+
(0): Conv2d(512, 1024, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 757 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 758 |
+
)
|
| 759 |
+
(4): Sequential(
|
| 760 |
+
(0): Conv2d(1024, 1024, kernel_size=(5, 1), stride=(1, 1), padding=(2, 0))
|
| 761 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 762 |
+
)
|
| 763 |
+
)
|
| 764 |
+
(output_conv): Conv2d(1024, 1, kernel_size=(2, 1), stride=(1, 1), padding=(1, 0))
|
| 765 |
+
)
|
| 766 |
+
(3): HiFiGANPeriodDiscriminator(
|
| 767 |
+
(convs): ModuleList(
|
| 768 |
+
(0): Sequential(
|
| 769 |
+
(0): Conv2d(1, 32, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 770 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 771 |
+
)
|
| 772 |
+
(1): Sequential(
|
| 773 |
+
(0): Conv2d(32, 128, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 774 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 775 |
+
)
|
| 776 |
+
(2): Sequential(
|
| 777 |
+
(0): Conv2d(128, 512, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 778 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 779 |
+
)
|
| 780 |
+
(3): Sequential(
|
| 781 |
+
(0): Conv2d(512, 1024, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 782 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 783 |
+
)
|
| 784 |
+
(4): Sequential(
|
| 785 |
+
(0): Conv2d(1024, 1024, kernel_size=(5, 1), stride=(1, 1), padding=(2, 0))
|
| 786 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 787 |
+
)
|
| 788 |
+
)
|
| 789 |
+
(output_conv): Conv2d(1024, 1, kernel_size=(2, 1), stride=(1, 1), padding=(1, 0))
|
| 790 |
+
)
|
| 791 |
+
(4): HiFiGANPeriodDiscriminator(
|
| 792 |
+
(convs): ModuleList(
|
| 793 |
+
(0): Sequential(
|
| 794 |
+
(0): Conv2d(1, 32, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 795 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 796 |
+
)
|
| 797 |
+
(1): Sequential(
|
| 798 |
+
(0): Conv2d(32, 128, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 799 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 800 |
+
)
|
| 801 |
+
(2): Sequential(
|
| 802 |
+
(0): Conv2d(128, 512, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 803 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 804 |
+
)
|
| 805 |
+
(3): Sequential(
|
| 806 |
+
(0): Conv2d(512, 1024, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 807 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 808 |
+
)
|
| 809 |
+
(4): Sequential(
|
| 810 |
+
(0): Conv2d(1024, 1024, kernel_size=(5, 1), stride=(1, 1), padding=(2, 0))
|
| 811 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 812 |
+
)
|
| 813 |
+
)
|
| 814 |
+
(output_conv): Conv2d(1024, 1, kernel_size=(2, 1), stride=(1, 1), padding=(1, 0))
|
| 815 |
+
)
|
| 816 |
+
)
|
| 817 |
+
)
|
| 818 |
+
)
|
| 819 |
+
(generator_adv_loss): GeneratorAdversarialLoss()
|
| 820 |
+
(discriminator_adv_loss): DiscriminatorAdversarialLoss()
|
| 821 |
+
(feat_match_loss): FeatureMatchLoss()
|
| 822 |
+
(mel_loss): MelSpectrogramLoss(
|
| 823 |
+
(wav_to_mel): LogMelFbank(
|
| 824 |
+
(stft): Stft(n_fft=1024, win_length=1024, hop_length=256, center=True, normalized=False, onesided=True)
|
| 825 |
+
(logmel): LogMel(sr=22050, n_fft=1024, n_mels=80, fmin=0, fmax=11025.0, htk=False)
|
| 826 |
+
)
|
| 827 |
+
)
|
| 828 |
+
(var_loss): VarianceLoss(
|
| 829 |
+
(mse_criterion): MSELoss()
|
| 830 |
+
(duration_criterion): DurationPredictorLoss(
|
| 831 |
+
(criterion): MSELoss()
|
| 832 |
+
)
|
| 833 |
+
)
|
| 834 |
+
(forwardsum_loss): ForwardSumLoss()
|
| 835 |
+
)
|
| 836 |
+
2025-02-21 15:00:46,238 (font_manager:1547) INFO: generated new fontManager
|
| 837 |
+
2025-02-21 15:00:53,936 (tts_inference:476) INFO: inference speed = 28924.2 points / sec.
|
| 838 |
+
2025-02-21 15:00:53,937 (tts_inference:481) INFO: LJ049-0166 (size:96->178944)
|
| 839 |
+
2025-02-21 15:00:56,902 (tts_inference:476) INFO: inference speed = 33243.3 points / sec.
|
| 840 |
+
2025-02-21 15:00:56,902 (tts_inference:481) INFO: LJ049-0167 (size:64->98304)
|
| 841 |
+
2025-02-21 15:01:02,259 (tts_inference:476) INFO: inference speed = 30710.4 points / sec.
|
| 842 |
+
2025-02-21 15:01:02,259 (tts_inference:481) INFO: LJ049-0168 (size:87->164352)
|
| 843 |
+
2025-02-21 15:01:05,787 (tts_inference:476) INFO: inference speed = 33734.8 points / sec.
|
| 844 |
+
2025-02-21 15:01:05,787 (tts_inference:481) INFO: LJ049-0169 (size:65->118784)
|
| 845 |
+
2025-02-21 15:01:11,170 (tts_inference:476) INFO: inference speed = 30091.5 points / sec.
|
| 846 |
+
2025-02-21 15:01:11,170 (tts_inference:481) INFO: LJ049-0170 (size:95->161792)
|
| 847 |
+
2025-02-21 15:01:14,635 (tts_inference:476) INFO: inference speed = 33759.8 points / sec.
|
| 848 |
+
2025-02-21 15:01:14,635 (tts_inference:481) INFO: LJ049-0171 (size:67->116736)
|
| 849 |
+
2025-02-21 15:01:18,388 (tts_inference:476) INFO: inference speed = 33682.6 points / sec.
|
| 850 |
+
2025-02-21 15:01:18,388 (tts_inference:481) INFO: LJ049-0172 (size:68->126208)
|
| 851 |
+
2025-02-21 15:01:25,136 (tts_inference:476) INFO: inference speed = 29851.3 points / sec.
|
| 852 |
+
2025-02-21 15:01:25,136 (tts_inference:481) INFO: LJ049-0173 (size:94->201216)
|
| 853 |
+
2025-02-21 15:01:29,880 (tts_inference:476) INFO: inference speed = 34109.4 points / sec.
|
| 854 |
+
2025-02-21 15:01:29,880 (tts_inference:481) INFO: LJ049-0174 (size:84->161536)
|
| 855 |
+
2025-02-21 15:01:31,391 (tts_inference:476) INFO: inference speed = 32332.8 points / sec.
|
| 856 |
+
2025-02-21 15:01:31,391 (tts_inference:481) INFO: LJ049-0175 (size:27->48640)
|
| 857 |
+
2025-02-21 15:01:36,899 (tts_inference:476) INFO: inference speed = 34147.8 points / sec.
|
| 858 |
+
2025-02-21 15:01:36,899 (tts_inference:481) INFO: LJ049-0176 (size:95->187904)
|
| 859 |
+
2025-02-21 15:01:43,987 (tts_inference:476) INFO: inference speed = 29902.6 points / sec.
|
| 860 |
+
2025-02-21 15:01:43,987 (tts_inference:481) INFO: LJ049-0177 (size:107->211712)
|
| 861 |
+
2025-02-21 15:01:47,422 (tts_inference:476) INFO: inference speed = 34654.2 points / sec.
|
| 862 |
+
2025-02-21 15:01:47,422 (tts_inference:481) INFO: LJ049-0178 (size:66->118784)
|
| 863 |
+
2025-02-21 15:01:52,278 (tts_inference:476) INFO: inference speed = 34046.9 points / sec.
|
| 864 |
+
2025-02-21 15:01:52,279 (tts_inference:481) INFO: LJ049-0179 (size:89->165120)
|
| 865 |
+
2025-02-21 15:01:53,428 (tts_inference:476) INFO: inference speed = 31813.4 points / sec.
|
| 866 |
+
2025-02-21 15:01:53,428 (tts_inference:481) INFO: LJ049-0180 (size:23->36352)
|
| 867 |
+
2025-02-21 15:01:59,076 (tts_inference:476) INFO: inference speed = 33887.5 points / sec.
|
| 868 |
+
2025-02-21 15:01:59,076 (tts_inference:481) INFO: LJ049-0181 (size:105->191232)
|
| 869 |
+
2025-02-21 15:02:03,979 (tts_inference:476) INFO: inference speed = 34041.2 points / sec.
|
| 870 |
+
2025-02-21 15:02:03,980 (tts_inference:481) INFO: LJ049-0182 (size:95->166656)
|
| 871 |
+
2025-02-21 15:02:07,942 (tts_inference:476) INFO: inference speed = 33977.4 points / sec.
|
| 872 |
+
2025-02-21 15:02:07,943 (tts_inference:481) INFO: LJ049-0183 (size:73->134400)
|
| 873 |
+
2025-02-21 15:02:11,861 (tts_inference:476) INFO: inference speed = 33766.5 points / sec.
|
| 874 |
+
2025-02-21 15:02:11,861 (tts_inference:481) INFO: LJ049-0184 (size:75->132096)
|
| 875 |
+
2025-02-21 15:02:14,308 (tts_inference:476) INFO: inference speed = 33150.2 points / sec.
|
| 876 |
+
2025-02-21 15:02:14,308 (tts_inference:481) INFO: LJ049-0185 (size:48->80896)
|
| 877 |
+
2025-02-21 15:02:18,372 (tts_inference:476) INFO: inference speed = 33690.8 points / sec.
|
| 878 |
+
2025-02-21 15:02:18,372 (tts_inference:481) INFO: LJ049-0186 (size:85->136704)
|
| 879 |
+
2025-02-21 15:02:22,559 (tts_inference:476) INFO: inference speed = 33747.0 points / sec.
|
| 880 |
+
2025-02-21 15:02:22,559 (tts_inference:481) INFO: LJ049-0187 (size:78->141056)
|
| 881 |
+
2025-02-21 15:02:29,036 (tts_inference:476) INFO: inference speed = 29911.9 points / sec.
|
| 882 |
+
2025-02-21 15:02:29,036 (tts_inference:481) INFO: LJ049-0188 (size:104->193536)
|
| 883 |
+
2025-02-21 15:02:34,132 (tts_inference:476) INFO: inference speed = 34512.2 points / sec.
|
| 884 |
+
2025-02-21 15:02:34,133 (tts_inference:481) INFO: LJ049-0189 (size:91->175616)
|
| 885 |
+
2025-02-21 15:02:37,734 (tts_inference:476) INFO: inference speed = 33691.0 points / sec.
|
| 886 |
+
2025-02-21 15:02:37,734 (tts_inference:481) INFO: LJ049-0190 (size:51->121088)
|
| 887 |
+
2025-02-21 15:02:42,145 (tts_inference:476) INFO: inference speed = 33713.2 points / sec.
|
| 888 |
+
2025-02-21 15:02:42,145 (tts_inference:481) INFO: LJ049-0191 (size:83->148480)
|
| 889 |
+
2025-02-21 15:02:48,046 (tts_inference:476) INFO: inference speed = 34002.6 points / sec.
|
| 890 |
+
2025-02-21 15:02:48,047 (tts_inference:481) INFO: LJ049-0192 (size:119->200448)
|
| 891 |
+
2025-02-21 15:02:53,710 (tts_inference:476) INFO: inference speed = 34039.6 points / sec.
|
| 892 |
+
2025-02-21 15:02:53,710 (tts_inference:481) INFO: LJ049-0193 (size:113->192512)
|
| 893 |
+
2025-02-21 15:02:55,262 (tts_inference:476) INFO: inference speed = 32155.5 points / sec.
|
| 894 |
+
2025-02-21 15:02:55,262 (tts_inference:481) INFO: LJ049-0194 (size:21->49664)
|
| 895 |
+
2025-02-21 15:03:00,268 (tts_inference:476) INFO: inference speed = 34189.8 points / sec.
|
| 896 |
+
2025-02-21 15:03:00,269 (tts_inference:481) INFO: LJ049-0195 (size:103->171008)
|
| 897 |
+
2025-02-21 15:03:06,440 (tts_inference:476) INFO: inference speed = 31727.5 points / sec.
|
| 898 |
+
2025-02-21 15:03:06,441 (tts_inference:481) INFO: LJ049-0196 (size:106->195584)
|
| 899 |
+
# Accounting: time=147 threads=1
|
| 900 |
+
# Ended (code 0) at Fri Feb 21 15:03:07 JST 2025, elapsed time 147 seconds
|
imdanboy/jets/decode_train.loss.ave/dev/log/tts_inference.7.log
ADDED
|
@@ -0,0 +1,900 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# python3 -m espnet2.bin.tts_inference --ngpu 0 --data_path_and_name_and_type dump/raw/dev/text,text,text --data_path_and_name_and_type dump/raw/dev/wav.scp,speech,sound --key_file exp/imdanboy/jets/decode_train.loss.ave/dev/log/keys.7.scp --model_file exp/imdanboy/jets/train.total_count.ave_5best.pth --train_config exp/imdanboy/jets/config.yaml --output_dir exp/imdanboy/jets/decode_train.loss.ave/dev/log/output.7 --vocoder_file none --config conf/decode.yaml
|
| 2 |
+
# Started at Fri Feb 21 15:00:40 JST 2025
|
| 3 |
+
#
|
| 4 |
+
/usr/lib/python3/dist-packages/requests/__init__.py:89: RequestsDependencyWarning: urllib3 (2.2.3) or chardet (3.0.4) doesn't match a supported version!
|
| 5 |
+
warnings.warn("urllib3 ({}) or chardet ({}) doesn't match a supported "
|
| 6 |
+
/usr/bin/python3 /work/espnet/espnet2/bin/tts_inference.py --ngpu 0 --data_path_and_name_and_type dump/raw/dev/text,text,text --data_path_and_name_and_type dump/raw/dev/wav.scp,speech,sound --key_file exp/imdanboy/jets/decode_train.loss.ave/dev/log/keys.7.scp --model_file exp/imdanboy/jets/train.total_count.ave_5best.pth --train_config exp/imdanboy/jets/config.yaml --output_dir exp/imdanboy/jets/decode_train.loss.ave/dev/log/output.7 --vocoder_file none --config conf/decode.yaml
|
| 7 |
+
2025-02-21 15:00:43,857 (tts:302) INFO: Vocabulary size: 78
|
| 8 |
+
2025-02-21 15:00:43,976 (encoder:172) INFO: encoder self-attention layer type = self-attention
|
| 9 |
+
2025-02-21 15:00:44,091 (encoder:172) INFO: encoder self-attention layer type = self-attention
|
| 10 |
+
2025-02-21 15:00:45,902 (tts_inference:126) INFO: Extractor:
|
| 11 |
+
LogMelFbank(
|
| 12 |
+
(stft): Stft(n_fft=1024, win_length=1024, hop_length=256, center=True, normalized=False, onesided=True)
|
| 13 |
+
(logmel): LogMel(sr=22050, n_fft=1024, n_mels=80, fmin=80, fmax=7600, htk=False)
|
| 14 |
+
)
|
| 15 |
+
2025-02-21 15:00:45,902 (tts_inference:127) INFO: Normalizer:
|
| 16 |
+
GlobalMVN(stats_file=/usr/local/lib/python3.8/dist-packages/espnet_model_zoo/models--imdanboy--jets/snapshots/1db95c26516c44e6789bf06417c51e89400b190b/exp/tts_stats_raw_phn_tacotron_g2p_en_no_space/train/feats_stats.npz, norm_means=True, norm_vars=True)
|
| 17 |
+
2025-02-21 15:00:45,906 (tts_inference:128) INFO: TTS:
|
| 18 |
+
JETS(
|
| 19 |
+
(generator): JETSGenerator(
|
| 20 |
+
(encoder): Encoder(
|
| 21 |
+
(embed): Sequential(
|
| 22 |
+
(0): Embedding(78, 256, padding_idx=0)
|
| 23 |
+
(1): ScaledPositionalEncoding(
|
| 24 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 25 |
+
)
|
| 26 |
+
)
|
| 27 |
+
(encoders): MultiSequential(
|
| 28 |
+
(0): EncoderLayer(
|
| 29 |
+
(self_attn): MultiHeadedAttention(
|
| 30 |
+
(linear_q): Linear(in_features=256, out_features=256, bias=True)
|
| 31 |
+
(linear_k): Linear(in_features=256, out_features=256, bias=True)
|
| 32 |
+
(linear_v): Linear(in_features=256, out_features=256, bias=True)
|
| 33 |
+
(linear_out): Linear(in_features=256, out_features=256, bias=True)
|
| 34 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 35 |
+
)
|
| 36 |
+
(feed_forward): MultiLayeredConv1d(
|
| 37 |
+
(w_1): Conv1d(256, 1024, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 38 |
+
(w_2): Conv1d(1024, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 39 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 40 |
+
)
|
| 41 |
+
(norm1): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 42 |
+
(norm2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 43 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 44 |
+
)
|
| 45 |
+
(1): EncoderLayer(
|
| 46 |
+
(self_attn): MultiHeadedAttention(
|
| 47 |
+
(linear_q): Linear(in_features=256, out_features=256, bias=True)
|
| 48 |
+
(linear_k): Linear(in_features=256, out_features=256, bias=True)
|
| 49 |
+
(linear_v): Linear(in_features=256, out_features=256, bias=True)
|
| 50 |
+
(linear_out): Linear(in_features=256, out_features=256, bias=True)
|
| 51 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 52 |
+
)
|
| 53 |
+
(feed_forward): MultiLayeredConv1d(
|
| 54 |
+
(w_1): Conv1d(256, 1024, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 55 |
+
(w_2): Conv1d(1024, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 56 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 57 |
+
)
|
| 58 |
+
(norm1): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 59 |
+
(norm2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 60 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 61 |
+
)
|
| 62 |
+
(2): EncoderLayer(
|
| 63 |
+
(self_attn): MultiHeadedAttention(
|
| 64 |
+
(linear_q): Linear(in_features=256, out_features=256, bias=True)
|
| 65 |
+
(linear_k): Linear(in_features=256, out_features=256, bias=True)
|
| 66 |
+
(linear_v): Linear(in_features=256, out_features=256, bias=True)
|
| 67 |
+
(linear_out): Linear(in_features=256, out_features=256, bias=True)
|
| 68 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 69 |
+
)
|
| 70 |
+
(feed_forward): MultiLayeredConv1d(
|
| 71 |
+
(w_1): Conv1d(256, 1024, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 72 |
+
(w_2): Conv1d(1024, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 73 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 74 |
+
)
|
| 75 |
+
(norm1): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 76 |
+
(norm2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 77 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 78 |
+
)
|
| 79 |
+
(3): EncoderLayer(
|
| 80 |
+
(self_attn): MultiHeadedAttention(
|
| 81 |
+
(linear_q): Linear(in_features=256, out_features=256, bias=True)
|
| 82 |
+
(linear_k): Linear(in_features=256, out_features=256, bias=True)
|
| 83 |
+
(linear_v): Linear(in_features=256, out_features=256, bias=True)
|
| 84 |
+
(linear_out): Linear(in_features=256, out_features=256, bias=True)
|
| 85 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 86 |
+
)
|
| 87 |
+
(feed_forward): MultiLayeredConv1d(
|
| 88 |
+
(w_1): Conv1d(256, 1024, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 89 |
+
(w_2): Conv1d(1024, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 90 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 91 |
+
)
|
| 92 |
+
(norm1): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 93 |
+
(norm2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 94 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 95 |
+
)
|
| 96 |
+
)
|
| 97 |
+
(after_norm): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 98 |
+
)
|
| 99 |
+
(duration_predictor): DurationPredictor(
|
| 100 |
+
(conv): ModuleList(
|
| 101 |
+
(0): Sequential(
|
| 102 |
+
(0): Conv1d(256, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 103 |
+
(1): ReLU()
|
| 104 |
+
(2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 105 |
+
(3): Dropout(p=0.1, inplace=False)
|
| 106 |
+
)
|
| 107 |
+
(1): Sequential(
|
| 108 |
+
(0): Conv1d(256, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 109 |
+
(1): ReLU()
|
| 110 |
+
(2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 111 |
+
(3): Dropout(p=0.1, inplace=False)
|
| 112 |
+
)
|
| 113 |
+
)
|
| 114 |
+
(linear): Linear(in_features=256, out_features=1, bias=True)
|
| 115 |
+
)
|
| 116 |
+
(pitch_predictor): VariancePredictor(
|
| 117 |
+
(conv): ModuleList(
|
| 118 |
+
(0): Sequential(
|
| 119 |
+
(0): Conv1d(256, 256, kernel_size=(5,), stride=(1,), padding=(2,))
|
| 120 |
+
(1): ReLU()
|
| 121 |
+
(2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 122 |
+
(3): Dropout(p=0.5, inplace=False)
|
| 123 |
+
)
|
| 124 |
+
(1): Sequential(
|
| 125 |
+
(0): Conv1d(256, 256, kernel_size=(5,), stride=(1,), padding=(2,))
|
| 126 |
+
(1): ReLU()
|
| 127 |
+
(2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 128 |
+
(3): Dropout(p=0.5, inplace=False)
|
| 129 |
+
)
|
| 130 |
+
(2): Sequential(
|
| 131 |
+
(0): Conv1d(256, 256, kernel_size=(5,), stride=(1,), padding=(2,))
|
| 132 |
+
(1): ReLU()
|
| 133 |
+
(2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 134 |
+
(3): Dropout(p=0.5, inplace=False)
|
| 135 |
+
)
|
| 136 |
+
(3): Sequential(
|
| 137 |
+
(0): Conv1d(256, 256, kernel_size=(5,), stride=(1,), padding=(2,))
|
| 138 |
+
(1): ReLU()
|
| 139 |
+
(2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 140 |
+
(3): Dropout(p=0.5, inplace=False)
|
| 141 |
+
)
|
| 142 |
+
(4): Sequential(
|
| 143 |
+
(0): Conv1d(256, 256, kernel_size=(5,), stride=(1,), padding=(2,))
|
| 144 |
+
(1): ReLU()
|
| 145 |
+
(2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 146 |
+
(3): Dropout(p=0.5, inplace=False)
|
| 147 |
+
)
|
| 148 |
+
)
|
| 149 |
+
(linear): Linear(in_features=256, out_features=1, bias=True)
|
| 150 |
+
)
|
| 151 |
+
(pitch_embed): Sequential(
|
| 152 |
+
(0): Conv1d(1, 256, kernel_size=(1,), stride=(1,))
|
| 153 |
+
(1): Dropout(p=0.0, inplace=False)
|
| 154 |
+
)
|
| 155 |
+
(energy_predictor): VariancePredictor(
|
| 156 |
+
(conv): ModuleList(
|
| 157 |
+
(0): Sequential(
|
| 158 |
+
(0): Conv1d(256, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 159 |
+
(1): ReLU()
|
| 160 |
+
(2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 161 |
+
(3): Dropout(p=0.5, inplace=False)
|
| 162 |
+
)
|
| 163 |
+
(1): Sequential(
|
| 164 |
+
(0): Conv1d(256, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 165 |
+
(1): ReLU()
|
| 166 |
+
(2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 167 |
+
(3): Dropout(p=0.5, inplace=False)
|
| 168 |
+
)
|
| 169 |
+
)
|
| 170 |
+
(linear): Linear(in_features=256, out_features=1, bias=True)
|
| 171 |
+
)
|
| 172 |
+
(energy_embed): Sequential(
|
| 173 |
+
(0): Conv1d(1, 256, kernel_size=(1,), stride=(1,))
|
| 174 |
+
(1): Dropout(p=0.0, inplace=False)
|
| 175 |
+
)
|
| 176 |
+
(alignment_module): AlignmentModule(
|
| 177 |
+
(t_conv1): Conv1d(256, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 178 |
+
(t_conv2): Conv1d(256, 256, kernel_size=(1,), stride=(1,))
|
| 179 |
+
(f_conv1): Conv1d(80, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 180 |
+
(f_conv2): Conv1d(256, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 181 |
+
(f_conv3): Conv1d(256, 256, kernel_size=(1,), stride=(1,))
|
| 182 |
+
)
|
| 183 |
+
(length_regulator): GaussianUpsampling()
|
| 184 |
+
(decoder): Encoder(
|
| 185 |
+
(embed): Sequential(
|
| 186 |
+
(0): ScaledPositionalEncoding(
|
| 187 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 188 |
+
)
|
| 189 |
+
)
|
| 190 |
+
(encoders): MultiSequential(
|
| 191 |
+
(0): EncoderLayer(
|
| 192 |
+
(self_attn): MultiHeadedAttention(
|
| 193 |
+
(linear_q): Linear(in_features=256, out_features=256, bias=True)
|
| 194 |
+
(linear_k): Linear(in_features=256, out_features=256, bias=True)
|
| 195 |
+
(linear_v): Linear(in_features=256, out_features=256, bias=True)
|
| 196 |
+
(linear_out): Linear(in_features=256, out_features=256, bias=True)
|
| 197 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 198 |
+
)
|
| 199 |
+
(feed_forward): MultiLayeredConv1d(
|
| 200 |
+
(w_1): Conv1d(256, 1024, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 201 |
+
(w_2): Conv1d(1024, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 202 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 203 |
+
)
|
| 204 |
+
(norm1): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 205 |
+
(norm2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 206 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 207 |
+
)
|
| 208 |
+
(1): EncoderLayer(
|
| 209 |
+
(self_attn): MultiHeadedAttention(
|
| 210 |
+
(linear_q): Linear(in_features=256, out_features=256, bias=True)
|
| 211 |
+
(linear_k): Linear(in_features=256, out_features=256, bias=True)
|
| 212 |
+
(linear_v): Linear(in_features=256, out_features=256, bias=True)
|
| 213 |
+
(linear_out): Linear(in_features=256, out_features=256, bias=True)
|
| 214 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 215 |
+
)
|
| 216 |
+
(feed_forward): MultiLayeredConv1d(
|
| 217 |
+
(w_1): Conv1d(256, 1024, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 218 |
+
(w_2): Conv1d(1024, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 219 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 220 |
+
)
|
| 221 |
+
(norm1): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 222 |
+
(norm2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 223 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 224 |
+
)
|
| 225 |
+
(2): EncoderLayer(
|
| 226 |
+
(self_attn): MultiHeadedAttention(
|
| 227 |
+
(linear_q): Linear(in_features=256, out_features=256, bias=True)
|
| 228 |
+
(linear_k): Linear(in_features=256, out_features=256, bias=True)
|
| 229 |
+
(linear_v): Linear(in_features=256, out_features=256, bias=True)
|
| 230 |
+
(linear_out): Linear(in_features=256, out_features=256, bias=True)
|
| 231 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 232 |
+
)
|
| 233 |
+
(feed_forward): MultiLayeredConv1d(
|
| 234 |
+
(w_1): Conv1d(256, 1024, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 235 |
+
(w_2): Conv1d(1024, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 236 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 237 |
+
)
|
| 238 |
+
(norm1): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 239 |
+
(norm2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 240 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 241 |
+
)
|
| 242 |
+
(3): EncoderLayer(
|
| 243 |
+
(self_attn): MultiHeadedAttention(
|
| 244 |
+
(linear_q): Linear(in_features=256, out_features=256, bias=True)
|
| 245 |
+
(linear_k): Linear(in_features=256, out_features=256, bias=True)
|
| 246 |
+
(linear_v): Linear(in_features=256, out_features=256, bias=True)
|
| 247 |
+
(linear_out): Linear(in_features=256, out_features=256, bias=True)
|
| 248 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 249 |
+
)
|
| 250 |
+
(feed_forward): MultiLayeredConv1d(
|
| 251 |
+
(w_1): Conv1d(256, 1024, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 252 |
+
(w_2): Conv1d(1024, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 253 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 254 |
+
)
|
| 255 |
+
(norm1): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 256 |
+
(norm2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 257 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 258 |
+
)
|
| 259 |
+
)
|
| 260 |
+
(after_norm): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 261 |
+
)
|
| 262 |
+
(generator): HiFiGANGenerator(
|
| 263 |
+
(input_conv): Conv1d(256, 512, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 264 |
+
(upsamples): ModuleList(
|
| 265 |
+
(0): Sequential(
|
| 266 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 267 |
+
(1): ConvTranspose1d(512, 256, kernel_size=(16,), stride=(8,), padding=(4,))
|
| 268 |
+
)
|
| 269 |
+
(1): Sequential(
|
| 270 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 271 |
+
(1): ConvTranspose1d(256, 128, kernel_size=(16,), stride=(8,), padding=(4,))
|
| 272 |
+
)
|
| 273 |
+
(2): Sequential(
|
| 274 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 275 |
+
(1): ConvTranspose1d(128, 64, kernel_size=(4,), stride=(2,), padding=(1,))
|
| 276 |
+
)
|
| 277 |
+
(3): Sequential(
|
| 278 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 279 |
+
(1): ConvTranspose1d(64, 32, kernel_size=(4,), stride=(2,), padding=(1,))
|
| 280 |
+
)
|
| 281 |
+
)
|
| 282 |
+
(blocks): ModuleList(
|
| 283 |
+
(0): ResidualBlock(
|
| 284 |
+
(convs1): ModuleList(
|
| 285 |
+
(0): Sequential(
|
| 286 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 287 |
+
(1): Conv1d(256, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 288 |
+
)
|
| 289 |
+
(1): Sequential(
|
| 290 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 291 |
+
(1): Conv1d(256, 256, kernel_size=(3,), stride=(1,), padding=(3,), dilation=(3,))
|
| 292 |
+
)
|
| 293 |
+
(2): Sequential(
|
| 294 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 295 |
+
(1): Conv1d(256, 256, kernel_size=(3,), stride=(1,), padding=(5,), dilation=(5,))
|
| 296 |
+
)
|
| 297 |
+
)
|
| 298 |
+
(convs2): ModuleList(
|
| 299 |
+
(0): Sequential(
|
| 300 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 301 |
+
(1): Conv1d(256, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 302 |
+
)
|
| 303 |
+
(1): Sequential(
|
| 304 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 305 |
+
(1): Conv1d(256, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 306 |
+
)
|
| 307 |
+
(2): Sequential(
|
| 308 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 309 |
+
(1): Conv1d(256, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 310 |
+
)
|
| 311 |
+
)
|
| 312 |
+
)
|
| 313 |
+
(1): ResidualBlock(
|
| 314 |
+
(convs1): ModuleList(
|
| 315 |
+
(0): Sequential(
|
| 316 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 317 |
+
(1): Conv1d(256, 256, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 318 |
+
)
|
| 319 |
+
(1): Sequential(
|
| 320 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 321 |
+
(1): Conv1d(256, 256, kernel_size=(7,), stride=(1,), padding=(9,), dilation=(3,))
|
| 322 |
+
)
|
| 323 |
+
(2): Sequential(
|
| 324 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 325 |
+
(1): Conv1d(256, 256, kernel_size=(7,), stride=(1,), padding=(15,), dilation=(5,))
|
| 326 |
+
)
|
| 327 |
+
)
|
| 328 |
+
(convs2): ModuleList(
|
| 329 |
+
(0): Sequential(
|
| 330 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 331 |
+
(1): Conv1d(256, 256, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 332 |
+
)
|
| 333 |
+
(1): Sequential(
|
| 334 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 335 |
+
(1): Conv1d(256, 256, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 336 |
+
)
|
| 337 |
+
(2): Sequential(
|
| 338 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 339 |
+
(1): Conv1d(256, 256, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 340 |
+
)
|
| 341 |
+
)
|
| 342 |
+
)
|
| 343 |
+
(2): ResidualBlock(
|
| 344 |
+
(convs1): ModuleList(
|
| 345 |
+
(0): Sequential(
|
| 346 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 347 |
+
(1): Conv1d(256, 256, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 348 |
+
)
|
| 349 |
+
(1): Sequential(
|
| 350 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 351 |
+
(1): Conv1d(256, 256, kernel_size=(11,), stride=(1,), padding=(15,), dilation=(3,))
|
| 352 |
+
)
|
| 353 |
+
(2): Sequential(
|
| 354 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 355 |
+
(1): Conv1d(256, 256, kernel_size=(11,), stride=(1,), padding=(25,), dilation=(5,))
|
| 356 |
+
)
|
| 357 |
+
)
|
| 358 |
+
(convs2): ModuleList(
|
| 359 |
+
(0): Sequential(
|
| 360 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 361 |
+
(1): Conv1d(256, 256, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 362 |
+
)
|
| 363 |
+
(1): Sequential(
|
| 364 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 365 |
+
(1): Conv1d(256, 256, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 366 |
+
)
|
| 367 |
+
(2): Sequential(
|
| 368 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 369 |
+
(1): Conv1d(256, 256, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 370 |
+
)
|
| 371 |
+
)
|
| 372 |
+
)
|
| 373 |
+
(3): ResidualBlock(
|
| 374 |
+
(convs1): ModuleList(
|
| 375 |
+
(0): Sequential(
|
| 376 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 377 |
+
(1): Conv1d(128, 128, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 378 |
+
)
|
| 379 |
+
(1): Sequential(
|
| 380 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 381 |
+
(1): Conv1d(128, 128, kernel_size=(3,), stride=(1,), padding=(3,), dilation=(3,))
|
| 382 |
+
)
|
| 383 |
+
(2): Sequential(
|
| 384 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 385 |
+
(1): Conv1d(128, 128, kernel_size=(3,), stride=(1,), padding=(5,), dilation=(5,))
|
| 386 |
+
)
|
| 387 |
+
)
|
| 388 |
+
(convs2): ModuleList(
|
| 389 |
+
(0): Sequential(
|
| 390 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 391 |
+
(1): Conv1d(128, 128, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 392 |
+
)
|
| 393 |
+
(1): Sequential(
|
| 394 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 395 |
+
(1): Conv1d(128, 128, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 396 |
+
)
|
| 397 |
+
(2): Sequential(
|
| 398 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 399 |
+
(1): Conv1d(128, 128, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 400 |
+
)
|
| 401 |
+
)
|
| 402 |
+
)
|
| 403 |
+
(4): ResidualBlock(
|
| 404 |
+
(convs1): ModuleList(
|
| 405 |
+
(0): Sequential(
|
| 406 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 407 |
+
(1): Conv1d(128, 128, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 408 |
+
)
|
| 409 |
+
(1): Sequential(
|
| 410 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 411 |
+
(1): Conv1d(128, 128, kernel_size=(7,), stride=(1,), padding=(9,), dilation=(3,))
|
| 412 |
+
)
|
| 413 |
+
(2): Sequential(
|
| 414 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 415 |
+
(1): Conv1d(128, 128, kernel_size=(7,), stride=(1,), padding=(15,), dilation=(5,))
|
| 416 |
+
)
|
| 417 |
+
)
|
| 418 |
+
(convs2): ModuleList(
|
| 419 |
+
(0): Sequential(
|
| 420 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 421 |
+
(1): Conv1d(128, 128, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 422 |
+
)
|
| 423 |
+
(1): Sequential(
|
| 424 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 425 |
+
(1): Conv1d(128, 128, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 426 |
+
)
|
| 427 |
+
(2): Sequential(
|
| 428 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 429 |
+
(1): Conv1d(128, 128, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 430 |
+
)
|
| 431 |
+
)
|
| 432 |
+
)
|
| 433 |
+
(5): ResidualBlock(
|
| 434 |
+
(convs1): ModuleList(
|
| 435 |
+
(0): Sequential(
|
| 436 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 437 |
+
(1): Conv1d(128, 128, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 438 |
+
)
|
| 439 |
+
(1): Sequential(
|
| 440 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 441 |
+
(1): Conv1d(128, 128, kernel_size=(11,), stride=(1,), padding=(15,), dilation=(3,))
|
| 442 |
+
)
|
| 443 |
+
(2): Sequential(
|
| 444 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 445 |
+
(1): Conv1d(128, 128, kernel_size=(11,), stride=(1,), padding=(25,), dilation=(5,))
|
| 446 |
+
)
|
| 447 |
+
)
|
| 448 |
+
(convs2): ModuleList(
|
| 449 |
+
(0): Sequential(
|
| 450 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 451 |
+
(1): Conv1d(128, 128, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 452 |
+
)
|
| 453 |
+
(1): Sequential(
|
| 454 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 455 |
+
(1): Conv1d(128, 128, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 456 |
+
)
|
| 457 |
+
(2): Sequential(
|
| 458 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 459 |
+
(1): Conv1d(128, 128, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 460 |
+
)
|
| 461 |
+
)
|
| 462 |
+
)
|
| 463 |
+
(6): ResidualBlock(
|
| 464 |
+
(convs1): ModuleList(
|
| 465 |
+
(0): Sequential(
|
| 466 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 467 |
+
(1): Conv1d(64, 64, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 468 |
+
)
|
| 469 |
+
(1): Sequential(
|
| 470 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 471 |
+
(1): Conv1d(64, 64, kernel_size=(3,), stride=(1,), padding=(3,), dilation=(3,))
|
| 472 |
+
)
|
| 473 |
+
(2): Sequential(
|
| 474 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 475 |
+
(1): Conv1d(64, 64, kernel_size=(3,), stride=(1,), padding=(5,), dilation=(5,))
|
| 476 |
+
)
|
| 477 |
+
)
|
| 478 |
+
(convs2): ModuleList(
|
| 479 |
+
(0): Sequential(
|
| 480 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 481 |
+
(1): Conv1d(64, 64, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 482 |
+
)
|
| 483 |
+
(1): Sequential(
|
| 484 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 485 |
+
(1): Conv1d(64, 64, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 486 |
+
)
|
| 487 |
+
(2): Sequential(
|
| 488 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 489 |
+
(1): Conv1d(64, 64, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 490 |
+
)
|
| 491 |
+
)
|
| 492 |
+
)
|
| 493 |
+
(7): ResidualBlock(
|
| 494 |
+
(convs1): ModuleList(
|
| 495 |
+
(0): Sequential(
|
| 496 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 497 |
+
(1): Conv1d(64, 64, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 498 |
+
)
|
| 499 |
+
(1): Sequential(
|
| 500 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 501 |
+
(1): Conv1d(64, 64, kernel_size=(7,), stride=(1,), padding=(9,), dilation=(3,))
|
| 502 |
+
)
|
| 503 |
+
(2): Sequential(
|
| 504 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 505 |
+
(1): Conv1d(64, 64, kernel_size=(7,), stride=(1,), padding=(15,), dilation=(5,))
|
| 506 |
+
)
|
| 507 |
+
)
|
| 508 |
+
(convs2): ModuleList(
|
| 509 |
+
(0): Sequential(
|
| 510 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 511 |
+
(1): Conv1d(64, 64, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 512 |
+
)
|
| 513 |
+
(1): Sequential(
|
| 514 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 515 |
+
(1): Conv1d(64, 64, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 516 |
+
)
|
| 517 |
+
(2): Sequential(
|
| 518 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 519 |
+
(1): Conv1d(64, 64, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 520 |
+
)
|
| 521 |
+
)
|
| 522 |
+
)
|
| 523 |
+
(8): ResidualBlock(
|
| 524 |
+
(convs1): ModuleList(
|
| 525 |
+
(0): Sequential(
|
| 526 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 527 |
+
(1): Conv1d(64, 64, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 528 |
+
)
|
| 529 |
+
(1): Sequential(
|
| 530 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 531 |
+
(1): Conv1d(64, 64, kernel_size=(11,), stride=(1,), padding=(15,), dilation=(3,))
|
| 532 |
+
)
|
| 533 |
+
(2): Sequential(
|
| 534 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 535 |
+
(1): Conv1d(64, 64, kernel_size=(11,), stride=(1,), padding=(25,), dilation=(5,))
|
| 536 |
+
)
|
| 537 |
+
)
|
| 538 |
+
(convs2): ModuleList(
|
| 539 |
+
(0): Sequential(
|
| 540 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 541 |
+
(1): Conv1d(64, 64, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 542 |
+
)
|
| 543 |
+
(1): Sequential(
|
| 544 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 545 |
+
(1): Conv1d(64, 64, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 546 |
+
)
|
| 547 |
+
(2): Sequential(
|
| 548 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 549 |
+
(1): Conv1d(64, 64, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 550 |
+
)
|
| 551 |
+
)
|
| 552 |
+
)
|
| 553 |
+
(9): ResidualBlock(
|
| 554 |
+
(convs1): ModuleList(
|
| 555 |
+
(0): Sequential(
|
| 556 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 557 |
+
(1): Conv1d(32, 32, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 558 |
+
)
|
| 559 |
+
(1): Sequential(
|
| 560 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 561 |
+
(1): Conv1d(32, 32, kernel_size=(3,), stride=(1,), padding=(3,), dilation=(3,))
|
| 562 |
+
)
|
| 563 |
+
(2): Sequential(
|
| 564 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 565 |
+
(1): Conv1d(32, 32, kernel_size=(3,), stride=(1,), padding=(5,), dilation=(5,))
|
| 566 |
+
)
|
| 567 |
+
)
|
| 568 |
+
(convs2): ModuleList(
|
| 569 |
+
(0): Sequential(
|
| 570 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 571 |
+
(1): Conv1d(32, 32, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 572 |
+
)
|
| 573 |
+
(1): Sequential(
|
| 574 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 575 |
+
(1): Conv1d(32, 32, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 576 |
+
)
|
| 577 |
+
(2): Sequential(
|
| 578 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 579 |
+
(1): Conv1d(32, 32, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 580 |
+
)
|
| 581 |
+
)
|
| 582 |
+
)
|
| 583 |
+
(10): ResidualBlock(
|
| 584 |
+
(convs1): ModuleList(
|
| 585 |
+
(0): Sequential(
|
| 586 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 587 |
+
(1): Conv1d(32, 32, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 588 |
+
)
|
| 589 |
+
(1): Sequential(
|
| 590 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 591 |
+
(1): Conv1d(32, 32, kernel_size=(7,), stride=(1,), padding=(9,), dilation=(3,))
|
| 592 |
+
)
|
| 593 |
+
(2): Sequential(
|
| 594 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 595 |
+
(1): Conv1d(32, 32, kernel_size=(7,), stride=(1,), padding=(15,), dilation=(5,))
|
| 596 |
+
)
|
| 597 |
+
)
|
| 598 |
+
(convs2): ModuleList(
|
| 599 |
+
(0): Sequential(
|
| 600 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 601 |
+
(1): Conv1d(32, 32, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 602 |
+
)
|
| 603 |
+
(1): Sequential(
|
| 604 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 605 |
+
(1): Conv1d(32, 32, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 606 |
+
)
|
| 607 |
+
(2): Sequential(
|
| 608 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 609 |
+
(1): Conv1d(32, 32, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 610 |
+
)
|
| 611 |
+
)
|
| 612 |
+
)
|
| 613 |
+
(11): ResidualBlock(
|
| 614 |
+
(convs1): ModuleList(
|
| 615 |
+
(0): Sequential(
|
| 616 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 617 |
+
(1): Conv1d(32, 32, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 618 |
+
)
|
| 619 |
+
(1): Sequential(
|
| 620 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 621 |
+
(1): Conv1d(32, 32, kernel_size=(11,), stride=(1,), padding=(15,), dilation=(3,))
|
| 622 |
+
)
|
| 623 |
+
(2): Sequential(
|
| 624 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 625 |
+
(1): Conv1d(32, 32, kernel_size=(11,), stride=(1,), padding=(25,), dilation=(5,))
|
| 626 |
+
)
|
| 627 |
+
)
|
| 628 |
+
(convs2): ModuleList(
|
| 629 |
+
(0): Sequential(
|
| 630 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 631 |
+
(1): Conv1d(32, 32, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 632 |
+
)
|
| 633 |
+
(1): Sequential(
|
| 634 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 635 |
+
(1): Conv1d(32, 32, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 636 |
+
)
|
| 637 |
+
(2): Sequential(
|
| 638 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 639 |
+
(1): Conv1d(32, 32, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 640 |
+
)
|
| 641 |
+
)
|
| 642 |
+
)
|
| 643 |
+
)
|
| 644 |
+
(output_conv): Sequential(
|
| 645 |
+
(0): LeakyReLU(negative_slope=0.01)
|
| 646 |
+
(1): Conv1d(32, 1, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 647 |
+
(2): Tanh()
|
| 648 |
+
)
|
| 649 |
+
)
|
| 650 |
+
)
|
| 651 |
+
(discriminator): HiFiGANMultiScaleMultiPeriodDiscriminator(
|
| 652 |
+
(msd): HiFiGANMultiScaleDiscriminator(
|
| 653 |
+
(discriminators): ModuleList(
|
| 654 |
+
(0): HiFiGANScaleDiscriminator(
|
| 655 |
+
(layers): ModuleList(
|
| 656 |
+
(0): Sequential(
|
| 657 |
+
(0): Conv1d(1, 128, kernel_size=(15,), stride=(1,), padding=(7,))
|
| 658 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 659 |
+
)
|
| 660 |
+
(1): Sequential(
|
| 661 |
+
(0): Conv1d(128, 128, kernel_size=(41,), stride=(2,), padding=(20,), groups=4)
|
| 662 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 663 |
+
)
|
| 664 |
+
(2): Sequential(
|
| 665 |
+
(0): Conv1d(128, 256, kernel_size=(41,), stride=(2,), padding=(20,), groups=16)
|
| 666 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 667 |
+
)
|
| 668 |
+
(3): Sequential(
|
| 669 |
+
(0): Conv1d(256, 512, kernel_size=(41,), stride=(4,), padding=(20,), groups=16)
|
| 670 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 671 |
+
)
|
| 672 |
+
(4): Sequential(
|
| 673 |
+
(0): Conv1d(512, 1024, kernel_size=(41,), stride=(4,), padding=(20,), groups=16)
|
| 674 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 675 |
+
)
|
| 676 |
+
(5): Sequential(
|
| 677 |
+
(0): Conv1d(1024, 1024, kernel_size=(41,), stride=(1,), padding=(20,), groups=16)
|
| 678 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 679 |
+
)
|
| 680 |
+
(6): Sequential(
|
| 681 |
+
(0): Conv1d(1024, 1024, kernel_size=(5,), stride=(1,), padding=(2,))
|
| 682 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 683 |
+
)
|
| 684 |
+
(7): Conv1d(1024, 1, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 685 |
+
)
|
| 686 |
+
)
|
| 687 |
+
)
|
| 688 |
+
)
|
| 689 |
+
(mpd): HiFiGANMultiPeriodDiscriminator(
|
| 690 |
+
(discriminators): ModuleList(
|
| 691 |
+
(0): HiFiGANPeriodDiscriminator(
|
| 692 |
+
(convs): ModuleList(
|
| 693 |
+
(0): Sequential(
|
| 694 |
+
(0): Conv2d(1, 32, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 695 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 696 |
+
)
|
| 697 |
+
(1): Sequential(
|
| 698 |
+
(0): Conv2d(32, 128, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 699 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 700 |
+
)
|
| 701 |
+
(2): Sequential(
|
| 702 |
+
(0): Conv2d(128, 512, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 703 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 704 |
+
)
|
| 705 |
+
(3): Sequential(
|
| 706 |
+
(0): Conv2d(512, 1024, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 707 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 708 |
+
)
|
| 709 |
+
(4): Sequential(
|
| 710 |
+
(0): Conv2d(1024, 1024, kernel_size=(5, 1), stride=(1, 1), padding=(2, 0))
|
| 711 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 712 |
+
)
|
| 713 |
+
)
|
| 714 |
+
(output_conv): Conv2d(1024, 1, kernel_size=(2, 1), stride=(1, 1), padding=(1, 0))
|
| 715 |
+
)
|
| 716 |
+
(1): HiFiGANPeriodDiscriminator(
|
| 717 |
+
(convs): ModuleList(
|
| 718 |
+
(0): Sequential(
|
| 719 |
+
(0): Conv2d(1, 32, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 720 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 721 |
+
)
|
| 722 |
+
(1): Sequential(
|
| 723 |
+
(0): Conv2d(32, 128, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 724 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 725 |
+
)
|
| 726 |
+
(2): Sequential(
|
| 727 |
+
(0): Conv2d(128, 512, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 728 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 729 |
+
)
|
| 730 |
+
(3): Sequential(
|
| 731 |
+
(0): Conv2d(512, 1024, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 732 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 733 |
+
)
|
| 734 |
+
(4): Sequential(
|
| 735 |
+
(0): Conv2d(1024, 1024, kernel_size=(5, 1), stride=(1, 1), padding=(2, 0))
|
| 736 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 737 |
+
)
|
| 738 |
+
)
|
| 739 |
+
(output_conv): Conv2d(1024, 1, kernel_size=(2, 1), stride=(1, 1), padding=(1, 0))
|
| 740 |
+
)
|
| 741 |
+
(2): HiFiGANPeriodDiscriminator(
|
| 742 |
+
(convs): ModuleList(
|
| 743 |
+
(0): Sequential(
|
| 744 |
+
(0): Conv2d(1, 32, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 745 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 746 |
+
)
|
| 747 |
+
(1): Sequential(
|
| 748 |
+
(0): Conv2d(32, 128, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 749 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 750 |
+
)
|
| 751 |
+
(2): Sequential(
|
| 752 |
+
(0): Conv2d(128, 512, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 753 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 754 |
+
)
|
| 755 |
+
(3): Sequential(
|
| 756 |
+
(0): Conv2d(512, 1024, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 757 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 758 |
+
)
|
| 759 |
+
(4): Sequential(
|
| 760 |
+
(0): Conv2d(1024, 1024, kernel_size=(5, 1), stride=(1, 1), padding=(2, 0))
|
| 761 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 762 |
+
)
|
| 763 |
+
)
|
| 764 |
+
(output_conv): Conv2d(1024, 1, kernel_size=(2, 1), stride=(1, 1), padding=(1, 0))
|
| 765 |
+
)
|
| 766 |
+
(3): HiFiGANPeriodDiscriminator(
|
| 767 |
+
(convs): ModuleList(
|
| 768 |
+
(0): Sequential(
|
| 769 |
+
(0): Conv2d(1, 32, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 770 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 771 |
+
)
|
| 772 |
+
(1): Sequential(
|
| 773 |
+
(0): Conv2d(32, 128, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 774 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 775 |
+
)
|
| 776 |
+
(2): Sequential(
|
| 777 |
+
(0): Conv2d(128, 512, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 778 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 779 |
+
)
|
| 780 |
+
(3): Sequential(
|
| 781 |
+
(0): Conv2d(512, 1024, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 782 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 783 |
+
)
|
| 784 |
+
(4): Sequential(
|
| 785 |
+
(0): Conv2d(1024, 1024, kernel_size=(5, 1), stride=(1, 1), padding=(2, 0))
|
| 786 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 787 |
+
)
|
| 788 |
+
)
|
| 789 |
+
(output_conv): Conv2d(1024, 1, kernel_size=(2, 1), stride=(1, 1), padding=(1, 0))
|
| 790 |
+
)
|
| 791 |
+
(4): HiFiGANPeriodDiscriminator(
|
| 792 |
+
(convs): ModuleList(
|
| 793 |
+
(0): Sequential(
|
| 794 |
+
(0): Conv2d(1, 32, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 795 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 796 |
+
)
|
| 797 |
+
(1): Sequential(
|
| 798 |
+
(0): Conv2d(32, 128, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 799 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 800 |
+
)
|
| 801 |
+
(2): Sequential(
|
| 802 |
+
(0): Conv2d(128, 512, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 803 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 804 |
+
)
|
| 805 |
+
(3): Sequential(
|
| 806 |
+
(0): Conv2d(512, 1024, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 807 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 808 |
+
)
|
| 809 |
+
(4): Sequential(
|
| 810 |
+
(0): Conv2d(1024, 1024, kernel_size=(5, 1), stride=(1, 1), padding=(2, 0))
|
| 811 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 812 |
+
)
|
| 813 |
+
)
|
| 814 |
+
(output_conv): Conv2d(1024, 1, kernel_size=(2, 1), stride=(1, 1), padding=(1, 0))
|
| 815 |
+
)
|
| 816 |
+
)
|
| 817 |
+
)
|
| 818 |
+
)
|
| 819 |
+
(generator_adv_loss): GeneratorAdversarialLoss()
|
| 820 |
+
(discriminator_adv_loss): DiscriminatorAdversarialLoss()
|
| 821 |
+
(feat_match_loss): FeatureMatchLoss()
|
| 822 |
+
(mel_loss): MelSpectrogramLoss(
|
| 823 |
+
(wav_to_mel): LogMelFbank(
|
| 824 |
+
(stft): Stft(n_fft=1024, win_length=1024, hop_length=256, center=True, normalized=False, onesided=True)
|
| 825 |
+
(logmel): LogMel(sr=22050, n_fft=1024, n_mels=80, fmin=0, fmax=11025.0, htk=False)
|
| 826 |
+
)
|
| 827 |
+
)
|
| 828 |
+
(var_loss): VarianceLoss(
|
| 829 |
+
(mse_criterion): MSELoss()
|
| 830 |
+
(duration_criterion): DurationPredictorLoss(
|
| 831 |
+
(criterion): MSELoss()
|
| 832 |
+
)
|
| 833 |
+
)
|
| 834 |
+
(forwardsum_loss): ForwardSumLoss()
|
| 835 |
+
)
|
| 836 |
+
2025-02-21 15:00:46,287 (font_manager:1547) INFO: generated new fontManager
|
| 837 |
+
2025-02-21 15:00:52,502 (tts_inference:476) INFO: inference speed = 28394.3 points / sec.
|
| 838 |
+
2025-02-21 15:00:52,502 (tts_inference:481) INFO: LJ049-0197 (size:81->133120)
|
| 839 |
+
2025-02-21 15:00:55,540 (tts_inference:476) INFO: inference speed = 33542.8 points / sec.
|
| 840 |
+
2025-02-21 15:00:55,540 (tts_inference:481) INFO: LJ049-0198 (size:52->101632)
|
| 841 |
+
2025-02-21 15:01:00,393 (tts_inference:476) INFO: inference speed = 29580.5 points / sec.
|
| 842 |
+
2025-02-21 15:01:00,393 (tts_inference:481) INFO: LJ049-0199 (size:88->143360)
|
| 843 |
+
2025-02-21 15:01:05,713 (tts_inference:476) INFO: inference speed = 29820.9 points / sec.
|
| 844 |
+
2025-02-21 15:01:05,713 (tts_inference:481) INFO: LJ049-0200 (size:89->158464)
|
| 845 |
+
2025-02-21 15:01:11,822 (tts_inference:476) INFO: inference speed = 30752.0 points / sec.
|
| 846 |
+
2025-02-21 15:01:11,822 (tts_inference:481) INFO: LJ049-0201 (size:105->187648)
|
| 847 |
+
2025-02-21 15:01:13,406 (tts_inference:476) INFO: inference speed = 32315.0 points / sec.
|
| 848 |
+
2025-02-21 15:01:13,406 (tts_inference:481) INFO: LJ049-0202 (size:30->50944)
|
| 849 |
+
2025-02-21 15:01:18,661 (tts_inference:476) INFO: inference speed = 34521.1 points / sec.
|
| 850 |
+
2025-02-21 15:01:18,662 (tts_inference:481) INFO: LJ049-0203 (size:97->181248)
|
| 851 |
+
2025-02-21 15:01:20,112 (tts_inference:476) INFO: inference speed = 32289.5 points / sec.
|
| 852 |
+
2025-02-21 15:01:20,112 (tts_inference:481) INFO: LJ049-0204 (size:26->46592)
|
| 853 |
+
2025-02-21 15:01:26,684 (tts_inference:476) INFO: inference speed = 29939.6 points / sec.
|
| 854 |
+
2025-02-21 15:01:26,684 (tts_inference:481) INFO: LJ049-0205 (size:116->196608)
|
| 855 |
+
2025-02-21 15:01:30,326 (tts_inference:476) INFO: inference speed = 33882.6 points / sec.
|
| 856 |
+
2025-02-21 15:01:30,326 (tts_inference:481) INFO: LJ049-0206 (size:58->123136)
|
| 857 |
+
2025-02-21 15:01:35,593 (tts_inference:476) INFO: inference speed = 33872.5 points / sec.
|
| 858 |
+
2025-02-21 15:01:35,593 (tts_inference:481) INFO: LJ049-0207 (size:115->178176)
|
| 859 |
+
2025-02-21 15:01:39,730 (tts_inference:476) INFO: inference speed = 33601.0 points / sec.
|
| 860 |
+
2025-02-21 15:01:39,730 (tts_inference:481) INFO: LJ049-0208 (size:83->138752)
|
| 861 |
+
2025-02-21 15:01:43,439 (tts_inference:476) INFO: inference speed = 33944.8 points / sec.
|
| 862 |
+
2025-02-21 15:01:43,440 (tts_inference:481) INFO: LJ049-0209 (size:67->125696)
|
| 863 |
+
2025-02-21 15:01:46,642 (tts_inference:476) INFO: inference speed = 33719.8 points / sec.
|
| 864 |
+
2025-02-21 15:01:46,642 (tts_inference:481) INFO: LJ049-0210 (size:73->107776)
|
| 865 |
+
2025-02-21 15:01:52,037 (tts_inference:476) INFO: inference speed = 34301.8 points / sec.
|
| 866 |
+
2025-02-21 15:01:52,037 (tts_inference:481) INFO: LJ049-0211 (size:116->184832)
|
| 867 |
+
2025-02-21 15:01:54,845 (tts_inference:476) INFO: inference speed = 33456.6 points / sec.
|
| 868 |
+
2025-02-21 15:01:54,845 (tts_inference:481) INFO: LJ049-0212 (size:51->93696)
|
| 869 |
+
2025-02-21 15:01:56,933 (tts_inference:476) INFO: inference speed = 33202.0 points / sec.
|
| 870 |
+
2025-02-21 15:01:56,933 (tts_inference:481) INFO: LJ049-0213 (size:41->69120)
|
| 871 |
+
2025-02-21 15:02:01,769 (tts_inference:476) INFO: inference speed = 34178.8 points / sec.
|
| 872 |
+
2025-02-21 15:02:01,769 (tts_inference:481) INFO: LJ049-0214 (size:87->165120)
|
| 873 |
+
2025-02-21 15:02:07,124 (tts_inference:476) INFO: inference speed = 34232.8 points / sec.
|
| 874 |
+
2025-02-21 15:02:07,124 (tts_inference:481) INFO: LJ049-0215 (size:106->183040)
|
| 875 |
+
2025-02-21 15:02:11,465 (tts_inference:476) INFO: inference speed = 33731.6 points / sec.
|
| 876 |
+
2025-02-21 15:02:11,465 (tts_inference:481) INFO: LJ049-0216 (size:64->146176)
|
| 877 |
+
2025-02-21 15:02:16,184 (tts_inference:476) INFO: inference speed = 33960.8 points / sec.
|
| 878 |
+
2025-02-21 15:02:16,184 (tts_inference:481) INFO: LJ049-0217 (size:85->160000)
|
| 879 |
+
2025-02-21 15:02:19,256 (tts_inference:476) INFO: inference speed = 33674.4 points / sec.
|
| 880 |
+
2025-02-21 15:02:19,257 (tts_inference:481) INFO: LJ049-0218 (size:61->103168)
|
| 881 |
+
2025-02-21 15:02:21,450 (tts_inference:476) INFO: inference speed = 32776.5 points / sec.
|
| 882 |
+
2025-02-21 15:02:21,450 (tts_inference:481) INFO: LJ049-0219 (size:41->71680)
|
| 883 |
+
2025-02-21 15:02:26,813 (tts_inference:476) INFO: inference speed = 34021.4 points / sec.
|
| 884 |
+
2025-02-21 15:02:26,814 (tts_inference:481) INFO: LJ049-0220 (size:110->182272)
|
| 885 |
+
2025-02-21 15:02:32,210 (tts_inference:476) INFO: inference speed = 34106.2 points / sec.
|
| 886 |
+
2025-02-21 15:02:32,210 (tts_inference:481) INFO: LJ049-0221 (size:103->183808)
|
| 887 |
+
2025-02-21 15:02:36,416 (tts_inference:476) INFO: inference speed = 33481.1 points / sec.
|
| 888 |
+
2025-02-21 15:02:36,416 (tts_inference:481) INFO: LJ049-0222 (size:80->140544)
|
| 889 |
+
2025-02-21 15:02:41,399 (tts_inference:476) INFO: inference speed = 34521.3 points / sec.
|
| 890 |
+
2025-02-21 15:02:41,399 (tts_inference:481) INFO: LJ049-0223 (size:90->171776)
|
| 891 |
+
2025-02-21 15:02:43,558 (tts_inference:476) INFO: inference speed = 32834.1 points / sec.
|
| 892 |
+
2025-02-21 15:02:43,558 (tts_inference:481) INFO: LJ049-0224 (size:45->70656)
|
| 893 |
+
2025-02-21 15:02:49,133 (tts_inference:476) INFO: inference speed = 34156.6 points / sec.
|
| 894 |
+
2025-02-21 15:02:49,133 (tts_inference:481) INFO: LJ049-0225 (size:94->190208)
|
| 895 |
+
2025-02-21 15:02:53,350 (tts_inference:476) INFO: inference speed = 33941.5 points / sec.
|
| 896 |
+
2025-02-21 15:02:53,350 (tts_inference:481) INFO: LJ049-0226 (size:73->142848)
|
| 897 |
+
2025-02-21 15:02:57,714 (tts_inference:476) INFO: inference speed = 34368.6 points / sec.
|
| 898 |
+
2025-02-21 15:02:57,714 (tts_inference:481) INFO: LJ049-0227 (size:89->149760)
|
| 899 |
+
# Accounting: time=138 threads=1
|
| 900 |
+
# Ended (code 0) at Fri Feb 21 15:02:58 JST 2025, elapsed time 138 seconds
|
imdanboy/jets/decode_train.loss.ave/dev/log/tts_inference.8.log
ADDED
|
@@ -0,0 +1,900 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# python3 -m espnet2.bin.tts_inference --ngpu 0 --data_path_and_name_and_type dump/raw/dev/text,text,text --data_path_and_name_and_type dump/raw/dev/wav.scp,speech,sound --key_file exp/imdanboy/jets/decode_train.loss.ave/dev/log/keys.8.scp --model_file exp/imdanboy/jets/train.total_count.ave_5best.pth --train_config exp/imdanboy/jets/config.yaml --output_dir exp/imdanboy/jets/decode_train.loss.ave/dev/log/output.8 --vocoder_file none --config conf/decode.yaml
|
| 2 |
+
# Started at Fri Feb 21 15:00:40 JST 2025
|
| 3 |
+
#
|
| 4 |
+
/usr/lib/python3/dist-packages/requests/__init__.py:89: RequestsDependencyWarning: urllib3 (2.2.3) or chardet (3.0.4) doesn't match a supported version!
|
| 5 |
+
warnings.warn("urllib3 ({}) or chardet ({}) doesn't match a supported "
|
| 6 |
+
/usr/bin/python3 /work/espnet/espnet2/bin/tts_inference.py --ngpu 0 --data_path_and_name_and_type dump/raw/dev/text,text,text --data_path_and_name_and_type dump/raw/dev/wav.scp,speech,sound --key_file exp/imdanboy/jets/decode_train.loss.ave/dev/log/keys.8.scp --model_file exp/imdanboy/jets/train.total_count.ave_5best.pth --train_config exp/imdanboy/jets/config.yaml --output_dir exp/imdanboy/jets/decode_train.loss.ave/dev/log/output.8 --vocoder_file none --config conf/decode.yaml
|
| 7 |
+
2025-02-21 15:00:43,857 (tts:302) INFO: Vocabulary size: 78
|
| 8 |
+
2025-02-21 15:00:43,977 (encoder:172) INFO: encoder self-attention layer type = self-attention
|
| 9 |
+
2025-02-21 15:00:44,092 (encoder:172) INFO: encoder self-attention layer type = self-attention
|
| 10 |
+
2025-02-21 15:00:45,900 (tts_inference:126) INFO: Extractor:
|
| 11 |
+
LogMelFbank(
|
| 12 |
+
(stft): Stft(n_fft=1024, win_length=1024, hop_length=256, center=True, normalized=False, onesided=True)
|
| 13 |
+
(logmel): LogMel(sr=22050, n_fft=1024, n_mels=80, fmin=80, fmax=7600, htk=False)
|
| 14 |
+
)
|
| 15 |
+
2025-02-21 15:00:45,900 (tts_inference:127) INFO: Normalizer:
|
| 16 |
+
GlobalMVN(stats_file=/usr/local/lib/python3.8/dist-packages/espnet_model_zoo/models--imdanboy--jets/snapshots/1db95c26516c44e6789bf06417c51e89400b190b/exp/tts_stats_raw_phn_tacotron_g2p_en_no_space/train/feats_stats.npz, norm_means=True, norm_vars=True)
|
| 17 |
+
2025-02-21 15:00:45,903 (tts_inference:128) INFO: TTS:
|
| 18 |
+
JETS(
|
| 19 |
+
(generator): JETSGenerator(
|
| 20 |
+
(encoder): Encoder(
|
| 21 |
+
(embed): Sequential(
|
| 22 |
+
(0): Embedding(78, 256, padding_idx=0)
|
| 23 |
+
(1): ScaledPositionalEncoding(
|
| 24 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 25 |
+
)
|
| 26 |
+
)
|
| 27 |
+
(encoders): MultiSequential(
|
| 28 |
+
(0): EncoderLayer(
|
| 29 |
+
(self_attn): MultiHeadedAttention(
|
| 30 |
+
(linear_q): Linear(in_features=256, out_features=256, bias=True)
|
| 31 |
+
(linear_k): Linear(in_features=256, out_features=256, bias=True)
|
| 32 |
+
(linear_v): Linear(in_features=256, out_features=256, bias=True)
|
| 33 |
+
(linear_out): Linear(in_features=256, out_features=256, bias=True)
|
| 34 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 35 |
+
)
|
| 36 |
+
(feed_forward): MultiLayeredConv1d(
|
| 37 |
+
(w_1): Conv1d(256, 1024, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 38 |
+
(w_2): Conv1d(1024, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 39 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 40 |
+
)
|
| 41 |
+
(norm1): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 42 |
+
(norm2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 43 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 44 |
+
)
|
| 45 |
+
(1): EncoderLayer(
|
| 46 |
+
(self_attn): MultiHeadedAttention(
|
| 47 |
+
(linear_q): Linear(in_features=256, out_features=256, bias=True)
|
| 48 |
+
(linear_k): Linear(in_features=256, out_features=256, bias=True)
|
| 49 |
+
(linear_v): Linear(in_features=256, out_features=256, bias=True)
|
| 50 |
+
(linear_out): Linear(in_features=256, out_features=256, bias=True)
|
| 51 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 52 |
+
)
|
| 53 |
+
(feed_forward): MultiLayeredConv1d(
|
| 54 |
+
(w_1): Conv1d(256, 1024, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 55 |
+
(w_2): Conv1d(1024, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 56 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 57 |
+
)
|
| 58 |
+
(norm1): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 59 |
+
(norm2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 60 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 61 |
+
)
|
| 62 |
+
(2): EncoderLayer(
|
| 63 |
+
(self_attn): MultiHeadedAttention(
|
| 64 |
+
(linear_q): Linear(in_features=256, out_features=256, bias=True)
|
| 65 |
+
(linear_k): Linear(in_features=256, out_features=256, bias=True)
|
| 66 |
+
(linear_v): Linear(in_features=256, out_features=256, bias=True)
|
| 67 |
+
(linear_out): Linear(in_features=256, out_features=256, bias=True)
|
| 68 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 69 |
+
)
|
| 70 |
+
(feed_forward): MultiLayeredConv1d(
|
| 71 |
+
(w_1): Conv1d(256, 1024, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 72 |
+
(w_2): Conv1d(1024, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 73 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 74 |
+
)
|
| 75 |
+
(norm1): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 76 |
+
(norm2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 77 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 78 |
+
)
|
| 79 |
+
(3): EncoderLayer(
|
| 80 |
+
(self_attn): MultiHeadedAttention(
|
| 81 |
+
(linear_q): Linear(in_features=256, out_features=256, bias=True)
|
| 82 |
+
(linear_k): Linear(in_features=256, out_features=256, bias=True)
|
| 83 |
+
(linear_v): Linear(in_features=256, out_features=256, bias=True)
|
| 84 |
+
(linear_out): Linear(in_features=256, out_features=256, bias=True)
|
| 85 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 86 |
+
)
|
| 87 |
+
(feed_forward): MultiLayeredConv1d(
|
| 88 |
+
(w_1): Conv1d(256, 1024, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 89 |
+
(w_2): Conv1d(1024, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 90 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 91 |
+
)
|
| 92 |
+
(norm1): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 93 |
+
(norm2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 94 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 95 |
+
)
|
| 96 |
+
)
|
| 97 |
+
(after_norm): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 98 |
+
)
|
| 99 |
+
(duration_predictor): DurationPredictor(
|
| 100 |
+
(conv): ModuleList(
|
| 101 |
+
(0): Sequential(
|
| 102 |
+
(0): Conv1d(256, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 103 |
+
(1): ReLU()
|
| 104 |
+
(2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 105 |
+
(3): Dropout(p=0.1, inplace=False)
|
| 106 |
+
)
|
| 107 |
+
(1): Sequential(
|
| 108 |
+
(0): Conv1d(256, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 109 |
+
(1): ReLU()
|
| 110 |
+
(2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 111 |
+
(3): Dropout(p=0.1, inplace=False)
|
| 112 |
+
)
|
| 113 |
+
)
|
| 114 |
+
(linear): Linear(in_features=256, out_features=1, bias=True)
|
| 115 |
+
)
|
| 116 |
+
(pitch_predictor): VariancePredictor(
|
| 117 |
+
(conv): ModuleList(
|
| 118 |
+
(0): Sequential(
|
| 119 |
+
(0): Conv1d(256, 256, kernel_size=(5,), stride=(1,), padding=(2,))
|
| 120 |
+
(1): ReLU()
|
| 121 |
+
(2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 122 |
+
(3): Dropout(p=0.5, inplace=False)
|
| 123 |
+
)
|
| 124 |
+
(1): Sequential(
|
| 125 |
+
(0): Conv1d(256, 256, kernel_size=(5,), stride=(1,), padding=(2,))
|
| 126 |
+
(1): ReLU()
|
| 127 |
+
(2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 128 |
+
(3): Dropout(p=0.5, inplace=False)
|
| 129 |
+
)
|
| 130 |
+
(2): Sequential(
|
| 131 |
+
(0): Conv1d(256, 256, kernel_size=(5,), stride=(1,), padding=(2,))
|
| 132 |
+
(1): ReLU()
|
| 133 |
+
(2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 134 |
+
(3): Dropout(p=0.5, inplace=False)
|
| 135 |
+
)
|
| 136 |
+
(3): Sequential(
|
| 137 |
+
(0): Conv1d(256, 256, kernel_size=(5,), stride=(1,), padding=(2,))
|
| 138 |
+
(1): ReLU()
|
| 139 |
+
(2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 140 |
+
(3): Dropout(p=0.5, inplace=False)
|
| 141 |
+
)
|
| 142 |
+
(4): Sequential(
|
| 143 |
+
(0): Conv1d(256, 256, kernel_size=(5,), stride=(1,), padding=(2,))
|
| 144 |
+
(1): ReLU()
|
| 145 |
+
(2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 146 |
+
(3): Dropout(p=0.5, inplace=False)
|
| 147 |
+
)
|
| 148 |
+
)
|
| 149 |
+
(linear): Linear(in_features=256, out_features=1, bias=True)
|
| 150 |
+
)
|
| 151 |
+
(pitch_embed): Sequential(
|
| 152 |
+
(0): Conv1d(1, 256, kernel_size=(1,), stride=(1,))
|
| 153 |
+
(1): Dropout(p=0.0, inplace=False)
|
| 154 |
+
)
|
| 155 |
+
(energy_predictor): VariancePredictor(
|
| 156 |
+
(conv): ModuleList(
|
| 157 |
+
(0): Sequential(
|
| 158 |
+
(0): Conv1d(256, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 159 |
+
(1): ReLU()
|
| 160 |
+
(2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 161 |
+
(3): Dropout(p=0.5, inplace=False)
|
| 162 |
+
)
|
| 163 |
+
(1): Sequential(
|
| 164 |
+
(0): Conv1d(256, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 165 |
+
(1): ReLU()
|
| 166 |
+
(2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 167 |
+
(3): Dropout(p=0.5, inplace=False)
|
| 168 |
+
)
|
| 169 |
+
)
|
| 170 |
+
(linear): Linear(in_features=256, out_features=1, bias=True)
|
| 171 |
+
)
|
| 172 |
+
(energy_embed): Sequential(
|
| 173 |
+
(0): Conv1d(1, 256, kernel_size=(1,), stride=(1,))
|
| 174 |
+
(1): Dropout(p=0.0, inplace=False)
|
| 175 |
+
)
|
| 176 |
+
(alignment_module): AlignmentModule(
|
| 177 |
+
(t_conv1): Conv1d(256, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 178 |
+
(t_conv2): Conv1d(256, 256, kernel_size=(1,), stride=(1,))
|
| 179 |
+
(f_conv1): Conv1d(80, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 180 |
+
(f_conv2): Conv1d(256, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 181 |
+
(f_conv3): Conv1d(256, 256, kernel_size=(1,), stride=(1,))
|
| 182 |
+
)
|
| 183 |
+
(length_regulator): GaussianUpsampling()
|
| 184 |
+
(decoder): Encoder(
|
| 185 |
+
(embed): Sequential(
|
| 186 |
+
(0): ScaledPositionalEncoding(
|
| 187 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 188 |
+
)
|
| 189 |
+
)
|
| 190 |
+
(encoders): MultiSequential(
|
| 191 |
+
(0): EncoderLayer(
|
| 192 |
+
(self_attn): MultiHeadedAttention(
|
| 193 |
+
(linear_q): Linear(in_features=256, out_features=256, bias=True)
|
| 194 |
+
(linear_k): Linear(in_features=256, out_features=256, bias=True)
|
| 195 |
+
(linear_v): Linear(in_features=256, out_features=256, bias=True)
|
| 196 |
+
(linear_out): Linear(in_features=256, out_features=256, bias=True)
|
| 197 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 198 |
+
)
|
| 199 |
+
(feed_forward): MultiLayeredConv1d(
|
| 200 |
+
(w_1): Conv1d(256, 1024, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 201 |
+
(w_2): Conv1d(1024, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 202 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 203 |
+
)
|
| 204 |
+
(norm1): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 205 |
+
(norm2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 206 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 207 |
+
)
|
| 208 |
+
(1): EncoderLayer(
|
| 209 |
+
(self_attn): MultiHeadedAttention(
|
| 210 |
+
(linear_q): Linear(in_features=256, out_features=256, bias=True)
|
| 211 |
+
(linear_k): Linear(in_features=256, out_features=256, bias=True)
|
| 212 |
+
(linear_v): Linear(in_features=256, out_features=256, bias=True)
|
| 213 |
+
(linear_out): Linear(in_features=256, out_features=256, bias=True)
|
| 214 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 215 |
+
)
|
| 216 |
+
(feed_forward): MultiLayeredConv1d(
|
| 217 |
+
(w_1): Conv1d(256, 1024, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 218 |
+
(w_2): Conv1d(1024, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 219 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 220 |
+
)
|
| 221 |
+
(norm1): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 222 |
+
(norm2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 223 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 224 |
+
)
|
| 225 |
+
(2): EncoderLayer(
|
| 226 |
+
(self_attn): MultiHeadedAttention(
|
| 227 |
+
(linear_q): Linear(in_features=256, out_features=256, bias=True)
|
| 228 |
+
(linear_k): Linear(in_features=256, out_features=256, bias=True)
|
| 229 |
+
(linear_v): Linear(in_features=256, out_features=256, bias=True)
|
| 230 |
+
(linear_out): Linear(in_features=256, out_features=256, bias=True)
|
| 231 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 232 |
+
)
|
| 233 |
+
(feed_forward): MultiLayeredConv1d(
|
| 234 |
+
(w_1): Conv1d(256, 1024, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 235 |
+
(w_2): Conv1d(1024, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 236 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 237 |
+
)
|
| 238 |
+
(norm1): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 239 |
+
(norm2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 240 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 241 |
+
)
|
| 242 |
+
(3): EncoderLayer(
|
| 243 |
+
(self_attn): MultiHeadedAttention(
|
| 244 |
+
(linear_q): Linear(in_features=256, out_features=256, bias=True)
|
| 245 |
+
(linear_k): Linear(in_features=256, out_features=256, bias=True)
|
| 246 |
+
(linear_v): Linear(in_features=256, out_features=256, bias=True)
|
| 247 |
+
(linear_out): Linear(in_features=256, out_features=256, bias=True)
|
| 248 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 249 |
+
)
|
| 250 |
+
(feed_forward): MultiLayeredConv1d(
|
| 251 |
+
(w_1): Conv1d(256, 1024, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 252 |
+
(w_2): Conv1d(1024, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 253 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 254 |
+
)
|
| 255 |
+
(norm1): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 256 |
+
(norm2): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 257 |
+
(dropout): Dropout(p=0.2, inplace=False)
|
| 258 |
+
)
|
| 259 |
+
)
|
| 260 |
+
(after_norm): LayerNorm((256,), eps=1e-12, elementwise_affine=True)
|
| 261 |
+
)
|
| 262 |
+
(generator): HiFiGANGenerator(
|
| 263 |
+
(input_conv): Conv1d(256, 512, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 264 |
+
(upsamples): ModuleList(
|
| 265 |
+
(0): Sequential(
|
| 266 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 267 |
+
(1): ConvTranspose1d(512, 256, kernel_size=(16,), stride=(8,), padding=(4,))
|
| 268 |
+
)
|
| 269 |
+
(1): Sequential(
|
| 270 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 271 |
+
(1): ConvTranspose1d(256, 128, kernel_size=(16,), stride=(8,), padding=(4,))
|
| 272 |
+
)
|
| 273 |
+
(2): Sequential(
|
| 274 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 275 |
+
(1): ConvTranspose1d(128, 64, kernel_size=(4,), stride=(2,), padding=(1,))
|
| 276 |
+
)
|
| 277 |
+
(3): Sequential(
|
| 278 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 279 |
+
(1): ConvTranspose1d(64, 32, kernel_size=(4,), stride=(2,), padding=(1,))
|
| 280 |
+
)
|
| 281 |
+
)
|
| 282 |
+
(blocks): ModuleList(
|
| 283 |
+
(0): ResidualBlock(
|
| 284 |
+
(convs1): ModuleList(
|
| 285 |
+
(0): Sequential(
|
| 286 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 287 |
+
(1): Conv1d(256, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 288 |
+
)
|
| 289 |
+
(1): Sequential(
|
| 290 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 291 |
+
(1): Conv1d(256, 256, kernel_size=(3,), stride=(1,), padding=(3,), dilation=(3,))
|
| 292 |
+
)
|
| 293 |
+
(2): Sequential(
|
| 294 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 295 |
+
(1): Conv1d(256, 256, kernel_size=(3,), stride=(1,), padding=(5,), dilation=(5,))
|
| 296 |
+
)
|
| 297 |
+
)
|
| 298 |
+
(convs2): ModuleList(
|
| 299 |
+
(0): Sequential(
|
| 300 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 301 |
+
(1): Conv1d(256, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 302 |
+
)
|
| 303 |
+
(1): Sequential(
|
| 304 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 305 |
+
(1): Conv1d(256, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 306 |
+
)
|
| 307 |
+
(2): Sequential(
|
| 308 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 309 |
+
(1): Conv1d(256, 256, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 310 |
+
)
|
| 311 |
+
)
|
| 312 |
+
)
|
| 313 |
+
(1): ResidualBlock(
|
| 314 |
+
(convs1): ModuleList(
|
| 315 |
+
(0): Sequential(
|
| 316 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 317 |
+
(1): Conv1d(256, 256, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 318 |
+
)
|
| 319 |
+
(1): Sequential(
|
| 320 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 321 |
+
(1): Conv1d(256, 256, kernel_size=(7,), stride=(1,), padding=(9,), dilation=(3,))
|
| 322 |
+
)
|
| 323 |
+
(2): Sequential(
|
| 324 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 325 |
+
(1): Conv1d(256, 256, kernel_size=(7,), stride=(1,), padding=(15,), dilation=(5,))
|
| 326 |
+
)
|
| 327 |
+
)
|
| 328 |
+
(convs2): ModuleList(
|
| 329 |
+
(0): Sequential(
|
| 330 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 331 |
+
(1): Conv1d(256, 256, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 332 |
+
)
|
| 333 |
+
(1): Sequential(
|
| 334 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 335 |
+
(1): Conv1d(256, 256, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 336 |
+
)
|
| 337 |
+
(2): Sequential(
|
| 338 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 339 |
+
(1): Conv1d(256, 256, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 340 |
+
)
|
| 341 |
+
)
|
| 342 |
+
)
|
| 343 |
+
(2): ResidualBlock(
|
| 344 |
+
(convs1): ModuleList(
|
| 345 |
+
(0): Sequential(
|
| 346 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 347 |
+
(1): Conv1d(256, 256, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 348 |
+
)
|
| 349 |
+
(1): Sequential(
|
| 350 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 351 |
+
(1): Conv1d(256, 256, kernel_size=(11,), stride=(1,), padding=(15,), dilation=(3,))
|
| 352 |
+
)
|
| 353 |
+
(2): Sequential(
|
| 354 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 355 |
+
(1): Conv1d(256, 256, kernel_size=(11,), stride=(1,), padding=(25,), dilation=(5,))
|
| 356 |
+
)
|
| 357 |
+
)
|
| 358 |
+
(convs2): ModuleList(
|
| 359 |
+
(0): Sequential(
|
| 360 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 361 |
+
(1): Conv1d(256, 256, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 362 |
+
)
|
| 363 |
+
(1): Sequential(
|
| 364 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 365 |
+
(1): Conv1d(256, 256, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 366 |
+
)
|
| 367 |
+
(2): Sequential(
|
| 368 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 369 |
+
(1): Conv1d(256, 256, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 370 |
+
)
|
| 371 |
+
)
|
| 372 |
+
)
|
| 373 |
+
(3): ResidualBlock(
|
| 374 |
+
(convs1): ModuleList(
|
| 375 |
+
(0): Sequential(
|
| 376 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 377 |
+
(1): Conv1d(128, 128, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 378 |
+
)
|
| 379 |
+
(1): Sequential(
|
| 380 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 381 |
+
(1): Conv1d(128, 128, kernel_size=(3,), stride=(1,), padding=(3,), dilation=(3,))
|
| 382 |
+
)
|
| 383 |
+
(2): Sequential(
|
| 384 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 385 |
+
(1): Conv1d(128, 128, kernel_size=(3,), stride=(1,), padding=(5,), dilation=(5,))
|
| 386 |
+
)
|
| 387 |
+
)
|
| 388 |
+
(convs2): ModuleList(
|
| 389 |
+
(0): Sequential(
|
| 390 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 391 |
+
(1): Conv1d(128, 128, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 392 |
+
)
|
| 393 |
+
(1): Sequential(
|
| 394 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 395 |
+
(1): Conv1d(128, 128, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 396 |
+
)
|
| 397 |
+
(2): Sequential(
|
| 398 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 399 |
+
(1): Conv1d(128, 128, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 400 |
+
)
|
| 401 |
+
)
|
| 402 |
+
)
|
| 403 |
+
(4): ResidualBlock(
|
| 404 |
+
(convs1): ModuleList(
|
| 405 |
+
(0): Sequential(
|
| 406 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 407 |
+
(1): Conv1d(128, 128, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 408 |
+
)
|
| 409 |
+
(1): Sequential(
|
| 410 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 411 |
+
(1): Conv1d(128, 128, kernel_size=(7,), stride=(1,), padding=(9,), dilation=(3,))
|
| 412 |
+
)
|
| 413 |
+
(2): Sequential(
|
| 414 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 415 |
+
(1): Conv1d(128, 128, kernel_size=(7,), stride=(1,), padding=(15,), dilation=(5,))
|
| 416 |
+
)
|
| 417 |
+
)
|
| 418 |
+
(convs2): ModuleList(
|
| 419 |
+
(0): Sequential(
|
| 420 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 421 |
+
(1): Conv1d(128, 128, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 422 |
+
)
|
| 423 |
+
(1): Sequential(
|
| 424 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 425 |
+
(1): Conv1d(128, 128, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 426 |
+
)
|
| 427 |
+
(2): Sequential(
|
| 428 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 429 |
+
(1): Conv1d(128, 128, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 430 |
+
)
|
| 431 |
+
)
|
| 432 |
+
)
|
| 433 |
+
(5): ResidualBlock(
|
| 434 |
+
(convs1): ModuleList(
|
| 435 |
+
(0): Sequential(
|
| 436 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 437 |
+
(1): Conv1d(128, 128, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 438 |
+
)
|
| 439 |
+
(1): Sequential(
|
| 440 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 441 |
+
(1): Conv1d(128, 128, kernel_size=(11,), stride=(1,), padding=(15,), dilation=(3,))
|
| 442 |
+
)
|
| 443 |
+
(2): Sequential(
|
| 444 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 445 |
+
(1): Conv1d(128, 128, kernel_size=(11,), stride=(1,), padding=(25,), dilation=(5,))
|
| 446 |
+
)
|
| 447 |
+
)
|
| 448 |
+
(convs2): ModuleList(
|
| 449 |
+
(0): Sequential(
|
| 450 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 451 |
+
(1): Conv1d(128, 128, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 452 |
+
)
|
| 453 |
+
(1): Sequential(
|
| 454 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 455 |
+
(1): Conv1d(128, 128, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 456 |
+
)
|
| 457 |
+
(2): Sequential(
|
| 458 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 459 |
+
(1): Conv1d(128, 128, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 460 |
+
)
|
| 461 |
+
)
|
| 462 |
+
)
|
| 463 |
+
(6): ResidualBlock(
|
| 464 |
+
(convs1): ModuleList(
|
| 465 |
+
(0): Sequential(
|
| 466 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 467 |
+
(1): Conv1d(64, 64, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 468 |
+
)
|
| 469 |
+
(1): Sequential(
|
| 470 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 471 |
+
(1): Conv1d(64, 64, kernel_size=(3,), stride=(1,), padding=(3,), dilation=(3,))
|
| 472 |
+
)
|
| 473 |
+
(2): Sequential(
|
| 474 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 475 |
+
(1): Conv1d(64, 64, kernel_size=(3,), stride=(1,), padding=(5,), dilation=(5,))
|
| 476 |
+
)
|
| 477 |
+
)
|
| 478 |
+
(convs2): ModuleList(
|
| 479 |
+
(0): Sequential(
|
| 480 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 481 |
+
(1): Conv1d(64, 64, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 482 |
+
)
|
| 483 |
+
(1): Sequential(
|
| 484 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 485 |
+
(1): Conv1d(64, 64, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 486 |
+
)
|
| 487 |
+
(2): Sequential(
|
| 488 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 489 |
+
(1): Conv1d(64, 64, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 490 |
+
)
|
| 491 |
+
)
|
| 492 |
+
)
|
| 493 |
+
(7): ResidualBlock(
|
| 494 |
+
(convs1): ModuleList(
|
| 495 |
+
(0): Sequential(
|
| 496 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 497 |
+
(1): Conv1d(64, 64, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 498 |
+
)
|
| 499 |
+
(1): Sequential(
|
| 500 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 501 |
+
(1): Conv1d(64, 64, kernel_size=(7,), stride=(1,), padding=(9,), dilation=(3,))
|
| 502 |
+
)
|
| 503 |
+
(2): Sequential(
|
| 504 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 505 |
+
(1): Conv1d(64, 64, kernel_size=(7,), stride=(1,), padding=(15,), dilation=(5,))
|
| 506 |
+
)
|
| 507 |
+
)
|
| 508 |
+
(convs2): ModuleList(
|
| 509 |
+
(0): Sequential(
|
| 510 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 511 |
+
(1): Conv1d(64, 64, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 512 |
+
)
|
| 513 |
+
(1): Sequential(
|
| 514 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 515 |
+
(1): Conv1d(64, 64, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 516 |
+
)
|
| 517 |
+
(2): Sequential(
|
| 518 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 519 |
+
(1): Conv1d(64, 64, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 520 |
+
)
|
| 521 |
+
)
|
| 522 |
+
)
|
| 523 |
+
(8): ResidualBlock(
|
| 524 |
+
(convs1): ModuleList(
|
| 525 |
+
(0): Sequential(
|
| 526 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 527 |
+
(1): Conv1d(64, 64, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 528 |
+
)
|
| 529 |
+
(1): Sequential(
|
| 530 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 531 |
+
(1): Conv1d(64, 64, kernel_size=(11,), stride=(1,), padding=(15,), dilation=(3,))
|
| 532 |
+
)
|
| 533 |
+
(2): Sequential(
|
| 534 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 535 |
+
(1): Conv1d(64, 64, kernel_size=(11,), stride=(1,), padding=(25,), dilation=(5,))
|
| 536 |
+
)
|
| 537 |
+
)
|
| 538 |
+
(convs2): ModuleList(
|
| 539 |
+
(0): Sequential(
|
| 540 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 541 |
+
(1): Conv1d(64, 64, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 542 |
+
)
|
| 543 |
+
(1): Sequential(
|
| 544 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 545 |
+
(1): Conv1d(64, 64, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 546 |
+
)
|
| 547 |
+
(2): Sequential(
|
| 548 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 549 |
+
(1): Conv1d(64, 64, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 550 |
+
)
|
| 551 |
+
)
|
| 552 |
+
)
|
| 553 |
+
(9): ResidualBlock(
|
| 554 |
+
(convs1): ModuleList(
|
| 555 |
+
(0): Sequential(
|
| 556 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 557 |
+
(1): Conv1d(32, 32, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 558 |
+
)
|
| 559 |
+
(1): Sequential(
|
| 560 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 561 |
+
(1): Conv1d(32, 32, kernel_size=(3,), stride=(1,), padding=(3,), dilation=(3,))
|
| 562 |
+
)
|
| 563 |
+
(2): Sequential(
|
| 564 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 565 |
+
(1): Conv1d(32, 32, kernel_size=(3,), stride=(1,), padding=(5,), dilation=(5,))
|
| 566 |
+
)
|
| 567 |
+
)
|
| 568 |
+
(convs2): ModuleList(
|
| 569 |
+
(0): Sequential(
|
| 570 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 571 |
+
(1): Conv1d(32, 32, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 572 |
+
)
|
| 573 |
+
(1): Sequential(
|
| 574 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 575 |
+
(1): Conv1d(32, 32, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 576 |
+
)
|
| 577 |
+
(2): Sequential(
|
| 578 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 579 |
+
(1): Conv1d(32, 32, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 580 |
+
)
|
| 581 |
+
)
|
| 582 |
+
)
|
| 583 |
+
(10): ResidualBlock(
|
| 584 |
+
(convs1): ModuleList(
|
| 585 |
+
(0): Sequential(
|
| 586 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 587 |
+
(1): Conv1d(32, 32, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 588 |
+
)
|
| 589 |
+
(1): Sequential(
|
| 590 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 591 |
+
(1): Conv1d(32, 32, kernel_size=(7,), stride=(1,), padding=(9,), dilation=(3,))
|
| 592 |
+
)
|
| 593 |
+
(2): Sequential(
|
| 594 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 595 |
+
(1): Conv1d(32, 32, kernel_size=(7,), stride=(1,), padding=(15,), dilation=(5,))
|
| 596 |
+
)
|
| 597 |
+
)
|
| 598 |
+
(convs2): ModuleList(
|
| 599 |
+
(0): Sequential(
|
| 600 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 601 |
+
(1): Conv1d(32, 32, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 602 |
+
)
|
| 603 |
+
(1): Sequential(
|
| 604 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 605 |
+
(1): Conv1d(32, 32, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 606 |
+
)
|
| 607 |
+
(2): Sequential(
|
| 608 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 609 |
+
(1): Conv1d(32, 32, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 610 |
+
)
|
| 611 |
+
)
|
| 612 |
+
)
|
| 613 |
+
(11): ResidualBlock(
|
| 614 |
+
(convs1): ModuleList(
|
| 615 |
+
(0): Sequential(
|
| 616 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 617 |
+
(1): Conv1d(32, 32, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 618 |
+
)
|
| 619 |
+
(1): Sequential(
|
| 620 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 621 |
+
(1): Conv1d(32, 32, kernel_size=(11,), stride=(1,), padding=(15,), dilation=(3,))
|
| 622 |
+
)
|
| 623 |
+
(2): Sequential(
|
| 624 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 625 |
+
(1): Conv1d(32, 32, kernel_size=(11,), stride=(1,), padding=(25,), dilation=(5,))
|
| 626 |
+
)
|
| 627 |
+
)
|
| 628 |
+
(convs2): ModuleList(
|
| 629 |
+
(0): Sequential(
|
| 630 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 631 |
+
(1): Conv1d(32, 32, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 632 |
+
)
|
| 633 |
+
(1): Sequential(
|
| 634 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 635 |
+
(1): Conv1d(32, 32, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 636 |
+
)
|
| 637 |
+
(2): Sequential(
|
| 638 |
+
(0): LeakyReLU(negative_slope=0.1)
|
| 639 |
+
(1): Conv1d(32, 32, kernel_size=(11,), stride=(1,), padding=(5,))
|
| 640 |
+
)
|
| 641 |
+
)
|
| 642 |
+
)
|
| 643 |
+
)
|
| 644 |
+
(output_conv): Sequential(
|
| 645 |
+
(0): LeakyReLU(negative_slope=0.01)
|
| 646 |
+
(1): Conv1d(32, 1, kernel_size=(7,), stride=(1,), padding=(3,))
|
| 647 |
+
(2): Tanh()
|
| 648 |
+
)
|
| 649 |
+
)
|
| 650 |
+
)
|
| 651 |
+
(discriminator): HiFiGANMultiScaleMultiPeriodDiscriminator(
|
| 652 |
+
(msd): HiFiGANMultiScaleDiscriminator(
|
| 653 |
+
(discriminators): ModuleList(
|
| 654 |
+
(0): HiFiGANScaleDiscriminator(
|
| 655 |
+
(layers): ModuleList(
|
| 656 |
+
(0): Sequential(
|
| 657 |
+
(0): Conv1d(1, 128, kernel_size=(15,), stride=(1,), padding=(7,))
|
| 658 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 659 |
+
)
|
| 660 |
+
(1): Sequential(
|
| 661 |
+
(0): Conv1d(128, 128, kernel_size=(41,), stride=(2,), padding=(20,), groups=4)
|
| 662 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 663 |
+
)
|
| 664 |
+
(2): Sequential(
|
| 665 |
+
(0): Conv1d(128, 256, kernel_size=(41,), stride=(2,), padding=(20,), groups=16)
|
| 666 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 667 |
+
)
|
| 668 |
+
(3): Sequential(
|
| 669 |
+
(0): Conv1d(256, 512, kernel_size=(41,), stride=(4,), padding=(20,), groups=16)
|
| 670 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 671 |
+
)
|
| 672 |
+
(4): Sequential(
|
| 673 |
+
(0): Conv1d(512, 1024, kernel_size=(41,), stride=(4,), padding=(20,), groups=16)
|
| 674 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 675 |
+
)
|
| 676 |
+
(5): Sequential(
|
| 677 |
+
(0): Conv1d(1024, 1024, kernel_size=(41,), stride=(1,), padding=(20,), groups=16)
|
| 678 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 679 |
+
)
|
| 680 |
+
(6): Sequential(
|
| 681 |
+
(0): Conv1d(1024, 1024, kernel_size=(5,), stride=(1,), padding=(2,))
|
| 682 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 683 |
+
)
|
| 684 |
+
(7): Conv1d(1024, 1, kernel_size=(3,), stride=(1,), padding=(1,))
|
| 685 |
+
)
|
| 686 |
+
)
|
| 687 |
+
)
|
| 688 |
+
)
|
| 689 |
+
(mpd): HiFiGANMultiPeriodDiscriminator(
|
| 690 |
+
(discriminators): ModuleList(
|
| 691 |
+
(0): HiFiGANPeriodDiscriminator(
|
| 692 |
+
(convs): ModuleList(
|
| 693 |
+
(0): Sequential(
|
| 694 |
+
(0): Conv2d(1, 32, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 695 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 696 |
+
)
|
| 697 |
+
(1): Sequential(
|
| 698 |
+
(0): Conv2d(32, 128, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 699 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 700 |
+
)
|
| 701 |
+
(2): Sequential(
|
| 702 |
+
(0): Conv2d(128, 512, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 703 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 704 |
+
)
|
| 705 |
+
(3): Sequential(
|
| 706 |
+
(0): Conv2d(512, 1024, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 707 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 708 |
+
)
|
| 709 |
+
(4): Sequential(
|
| 710 |
+
(0): Conv2d(1024, 1024, kernel_size=(5, 1), stride=(1, 1), padding=(2, 0))
|
| 711 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 712 |
+
)
|
| 713 |
+
)
|
| 714 |
+
(output_conv): Conv2d(1024, 1, kernel_size=(2, 1), stride=(1, 1), padding=(1, 0))
|
| 715 |
+
)
|
| 716 |
+
(1): HiFiGANPeriodDiscriminator(
|
| 717 |
+
(convs): ModuleList(
|
| 718 |
+
(0): Sequential(
|
| 719 |
+
(0): Conv2d(1, 32, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 720 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 721 |
+
)
|
| 722 |
+
(1): Sequential(
|
| 723 |
+
(0): Conv2d(32, 128, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 724 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 725 |
+
)
|
| 726 |
+
(2): Sequential(
|
| 727 |
+
(0): Conv2d(128, 512, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 728 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 729 |
+
)
|
| 730 |
+
(3): Sequential(
|
| 731 |
+
(0): Conv2d(512, 1024, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 732 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 733 |
+
)
|
| 734 |
+
(4): Sequential(
|
| 735 |
+
(0): Conv2d(1024, 1024, kernel_size=(5, 1), stride=(1, 1), padding=(2, 0))
|
| 736 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 737 |
+
)
|
| 738 |
+
)
|
| 739 |
+
(output_conv): Conv2d(1024, 1, kernel_size=(2, 1), stride=(1, 1), padding=(1, 0))
|
| 740 |
+
)
|
| 741 |
+
(2): HiFiGANPeriodDiscriminator(
|
| 742 |
+
(convs): ModuleList(
|
| 743 |
+
(0): Sequential(
|
| 744 |
+
(0): Conv2d(1, 32, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 745 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 746 |
+
)
|
| 747 |
+
(1): Sequential(
|
| 748 |
+
(0): Conv2d(32, 128, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 749 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 750 |
+
)
|
| 751 |
+
(2): Sequential(
|
| 752 |
+
(0): Conv2d(128, 512, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 753 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 754 |
+
)
|
| 755 |
+
(3): Sequential(
|
| 756 |
+
(0): Conv2d(512, 1024, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 757 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 758 |
+
)
|
| 759 |
+
(4): Sequential(
|
| 760 |
+
(0): Conv2d(1024, 1024, kernel_size=(5, 1), stride=(1, 1), padding=(2, 0))
|
| 761 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 762 |
+
)
|
| 763 |
+
)
|
| 764 |
+
(output_conv): Conv2d(1024, 1, kernel_size=(2, 1), stride=(1, 1), padding=(1, 0))
|
| 765 |
+
)
|
| 766 |
+
(3): HiFiGANPeriodDiscriminator(
|
| 767 |
+
(convs): ModuleList(
|
| 768 |
+
(0): Sequential(
|
| 769 |
+
(0): Conv2d(1, 32, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 770 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 771 |
+
)
|
| 772 |
+
(1): Sequential(
|
| 773 |
+
(0): Conv2d(32, 128, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 774 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 775 |
+
)
|
| 776 |
+
(2): Sequential(
|
| 777 |
+
(0): Conv2d(128, 512, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 778 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 779 |
+
)
|
| 780 |
+
(3): Sequential(
|
| 781 |
+
(0): Conv2d(512, 1024, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 782 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 783 |
+
)
|
| 784 |
+
(4): Sequential(
|
| 785 |
+
(0): Conv2d(1024, 1024, kernel_size=(5, 1), stride=(1, 1), padding=(2, 0))
|
| 786 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 787 |
+
)
|
| 788 |
+
)
|
| 789 |
+
(output_conv): Conv2d(1024, 1, kernel_size=(2, 1), stride=(1, 1), padding=(1, 0))
|
| 790 |
+
)
|
| 791 |
+
(4): HiFiGANPeriodDiscriminator(
|
| 792 |
+
(convs): ModuleList(
|
| 793 |
+
(0): Sequential(
|
| 794 |
+
(0): Conv2d(1, 32, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 795 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 796 |
+
)
|
| 797 |
+
(1): Sequential(
|
| 798 |
+
(0): Conv2d(32, 128, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 799 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 800 |
+
)
|
| 801 |
+
(2): Sequential(
|
| 802 |
+
(0): Conv2d(128, 512, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 803 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 804 |
+
)
|
| 805 |
+
(3): Sequential(
|
| 806 |
+
(0): Conv2d(512, 1024, kernel_size=(5, 1), stride=(3, 1), padding=(2, 0))
|
| 807 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 808 |
+
)
|
| 809 |
+
(4): Sequential(
|
| 810 |
+
(0): Conv2d(1024, 1024, kernel_size=(5, 1), stride=(1, 1), padding=(2, 0))
|
| 811 |
+
(1): LeakyReLU(negative_slope=0.1)
|
| 812 |
+
)
|
| 813 |
+
)
|
| 814 |
+
(output_conv): Conv2d(1024, 1, kernel_size=(2, 1), stride=(1, 1), padding=(1, 0))
|
| 815 |
+
)
|
| 816 |
+
)
|
| 817 |
+
)
|
| 818 |
+
)
|
| 819 |
+
(generator_adv_loss): GeneratorAdversarialLoss()
|
| 820 |
+
(discriminator_adv_loss): DiscriminatorAdversarialLoss()
|
| 821 |
+
(feat_match_loss): FeatureMatchLoss()
|
| 822 |
+
(mel_loss): MelSpectrogramLoss(
|
| 823 |
+
(wav_to_mel): LogMelFbank(
|
| 824 |
+
(stft): Stft(n_fft=1024, win_length=1024, hop_length=256, center=True, normalized=False, onesided=True)
|
| 825 |
+
(logmel): LogMel(sr=22050, n_fft=1024, n_mels=80, fmin=0, fmax=11025.0, htk=False)
|
| 826 |
+
)
|
| 827 |
+
)
|
| 828 |
+
(var_loss): VarianceLoss(
|
| 829 |
+
(mse_criterion): MSELoss()
|
| 830 |
+
(duration_criterion): DurationPredictorLoss(
|
| 831 |
+
(criterion): MSELoss()
|
| 832 |
+
)
|
| 833 |
+
)
|
| 834 |
+
(forwardsum_loss): ForwardSumLoss()
|
| 835 |
+
)
|
| 836 |
+
2025-02-21 15:00:46,486 (font_manager:1547) INFO: generated new fontManager
|
| 837 |
+
2025-02-21 15:00:52,596 (tts_inference:476) INFO: inference speed = 28695.1 points / sec.
|
| 838 |
+
2025-02-21 15:00:52,596 (tts_inference:481) INFO: LJ049-0228 (size:77->132096)
|
| 839 |
+
2025-02-21 15:00:55,107 (tts_inference:476) INFO: inference speed = 33329.1 points / sec.
|
| 840 |
+
2025-02-21 15:00:55,107 (tts_inference:481) INFO: LJ049-0229 (size:46->83456)
|
| 841 |
+
2025-02-21 15:00:58,907 (tts_inference:476) INFO: inference speed = 33267.1 points / sec.
|
| 842 |
+
2025-02-21 15:00:58,907 (tts_inference:481) INFO: LJ049-0230 (size:82->126208)
|
| 843 |
+
2025-02-21 15:01:06,051 (tts_inference:476) INFO: inference speed = 29661.0 points / sec.
|
| 844 |
+
2025-02-21 15:01:06,051 (tts_inference:481) INFO: LJ050-0001 (size:114->211712)
|
| 845 |
+
2025-02-21 15:01:07,207 (tts_inference:476) INFO: inference speed = 30770.5 points / sec.
|
| 846 |
+
2025-02-21 15:01:07,207 (tts_inference:481) INFO: LJ050-0002 (size:22->35328)
|
| 847 |
+
2025-02-21 15:01:12,206 (tts_inference:476) INFO: inference speed = 34343.2 points / sec.
|
| 848 |
+
2025-02-21 15:01:12,206 (tts_inference:481) INFO: LJ050-0003 (size:95->171520)
|
| 849 |
+
2025-02-21 15:01:13,851 (tts_inference:476) INFO: inference speed = 32191.8 points / sec.
|
| 850 |
+
2025-02-21 15:01:13,852 (tts_inference:481) INFO: LJ050-0004 (size:31->52736)
|
| 851 |
+
2025-02-21 15:01:16,497 (tts_inference:476) INFO: inference speed = 33832.6 points / sec.
|
| 852 |
+
2025-02-21 15:01:16,498 (tts_inference:481) INFO: LJ050-0005 (size:48->89344)
|
| 853 |
+
2025-02-21 15:01:20,199 (tts_inference:476) INFO: inference speed = 33866.7 points / sec.
|
| 854 |
+
2025-02-21 15:01:20,199 (tts_inference:481) INFO: LJ050-0006 (size:79->125184)
|
| 855 |
+
2025-02-21 15:01:23,340 (tts_inference:476) INFO: inference speed = 33900.4 points / sec.
|
| 856 |
+
2025-02-21 15:01:23,340 (tts_inference:481) INFO: LJ050-0007 (size:63->106240)
|
| 857 |
+
2025-02-21 15:01:28,612 (tts_inference:476) INFO: inference speed = 30333.8 points / sec.
|
| 858 |
+
2025-02-21 15:01:28,612 (tts_inference:481) INFO: LJ050-0008 (size:96->159744)
|
| 859 |
+
2025-02-21 15:01:34,772 (tts_inference:476) INFO: inference speed = 30829.7 points / sec.
|
| 860 |
+
2025-02-21 15:01:34,772 (tts_inference:481) INFO: LJ050-0009 (size:95->189696)
|
| 861 |
+
2025-02-21 15:01:38,869 (tts_inference:476) INFO: inference speed = 33867.1 points / sec.
|
| 862 |
+
2025-02-21 15:01:38,869 (tts_inference:481) INFO: LJ050-0010 (size:81->138496)
|
| 863 |
+
2025-02-21 15:01:41,433 (tts_inference:476) INFO: inference speed = 33734.5 points / sec.
|
| 864 |
+
2025-02-21 15:01:41,434 (tts_inference:481) INFO: LJ050-0011 (size:47->86272)
|
| 865 |
+
2025-02-21 15:01:45,449 (tts_inference:476) INFO: inference speed = 33899.0 points / sec.
|
| 866 |
+
2025-02-21 15:01:45,449 (tts_inference:481) INFO: LJ050-0012 (size:74->135936)
|
| 867 |
+
2025-02-21 15:01:50,782 (tts_inference:476) INFO: inference speed = 34126.9 points / sec.
|
| 868 |
+
2025-02-21 15:01:50,782 (tts_inference:481) INFO: LJ050-0013 (size:104->181760)
|
| 869 |
+
2025-02-21 15:01:54,074 (tts_inference:476) INFO: inference speed = 33591.2 points / sec.
|
| 870 |
+
2025-02-21 15:01:54,074 (tts_inference:481) INFO: LJ050-0014 (size:69->110336)
|
| 871 |
+
2025-02-21 15:01:59,239 (tts_inference:476) INFO: inference speed = 34137.4 points / sec.
|
| 872 |
+
2025-02-21 15:01:59,240 (tts_inference:481) INFO: LJ050-0015 (size:101->176128)
|
| 873 |
+
2025-02-21 15:02:05,927 (tts_inference:476) INFO: inference speed = 29430.7 points / sec.
|
| 874 |
+
2025-02-21 15:02:05,927 (tts_inference:481) INFO: LJ050-0016 (size:98->196608)
|
| 875 |
+
2025-02-21 15:02:08,124 (tts_inference:476) INFO: inference speed = 32631.6 points / sec.
|
| 876 |
+
2025-02-21 15:02:08,124 (tts_inference:481) INFO: LJ050-0017 (size:43->71424)
|
| 877 |
+
2025-02-21 15:02:12,444 (tts_inference:476) INFO: inference speed = 33818.5 points / sec.
|
| 878 |
+
2025-02-21 15:02:12,444 (tts_inference:481) INFO: LJ050-0018 (size:94->145920)
|
| 879 |
+
2025-02-21 15:02:14,543 (tts_inference:476) INFO: inference speed = 34137.0 points / sec.
|
| 880 |
+
2025-02-21 15:02:14,543 (tts_inference:481) INFO: LJ050-0019 (size:41->71424)
|
| 881 |
+
2025-02-21 15:02:18,552 (tts_inference:476) INFO: inference speed = 33372.4 points / sec.
|
| 882 |
+
2025-02-21 15:02:18,553 (tts_inference:481) INFO: LJ050-0020 (size:76->133632)
|
| 883 |
+
2025-02-21 15:02:24,057 (tts_inference:476) INFO: inference speed = 33764.5 points / sec.
|
| 884 |
+
2025-02-21 15:02:24,057 (tts_inference:481) INFO: LJ050-0021 (size:103->185600)
|
| 885 |
+
2025-02-21 15:02:28,484 (tts_inference:476) INFO: inference speed = 33996.9 points / sec.
|
| 886 |
+
2025-02-21 15:02:28,485 (tts_inference:481) INFO: LJ050-0022 (size:83->150272)
|
| 887 |
+
2025-02-21 15:02:29,526 (tts_inference:476) INFO: inference speed = 30420.5 points / sec.
|
| 888 |
+
2025-02-21 15:02:29,527 (tts_inference:481) INFO: LJ050-0023 (size:16->31488)
|
| 889 |
+
2025-02-21 15:02:34,598 (tts_inference:476) INFO: inference speed = 34004.2 points / sec.
|
| 890 |
+
2025-02-21 15:02:34,598 (tts_inference:481) INFO: LJ050-0024 (size:100->172288)
|
| 891 |
+
2025-02-21 15:02:36,718 (tts_inference:476) INFO: inference speed = 32597.2 points / sec.
|
| 892 |
+
2025-02-21 15:02:36,718 (tts_inference:481) INFO: LJ050-0025 (size:35->68864)
|
| 893 |
+
2025-02-21 15:02:40,664 (tts_inference:476) INFO: inference speed = 34109.1 points / sec.
|
| 894 |
+
2025-02-21 15:02:40,664 (tts_inference:481) INFO: LJ050-0026 (size:67->134400)
|
| 895 |
+
2025-02-21 15:02:43,668 (tts_inference:476) INFO: inference speed = 33387.8 points / sec.
|
| 896 |
+
2025-02-21 15:02:43,669 (tts_inference:481) INFO: LJ050-0027 (size:52->100096)
|
| 897 |
+
2025-02-21 15:02:48,660 (tts_inference:476) INFO: inference speed = 34000.0 points / sec.
|
| 898 |
+
2025-02-21 15:02:48,660 (tts_inference:481) INFO: LJ050-0028 (size:86->169472)
|
| 899 |
+
# Accounting: time=129 threads=1
|
| 900 |
+
# Ended (code 0) at Fri Feb 21 15:02:49 JST 2025, elapsed time 129 seconds
|
imdanboy/jets/decode_train.loss.ave/dev/speech_shape
ADDED
|
File without changes
|
imdanboy/jets/decode_train.loss.ave/dev/wav/LJ049-0008.wav
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:53154c6909b2f5d7486b1e38c52af3a5f3b223b6584154d4b01d15c2a68efbe3
|
| 3 |
+
size 235052
|
imdanboy/jets/decode_train.loss.ave/dev/wav/LJ049-0009.wav
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:a347a11c3eb2ea9a0c08dfd1845e8b4fa37e27920d809fb07f67ad3676222030
|
| 3 |
+
size 421420
|
imdanboy/jets/decode_train.loss.ave/dev/wav/LJ049-0010.wav
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:663044bca8589433afb5fc20ec8a18f964337c7505fdbdf2c73f52d5b3c9966f
|
| 3 |
+
size 138284
|
imdanboy/jets/decode_train.loss.ave/dev/wav/LJ049-0011.wav
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:05c3ad358d51eb1a5f1eaaf423993cc03e2769ee4c190d5cca09070a310c0ef4
|
| 3 |
+
size 394284
|
imdanboy/jets/decode_train.loss.ave/dev/wav/LJ049-0012.wav
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:4e3abe2f0b82076af28725b36a77536d20ad926b73b542aa56fec67813d8fcd2
|
| 3 |
+
size 274988
|
imdanboy/jets/decode_train.loss.ave/dev/wav/LJ049-0013.wav
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:21a969d6cacf6967c7262b62c7ccc6d90404b0fa54f2d77a1660f03315b5c703
|
| 3 |
+
size 261164
|
imdanboy/jets/decode_train.loss.ave/dev/wav/LJ049-0014.wav
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:7687aeac49cfb6884e27ebcc3d1522fa47812c25469093d2bc8816b0b6410a77
|
| 3 |
+
size 226348
|
imdanboy/jets/decode_train.loss.ave/dev/wav/LJ049-0015.wav
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:d0c965ed87d913b98388b142077fe5b988f23883f6671811e9bafa5d789873a3
|
| 3 |
+
size 252460
|
imdanboy/jets/decode_train.loss.ave/dev/wav/LJ049-0016.wav
ADDED
|
Binary file (65.6 kB). View file
|
|
|
imdanboy/jets/decode_train.loss.ave/dev/wav/LJ049-0017.wav
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:25cd7bf352b4d5bd5bff0e415dc065497167329428423999eb4f6ee842b5d8b8
|
| 3 |
+
size 312364
|
imdanboy/jets/decode_train.loss.ave/dev/wav/LJ049-0018.wav
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:5ae58f55910e55ef883a695c3f508338a007b6e48eae483b75abe579928183cf
|
| 3 |
+
size 139308
|
imdanboy/jets/decode_train.loss.ave/dev/wav/LJ049-0019.wav
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:42d7d17015e4e8c939415b86bccc76f8d97b381158935e14bd809c9388d5b953
|
| 3 |
+
size 302636
|
imdanboy/jets/decode_train.loss.ave/dev/wav/LJ049-0020.wav
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:38df3b90bb2889b984e7e46c025d643afaa58c638d9abb7acc3f9717dad346e9
|
| 3 |
+
size 307756
|