GearlessJo commited on
Commit
10a7f67
·
verified ·
1 Parent(s): 3f6ffcc

Added ljs and vctk pretrained model for mamba-vits2

Browse files
ljs_base/.ipynb_checkpoints/config-checkpoint.json ADDED
@@ -0,0 +1,61 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "train": {
3
+ "log_interval": 200,
4
+ "eval_interval": 1000,
5
+ "seed": 1234,
6
+ "epochs": 20000,
7
+ "learning_rate": 2e-4,
8
+ "betas": [0.8, 0.99],
9
+ "eps": 1e-9,
10
+ "batch_size": 64,
11
+ "fp16_run": false,
12
+ "lr_decay": 0.999875,
13
+ "segment_size": 8192,
14
+ "init_lr_ratio": 1,
15
+ "warmup_epochs": 0,
16
+ "c_mel": 45,
17
+ "c_kl": 1.0
18
+ },
19
+ "data": {
20
+ "use_mel_posterior_encoder": true,
21
+ "training_files":"filelists/ljs_audio_text_train_filelist.txt.cleaned",
22
+ "validation_files":"filelists/ljs_audio_text_val_filelist.txt.cleaned",
23
+ "text_cleaners":["english_cleaners2"],
24
+ "max_wav_value": 32768.0,
25
+ "sampling_rate": 22050,
26
+ "filter_length": 1024,
27
+ "hop_length": 256,
28
+ "win_length": 1024,
29
+ "n_mel_channels": 80,
30
+ "mel_fmin": 0.0,
31
+ "mel_fmax": null,
32
+ "add_blank": false,
33
+ "n_speakers": 0,
34
+ "cleaned_text": true
35
+ },
36
+ "model": {
37
+ "use_mel_posterior_encoder": true,
38
+ "use_transformer_flows": true,
39
+ "transformer_flow_type": "pre_conv",
40
+ "use_spk_conditioned_encoder": false,
41
+ "use_noise_scaled_mas": true,
42
+ "use_duration_discriminator": false,
43
+ "duration_discriminator_type": "dur_disc_2",
44
+ "inter_channels": 192,
45
+ "hidden_channels": 192,
46
+ "filter_channels": 768,
47
+ "n_heads": 2,
48
+ "n_layers": 6,
49
+ "kernel_size": 3,
50
+ "p_dropout": 0.1,
51
+ "resblock": "1",
52
+ "resblock_kernel_sizes": [3,7,11],
53
+ "resblock_dilation_sizes": [[1,3,5], [1,3,5], [1,3,5]],
54
+ "upsample_rates": [8,8,2,2],
55
+ "upsample_initial_channel": 512,
56
+ "upsample_kernel_sizes": [16,16,4,4],
57
+ "n_layers_q": 3,
58
+ "use_spectral_norm": false,
59
+ "use_sdp": false
60
+ }
61
+ }
ljs_base/.ipynb_checkpoints/train-checkpoint.log ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ 2024-07-31 09:47:02,780 ljs_base INFO {'train': {'log_interval': 200, 'eval_interval': 1000, 'seed': 1234, 'epochs': 20000, 'learning_rate': 0.0002, 'betas': [0.8, 0.99], 'eps': 1e-09, 'batch_size': 64, 'fp16_run': False, 'lr_decay': 0.999875, 'segment_size': 8192, 'init_lr_ratio': 1, 'warmup_epochs': 0, 'c_mel': 45, 'c_kl': 1.0}, 'data': {'use_mel_posterior_encoder': True, 'training_files': 'filelists/ljs_audio_text_train_filelist.txt.cleaned', 'validation_files': 'filelists/ljs_audio_text_val_filelist.txt.cleaned', 'text_cleaners': ['english_cleaners2'], 'max_wav_value': 32768.0, 'sampling_rate': 22050, 'filter_length': 1024, 'hop_length': 256, 'win_length': 1024, 'n_mel_channels': 80, 'mel_fmin': 0.0, 'mel_fmax': None, 'add_blank': False, 'n_speakers': 0, 'cleaned_text': True}, 'model': {'use_mel_posterior_encoder': True, 'use_transformer_flows': True, 'transformer_flow_type': 'pre_conv', 'use_spk_conditioned_encoder': False, 'use_noise_scaled_mas': True, 'use_duration_discriminator': False, 'duration_discriminator_type': 'dur_disc_2', 'inter_channels': 192, 'hidden_channels': 192, 'filter_channels': 768, 'n_heads': 2, 'n_layers': 6, 'kernel_size': 3, 'p_dropout': 0.1, 'resblock': '1', 'resblock_kernel_sizes': [3, 7, 11], 'resblock_dilation_sizes': [[1, 3, 5], [1, 3, 5], [1, 3, 5]], 'upsample_rates': [8, 8, 2, 2], 'upsample_initial_channel': 512, 'upsample_kernel_sizes': [16, 16, 4, 4], 'n_layers_q': 3, 'use_spectral_norm': False, 'use_sdp': False}, 'model_dir': './logs/ljs_base'}
2
+ 2024-07-31 09:48:06,663 ljs_base INFO Train Epoch: 1 [0%]
3
+ 2024-07-31 09:48:06,663 ljs_base INFO [5.950986862182617, 4.56489372253418, 0.28700321912765503, 86.78934478759766, 1.9993789196014404, 364.4577941894531, 0, 0.0002]
4
+ 2024-07-31 09:48:15,369 ljs_base INFO Saving model and optimizer state at iteration 1 to ./logs/ljs_base/G_0.pth
5
+ 2024-07-31 09:48:15,888 ljs_base INFO Saving model and optimizer state at iteration 1 to ./logs/ljs_base/D_0.pth
6
+ 2024-07-31 09:57:20,234 ljs_base INFO ====> Epoch: 1
7
+ 2024-07-31 10:04:59,665 ljs_base INFO ====> Epoch: 2
ljs_base/G_13000.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5e478cc84376abf7928813d366dde48b71d7b15e5753e38de7eb3d86a458fe41
3
+ size 438589345
ljs_base/config.json ADDED
@@ -0,0 +1,61 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "train": {
3
+ "log_interval": 200,
4
+ "eval_interval": 1000,
5
+ "seed": 1234,
6
+ "epochs": 20000,
7
+ "learning_rate": 2e-4,
8
+ "betas": [0.8, 0.99],
9
+ "eps": 1e-9,
10
+ "batch_size": 64,
11
+ "fp16_run": false,
12
+ "lr_decay": 0.999875,
13
+ "segment_size": 8192,
14
+ "init_lr_ratio": 1,
15
+ "warmup_epochs": 0,
16
+ "c_mel": 45,
17
+ "c_kl": 1.0
18
+ },
19
+ "data": {
20
+ "use_mel_posterior_encoder": true,
21
+ "training_files":"filelists/ljs_audio_text_train_filelist.txt.cleaned",
22
+ "validation_files":"filelists/ljs_audio_text_val_filelist.txt.cleaned",
23
+ "text_cleaners":["english_cleaners2"],
24
+ "max_wav_value": 32768.0,
25
+ "sampling_rate": 22050,
26
+ "filter_length": 1024,
27
+ "hop_length": 256,
28
+ "win_length": 1024,
29
+ "n_mel_channels": 80,
30
+ "mel_fmin": 0.0,
31
+ "mel_fmax": null,
32
+ "add_blank": false,
33
+ "n_speakers": 0,
34
+ "cleaned_text": true
35
+ },
36
+ "model": {
37
+ "use_mel_posterior_encoder": true,
38
+ "use_transformer_flows": true,
39
+ "transformer_flow_type": "pre_conv",
40
+ "use_spk_conditioned_encoder": false,
41
+ "use_noise_scaled_mas": true,
42
+ "use_duration_discriminator": false,
43
+ "duration_discriminator_type": "dur_disc_2",
44
+ "inter_channels": 192,
45
+ "hidden_channels": 192,
46
+ "filter_channels": 768,
47
+ "n_heads": 2,
48
+ "n_layers": 6,
49
+ "kernel_size": 3,
50
+ "p_dropout": 0.1,
51
+ "resblock": "1",
52
+ "resblock_kernel_sizes": [3,7,11],
53
+ "resblock_dilation_sizes": [[1,3,5], [1,3,5], [1,3,5]],
54
+ "upsample_rates": [8,8,2,2],
55
+ "upsample_initial_channel": 512,
56
+ "upsample_kernel_sizes": [16,16,4,4],
57
+ "n_layers_q": 3,
58
+ "use_spectral_norm": false,
59
+ "use_sdp": false
60
+ }
61
+ }
ljs_base/eval/events.out.tfevents.1722390422.autodl-container-ae2d4d944b-5e0c94ae.674115.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fa821a63106b18077fa7448764b80109d2e4927864533f0b048ac0b62590e841
3
+ size 7150770
ljs_base/events.out.tfevents.1722390422.autodl-container-ae2d4d944b-5e0c94ae.674115.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:07f40ffd830cc47b17c76c47f9eddc4892dd81c71c4d1cabdecf775ed54c7f3e
3
+ size 10702866
ljs_base/githash ADDED
@@ -0,0 +1 @@
 
 
1
+ 1f4f3790568180f8dec4419d5cad5d0877b034bb
ljs_base/train.log ADDED
@@ -0,0 +1,408 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 2024-07-31 09:47:02,780 ljs_base INFO {'train': {'log_interval': 200, 'eval_interval': 1000, 'seed': 1234, 'epochs': 20000, 'learning_rate': 0.0002, 'betas': [0.8, 0.99], 'eps': 1e-09, 'batch_size': 64, 'fp16_run': False, 'lr_decay': 0.999875, 'segment_size': 8192, 'init_lr_ratio': 1, 'warmup_epochs': 0, 'c_mel': 45, 'c_kl': 1.0}, 'data': {'use_mel_posterior_encoder': True, 'training_files': 'filelists/ljs_audio_text_train_filelist.txt.cleaned', 'validation_files': 'filelists/ljs_audio_text_val_filelist.txt.cleaned', 'text_cleaners': ['english_cleaners2'], 'max_wav_value': 32768.0, 'sampling_rate': 22050, 'filter_length': 1024, 'hop_length': 256, 'win_length': 1024, 'n_mel_channels': 80, 'mel_fmin': 0.0, 'mel_fmax': None, 'add_blank': False, 'n_speakers': 0, 'cleaned_text': True}, 'model': {'use_mel_posterior_encoder': True, 'use_transformer_flows': True, 'transformer_flow_type': 'pre_conv', 'use_spk_conditioned_encoder': False, 'use_noise_scaled_mas': True, 'use_duration_discriminator': False, 'duration_discriminator_type': 'dur_disc_2', 'inter_channels': 192, 'hidden_channels': 192, 'filter_channels': 768, 'n_heads': 2, 'n_layers': 6, 'kernel_size': 3, 'p_dropout': 0.1, 'resblock': '1', 'resblock_kernel_sizes': [3, 7, 11], 'resblock_dilation_sizes': [[1, 3, 5], [1, 3, 5], [1, 3, 5]], 'upsample_rates': [8, 8, 2, 2], 'upsample_initial_channel': 512, 'upsample_kernel_sizes': [16, 16, 4, 4], 'n_layers_q': 3, 'use_spectral_norm': False, 'use_sdp': False}, 'model_dir': './logs/ljs_base'}
2
+ 2024-07-31 09:48:06,663 ljs_base INFO Train Epoch: 1 [0%]
3
+ 2024-07-31 09:48:06,663 ljs_base INFO [5.950986862182617, 4.56489372253418, 0.28700321912765503, 86.78934478759766, 1.9993789196014404, 364.4577941894531, 0, 0.0002]
4
+ 2024-07-31 09:48:15,369 ljs_base INFO Saving model and optimizer state at iteration 1 to ./logs/ljs_base/G_0.pth
5
+ 2024-07-31 09:48:15,888 ljs_base INFO Saving model and optimizer state at iteration 1 to ./logs/ljs_base/D_0.pth
6
+ 2024-07-31 09:57:20,234 ljs_base INFO ====> Epoch: 1
7
+ 2024-07-31 10:04:59,665 ljs_base INFO ====> Epoch: 2
8
+ 2024-07-31 10:09:50,506 ljs_base INFO ====> Epoch: 3
9
+ 2024-07-31 10:13:08,044 ljs_base INFO Train Epoch: 4 [77%]
10
+ 2024-07-31 10:13:08,044 ljs_base INFO [2.598374366760254, 2.3966853618621826, 2.9164767265319824, 36.494815826416016, 1.0697224140167236, 2.802391290664673, 200, 0.00019992500937460937]
11
+ 2024-07-31 10:13:45,253 ljs_base INFO ====> Epoch: 4
12
+ 2024-07-31 10:17:38,542 ljs_base INFO ====> Epoch: 5
13
+ 2024-07-31 10:19:51,024 ljs_base INFO ====> Epoch: 6
14
+ 2024-07-31 10:22:16,164 ljs_base INFO ====> Epoch: 7
15
+ 2024-07-31 10:24:09,212 ljs_base INFO Train Epoch: 8 [55%]
16
+ 2024-07-31 10:24:09,213 ljs_base INFO [2.5060842037200928, 1.9938143491744995, 3.0472145080566406, 32.38056564331055, 1.03996741771698, 2.5931684970855713, 400, 0.00019982506561132978]
17
+ 2024-07-31 10:24:44,079 ljs_base INFO ====> Epoch: 8
18
+ 2024-07-31 10:27:07,086 ljs_base INFO ====> Epoch: 9
19
+ 2024-07-31 10:29:21,260 ljs_base INFO ====> Epoch: 10
20
+ 2024-07-31 10:30:43,429 ljs_base INFO ====> Epoch: 11
21
+ 2024-07-31 10:31:23,319 ljs_base INFO Train Epoch: 12 [32%]
22
+ 2024-07-31 10:31:23,320 ljs_base INFO [2.6676931381225586, 1.9234634637832642, 2.092226982116699, 31.165834426879883, 1.0158162117004395, 2.321983814239502, 600, 0.00019972517181056292]
23
+ 2024-07-31 10:32:02,807 ljs_base INFO ====> Epoch: 12
24
+ 2024-07-31 10:33:21,644 ljs_base INFO ====> Epoch: 13
25
+ 2024-07-31 10:34:47,962 ljs_base INFO ====> Epoch: 14
26
+ 2024-07-31 10:35:54,574 ljs_base INFO ====> Epoch: 15
27
+ 2024-07-31 10:36:25,611 ljs_base INFO Train Epoch: 16 [9%]
28
+ 2024-07-31 10:36:25,612 ljs_base INFO [2.6194863319396973, 2.199115037918091, 2.6871302127838135, 28.028554916381836, 1.0149562358856201, 2.3515214920043945, 800, 0.00019962532794733217]
29
+ 2024-07-31 10:37:00,567 ljs_base INFO ====> Epoch: 16
30
+ 2024-07-31 10:38:26,774 ljs_base INFO ====> Epoch: 17
31
+ 2024-07-31 10:39:41,586 ljs_base INFO ====> Epoch: 18
32
+ 2024-07-31 10:40:41,828 ljs_base INFO Train Epoch: 19 [87%]
33
+ 2024-07-31 10:40:41,829 ljs_base INFO [2.567964792251587, 2.2727222442626953, 2.4994242191314697, 27.290918350219727, 0.9692533016204834, 2.384336233139038, 1000, 0.00019955047780639926]
34
+ 2024-07-31 10:40:49,021 ljs_base INFO Saving model and optimizer state at iteration 19 to ./logs/ljs_base/G_1000.pth
35
+ 2024-07-31 10:40:49,462 ljs_base INFO Saving model and optimizer state at iteration 19 to ./logs/ljs_base/D_1000.pth
36
+ 2024-07-31 10:40:54,086 ljs_base INFO ====> Epoch: 19
37
+ 2024-07-31 10:42:03,343 ljs_base INFO ====> Epoch: 20
38
+ 2024-07-31 10:43:23,100 ljs_base INFO ====> Epoch: 21
39
+ 2024-07-31 10:44:50,298 ljs_base INFO ====> Epoch: 22
40
+ 2024-07-31 10:45:38,619 ljs_base INFO Train Epoch: 23 [64%]
41
+ 2024-07-31 10:45:38,621 ljs_base INFO [2.6730306148529053, 1.9099141359329224, 2.3974361419677734, 26.752132415771484, 0.9820241928100586, 2.4122674465179443, 1200, 0.00019945072127379438]
42
+ 2024-07-31 10:45:52,460 ljs_base INFO ====> Epoch: 23
43
+ 2024-07-31 10:46:56,357 ljs_base INFO ====> Epoch: 24
44
+ 2024-07-31 10:48:12,474 ljs_base INFO ====> Epoch: 25
45
+ 2024-07-31 10:49:24,079 ljs_base INFO ====> Epoch: 26
46
+ 2024-07-31 10:50:08,268 ljs_base INFO Train Epoch: 27 [42%]
47
+ 2024-07-31 10:50:08,269 ljs_base INFO [2.5688161849975586, 1.8044644594192505, 2.3296608924865723, 25.59964370727539, 0.9955724477767944, 2.5769269466400146, 1400, 0.00019935101461010442]
48
+ 2024-07-31 10:50:32,167 ljs_base INFO ====> Epoch: 27
49
+ 2024-07-31 10:51:46,788 ljs_base INFO ====> Epoch: 28
50
+ 2024-07-31 10:52:49,319 ljs_base INFO ====> Epoch: 29
51
+ 2024-07-31 10:54:09,703 ljs_base INFO ====> Epoch: 30
52
+ 2024-07-31 10:54:45,382 ljs_base INFO Train Epoch: 31 [19%]
53
+ 2024-07-31 10:54:45,383 ljs_base INFO [2.7217636108398438, 2.138204574584961, 2.4408438205718994, 26.48148536682129, 0.9820035696029663, 2.6803126335144043, 1600, 0.00019925135779039958]
54
+ 2024-07-31 10:55:15,152 ljs_base INFO ====> Epoch: 31
55
+ 2024-07-31 10:56:18,067 ljs_base INFO ====> Epoch: 32
56
+ 2024-07-31 10:57:33,472 ljs_base INFO ====> Epoch: 33
57
+ 2024-07-31 10:58:32,916 ljs_base INFO Train Epoch: 34 [96%]
58
+ 2024-07-31 10:58:32,917 ljs_base INFO [2.691612720489502, 1.856447458267212, 2.1974406242370605, 24.75498390197754, 0.9569270610809326, 2.5104684829711914, 1800, 0.0001991766478707464]
59
+ 2024-07-31 10:58:35,175 ljs_base INFO ====> Epoch: 34
60
+ 2024-07-31 10:59:36,113 ljs_base INFO ====> Epoch: 35
61
+ 2024-07-31 11:00:38,777 ljs_base INFO ====> Epoch: 36
62
+ 2024-07-31 11:01:41,916 ljs_base INFO ====> Epoch: 37
63
+ 2024-07-31 11:02:39,743 ljs_base INFO Train Epoch: 38 [74%]
64
+ 2024-07-31 11:02:39,744 ljs_base INFO [2.6265015602111816, 1.9174726009368896, 2.2177350521087646, 23.854719161987305, 0.7685348987579346, 2.4181604385375977, 2000, 0.0001990770782180657]
65
+ 2024-07-31 11:02:45,688 ljs_base INFO Saving model and optimizer state at iteration 38 to ./logs/ljs_base/G_2000.pth
66
+ 2024-07-31 11:02:46,468 ljs_base INFO Saving model and optimizer state at iteration 38 to ./logs/ljs_base/D_2000.pth
67
+ 2024-07-31 11:02:56,987 ljs_base INFO ====> Epoch: 38
68
+ 2024-07-31 11:04:10,491 ljs_base INFO ====> Epoch: 39
69
+ 2024-07-31 11:05:13,833 ljs_base INFO ====> Epoch: 40
70
+ 2024-07-31 11:06:15,786 ljs_base INFO ====> Epoch: 41
71
+ 2024-07-31 11:07:00,962 ljs_base INFO Train Epoch: 42 [51%]
72
+ 2024-07-31 11:07:00,964 ljs_base INFO [2.692326545715332, 1.7800456285476685, 2.0407416820526123, 23.844852447509766, 0.6173300743103027, 2.6184093952178955, 2200, 0.0001989775583408775]
73
+ 2024-07-31 11:07:17,962 ljs_base INFO ====> Epoch: 42
74
+ 2024-07-31 11:08:21,442 ljs_base INFO ====> Epoch: 43
75
+ 2024-07-31 11:09:24,548 ljs_base INFO ====> Epoch: 44
76
+ 2024-07-31 11:10:28,839 ljs_base INFO ====> Epoch: 45
77
+ 2024-07-31 11:11:07,610 ljs_base INFO Train Epoch: 46 [28%]
78
+ 2024-07-31 11:11:07,611 ljs_base INFO [2.7999165058135986, 1.9650318622589111, 1.924375295639038, 23.169322967529297, 0.5861815214157104, 2.3529186248779297, 2400, 0.00019887808821429862]
79
+ 2024-07-31 11:11:31,961 ljs_base INFO ====> Epoch: 46
80
+ 2024-07-31 11:12:35,794 ljs_base INFO ====> Epoch: 47
81
+ 2024-07-31 11:13:47,832 ljs_base INFO ====> Epoch: 48
82
+ 2024-07-31 11:14:51,255 ljs_base INFO ====> Epoch: 49
83
+ 2024-07-31 11:15:22,354 ljs_base INFO Train Epoch: 50 [6%]
84
+ 2024-07-31 11:15:22,355 ljs_base INFO [2.7900609970092773, 2.230989694595337, 2.1956465244293213, 23.458518981933594, 0.48977530002593994, 2.314560890197754, 2600, 0.00019877866781345852]
85
+ 2024-07-31 11:15:54,316 ljs_base INFO ====> Epoch: 50
86
+ 2024-07-31 11:17:03,226 ljs_base INFO ====> Epoch: 51
87
+ 2024-07-31 11:18:06,892 ljs_base INFO ====> Epoch: 52
88
+ 2024-07-31 11:19:03,067 ljs_base INFO Train Epoch: 53 [83%]
89
+ 2024-07-31 11:19:03,068 ljs_base INFO [2.7273552417755127, 1.834113359451294, 1.8409875631332397, 22.175968170166016, 0.5288352966308594, 2.318527936935425, 2800, 0.00019870413513039026]
90
+ 2024-07-31 11:19:09,503 ljs_base INFO ====> Epoch: 53
91
+ 2024-07-31 11:20:12,586 ljs_base INFO ====> Epoch: 54
92
+ 2024-07-31 11:21:14,710 ljs_base INFO ====> Epoch: 55
93
+ 2024-07-31 11:22:17,677 ljs_base INFO ====> Epoch: 56
94
+ 2024-07-31 11:23:05,628 ljs_base INFO Train Epoch: 57 [60%]
95
+ 2024-07-31 11:23:05,629 ljs_base INFO [2.789243459701538, 1.7825852632522583, 2.1028549671173096, 22.39212417602539, 0.49937522411346436, 2.3648788928985596, 3000, 0.00019860480168978534]
96
+ 2024-07-31 11:23:11,565 ljs_base INFO Saving model and optimizer state at iteration 57 to ./logs/ljs_base/G_3000.pth
97
+ 2024-07-31 11:23:12,007 ljs_base INFO Saving model and optimizer state at iteration 57 to ./logs/ljs_base/D_3000.pth
98
+ 2024-07-31 11:23:26,724 ljs_base INFO ====> Epoch: 57
99
+ 2024-07-31 11:24:43,266 ljs_base INFO ====> Epoch: 58
100
+ 2024-07-31 11:25:44,892 ljs_base INFO ====> Epoch: 59
101
+ 2024-07-31 11:26:47,388 ljs_base INFO ====> Epoch: 60
102
+ 2024-07-31 11:27:28,596 ljs_base INFO Train Epoch: 61 [38%]
103
+ 2024-07-31 11:27:28,597 ljs_base INFO [2.8034749031066895, 1.7416338920593262, 2.009138345718384, 21.6916561126709, 0.4841323792934418, 2.2971346378326416, 3200, 0.000198505517906589]
104
+ 2024-07-31 11:27:50,263 ljs_base INFO ====> Epoch: 61
105
+ 2024-07-31 11:28:54,897 ljs_base INFO ====> Epoch: 62
106
+ 2024-07-31 11:29:57,444 ljs_base INFO ====> Epoch: 63
107
+ 2024-07-31 11:30:59,332 ljs_base INFO ====> Epoch: 64
108
+ 2024-07-31 11:31:34,040 ljs_base INFO Train Epoch: 65 [15%]
109
+ 2024-07-31 11:31:34,041 ljs_base INFO [2.693572998046875, 1.7075475454330444, 2.0371503829956055, 21.750768661499023, 0.45888733863830566, 2.4769959449768066, 3400, 0.0001984062837559772]
110
+ 2024-07-31 11:32:13,627 ljs_base INFO ====> Epoch: 65
111
+ 2024-07-31 11:33:17,407 ljs_base INFO ====> Epoch: 66
112
+ 2024-07-31 11:34:22,839 ljs_base INFO ====> Epoch: 67
113
+ 2024-07-31 11:35:28,375 ljs_base INFO Train Epoch: 68 [92%]
114
+ 2024-07-31 11:35:28,376 ljs_base INFO [2.708404779434204, 1.8079609870910645, 2.1660284996032715, 21.539203643798828, 0.4587753415107727, 2.27506160736084, 3600, 0.00019833189069947573]
115
+ 2024-07-31 11:35:31,667 ljs_base INFO ====> Epoch: 68
116
+ 2024-07-31 11:36:33,008 ljs_base INFO ====> Epoch: 69
117
+ 2024-07-31 11:37:34,254 ljs_base INFO ====> Epoch: 70
118
+ 2024-07-31 11:38:36,451 ljs_base INFO ====> Epoch: 71
119
+ 2024-07-31 11:39:28,929 ljs_base INFO Train Epoch: 72 [70%]
120
+ 2024-07-31 11:39:28,929 ljs_base INFO [2.8704917430877686, 1.8068981170654297, 2.167020559310913, 22.63511848449707, 0.451168417930603, 2.418726921081543, 3800, 0.0001982327433461913]
121
+ 2024-07-31 11:39:39,517 ljs_base INFO ====> Epoch: 72
122
+ 2024-07-31 11:40:41,606 ljs_base INFO ====> Epoch: 73
123
+ 2024-07-31 11:41:45,331 ljs_base INFO ====> Epoch: 74
124
+ 2024-07-31 11:42:47,728 ljs_base INFO ====> Epoch: 75
125
+ 2024-07-31 11:43:32,921 ljs_base INFO Train Epoch: 76 [47%]
126
+ 2024-07-31 11:43:32,922 ljs_base INFO [2.747440814971924, 2.02388858795166, 2.1200990676879883, 21.184219360351562, 0.437663197517395, 2.3966639041900635, 4000, 0.00019813364555728923]
127
+ 2024-07-31 11:43:38,491 ljs_base INFO Saving model and optimizer state at iteration 76 to ./logs/ljs_base/G_4000.pth
128
+ 2024-07-31 11:43:38,918 ljs_base INFO Saving model and optimizer state at iteration 76 to ./logs/ljs_base/D_4000.pth
129
+ 2024-07-31 11:43:57,300 ljs_base INFO ====> Epoch: 76
130
+ 2024-07-31 11:44:59,722 ljs_base INFO ====> Epoch: 77
131
+ 2024-07-31 11:46:02,966 ljs_base INFO ====> Epoch: 78
132
+ 2024-07-31 11:47:05,990 ljs_base INFO ====> Epoch: 79
133
+ 2024-07-31 11:47:42,829 ljs_base INFO Train Epoch: 80 [25%]
134
+ 2024-07-31 11:47:42,830 ljs_base INFO [2.7974162101745605, 1.8035444021224976, 2.198517084121704, 21.88411521911621, 0.4499097466468811, 2.2326645851135254, 4200, 0.00019803459730799195]
135
+ 2024-07-31 11:48:09,648 ljs_base INFO ====> Epoch: 80
136
+ 2024-07-31 11:49:23,873 ljs_base INFO ====> Epoch: 81
137
+ 2024-07-31 11:50:25,973 ljs_base INFO ====> Epoch: 82
138
+ 2024-07-31 11:51:27,354 ljs_base INFO ====> Epoch: 83
139
+ 2024-07-31 11:51:56,318 ljs_base INFO Train Epoch: 84 [2%]
140
+ 2024-07-31 11:51:56,318 ljs_base INFO [2.6844029426574707, 1.9635779857635498, 2.2988109588623047, 21.776737213134766, 0.4187493920326233, 2.271404981613159, 4400, 0.00019793559857353432]
141
+ 2024-07-31 11:52:29,314 ljs_base INFO ====> Epoch: 84
142
+ 2024-07-31 11:53:32,549 ljs_base INFO ====> Epoch: 85
143
+ 2024-07-31 11:54:33,814 ljs_base INFO ====> Epoch: 86
144
+ 2024-07-31 11:55:27,756 ljs_base INFO Train Epoch: 87 [79%]
145
+ 2024-07-31 11:55:27,757 ljs_base INFO [2.8255152702331543, 1.739126443862915, 2.0086638927459717, 20.5821475982666, 0.41625258326530457, 2.2604920864105225, 4600, 0.0001978613820019138]
146
+ 2024-07-31 11:55:35,560 ljs_base INFO ====> Epoch: 87
147
+ 2024-07-31 11:56:37,377 ljs_base INFO ====> Epoch: 88
148
+ 2024-07-31 11:57:38,661 ljs_base INFO ====> Epoch: 89
149
+ 2024-07-31 11:58:40,527 ljs_base INFO ====> Epoch: 90
150
+ 2024-07-31 11:59:27,048 ljs_base INFO Train Epoch: 91 [57%]
151
+ 2024-07-31 11:59:27,050 ljs_base INFO [2.766979932785034, 1.9306485652923584, 2.136624813079834, 20.867385864257812, 0.43413394689559937, 2.1930770874023438, 4800, 0.00019776246985887165]
152
+ 2024-07-31 11:59:42,173 ljs_base INFO ====> Epoch: 91
153
+ 2024-07-31 12:00:44,170 ljs_base INFO ====> Epoch: 92
154
+ 2024-07-31 12:01:45,512 ljs_base INFO ====> Epoch: 93
155
+ 2024-07-31 12:02:48,124 ljs_base INFO ====> Epoch: 94
156
+ 2024-07-31 12:03:27,495 ljs_base INFO Train Epoch: 95 [34%]
157
+ 2024-07-31 12:03:27,496 ljs_base INFO [2.7606403827667236, 2.2221240997314453, 2.3432023525238037, 21.488948822021484, 0.37125933170318604, 2.430724620819092, 5000, 0.00019766360716262876]
158
+ 2024-07-31 12:03:36,059 ljs_base INFO Saving model and optimizer state at iteration 95 to ./logs/ljs_base/G_5000.pth
159
+ 2024-07-31 12:03:36,633 ljs_base INFO Saving model and optimizer state at iteration 95 to ./logs/ljs_base/D_5000.pth
160
+ 2024-07-31 12:03:59,519 ljs_base INFO ====> Epoch: 95
161
+ 2024-07-31 12:05:00,964 ljs_base INFO ====> Epoch: 96
162
+ 2024-07-31 12:06:03,040 ljs_base INFO ====> Epoch: 97
163
+ 2024-07-31 12:07:04,382 ljs_base INFO ====> Epoch: 98
164
+ 2024-07-31 12:07:37,351 ljs_base INFO Train Epoch: 99 [11%]
165
+ 2024-07-31 12:07:37,352 ljs_base INFO [2.767786979675293, 1.9554927349090576, 2.2976443767547607, 20.838756561279297, 0.4236322045326233, 2.402434825897217, 5200, 0.0001975647938884664]
166
+ 2024-07-31 12:08:07,278 ljs_base INFO ====> Epoch: 99
167
+ 2024-07-31 12:09:10,057 ljs_base INFO ====> Epoch: 100
168
+ 2024-07-31 12:10:12,083 ljs_base INFO ====> Epoch: 101
169
+ 2024-07-31 12:11:08,575 ljs_base INFO Train Epoch: 102 [89%]
170
+ 2024-07-31 12:11:08,576 ljs_base INFO [2.8373074531555176, 2.237924337387085, 2.3840789794921875, 20.795927047729492, 0.4319656193256378, 2.258962392807007, 5400, 0.00019749071635122203]
171
+ 2024-07-31 12:11:13,697 ljs_base INFO ====> Epoch: 102
172
+ 2024-07-31 12:12:17,164 ljs_base INFO ====> Epoch: 103
173
+ 2024-07-31 12:13:18,565 ljs_base INFO ====> Epoch: 104
174
+ 2024-07-31 12:14:20,460 ljs_base INFO ====> Epoch: 105
175
+ 2024-07-31 12:15:09,699 ljs_base INFO Train Epoch: 106 [66%]
176
+ 2024-07-31 12:15:09,700 ljs_base INFO [2.696894645690918, 1.746582269668579, 2.6326632499694824, 20.629104614257812, 0.4177781343460083, 2.228642463684082, 5600, 0.0001973919895062582]
177
+ 2024-07-31 12:15:21,717 ljs_base INFO ====> Epoch: 106
178
+ 2024-07-31 12:16:24,476 ljs_base INFO ====> Epoch: 107
179
+ 2024-07-31 12:17:26,008 ljs_base INFO ====> Epoch: 108
180
+ 2024-07-31 12:18:27,205 ljs_base INFO ====> Epoch: 109
181
+ 2024-07-31 12:19:10,028 ljs_base INFO Train Epoch: 110 [43%]
182
+ 2024-07-31 12:19:10,029 ljs_base INFO [2.7918827533721924, 1.988091230392456, 2.3572394847869873, 20.86115074157715, 0.40455320477485657, 2.3338770866394043, 5800, 0.00019729331201546197]
183
+ 2024-07-31 12:19:29,434 ljs_base INFO ====> Epoch: 110
184
+ 2024-07-31 12:20:30,488 ljs_base INFO ====> Epoch: 111
185
+ 2024-07-31 12:21:32,240 ljs_base INFO ====> Epoch: 112
186
+ 2024-07-31 12:22:33,769 ljs_base INFO ====> Epoch: 113
187
+ 2024-07-31 12:23:09,058 ljs_base INFO Train Epoch: 114 [21%]
188
+ 2024-07-31 12:23:09,060 ljs_base INFO [2.6772844791412354, 1.786861538887024, 2.5001933574676514, 20.281513214111328, 0.39335358142852783, 2.241015672683716, 6000, 0.0001971946838541609]
189
+ 2024-07-31 12:23:14,725 ljs_base INFO Saving model and optimizer state at iteration 114 to ./logs/ljs_base/G_6000.pth
190
+ 2024-07-31 12:23:15,171 ljs_base INFO Saving model and optimizer state at iteration 114 to ./logs/ljs_base/D_6000.pth
191
+ 2024-07-31 12:23:42,075 ljs_base INFO ====> Epoch: 114
192
+ 2024-07-31 12:24:44,107 ljs_base INFO ====> Epoch: 115
193
+ 2024-07-31 12:25:44,974 ljs_base INFO ====> Epoch: 116
194
+ 2024-07-31 12:26:44,585 ljs_base INFO Train Epoch: 117 [98%]
195
+ 2024-07-31 12:26:44,586 ljs_base INFO [2.7479491233825684, 2.18731951713562, 2.457127571105957, 20.635019302368164, 0.4147069454193115, 2.334766149520874, 6200, 0.0001971207450908312]
196
+ 2024-07-31 12:26:46,068 ljs_base INFO ====> Epoch: 117
197
+ 2024-07-31 12:27:47,504 ljs_base INFO ====> Epoch: 118
198
+ 2024-07-31 12:28:48,601 ljs_base INFO ====> Epoch: 119
199
+ 2024-07-31 12:29:49,136 ljs_base INFO ====> Epoch: 120
200
+ 2024-07-31 12:30:42,311 ljs_base INFO Train Epoch: 121 [75%]
201
+ 2024-07-31 12:30:42,313 ljs_base INFO [2.6838886737823486, 1.8781787157058716, 2.6389756202697754, 20.808956146240234, 0.40973609685897827, 2.3969216346740723, 6400, 0.00019702220319681561]
202
+ 2024-07-31 12:30:50,927 ljs_base INFO ====> Epoch: 121
203
+ 2024-07-31 12:31:53,232 ljs_base INFO ====> Epoch: 122
204
+ 2024-07-31 12:32:54,451 ljs_base INFO ====> Epoch: 123
205
+ 2024-07-31 12:33:55,390 ljs_base INFO ====> Epoch: 124
206
+ 2024-07-31 12:34:41,890 ljs_base INFO Train Epoch: 125 [53%]
207
+ 2024-07-31 12:34:41,891 ljs_base INFO [2.6501541137695312, 1.824973702430725, 2.7520337104797363, 21.05621337890625, 0.4201490879058838, 2.323972463607788, 6600, 0.00019692371056450955]
208
+ 2024-07-31 12:34:57,684 ljs_base INFO ====> Epoch: 125
209
+ 2024-07-31 12:35:58,868 ljs_base INFO ====> Epoch: 126
210
+ 2024-07-31 12:37:00,315 ljs_base INFO ====> Epoch: 127
211
+ 2024-07-31 12:38:01,164 ljs_base INFO ====> Epoch: 128
212
+ 2024-07-31 12:38:38,578 ljs_base INFO Train Epoch: 129 [30%]
213
+ 2024-07-31 12:38:38,580 ljs_base INFO [2.73425555229187, 2.007103204727173, 2.546501636505127, 20.194446563720703, 0.3779953122138977, 2.3183279037475586, 6800, 0.00019682526716928672]
214
+ 2024-07-31 12:39:02,921 ljs_base INFO ====> Epoch: 129
215
+ 2024-07-31 12:40:05,642 ljs_base INFO ====> Epoch: 130
216
+ 2024-07-31 12:41:08,137 ljs_base INFO ====> Epoch: 131
217
+ 2024-07-31 12:42:09,517 ljs_base INFO ====> Epoch: 132
218
+ 2024-07-31 12:42:40,509 ljs_base INFO Train Epoch: 133 [8%]
219
+ 2024-07-31 12:42:40,509 ljs_base INFO [2.690202474594116, 1.8971304893493652, 2.877265691757202, 20.277921676635742, 0.3907316327095032, 2.2887628078460693, 7000, 0.00019672687298653317]
220
+ 2024-07-31 12:42:48,862 ljs_base INFO Saving model and optimizer state at iteration 133 to ./logs/ljs_base/G_7000.pth
221
+ 2024-07-31 12:42:49,272 ljs_base INFO Saving model and optimizer state at iteration 133 to ./logs/ljs_base/D_7000.pth
222
+ 2024-07-31 12:43:20,839 ljs_base INFO ====> Epoch: 133
223
+ 2024-07-31 12:44:22,330 ljs_base INFO ====> Epoch: 134
224
+ 2024-07-31 12:45:23,274 ljs_base INFO ====> Epoch: 135
225
+ 2024-07-31 12:46:18,894 ljs_base INFO Train Epoch: 136 [85%]
226
+ 2024-07-31 12:46:18,895 ljs_base INFO [2.8278086185455322, 1.9972014427185059, 2.480008363723755, 20.03356170654297, 0.3904039263725281, 2.4505960941314697, 7200, 0.00019665310963035113]
227
+ 2024-07-31 12:46:25,741 ljs_base INFO ====> Epoch: 136
228
+ 2024-07-31 12:47:27,951 ljs_base INFO ====> Epoch: 137
229
+ 2024-07-31 12:48:28,681 ljs_base INFO ====> Epoch: 138
230
+ 2024-07-31 12:49:29,181 ljs_base INFO ====> Epoch: 139
231
+ 2024-07-31 12:50:17,078 ljs_base INFO Train Epoch: 140 [62%]
232
+ 2024-07-31 12:50:17,079 ljs_base INFO [2.733610153198242, 1.769425630569458, 2.431081771850586, 19.766481399536133, 0.39650705456733704, 2.296481132507324, 7400, 0.00019655480151022865]
233
+ 2024-07-31 12:50:30,951 ljs_base INFO ====> Epoch: 140
234
+ 2024-07-31 12:51:32,029 ljs_base INFO ====> Epoch: 141
235
+ 2024-07-31 12:52:32,471 ljs_base INFO ====> Epoch: 142
236
+ 2024-07-31 12:53:34,077 ljs_base INFO ====> Epoch: 143
237
+ 2024-07-31 12:54:15,006 ljs_base INFO Train Epoch: 144 [40%]
238
+ 2024-07-31 12:54:15,008 ljs_base INFO [2.697082042694092, 1.9279816150665283, 3.054624319076538, 20.100099563598633, 0.3916524648666382, 2.503857374191284, 7600, 0.00019645654253495058]
239
+ 2024-07-31 12:54:35,623 ljs_base INFO ====> Epoch: 144
240
+ 2024-07-31 12:55:37,341 ljs_base INFO ====> Epoch: 145
241
+ 2024-07-31 12:56:38,715 ljs_base INFO ====> Epoch: 146
242
+ 2024-07-31 12:57:39,964 ljs_base INFO ====> Epoch: 147
243
+ 2024-07-31 12:58:14,380 ljs_base INFO Train Epoch: 148 [17%]
244
+ 2024-07-31 12:58:14,382 ljs_base INFO [2.718626022338867, 1.992788314819336, 2.7710390090942383, 20.087230682373047, 0.3715357482433319, 2.3527145385742188, 7800, 0.00019635833267994917]
245
+ 2024-07-31 12:58:41,985 ljs_base INFO ====> Epoch: 148
246
+ 2024-07-31 12:59:42,979 ljs_base INFO ====> Epoch: 149
247
+ 2024-07-31 13:00:42,114 ljs_base INFO ====> Epoch: 150
248
+ 2024-07-31 13:01:42,852 ljs_base INFO Train Epoch: 151 [94%]
249
+ 2024-07-31 13:01:42,853 ljs_base INFO [2.7227392196655273, 1.8883345127105713, 2.29677152633667, 18.63558006286621, 0.39265352487564087, 2.289888381958008, 8000, 0.0001962847075091075]
250
+ 2024-07-31 13:01:48,529 ljs_base INFO Saving model and optimizer state at iteration 151 to ./logs/ljs_base/G_8000.pth
251
+ 2024-07-31 13:01:48,986 ljs_base INFO Saving model and optimizer state at iteration 151 to ./logs/ljs_base/D_8000.pth
252
+ 2024-07-31 13:01:52,375 ljs_base INFO ====> Epoch: 151
253
+ 2024-07-31 13:02:55,727 ljs_base INFO ====> Epoch: 152
254
+ 2024-07-31 13:03:56,546 ljs_base INFO ====> Epoch: 153
255
+ 2024-07-31 13:04:58,594 ljs_base INFO ====> Epoch: 154
256
+ 2024-07-31 13:05:50,586 ljs_base INFO Train Epoch: 155 [72%]
257
+ 2024-07-31 13:05:50,586 ljs_base INFO [2.736520528793335, 1.5712916851043701, 2.663147449493408, 20.486469268798828, 0.4086706042289734, 2.454172134399414, 8200, 0.00019618658355551082]
258
+ 2024-07-31 13:06:00,711 ljs_base INFO ====> Epoch: 155
259
+ 2024-07-31 13:07:01,503 ljs_base INFO ====> Epoch: 156
260
+ 2024-07-31 13:08:01,990 ljs_base INFO ====> Epoch: 157
261
+ 2024-07-31 13:09:11,841 ljs_base INFO ====> Epoch: 158
262
+ 2024-07-31 13:09:56,961 ljs_base INFO Train Epoch: 159 [49%]
263
+ 2024-07-31 13:09:56,962 ljs_base INFO [2.6040573120117188, 2.048781156539917, 3.060547351837158, 20.133474349975586, 0.3825329840183258, 2.4331347942352295, 8400, 0.00019608850865469258]
264
+ 2024-07-31 13:10:14,089 ljs_base INFO ====> Epoch: 159
265
+ 2024-07-31 13:11:15,069 ljs_base INFO ====> Epoch: 160
266
+ 2024-07-31 13:12:16,981 ljs_base INFO ====> Epoch: 161
267
+ 2024-07-31 13:13:18,490 ljs_base INFO ====> Epoch: 162
268
+ 2024-07-31 13:13:56,195 ljs_base INFO Train Epoch: 163 [26%]
269
+ 2024-07-31 13:13:56,195 ljs_base INFO [2.592172622680664, 2.093310594558716, 3.0670385360717773, 19.600814819335938, 0.38214850425720215, 2.352736234664917, 8600, 0.000195990482782131]
270
+ 2024-07-31 13:14:20,769 ljs_base INFO ====> Epoch: 163
271
+ 2024-07-31 13:15:21,570 ljs_base INFO ====> Epoch: 164
272
+ 2024-07-31 13:16:23,715 ljs_base INFO ====> Epoch: 165
273
+ 2024-07-31 13:17:25,378 ljs_base INFO ====> Epoch: 166
274
+ 2024-07-31 13:17:54,149 ljs_base INFO Train Epoch: 167 [4%]
275
+ 2024-07-31 13:17:54,149 ljs_base INFO [2.6090869903564453, 2.0218873023986816, 3.1224050521850586, 20.05098533630371, 0.34174489974975586, 2.4697113037109375, 8800, 0.00019589250591331656]
276
+ 2024-07-31 13:18:27,402 ljs_base INFO ====> Epoch: 167
277
+ 2024-07-31 13:19:29,519 ljs_base INFO ====> Epoch: 168
278
+ 2024-07-31 13:20:31,724 ljs_base INFO ====> Epoch: 169
279
+ 2024-07-31 13:21:26,992 ljs_base INFO Train Epoch: 170 [81%]
280
+ 2024-07-31 13:21:26,993 ljs_base INFO [2.571429967880249, 1.9023668766021729, 2.8853037357330322, 19.971351623535156, 0.38279369473457336, 2.413917064666748, 9000, 0.00019581905540567768]
281
+ 2024-07-31 13:21:32,310 ljs_base INFO Saving model and optimizer state at iteration 170 to ./logs/ljs_base/G_9000.pth
282
+ 2024-07-31 13:21:32,742 ljs_base INFO Saving model and optimizer state at iteration 170 to ./logs/ljs_base/D_9000.pth
283
+ 2024-07-31 13:21:39,623 ljs_base INFO ====> Epoch: 170
284
+ 2024-07-31 13:22:40,912 ljs_base INFO ====> Epoch: 171
285
+ 2024-07-31 13:23:41,863 ljs_base INFO ====> Epoch: 172
286
+ 2024-07-31 13:24:43,399 ljs_base INFO ====> Epoch: 173
287
+ 2024-07-31 13:25:30,628 ljs_base INFO Train Epoch: 174 [58%]
288
+ 2024-07-31 13:25:30,630 ljs_base INFO [2.697964906692505, 1.9248664379119873, 2.778087854385376, 19.78999137878418, 0.3748275637626648, 2.4526729583740234, 9200, 0.00019572116423448148]
289
+ 2024-07-31 13:25:45,038 ljs_base INFO ====> Epoch: 174
290
+ 2024-07-31 13:26:56,768 ljs_base INFO ====> Epoch: 175
291
+ 2024-07-31 13:27:58,225 ljs_base INFO ====> Epoch: 176
292
+ 2024-07-31 13:29:01,041 ljs_base INFO ====> Epoch: 177
293
+ 2024-07-31 13:29:42,321 ljs_base INFO Train Epoch: 178 [36%]
294
+ 2024-07-31 13:29:42,322 ljs_base INFO [2.6142427921295166, 2.272185802459717, 3.0849814414978027, 19.250341415405273, 0.40428391098976135, 2.2674968242645264, 9400, 0.00019562332199969432]
295
+ 2024-07-31 13:30:03,418 ljs_base INFO ====> Epoch: 178
296
+ 2024-07-31 13:31:04,653 ljs_base INFO ====> Epoch: 179
297
+ 2024-07-31 13:32:06,883 ljs_base INFO ====> Epoch: 180
298
+ 2024-07-31 13:33:07,873 ljs_base INFO ====> Epoch: 181
299
+ 2024-07-31 13:33:40,591 ljs_base INFO Train Epoch: 182 [13%]
300
+ 2024-07-31 13:33:40,593 ljs_base INFO [2.715524673461914, 1.9258995056152344, 3.167515277862549, 20.08539390563965, 0.3926645517349243, 2.3616135120391846, 9600, 0.00019552552867685262]
301
+ 2024-07-31 13:34:09,747 ljs_base INFO ====> Epoch: 182
302
+ 2024-07-31 13:35:10,692 ljs_base INFO ====> Epoch: 183
303
+ 2024-07-31 13:36:11,393 ljs_base INFO ====> Epoch: 184
304
+ 2024-07-31 13:37:08,923 ljs_base INFO Train Epoch: 185 [91%]
305
+ 2024-07-31 13:37:08,923 ljs_base INFO [2.7231550216674805, 1.767869234085083, 2.9239516258239746, 19.44363784790039, 0.37466052174568176, 2.4596214294433594, 9800, 0.00019545221576847604]
306
+ 2024-07-31 13:37:12,874 ljs_base INFO ====> Epoch: 185
307
+ 2024-07-31 13:38:14,011 ljs_base INFO ====> Epoch: 186
308
+ 2024-07-31 13:39:14,543 ljs_base INFO ====> Epoch: 187
309
+ 2024-07-31 13:40:23,162 ljs_base INFO ====> Epoch: 188
310
+ 2024-07-31 13:41:24,083 ljs_base INFO Train Epoch: 189 [68%]
311
+ 2024-07-31 13:41:24,083 ljs_base INFO [2.5794930458068848, 1.907576560974121, 2.964963674545288, 19.269968032836914, 0.38736486434936523, 2.2458813190460205, 10000, 0.00019535450798271008]
312
+ 2024-07-31 13:41:29,972 ljs_base INFO Saving model and optimizer state at iteration 189 to ./logs/ljs_base/G_10000.pth
313
+ 2024-07-31 13:41:30,392 ljs_base INFO Saving model and optimizer state at iteration 189 to ./logs/ljs_base/D_10000.pth
314
+ 2024-07-31 13:41:41,959 ljs_base INFO ====> Epoch: 189
315
+ 2024-07-31 13:42:43,135 ljs_base INFO ====> Epoch: 190
316
+ 2024-07-31 13:43:43,413 ljs_base INFO ====> Epoch: 191
317
+ 2024-07-31 13:44:44,913 ljs_base INFO ====> Epoch: 192
318
+ 2024-07-31 13:45:28,356 ljs_base INFO Train Epoch: 193 [45%]
319
+ 2024-07-31 13:45:28,358 ljs_base INFO [2.6886351108551025, 1.918583631515503, 2.8772008419036865, 19.55173110961914, 0.3469145894050598, 2.3171496391296387, 10200, 0.00019525684904167766]
320
+ 2024-07-31 13:45:47,044 ljs_base INFO ====> Epoch: 193
321
+ 2024-07-31 13:46:49,175 ljs_base INFO ====> Epoch: 194
322
+ 2024-07-31 13:47:49,660 ljs_base INFO ====> Epoch: 195
323
+ 2024-07-31 13:48:50,945 ljs_base INFO ====> Epoch: 196
324
+ 2024-07-31 13:49:26,824 ljs_base INFO Train Epoch: 197 [23%]
325
+ 2024-07-31 13:49:26,825 ljs_base INFO [2.6843950748443604, 1.9798598289489746, 2.903687000274658, 19.730886459350586, 0.3818723261356354, 2.300738573074341, 10400, 0.00019515923892096098]
326
+ 2024-07-31 13:49:53,061 ljs_base INFO ====> Epoch: 197
327
+ 2024-07-31 13:50:55,553 ljs_base INFO ====> Epoch: 198
328
+ 2024-07-31 13:51:57,919 ljs_base INFO ====> Epoch: 199
329
+ 2024-07-31 13:52:59,481 ljs_base INFO ====> Epoch: 200
330
+ 2024-07-31 13:53:28,106 ljs_base INFO Train Epoch: 201 [0%]
331
+ 2024-07-31 13:53:28,107 ljs_base INFO [2.6738975048065186, 2.142892599105835, 3.4255268573760986, 19.580678939819336, 0.38902825117111206, 2.2738254070281982, 10600, 0.00019506167759615451]
332
+ 2024-07-31 13:54:01,344 ljs_base INFO ====> Epoch: 201
333
+ 2024-07-31 13:55:02,011 ljs_base INFO ====> Epoch: 202
334
+ 2024-07-31 13:56:03,339 ljs_base INFO ====> Epoch: 203
335
+ 2024-07-31 13:56:56,990 ljs_base INFO Train Epoch: 204 [77%]
336
+ 2024-07-31 13:56:56,991 ljs_base INFO [2.727489709854126, 2.2826640605926514, 3.1736278533935547, 19.286169052124023, 0.392184853553772, 2.2134885787963867, 10800, 0.0001949885386101911]
337
+ 2024-07-31 13:57:06,391 ljs_base INFO ====> Epoch: 204
338
+ 2024-07-31 13:58:07,308 ljs_base INFO ====> Epoch: 205
339
+ 2024-07-31 13:59:08,291 ljs_base INFO ====> Epoch: 206
340
+ 2024-07-31 14:00:08,389 ljs_base INFO ====> Epoch: 207
341
+ 2024-07-31 14:00:54,913 ljs_base INFO Train Epoch: 208 [55%]
342
+ 2024-07-31 14:00:54,913 ljs_base INFO [2.688708782196045, 2.0635128021240234, 3.0084340572357178, 19.054336547851562, 0.3826081156730652, 2.2725555896759033, 11000, 0.00019489106261953815]
343
+ 2024-07-31 14:01:00,782 ljs_base INFO Saving model and optimizer state at iteration 208 to ./logs/ljs_base/G_11000.pth
344
+ 2024-07-31 14:01:01,397 ljs_base INFO Saving model and optimizer state at iteration 208 to ./logs/ljs_base/D_11000.pth
345
+ 2024-07-31 14:01:17,623 ljs_base INFO ====> Epoch: 208
346
+ 2024-07-31 14:02:19,195 ljs_base INFO ====> Epoch: 209
347
+ 2024-07-31 14:03:21,791 ljs_base INFO ====> Epoch: 210
348
+ 2024-07-31 14:04:23,602 ljs_base INFO ====> Epoch: 211
349
+ 2024-07-31 14:05:01,918 ljs_base INFO Train Epoch: 212 [32%]
350
+ 2024-07-31 14:05:01,919 ljs_base INFO [2.6519405841827393, 1.9010465145111084, 3.4010229110717773, 19.839054107666016, 0.3316868245601654, 2.306504964828491, 11200, 0.00019479363535774292]
351
+ 2024-07-31 14:05:25,185 ljs_base INFO ====> Epoch: 212
352
+ 2024-07-31 14:06:26,936 ljs_base INFO ====> Epoch: 213
353
+ 2024-07-31 14:07:28,499 ljs_base INFO ====> Epoch: 214
354
+ 2024-07-31 14:08:30,092 ljs_base INFO ====> Epoch: 215
355
+ 2024-07-31 14:09:01,410 ljs_base INFO Train Epoch: 216 [9%]
356
+ 2024-07-31 14:09:01,411 ljs_base INFO [2.5793371200561523, 2.007523536682129, 3.5202882289886475, 19.615947723388672, 0.3394443392753601, 2.2847044467926025, 11400, 0.00019469625680044555]
357
+ 2024-07-31 14:09:31,811 ljs_base INFO ====> Epoch: 216
358
+ 2024-07-31 14:10:32,668 ljs_base INFO ====> Epoch: 217
359
+ 2024-07-31 14:11:34,071 ljs_base INFO ====> Epoch: 218
360
+ 2024-07-31 14:12:30,644 ljs_base INFO Train Epoch: 219 [87%]
361
+ 2024-07-31 14:12:30,645 ljs_base INFO [2.6956515312194824, 2.1208302974700928, 2.9711387157440186, 19.217050552368164, 0.3882138431072235, 2.339681625366211, 11600, 0.00019462325483015215]
362
+ 2024-07-31 14:12:35,788 ljs_base INFO ====> Epoch: 219
363
+ 2024-07-31 14:13:38,482 ljs_base INFO ====> Epoch: 220
364
+ 2024-07-31 14:14:40,825 ljs_base INFO ====> Epoch: 221
365
+ 2024-07-31 14:15:42,441 ljs_base INFO ====> Epoch: 222
366
+ 2024-07-31 14:16:31,768 ljs_base INFO Train Epoch: 223 [64%]
367
+ 2024-07-31 14:16:31,769 ljs_base INFO [2.7277979850769043, 2.0454678535461426, 3.1746811866760254, 18.723020553588867, 0.35787707567214966, 2.336127758026123, 11800, 0.00019452596144714675]
368
+ 2024-07-31 14:16:44,285 ljs_base INFO ====> Epoch: 223
369
+ 2024-07-31 14:17:44,836 ljs_base INFO ====> Epoch: 224
370
+ 2024-07-31 14:18:45,625 ljs_base INFO ====> Epoch: 225
371
+ 2024-07-31 14:19:47,202 ljs_base INFO ====> Epoch: 226
372
+ 2024-07-31 14:20:28,989 ljs_base INFO Train Epoch: 227 [42%]
373
+ 2024-07-31 14:20:28,990 ljs_base INFO [2.650608777999878, 1.9724624156951904, 3.0053796768188477, 18.978097915649414, 0.3906213641166687, 2.413555145263672, 12000, 0.00019442871670171237]
374
+ 2024-07-31 14:20:34,726 ljs_base INFO Saving model and optimizer state at iteration 227 to ./logs/ljs_base/G_12000.pth
375
+ 2024-07-31 14:20:35,169 ljs_base INFO Saving model and optimizer state at iteration 227 to ./logs/ljs_base/D_12000.pth
376
+ 2024-07-31 14:20:55,434 ljs_base INFO ====> Epoch: 227
377
+ 2024-07-31 14:21:56,661 ljs_base INFO ====> Epoch: 228
378
+ 2024-07-31 14:22:57,635 ljs_base INFO ====> Epoch: 229
379
+ 2024-07-31 14:23:59,339 ljs_base INFO ====> Epoch: 230
380
+ 2024-07-31 14:24:33,346 ljs_base INFO Train Epoch: 231 [19%]
381
+ 2024-07-31 14:24:33,347 ljs_base INFO [2.6945128440856934, 1.935408592224121, 3.1295275688171387, 19.233083724975586, 0.38724976778030396, 2.312448024749756, 12200, 0.00019433152056953475]
382
+ 2024-07-31 14:25:00,658 ljs_base INFO ====> Epoch: 231
383
+ 2024-07-31 14:26:03,052 ljs_base INFO ====> Epoch: 232
384
+ 2024-07-31 14:27:05,802 ljs_base INFO ====> Epoch: 233
385
+ 2024-07-31 14:28:05,363 ljs_base INFO Train Epoch: 234 [96%]
386
+ 2024-07-31 14:28:05,364 ljs_base INFO [2.5830931663513184, 1.956726312637329, 3.283010721206665, 19.306346893310547, 0.36510634422302246, 2.262772798538208, 12400, 0.0001942586553582316]
387
+ 2024-07-31 14:28:07,355 ljs_base INFO ====> Epoch: 234
388
+ 2024-07-31 14:29:08,125 ljs_base INFO ====> Epoch: 235
389
+ 2024-07-31 14:30:09,213 ljs_base INFO ====> Epoch: 236
390
+ 2024-07-31 14:31:10,882 ljs_base INFO ====> Epoch: 237
391
+ 2024-07-31 14:32:04,109 ljs_base INFO Train Epoch: 238 [74%]
392
+ 2024-07-31 14:32:04,109 ljs_base INFO [2.6957156658172607, 1.9041168689727783, 3.2582807540893555, 18.825563430786133, 0.3776545524597168, 2.361882448196411, 12600, 0.0001941615442407838]
393
+ 2024-07-31 14:32:13,590 ljs_base INFO ====> Epoch: 238
394
+ 2024-07-31 14:33:14,448 ljs_base INFO ====> Epoch: 239
395
+ 2024-07-31 14:34:15,811 ljs_base INFO ====> Epoch: 240
396
+ 2024-07-31 14:35:17,344 ljs_base INFO ====> Epoch: 241
397
+ 2024-07-31 14:36:02,729 ljs_base INFO Train Epoch: 242 [51%]
398
+ 2024-07-31 14:36:02,730 ljs_base INFO [2.711752414703369, 2.0284745693206787, 3.282557487487793, 19.581058502197266, 0.35345321893692017, 2.2635228633880615, 12800, 0.0001940644816697913]
399
+ 2024-07-31 14:36:19,649 ljs_base INFO ====> Epoch: 242
400
+ 2024-07-31 14:37:22,941 ljs_base INFO ====> Epoch: 243
401
+ 2024-07-31 14:38:24,108 ljs_base INFO ====> Epoch: 244
402
+ 2024-07-31 14:39:25,518 ljs_base INFO ====> Epoch: 245
403
+ 2024-07-31 14:40:01,947 ljs_base INFO Train Epoch: 246 [28%]
404
+ 2024-07-31 14:40:01,947 ljs_base INFO [2.693770170211792, 1.7381986379623413, 3.3295540809631348, 18.838062286376953, 0.35705989599227905, 2.4090120792388916, 13000, 0.00019396746762098544]
405
+ 2024-07-31 14:40:07,371 ljs_base INFO Saving model and optimizer state at iteration 246 to ./logs/ljs_base/G_13000.pth
406
+ 2024-07-31 14:40:07,805 ljs_base INFO Saving model and optimizer state at iteration 246 to ./logs/ljs_base/D_13000.pth
407
+ 2024-07-31 14:40:33,435 ljs_base INFO ====> Epoch: 246
408
+ 2024-07-31 14:41:35,298 ljs_base INFO ====> Epoch: 247
vctk_base/.ipynb_checkpoints/config-checkpoint.json ADDED
@@ -0,0 +1,62 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "train": {
3
+ "log_interval": 200,
4
+ "eval_interval": 1000,
5
+ "seed": 1234,
6
+ "epochs": 10000,
7
+ "learning_rate": 2e-4,
8
+ "betas": [0.8, 0.99],
9
+ "eps": 1e-9,
10
+ "batch_size": 64,
11
+ "fp16_run": false,
12
+ "lr_decay": 0.999875,
13
+ "segment_size": 8192,
14
+ "init_lr_ratio": 1,
15
+ "warmup_epochs": 0,
16
+ "c_mel": 45,
17
+ "c_kl": 1.0
18
+ },
19
+ "data": {
20
+ "use_mel_posterior_encoder": true,
21
+ "training_files":"filelists/vctk_audio_sid_text_train_filelist.txt.cleaned",
22
+ "validation_files":"filelists/vctk_audio_sid_text_val_filelist.txt.cleaned",
23
+ "text_cleaners":["english_cleaners2"],
24
+ "max_wav_value": 32768.0,
25
+ "sampling_rate": 22050,
26
+ "filter_length": 1024,
27
+ "hop_length": 256,
28
+ "win_length": 1024,
29
+ "n_mel_channels": 80,
30
+ "mel_fmin": 0.0,
31
+ "mel_fmax": null,
32
+ "add_blank": false,
33
+ "n_speakers": 109,
34
+ "cleaned_text": true
35
+ },
36
+ "model": {
37
+ "use_mel_posterior_encoder": true,
38
+ "use_transformer_flows": true,
39
+ "transformer_flow_type": "pre_conv",
40
+ "use_spk_conditioned_encoder": true,
41
+ "use_noise_scaled_mas": true,
42
+ "use_duration_discriminator": false,
43
+ "inter_channels": 192,
44
+ "hidden_channels": 192,
45
+ "filter_channels": 768,
46
+ "n_heads": 2,
47
+ "n_layers": 6,
48
+ "kernel_size": 3,
49
+ "p_dropout": 0.1,
50
+ "resblock": "1",
51
+ "resblock_kernel_sizes": [3,7,11],
52
+ "resblock_dilation_sizes": [[1,3,5], [1,3,5], [1,3,5]],
53
+ "upsample_rates": [8,8,2,2],
54
+ "upsample_initial_channel": 512,
55
+ "upsample_kernel_sizes": [16,16,4,4],
56
+ "n_layers_q": 3,
57
+ "use_spectral_norm": false,
58
+ "use_sdp": false,
59
+ "gin_channels": 256
60
+ }
61
+ }
62
+
vctk_base/.ipynb_checkpoints/train-checkpoint.log ADDED
@@ -0,0 +1,44 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 2024-07-31 15:58:37,415 vctk_base INFO {'train': {'log_interval': 200, 'eval_interval': 1000, 'seed': 1234, 'epochs': 10000, 'learning_rate': 0.0002, 'betas': [0.8, 0.99], 'eps': 1e-09, 'batch_size': 64, 'fp16_run': False, 'lr_decay': 0.999875, 'segment_size': 8192, 'init_lr_ratio': 1, 'warmup_epochs': 0, 'c_mel': 45, 'c_kl': 1.0}, 'data': {'use_mel_posterior_encoder': True, 'training_files': 'filelists/vctk_audio_sid_text_train_filelist.txt.cleaned', 'validation_files': 'filelists/vctk_audio_sid_text_val_filelist.txt.cleaned', 'text_cleaners': ['english_cleaners2'], 'max_wav_value': 32768.0, 'sampling_rate': 22050, 'filter_length': 1024, 'hop_length': 256, 'win_length': 1024, 'n_mel_channels': 80, 'mel_fmin': 0.0, 'mel_fmax': None, 'add_blank': False, 'n_speakers': 109, 'cleaned_text': True}, 'model': {'use_mel_posterior_encoder': True, 'use_transformer_flows': True, 'transformer_flow_type': 'pre_conv', 'use_spk_conditioned_encoder': True, 'use_noise_scaled_mas': True, 'use_duration_discriminator': False, 'inter_channels': 192, 'hidden_channels': 192, 'filter_channels': 768, 'n_heads': 2, 'n_layers': 6, 'kernel_size': 3, 'p_dropout': 0.1, 'resblock': '1', 'resblock_kernel_sizes': [3, 7, 11], 'resblock_dilation_sizes': [[1, 3, 5], [1, 3, 5], [1, 3, 5]], 'upsample_rates': [8, 8, 2, 2], 'upsample_initial_channel': 512, 'upsample_kernel_sizes': [16, 16, 4, 4], 'n_layers_q': 3, 'use_spectral_norm': False, 'use_sdp': False, 'gin_channels': 256}, 'model_dir': './logs/vctk_base'}
2
+ 2024-07-31 15:59:46,748 vctk_base INFO Train Epoch: 1 [0%]
3
+ 2024-07-31 15:59:46,749 vctk_base INFO [5.95834493637085, 4.447598457336426, 0.5945128798484802, 80.78205871582031, 1.6755988597869873, 382.7722473144531, 0, 0.0002]
4
+ 2024-07-31 15:59:54,153 vctk_base INFO Saving model and optimizer state at iteration 1 to ./logs/vctk_base/G_0.pth
5
+ 2024-07-31 15:59:54,649 vctk_base INFO Saving model and optimizer state at iteration 1 to ./logs/vctk_base/D_0.pth
6
+ 2024-07-31 16:10:35,610 vctk_base INFO ====> Epoch: 1
7
+ 2024-07-31 16:11:43,492 vctk_base INFO Train Epoch: 2 [15%]
8
+ 2024-07-31 16:11:43,493 vctk_base INFO [2.9280824661254883, 2.1821279525756836, 1.6974865198135376, 34.330299377441406, 0.9032274484634399, 2.9195711612701416, 200, 0.000199975]
9
+ 2024-07-31 16:16:21,857 vctk_base INFO ====> Epoch: 2
10
+ 2024-07-31 16:17:41,687 vctk_base INFO Train Epoch: 3 [30%]
11
+ 2024-07-31 16:17:41,688 vctk_base INFO [2.600605010986328, 1.9394288063049316, 1.9603354930877686, 31.105972290039062, 0.8931986093521118, 2.5727241039276123, 400, 0.000199950003125]
12
+ 2024-07-31 16:20:27,208 vctk_base INFO ====> Epoch: 3
13
+ 2024-07-31 16:22:27,692 vctk_base INFO Train Epoch: 4 [45%]
14
+ 2024-07-31 16:22:27,693 vctk_base INFO [2.38808536529541, 2.2886123657226562, 3.368988513946533, 28.492382049560547, 0.8514041900634766, 2.7287182807922363, 600, 0.00019992500937460937]
15
+ 2024-07-31 16:23:36,964 vctk_base INFO ====> Epoch: 4
16
+ 2024-07-31 16:26:27,606 vctk_base INFO Train Epoch: 5 [60%]
17
+ 2024-07-31 16:26:27,607 vctk_base INFO [2.7892253398895264, 1.5592807531356812, 1.797709345817566, 26.904010772705078, 0.8325083255767822, 2.5772321224212646, 800, 0.00019990001874843754]
18
+ 2024-07-31 16:27:22,899 vctk_base INFO ====> Epoch: 5
19
+ 2024-07-31 16:30:35,911 vctk_base INFO Train Epoch: 6 [75%]
20
+ 2024-07-31 16:30:35,912 vctk_base INFO [2.77366042137146, 1.8799923658370972, 1.9885990619659424, 25.85253143310547, 0.8241404294967651, 2.4684202671051025, 1000, 0.00019987503124609398]
21
+ 2024-07-31 16:30:41,251 vctk_base INFO Saving model and optimizer state at iteration 6 to ./logs/vctk_base/G_1000.pth
22
+ 2024-07-31 16:30:41,707 vctk_base INFO Saving model and optimizer state at iteration 6 to ./logs/vctk_base/D_1000.pth
23
+ 2024-07-31 16:31:33,847 vctk_base INFO ====> Epoch: 6
24
+ 2024-07-31 16:34:17,687 vctk_base INFO Train Epoch: 7 [90%]
25
+ 2024-07-31 16:34:17,687 vctk_base INFO [2.719902753829956, 1.8916287422180176, 2.0821564197540283, 25.40392303466797, 0.8367151021957397, 2.7618155479431152, 1200, 0.0001998500468671882]
26
+ 2024-07-31 16:34:29,055 vctk_base INFO ====> Epoch: 7
27
+ 2024-07-31 16:37:52,347 vctk_base INFO ====> Epoch: 8
28
+ 2024-07-31 16:38:25,670 vctk_base INFO Train Epoch: 9 [5%]
29
+ 2024-07-31 16:38:25,670 vctk_base INFO [2.730032205581665, 1.8405590057373047, 2.0475995540618896, 23.66701889038086, 0.8283747434616089, 2.5974247455596924, 1400, 0.00019980008747812837]
30
+ 2024-07-31 16:40:41,621 vctk_base INFO ====> Epoch: 9
31
+ 2024-07-31 16:41:39,657 vctk_base INFO Train Epoch: 10 [20%]
32
+ 2024-07-31 16:41:39,658 vctk_base INFO [2.7427616119384766, 1.9437215328216553, 2.390969753265381, 24.00310707092285, 0.8053104877471924, 2.6501286029815674, 1600, 0.0001997751124671936]
33
+ 2024-07-31 16:43:20,386 vctk_base INFO ====> Epoch: 10
34
+ 2024-07-31 16:44:44,701 vctk_base INFO Train Epoch: 11 [34%]
35
+ 2024-07-31 16:44:44,702 vctk_base INFO [2.70190167427063, 2.2952077388763428, 2.174717664718628, 23.466419219970703, 2.423936367034912, 2.5556368827819824, 1800, 0.00019975014057813518]
36
+ 2024-07-31 16:46:23,443 vctk_base INFO ====> Epoch: 11
37
+ 2024-07-31 16:47:52,773 vctk_base INFO Train Epoch: 12 [49%]
38
+ 2024-07-31 16:47:52,773 vctk_base INFO [2.6355438232421875, 1.834970235824585, 2.192290782928467, 22.52155876159668, 0.6051994562149048, 2.3109006881713867, 2000, 0.00019972517181056292]
39
+ 2024-07-31 16:47:57,936 vctk_base INFO Saving model and optimizer state at iteration 12 to ./logs/vctk_base/G_2000.pth
40
+ 2024-07-31 16:47:58,384 vctk_base INFO Saving model and optimizer state at iteration 12 to ./logs/vctk_base/D_2000.pth
41
+ 2024-07-31 16:48:45,264 vctk_base INFO ====> Epoch: 12
42
+ 2024-07-31 16:50:14,281 vctk_base INFO Train Epoch: 13 [64%]
43
+ 2024-07-31 16:50:14,281 vctk_base INFO [2.8553051948547363, 1.9873125553131104, 2.307190418243408, 23.34893798828125, 0.5854883193969727, 2.556208848953247, 2200, 0.0001997002061640866]
44
+ 2024-07-31 16:50:58,237 vctk_base INFO ====> Epoch: 13
vctk_base/G_23000.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dd6d9a353ea2c296e4e489b2a7c43a2b688e077280f5f20fd4349480e7d61cd3
3
+ size 479757513
vctk_base/config.json ADDED
@@ -0,0 +1,62 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "train": {
3
+ "log_interval": 200,
4
+ "eval_interval": 1000,
5
+ "seed": 1234,
6
+ "epochs": 10000,
7
+ "learning_rate": 2e-4,
8
+ "betas": [0.8, 0.99],
9
+ "eps": 1e-9,
10
+ "batch_size": 64,
11
+ "fp16_run": false,
12
+ "lr_decay": 0.999875,
13
+ "segment_size": 8192,
14
+ "init_lr_ratio": 1,
15
+ "warmup_epochs": 0,
16
+ "c_mel": 45,
17
+ "c_kl": 1.0
18
+ },
19
+ "data": {
20
+ "use_mel_posterior_encoder": true,
21
+ "training_files":"filelists/vctk_audio_sid_text_train_filelist.txt.cleaned",
22
+ "validation_files":"filelists/vctk_audio_sid_text_val_filelist.txt.cleaned",
23
+ "text_cleaners":["english_cleaners2"],
24
+ "max_wav_value": 32768.0,
25
+ "sampling_rate": 22050,
26
+ "filter_length": 1024,
27
+ "hop_length": 256,
28
+ "win_length": 1024,
29
+ "n_mel_channels": 80,
30
+ "mel_fmin": 0.0,
31
+ "mel_fmax": null,
32
+ "add_blank": false,
33
+ "n_speakers": 109,
34
+ "cleaned_text": true
35
+ },
36
+ "model": {
37
+ "use_mel_posterior_encoder": true,
38
+ "use_transformer_flows": true,
39
+ "transformer_flow_type": "pre_conv",
40
+ "use_spk_conditioned_encoder": true,
41
+ "use_noise_scaled_mas": true,
42
+ "use_duration_discriminator": false,
43
+ "inter_channels": 192,
44
+ "hidden_channels": 192,
45
+ "filter_channels": 768,
46
+ "n_heads": 2,
47
+ "n_layers": 6,
48
+ "kernel_size": 3,
49
+ "p_dropout": 0.1,
50
+ "resblock": "1",
51
+ "resblock_kernel_sizes": [3,7,11],
52
+ "resblock_dilation_sizes": [[1,3,5], [1,3,5], [1,3,5]],
53
+ "upsample_rates": [8,8,2,2],
54
+ "upsample_initial_channel": 512,
55
+ "upsample_kernel_sizes": [16,16,4,4],
56
+ "n_layers_q": 3,
57
+ "use_spectral_norm": false,
58
+ "use_sdp": false,
59
+ "gin_channels": 256
60
+ }
61
+ }
62
+
vctk_base/eval/events.out.tfevents.1722412717.autodl-container-ae2d4d944b-5e0c94ae.180281.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5f9d3f43ae2e177454014ca7f4e143b35c992c8c70e1e46d90336738551bb10b
3
+ size 7617324
vctk_base/events.out.tfevents.1722412717.autodl-container-ae2d4d944b-5e0c94ae.180281.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8ca36ed5741c3590beb837251eabda6ed52a6f88a3c21624d4d1f7e757b9a849
3
+ size 15404695
vctk_base/githash ADDED
@@ -0,0 +1 @@
 
 
1
+ 1f4f3790568180f8dec4419d5cad5d0877b034bb
vctk_base/train.log ADDED
@@ -0,0 +1,423 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 2024-07-31 15:58:37,415 vctk_base INFO {'train': {'log_interval': 200, 'eval_interval': 1000, 'seed': 1234, 'epochs': 10000, 'learning_rate': 0.0002, 'betas': [0.8, 0.99], 'eps': 1e-09, 'batch_size': 64, 'fp16_run': False, 'lr_decay': 0.999875, 'segment_size': 8192, 'init_lr_ratio': 1, 'warmup_epochs': 0, 'c_mel': 45, 'c_kl': 1.0}, 'data': {'use_mel_posterior_encoder': True, 'training_files': 'filelists/vctk_audio_sid_text_train_filelist.txt.cleaned', 'validation_files': 'filelists/vctk_audio_sid_text_val_filelist.txt.cleaned', 'text_cleaners': ['english_cleaners2'], 'max_wav_value': 32768.0, 'sampling_rate': 22050, 'filter_length': 1024, 'hop_length': 256, 'win_length': 1024, 'n_mel_channels': 80, 'mel_fmin': 0.0, 'mel_fmax': None, 'add_blank': False, 'n_speakers': 109, 'cleaned_text': True}, 'model': {'use_mel_posterior_encoder': True, 'use_transformer_flows': True, 'transformer_flow_type': 'pre_conv', 'use_spk_conditioned_encoder': True, 'use_noise_scaled_mas': True, 'use_duration_discriminator': False, 'inter_channels': 192, 'hidden_channels': 192, 'filter_channels': 768, 'n_heads': 2, 'n_layers': 6, 'kernel_size': 3, 'p_dropout': 0.1, 'resblock': '1', 'resblock_kernel_sizes': [3, 7, 11], 'resblock_dilation_sizes': [[1, 3, 5], [1, 3, 5], [1, 3, 5]], 'upsample_rates': [8, 8, 2, 2], 'upsample_initial_channel': 512, 'upsample_kernel_sizes': [16, 16, 4, 4], 'n_layers_q': 3, 'use_spectral_norm': False, 'use_sdp': False, 'gin_channels': 256}, 'model_dir': './logs/vctk_base'}
2
+ 2024-07-31 15:59:46,748 vctk_base INFO Train Epoch: 1 [0%]
3
+ 2024-07-31 15:59:46,749 vctk_base INFO [5.95834493637085, 4.447598457336426, 0.5945128798484802, 80.78205871582031, 1.6755988597869873, 382.7722473144531, 0, 0.0002]
4
+ 2024-07-31 15:59:54,153 vctk_base INFO Saving model and optimizer state at iteration 1 to ./logs/vctk_base/G_0.pth
5
+ 2024-07-31 15:59:54,649 vctk_base INFO Saving model and optimizer state at iteration 1 to ./logs/vctk_base/D_0.pth
6
+ 2024-07-31 16:10:35,610 vctk_base INFO ====> Epoch: 1
7
+ 2024-07-31 16:11:43,492 vctk_base INFO Train Epoch: 2 [15%]
8
+ 2024-07-31 16:11:43,493 vctk_base INFO [2.9280824661254883, 2.1821279525756836, 1.6974865198135376, 34.330299377441406, 0.9032274484634399, 2.9195711612701416, 200, 0.000199975]
9
+ 2024-07-31 16:16:21,857 vctk_base INFO ====> Epoch: 2
10
+ 2024-07-31 16:17:41,687 vctk_base INFO Train Epoch: 3 [30%]
11
+ 2024-07-31 16:17:41,688 vctk_base INFO [2.600605010986328, 1.9394288063049316, 1.9603354930877686, 31.105972290039062, 0.8931986093521118, 2.5727241039276123, 400, 0.000199950003125]
12
+ 2024-07-31 16:20:27,208 vctk_base INFO ====> Epoch: 3
13
+ 2024-07-31 16:22:27,692 vctk_base INFO Train Epoch: 4 [45%]
14
+ 2024-07-31 16:22:27,693 vctk_base INFO [2.38808536529541, 2.2886123657226562, 3.368988513946533, 28.492382049560547, 0.8514041900634766, 2.7287182807922363, 600, 0.00019992500937460937]
15
+ 2024-07-31 16:23:36,964 vctk_base INFO ====> Epoch: 4
16
+ 2024-07-31 16:26:27,606 vctk_base INFO Train Epoch: 5 [60%]
17
+ 2024-07-31 16:26:27,607 vctk_base INFO [2.7892253398895264, 1.5592807531356812, 1.797709345817566, 26.904010772705078, 0.8325083255767822, 2.5772321224212646, 800, 0.00019990001874843754]
18
+ 2024-07-31 16:27:22,899 vctk_base INFO ====> Epoch: 5
19
+ 2024-07-31 16:30:35,911 vctk_base INFO Train Epoch: 6 [75%]
20
+ 2024-07-31 16:30:35,912 vctk_base INFO [2.77366042137146, 1.8799923658370972, 1.9885990619659424, 25.85253143310547, 0.8241404294967651, 2.4684202671051025, 1000, 0.00019987503124609398]
21
+ 2024-07-31 16:30:41,251 vctk_base INFO Saving model and optimizer state at iteration 6 to ./logs/vctk_base/G_1000.pth
22
+ 2024-07-31 16:30:41,707 vctk_base INFO Saving model and optimizer state at iteration 6 to ./logs/vctk_base/D_1000.pth
23
+ 2024-07-31 16:31:33,847 vctk_base INFO ====> Epoch: 6
24
+ 2024-07-31 16:34:17,687 vctk_base INFO Train Epoch: 7 [90%]
25
+ 2024-07-31 16:34:17,687 vctk_base INFO [2.719902753829956, 1.8916287422180176, 2.0821564197540283, 25.40392303466797, 0.8367151021957397, 2.7618155479431152, 1200, 0.0001998500468671882]
26
+ 2024-07-31 16:34:29,055 vctk_base INFO ====> Epoch: 7
27
+ 2024-07-31 16:37:52,347 vctk_base INFO ====> Epoch: 8
28
+ 2024-07-31 16:38:25,670 vctk_base INFO Train Epoch: 9 [5%]
29
+ 2024-07-31 16:38:25,670 vctk_base INFO [2.730032205581665, 1.8405590057373047, 2.0475995540618896, 23.66701889038086, 0.8283747434616089, 2.5974247455596924, 1400, 0.00019980008747812837]
30
+ 2024-07-31 16:40:41,621 vctk_base INFO ====> Epoch: 9
31
+ 2024-07-31 16:41:39,657 vctk_base INFO Train Epoch: 10 [20%]
32
+ 2024-07-31 16:41:39,658 vctk_base INFO [2.7427616119384766, 1.9437215328216553, 2.390969753265381, 24.00310707092285, 0.8053104877471924, 2.6501286029815674, 1600, 0.0001997751124671936]
33
+ 2024-07-31 16:43:20,386 vctk_base INFO ====> Epoch: 10
34
+ 2024-07-31 16:44:44,701 vctk_base INFO Train Epoch: 11 [34%]
35
+ 2024-07-31 16:44:44,702 vctk_base INFO [2.70190167427063, 2.2952077388763428, 2.174717664718628, 23.466419219970703, 2.423936367034912, 2.5556368827819824, 1800, 0.00019975014057813518]
36
+ 2024-07-31 16:46:23,443 vctk_base INFO ====> Epoch: 11
37
+ 2024-07-31 16:47:52,773 vctk_base INFO Train Epoch: 12 [49%]
38
+ 2024-07-31 16:47:52,773 vctk_base INFO [2.6355438232421875, 1.834970235824585, 2.192290782928467, 22.52155876159668, 0.6051994562149048, 2.3109006881713867, 2000, 0.00019972517181056292]
39
+ 2024-07-31 16:47:57,936 vctk_base INFO Saving model and optimizer state at iteration 12 to ./logs/vctk_base/G_2000.pth
40
+ 2024-07-31 16:47:58,384 vctk_base INFO Saving model and optimizer state at iteration 12 to ./logs/vctk_base/D_2000.pth
41
+ 2024-07-31 16:48:45,264 vctk_base INFO ====> Epoch: 12
42
+ 2024-07-31 16:50:14,281 vctk_base INFO Train Epoch: 13 [64%]
43
+ 2024-07-31 16:50:14,281 vctk_base INFO [2.8553051948547363, 1.9873125553131104, 2.307190418243408, 23.34893798828125, 0.5854883193969727, 2.556208848953247, 2200, 0.0001997002061640866]
44
+ 2024-07-31 16:50:58,237 vctk_base INFO ====> Epoch: 13
45
+ 2024-07-31 16:53:05,544 vctk_base INFO Train Epoch: 14 [79%]
46
+ 2024-07-31 16:53:05,545 vctk_base INFO [2.777721405029297, 2.1381800174713135, 2.501556634902954, 22.20928382873535, 0.5601696968078613, 2.460447072982788, 2400, 0.00019967524363831608]
47
+ 2024-07-31 16:53:25,548 vctk_base INFO ====> Epoch: 14
48
+ 2024-07-31 16:55:57,871 vctk_base INFO Train Epoch: 15 [94%]
49
+ 2024-07-31 16:55:57,872 vctk_base INFO [2.714642286300659, 1.659013032913208, 1.8553208112716675, 21.232280731201172, 0.5373649597167969, 2.3354811668395996, 2600, 0.0001996502842328613]
50
+ 2024-07-31 16:56:04,386 vctk_base INFO ====> Epoch: 15
51
+ 2024-07-31 16:58:07,670 vctk_base INFO ====> Epoch: 16
52
+ 2024-07-31 16:58:52,366 vctk_base INFO Train Epoch: 17 [9%]
53
+ 2024-07-31 16:58:52,367 vctk_base INFO [2.713298797607422, 1.9323736429214478, 2.303877115249634, 21.44766616821289, 0.5438215136528015, 2.3254811763763428, 2800, 0.00019960037478133875]
54
+ 2024-07-31 17:00:29,660 vctk_base INFO ====> Epoch: 17
55
+ 2024-07-31 17:01:21,922 vctk_base INFO Train Epoch: 18 [24%]
56
+ 2024-07-31 17:01:21,923 vctk_base INFO [2.713764190673828, 1.9357024431228638, 2.4548184871673584, 21.915802001953125, 0.5274988412857056, 2.5186212062835693, 3000, 0.00019957542473449108]
57
+ 2024-07-31 17:01:27,806 vctk_base INFO Saving model and optimizer state at iteration 18 to ./logs/vctk_base/G_3000.pth
58
+ 2024-07-31 17:01:28,295 vctk_base INFO Saving model and optimizer state at iteration 18 to ./logs/vctk_base/D_3000.pth
59
+ 2024-07-31 17:02:38,729 vctk_base INFO ====> Epoch: 18
60
+ 2024-07-31 17:03:45,550 vctk_base INFO Train Epoch: 19 [39%]
61
+ 2024-07-31 17:03:45,551 vctk_base INFO [2.7419161796569824, 1.8765175342559814, 2.3707008361816406, 21.559480667114258, 0.4915613532066345, 2.187140941619873, 3200, 0.00019955047780639926]
62
+ 2024-07-31 17:05:39,644 vctk_base INFO ====> Epoch: 19
63
+ 2024-07-31 17:07:12,157 vctk_base INFO Train Epoch: 20 [54%]
64
+ 2024-07-31 17:07:12,158 vctk_base INFO [2.785828113555908, 1.8576685190200806, 2.308138132095337, 21.249027252197266, 0.512744665145874, 2.4672529697418213, 3400, 0.00019952553399667344]
65
+ 2024-07-31 17:07:54,529 vctk_base INFO ====> Epoch: 20
66
+ 2024-07-31 17:10:15,272 vctk_base INFO Train Epoch: 21 [69%]
67
+ 2024-07-31 17:10:15,272 vctk_base INFO [2.7413244247436523, 1.9046604633331299, 2.3721301555633545, 20.25397491455078, 0.4737487733364105, 2.3318686485290527, 3600, 0.00019950059330492385]
68
+ 2024-07-31 17:10:56,593 vctk_base INFO ====> Epoch: 21
69
+ 2024-07-31 17:12:46,075 vctk_base INFO Train Epoch: 22 [84%]
70
+ 2024-07-31 17:12:46,076 vctk_base INFO [2.5955231189727783, 2.276843547821045, 3.819395065307617, 20.602304458618164, 0.47186678647994995, 2.5826566219329834, 3800, 0.00019947565573076072]
71
+ 2024-07-31 17:13:02,656 vctk_base INFO ====> Epoch: 22
72
+ 2024-07-31 17:15:20,996 vctk_base INFO Train Epoch: 23 [99%]
73
+ 2024-07-31 17:15:20,998 vctk_base INFO [2.704430341720581, 2.234173059463501, 2.6887426376342773, 20.148422241210938, 0.4825696349143982, 2.396641969680786, 4000, 0.00019945072127379438]
74
+ 2024-07-31 17:15:27,019 vctk_base INFO Saving model and optimizer state at iteration 23 to ./logs/vctk_base/G_4000.pth
75
+ 2024-07-31 17:15:27,535 vctk_base INFO Saving model and optimizer state at iteration 23 to ./logs/vctk_base/D_4000.pth
76
+ 2024-07-31 17:15:29,595 vctk_base INFO ====> Epoch: 23
77
+ 2024-07-31 17:17:56,377 vctk_base INFO ====> Epoch: 24
78
+ 2024-07-31 17:18:37,733 vctk_base INFO Train Epoch: 25 [14%]
79
+ 2024-07-31 17:18:37,735 vctk_base INFO [2.9282643795013428, 2.020925521850586, 3.2645044326782227, 20.934186935424805, 0.46214795112609863, 2.4720187187194824, 4200, 0.00019940086170989343]
80
+ 2024-07-31 17:20:25,072 vctk_base INFO ====> Epoch: 25
81
+ 2024-07-31 17:21:20,556 vctk_base INFO Train Epoch: 26 [29%]
82
+ 2024-07-31 17:21:20,557 vctk_base INFO [2.8570451736450195, 1.7384707927703857, 2.3261985778808594, 19.840103149414062, 0.4487919211387634, 2.424896717071533, 4400, 0.0001993759366021797]
83
+ 2024-07-31 17:22:39,767 vctk_base INFO ====> Epoch: 26
84
+ 2024-07-31 17:23:50,501 vctk_base INFO Train Epoch: 27 [44%]
85
+ 2024-07-31 17:23:50,502 vctk_base INFO [2.910148859024048, 1.8480138778686523, 2.381047010421753, 19.552705764770508, 0.4701864719390869, 2.50093150138855, 4600, 0.00019935101461010442]
86
+ 2024-07-31 17:24:54,124 vctk_base INFO ====> Epoch: 27
87
+ 2024-07-31 17:26:18,050 vctk_base INFO Train Epoch: 28 [59%]
88
+ 2024-07-31 17:26:18,051 vctk_base INFO [2.853848457336426, 1.8373521566390991, 2.225456714630127, 19.835281372070312, 0.44014596939086914, 2.3965091705322266, 4800, 0.00019932609573327815]
89
+ 2024-07-31 17:27:06,532 vctk_base INFO ====> Epoch: 28
90
+ 2024-07-31 17:28:46,402 vctk_base INFO Train Epoch: 29 [74%]
91
+ 2024-07-31 17:28:46,403 vctk_base INFO [2.7545416355133057, 1.818556785583496, 2.3945600986480713, 19.29508399963379, 0.4671099781990051, 2.5171539783477783, 5000, 0.0001993011799713115]
92
+ 2024-07-31 17:28:52,415 vctk_base INFO Saving model and optimizer state at iteration 29 to ./logs/vctk_base/G_5000.pth
93
+ 2024-07-31 17:28:52,881 vctk_base INFO Saving model and optimizer state at iteration 29 to ./logs/vctk_base/D_5000.pth
94
+ 2024-07-31 17:29:17,866 vctk_base INFO ====> Epoch: 29
95
+ 2024-07-31 17:31:08,121 vctk_base INFO Train Epoch: 30 [89%]
96
+ 2024-07-31 17:31:08,121 vctk_base INFO [2.73783016204834, 1.8461756706237793, 2.4771246910095215, 19.831769943237305, 3.0953614711761475, 2.4404149055480957, 5200, 0.00019927626732381507]
97
+ 2024-07-31 17:31:19,487 vctk_base INFO ====> Epoch: 30
98
+ 2024-07-31 17:33:34,514 vctk_base INFO ====> Epoch: 31
99
+ 2024-07-31 17:34:07,166 vctk_base INFO Train Epoch: 32 [3%]
100
+ 2024-07-31 17:34:07,167 vctk_base INFO [2.757707357406616, 1.7589588165283203, 2.5125648975372314, 19.629941940307617, 0.46072179079055786, 2.5100460052490234, 5400, 0.00019922645137067577]
101
+ 2024-07-31 17:35:58,830 vctk_base INFO ====> Epoch: 32
102
+ 2024-07-31 17:36:46,267 vctk_base INFO Train Epoch: 33 [18%]
103
+ 2024-07-31 17:36:46,268 vctk_base INFO [3.017284870147705, 1.6329749822616577, 2.330310821533203, 19.94306182861328, 0.4459061324596405, 2.4063453674316406, 5600, 0.00019920154806425444]
104
+ 2024-07-31 17:38:11,295 vctk_base INFO ====> Epoch: 33
105
+ 2024-07-31 17:39:09,958 vctk_base INFO Train Epoch: 34 [33%]
106
+ 2024-07-31 17:39:09,959 vctk_base INFO [2.6629722118377686, 1.7911596298217773, 2.727849006652832, 19.163135528564453, 0.4723818302154541, 2.4707844257354736, 5800, 0.0001991766478707464]
107
+ 2024-07-31 17:40:11,655 vctk_base INFO ====> Epoch: 34
108
+ 2024-07-31 17:41:26,722 vctk_base INFO Train Epoch: 35 [48%]
109
+ 2024-07-31 17:41:26,723 vctk_base INFO [2.6895318031311035, 1.7457455396652222, 2.81209659576416, 19.937545776367188, 0.44610169529914856, 2.459444046020508, 6000, 0.00019915175078976256]
110
+ 2024-07-31 17:41:32,288 vctk_base INFO Saving model and optimizer state at iteration 35 to ./logs/vctk_base/G_6000.pth
111
+ 2024-07-31 17:41:32,746 vctk_base INFO Saving model and optimizer state at iteration 35 to ./logs/vctk_base/D_6000.pth
112
+ 2024-07-31 17:42:21,196 vctk_base INFO ====> Epoch: 35
113
+ 2024-07-31 17:43:47,425 vctk_base INFO Train Epoch: 36 [63%]
114
+ 2024-07-31 17:43:47,425 vctk_base INFO [2.7982497215270996, 1.7803000211715698, 2.534562826156616, 19.3641414642334, 0.4583313465118408, 2.3200626373291016, 6200, 0.00019912685682091382]
115
+ 2024-07-31 17:44:34,599 vctk_base INFO ====> Epoch: 36
116
+ 2024-07-31 17:46:31,164 vctk_base INFO Train Epoch: 37 [78%]
117
+ 2024-07-31 17:46:31,165 vctk_base INFO [2.7090446949005127, 1.7237282991409302, 2.4443578720092773, 19.533061981201172, 0.460343599319458, 2.3446848392486572, 6400, 0.0001991019659638112]
118
+ 2024-07-31 17:46:52,133 vctk_base INFO ====> Epoch: 37
119
+ 2024-07-31 17:48:46,189 vctk_base INFO Train Epoch: 38 [93%]
120
+ 2024-07-31 17:48:46,190 vctk_base INFO [2.9845643043518066, 1.7032246589660645, 2.5558934211730957, 19.15768814086914, 0.42551785707473755, 2.2799932956695557, 6600, 0.0001990770782180657]
121
+ 2024-07-31 17:48:53,446 vctk_base INFO ====> Epoch: 38
122
+ 2024-07-31 17:50:54,859 vctk_base INFO ====> Epoch: 39
123
+ 2024-07-31 17:51:31,377 vctk_base INFO Train Epoch: 40 [8%]
124
+ 2024-07-31 17:51:31,378 vctk_base INFO [2.733673572540283, 2.0628349781036377, 2.7443742752075195, 18.71280288696289, 0.42990636825561523, 2.4215052127838135, 6800, 0.0001990273120590905]
125
+ 2024-07-31 17:53:12,442 vctk_base INFO ====> Epoch: 40
126
+ 2024-07-31 17:54:02,927 vctk_base INFO Train Epoch: 41 [23%]
127
+ 2024-07-31 17:54:02,928 vctk_base INFO [2.718684673309326, 1.8519021272659302, 2.749854564666748, 19.13753890991211, 0.46087250113487244, 2.498363733291626, 7000, 0.00019900243364508313]
128
+ 2024-07-31 17:54:08,424 vctk_base INFO Saving model and optimizer state at iteration 41 to ./logs/vctk_base/G_7000.pth
129
+ 2024-07-31 17:54:08,885 vctk_base INFO Saving model and optimizer state at iteration 41 to ./logs/vctk_base/D_7000.pth
130
+ 2024-07-31 17:55:32,261 vctk_base INFO ====> Epoch: 41
131
+ 2024-07-31 17:56:35,835 vctk_base INFO Train Epoch: 42 [38%]
132
+ 2024-07-31 17:56:35,836 vctk_base INFO [2.674175262451172, 1.8392747640609741, 2.8282971382141113, 19.2322998046875, 0.438440203666687, 2.52997088432312, 7200, 0.0001989775583408775]
133
+ 2024-07-31 17:57:33,092 vctk_base INFO ====> Epoch: 42
134
+ 2024-07-31 17:58:50,688 vctk_base INFO Train Epoch: 43 [53%]
135
+ 2024-07-31 17:58:50,689 vctk_base INFO [2.7889764308929443, 1.8127435445785522, 2.6087982654571533, 18.935815811157227, 0.43870314955711365, 2.329326868057251, 7400, 0.00019895268614608487]
136
+ 2024-07-31 17:59:39,428 vctk_base INFO ====> Epoch: 43
137
+ 2024-07-31 18:01:10,686 vctk_base INFO Train Epoch: 44 [68%]
138
+ 2024-07-31 18:01:10,687 vctk_base INFO [2.6796553134918213, 2.1222262382507324, 2.874722957611084, 18.804826736450195, 0.4366087317466736, 2.4347615242004395, 7600, 0.0001989278170603166]
139
+ 2024-07-31 18:01:41,218 vctk_base INFO ====> Epoch: 44
140
+ 2024-07-31 18:03:47,975 vctk_base INFO Train Epoch: 45 [83%]
141
+ 2024-07-31 18:03:47,975 vctk_base INFO [2.8028101921081543, 1.9076577425003052, 2.8301918506622314, 19.087753295898438, 0.4097292423248291, 2.47856068611145, 7800, 0.00019890295108318404]
142
+ 2024-07-31 18:04:05,841 vctk_base INFO ====> Epoch: 45
143
+ 2024-07-31 18:06:22,022 vctk_base INFO Train Epoch: 46 [98%]
144
+ 2024-07-31 18:06:22,024 vctk_base INFO [2.7154791355133057, 1.8352608680725098, 2.966320276260376, 19.369640350341797, 0.4112851619720459, 2.2912449836730957, 8000, 0.00019887808821429862]
145
+ 2024-07-31 18:06:27,450 vctk_base INFO Saving model and optimizer state at iteration 46 to ./logs/vctk_base/G_8000.pth
146
+ 2024-07-31 18:06:27,908 vctk_base INFO Saving model and optimizer state at iteration 46 to ./logs/vctk_base/D_8000.pth
147
+ 2024-07-31 18:06:31,812 vctk_base INFO ====> Epoch: 46
148
+ 2024-07-31 18:08:33,179 vctk_base INFO ====> Epoch: 47
149
+ 2024-07-31 18:09:13,506 vctk_base INFO Train Epoch: 48 [13%]
150
+ 2024-07-31 18:09:13,507 vctk_base INFO [2.8093514442443848, 1.9022984504699707, 2.909862518310547, 19.05031394958496, 0.41173529624938965, 2.644400119781494, 8200, 0.00019882837179971516]
151
+ 2024-07-31 18:10:34,301 vctk_base INFO ====> Epoch: 48
152
+ 2024-07-31 18:11:28,206 vctk_base INFO Train Epoch: 49 [28%]
153
+ 2024-07-31 18:11:28,207 vctk_base INFO [2.8446223735809326, 1.7263821363449097, 2.8976407051086426, 19.01997184753418, 0.43137192726135254, 2.574247121810913, 8400, 0.00019880351825324018]
154
+ 2024-07-31 18:12:35,651 vctk_base INFO ====> Epoch: 49
155
+ 2024-07-31 18:13:43,369 vctk_base INFO Train Epoch: 50 [43%]
156
+ 2024-07-31 18:13:43,370 vctk_base INFO [2.7802820205688477, 1.9549651145935059, 2.9765007495880127, 18.892009735107422, 0.39091944694519043, 2.554434061050415, 8600, 0.00019877866781345852]
157
+ 2024-07-31 18:14:36,700 vctk_base INFO ====> Epoch: 50
158
+ 2024-07-31 18:16:03,299 vctk_base INFO Train Epoch: 51 [57%]
159
+ 2024-07-31 18:16:03,299 vctk_base INFO [2.679659366607666, 2.0002799034118652, 2.909229278564453, 18.65703773498535, 0.4002780616283417, 2.3429698944091797, 8800, 0.00019875382047998183]
160
+ 2024-07-31 18:16:53,189 vctk_base INFO ====> Epoch: 51
161
+ 2024-07-31 18:18:40,145 vctk_base INFO Train Epoch: 52 [72%]
162
+ 2024-07-31 18:18:40,146 vctk_base INFO [2.744574785232544, 1.7724623680114746, 3.0610110759735107, 18.913209915161133, 0.43122178316116333, 2.6649796962738037, 9000, 0.00019872897625242182]
163
+ 2024-07-31 18:18:45,958 vctk_base INFO Saving model and optimizer state at iteration 52 to ./logs/vctk_base/G_9000.pth
164
+ 2024-07-31 18:18:46,429 vctk_base INFO Saving model and optimizer state at iteration 52 to ./logs/vctk_base/D_9000.pth
165
+ 2024-07-31 18:19:13,109 vctk_base INFO ====> Epoch: 52
166
+ 2024-07-31 18:21:12,690 vctk_base INFO Train Epoch: 53 [87%]
167
+ 2024-07-31 18:21:12,690 vctk_base INFO [2.7237436771392822, 1.8348126411437988, 2.9269063472747803, 18.7817325592041, 0.43582791090011597, 2.491278886795044, 9200, 0.00019870413513039026]
168
+ 2024-07-31 18:21:29,229 vctk_base INFO ====> Epoch: 53
169
+ 2024-07-31 18:23:51,252 vctk_base INFO ====> Epoch: 54
170
+ 2024-07-31 18:24:22,522 vctk_base INFO Train Epoch: 55 [2%]
171
+ 2024-07-31 18:24:22,523 vctk_base INFO [2.9167683124542236, 2.416536331176758, 3.396906852722168, 18.938705444335938, 0.3908268213272095, 2.4303977489471436, 9400, 0.00019865446220135974]
172
+ 2024-07-31 18:25:53,277 vctk_base INFO ====> Epoch: 55
173
+ 2024-07-31 18:26:37,862 vctk_base INFO Train Epoch: 56 [17%]
174
+ 2024-07-31 18:26:37,862 vctk_base INFO [2.735896110534668, 1.9681804180145264, 3.235273599624634, 18.697275161743164, 0.4266441762447357, 2.480555534362793, 9600, 0.00019862963039358455]
175
+ 2024-07-31 18:27:53,828 vctk_base INFO ====> Epoch: 56
176
+ 2024-07-31 18:28:52,024 vctk_base INFO Train Epoch: 57 [32%]
177
+ 2024-07-31 18:28:52,024 vctk_base INFO [2.6695876121520996, 1.9072425365447998, 3.20603084564209, 18.753311157226562, 0.38820958137512207, 2.5629220008850098, 9800, 0.00019860480168978534]
178
+ 2024-07-31 18:29:54,259 vctk_base INFO ====> Epoch: 57
179
+ 2024-07-31 18:31:05,689 vctk_base INFO Train Epoch: 58 [47%]
180
+ 2024-07-31 18:31:05,690 vctk_base INFO [2.720654010772705, 1.8098275661468506, 3.111680507659912, 18.800527572631836, 0.43307945132255554, 2.684324264526367, 10000, 0.0001985799760895741]
181
+ 2024-07-31 18:31:11,461 vctk_base INFO Saving model and optimizer state at iteration 58 to ./logs/vctk_base/G_10000.pth
182
+ 2024-07-31 18:31:11,918 vctk_base INFO Saving model and optimizer state at iteration 58 to ./logs/vctk_base/D_10000.pth
183
+ 2024-07-31 18:32:00,640 vctk_base INFO ====> Epoch: 58
184
+ 2024-07-31 18:33:25,835 vctk_base INFO Train Epoch: 59 [62%]
185
+ 2024-07-31 18:33:25,836 vctk_base INFO [2.6345467567443848, 1.9701354503631592, 3.4606759548187256, 18.609193801879883, 0.3796922266483307, 2.5039894580841064, 10200, 0.0001985551535925629]
186
+ 2024-07-31 18:34:00,761 vctk_base INFO ====> Epoch: 59
187
+ 2024-07-31 18:35:51,278 vctk_base INFO Train Epoch: 60 [77%]
188
+ 2024-07-31 18:35:51,280 vctk_base INFO [2.7545344829559326, 1.9002240896224976, 3.353100061416626, 19.198869705200195, 0.41377776861190796, 2.5617709159851074, 10400, 0.00019853033419836382]
189
+ 2024-07-31 18:36:16,825 vctk_base INFO ====> Epoch: 60
190
+ 2024-07-31 18:38:10,598 vctk_base INFO Train Epoch: 61 [92%]
191
+ 2024-07-31 18:38:10,598 vctk_base INFO [2.6513590812683105, 1.9980324506759644, 3.5149335861206055, 18.710575103759766, 0.4296843409538269, 2.5778968334198, 10600, 0.000198505517906589]
192
+ 2024-07-31 18:38:18,843 vctk_base INFO ====> Epoch: 61
193
+ 2024-07-31 18:40:36,983 vctk_base INFO ====> Epoch: 62
194
+ 2024-07-31 18:41:13,086 vctk_base INFO Train Epoch: 63 [7%]
195
+ 2024-07-31 18:41:13,087 vctk_base INFO [2.76898455619812, 1.8472405672073364, 3.501081705093384, 18.66193962097168, 0.39157161116600037, 2.4212586879730225, 10800, 0.00019845589462876104]
196
+ 2024-07-31 18:42:49,489 vctk_base INFO ====> Epoch: 63
197
+ 2024-07-31 18:43:38,483 vctk_base INFO Train Epoch: 64 [22%]
198
+ 2024-07-31 18:43:38,483 vctk_base INFO [2.82553768157959, 2.144855499267578, 3.4176788330078125, 18.932954788208008, 0.4165416955947876, 2.766864538192749, 11000, 0.00019843108764193245]
199
+ 2024-07-31 18:43:44,162 vctk_base INFO Saving model and optimizer state at iteration 64 to ./logs/vctk_base/G_11000.pth
200
+ 2024-07-31 18:43:44,624 vctk_base INFO Saving model and optimizer state at iteration 64 to ./logs/vctk_base/D_11000.pth
201
+ 2024-07-31 18:45:07,232 vctk_base INFO ====> Epoch: 64
202
+ 2024-07-31 18:46:09,868 vctk_base INFO Train Epoch: 65 [37%]
203
+ 2024-07-31 18:46:09,868 vctk_base INFO [2.7012858390808105, 1.8674659729003906, 3.3445403575897217, 18.40689468383789, 0.410780131816864, 2.5117475986480713, 11200, 0.0001984062837559772]
204
+ 2024-07-31 18:47:20,815 vctk_base INFO ====> Epoch: 65
205
+ 2024-07-31 18:48:42,310 vctk_base INFO Train Epoch: 66 [52%]
206
+ 2024-07-31 18:48:42,310 vctk_base INFO [2.723456859588623, 1.8933134078979492, 3.5873687267303467, 18.62954330444336, 0.39162886142730713, 2.4643847942352295, 11400, 0.00019838148297050769]
207
+ 2024-07-31 18:49:37,614 vctk_base INFO ====> Epoch: 66
208
+ 2024-07-31 18:51:17,608 vctk_base INFO Train Epoch: 67 [67%]
209
+ 2024-07-31 18:51:17,608 vctk_base INFO [2.738039970397949, 1.7691630125045776, 3.2700178623199463, 18.368764877319336, 0.4393700659275055, 2.663144588470459, 11600, 0.00019835668528513637]
210
+ 2024-07-31 18:51:48,636 vctk_base INFO ====> Epoch: 67
211
+ 2024-07-31 18:53:37,519 vctk_base INFO Train Epoch: 68 [82%]
212
+ 2024-07-31 18:53:37,520 vctk_base INFO [2.6336379051208496, 1.8234792947769165, 3.431884527206421, 19.009965896606445, 0.3866386413574219, 2.5979223251342773, 11800, 0.00019833189069947573]
213
+ 2024-07-31 18:53:56,007 vctk_base INFO ====> Epoch: 68
214
+ 2024-07-31 18:55:57,272 vctk_base INFO Train Epoch: 69 [97%]
215
+ 2024-07-31 18:55:57,273 vctk_base INFO [2.6835806369781494, 1.9457542896270752, 3.31473970413208, 18.56977653503418, 0.3988074064254761, 2.4251718521118164, 12000, 0.0001983070992131383]
216
+ 2024-07-31 18:56:02,212 vctk_base INFO Saving model and optimizer state at iteration 69 to ./logs/vctk_base/G_12000.pth
217
+ 2024-07-31 18:56:02,672 vctk_base INFO Saving model and optimizer state at iteration 69 to ./logs/vctk_base/D_12000.pth
218
+ 2024-07-31 18:56:06,950 vctk_base INFO ====> Epoch: 69
219
+ 2024-07-31 18:58:07,049 vctk_base INFO ====> Epoch: 70
220
+ 2024-07-31 18:58:46,471 vctk_base INFO Train Epoch: 71 [11%]
221
+ 2024-07-31 18:58:46,472 vctk_base INFO [2.6941089630126953, 1.8600449562072754, 3.345460891723633, 18.61273193359375, 0.4168451726436615, 2.5507454872131348, 12200, 0.00019825752553688343]
222
+ 2024-07-31 19:00:16,725 vctk_base INFO ====> Epoch: 71
223
+ 2024-07-31 19:01:20,576 vctk_base INFO Train Epoch: 72 [26%]
224
+ 2024-07-31 19:01:20,576 vctk_base INFO [2.712427854537964, 1.9201195240020752, 3.475613832473755, 18.09619140625, 0.37365442514419556, 2.4617998600006104, 12400, 0.0001982327433461913]
225
+ 2024-07-31 19:02:29,327 vctk_base INFO ====> Epoch: 72
226
+ 2024-07-31 19:03:37,051 vctk_base INFO Train Epoch: 73 [41%]
227
+ 2024-07-31 19:03:37,052 vctk_base INFO [2.725606918334961, 1.8301725387573242, 3.4666659832000732, 18.19718360900879, 0.4112148880958557, 2.5959408283233643, 12600, 0.00019820796425327303]
228
+ 2024-07-31 19:04:31,900 vctk_base INFO ====> Epoch: 73
229
+ 2024-07-31 19:05:54,065 vctk_base INFO Train Epoch: 74 [56%]
230
+ 2024-07-31 19:05:54,065 vctk_base INFO [2.6599438190460205, 1.9059048891067505, 3.6940693855285645, 18.52634620666504, 0.4418574571609497, 2.5023603439331055, 12800, 0.00019818318825774137]
231
+ 2024-07-31 19:06:34,085 vctk_base INFO ====> Epoch: 74
232
+ 2024-07-31 19:08:09,011 vctk_base INFO Train Epoch: 75 [71%]
233
+ 2024-07-31 19:08:09,012 vctk_base INFO [2.7365143299102783, 1.9194236993789673, 3.6305205821990967, 18.415456771850586, 0.37235331535339355, 2.4268441200256348, 13000, 0.00019815841535920914]
234
+ 2024-07-31 19:08:14,314 vctk_base INFO Saving model and optimizer state at iteration 75 to ./logs/vctk_base/G_13000.pth
235
+ 2024-07-31 19:08:14,766 vctk_base INFO Saving model and optimizer state at iteration 75 to ./logs/vctk_base/D_13000.pth
236
+ 2024-07-31 19:08:42,194 vctk_base INFO ====> Epoch: 75
237
+ 2024-07-31 19:10:34,482 vctk_base INFO Train Epoch: 76 [86%]
238
+ 2024-07-31 19:10:34,482 vctk_base INFO [2.7580180168151855, 1.7200636863708496, 3.0227560997009277, 18.448945999145508, 0.4506934583187103, 2.471876382827759, 13200, 0.00019813364555728923]
239
+ 2024-07-31 19:10:47,823 vctk_base INFO ====> Epoch: 76
240
+ 2024-07-31 19:12:48,412 vctk_base INFO ====> Epoch: 77
241
+ 2024-07-31 19:13:18,639 vctk_base INFO Train Epoch: 78 [1%]
242
+ 2024-07-31 19:13:18,639 vctk_base INFO [2.7148354053497314, 2.165894031524658, 3.313800096511841, 18.442676544189453, 0.3986698389053345, 2.6134207248687744, 13400, 0.0001980841152417381]
243
+ 2024-07-31 19:14:49,447 vctk_base INFO ====> Epoch: 78
244
+ 2024-07-31 19:15:33,041 vctk_base INFO Train Epoch: 79 [16%]
245
+ 2024-07-31 19:15:33,042 vctk_base INFO [2.730719566345215, 1.9327192306518555, 3.405902862548828, 18.174877166748047, 0.3740883469581604, 2.6147971153259277, 13600, 0.00019805935472733287]
246
+ 2024-07-31 19:16:52,860 vctk_base INFO ====> Epoch: 79
247
+ 2024-07-31 19:17:51,205 vctk_base INFO Train Epoch: 80 [31%]
248
+ 2024-07-31 19:17:51,206 vctk_base INFO [2.660062551498413, 1.9964848756790161, 3.5999867916107178, 18.233510971069336, 0.3824460208415985, 2.401052951812744, 13800, 0.00019803459730799195]
249
+ 2024-07-31 19:18:53,647 vctk_base INFO ====> Epoch: 80
250
+ 2024-07-31 19:20:04,166 vctk_base INFO Train Epoch: 81 [46%]
251
+ 2024-07-31 19:20:04,166 vctk_base INFO [2.673189401626587, 2.026869058609009, 3.6645166873931885, 18.407241821289062, 0.4021156132221222, 2.4716899394989014, 14000, 0.00019800984298332845]
252
+ 2024-07-31 19:20:09,928 vctk_base INFO Saving model and optimizer state at iteration 81 to ./logs/vctk_base/G_14000.pth
253
+ 2024-07-31 19:20:10,391 vctk_base INFO Saving model and optimizer state at iteration 81 to ./logs/vctk_base/D_14000.pth
254
+ 2024-07-31 19:21:00,725 vctk_base INFO ====> Epoch: 81
255
+ 2024-07-31 19:22:32,425 vctk_base INFO Train Epoch: 82 [61%]
256
+ 2024-07-31 19:22:32,426 vctk_base INFO [2.745089054107666, 1.9575862884521484, 2.985320568084717, 18.152193069458008, 0.4198072552680969, 2.5089948177337646, 14200, 0.00019798509175295552]
257
+ 2024-07-31 19:23:08,248 vctk_base INFO ====> Epoch: 82
258
+ 2024-07-31 19:24:45,801 vctk_base INFO Train Epoch: 83 [76%]
259
+ 2024-07-31 19:24:45,802 vctk_base INFO [2.650749444961548, 2.090334892272949, 3.849977970123291, 18.16591453552246, 0.43277591466903687, 2.506842613220215, 14400, 0.0001979603436164864]
260
+ 2024-07-31 19:25:08,552 vctk_base INFO ====> Epoch: 83
261
+ 2024-07-31 19:26:59,928 vctk_base INFO Train Epoch: 84 [91%]
262
+ 2024-07-31 19:26:59,929 vctk_base INFO [2.6522724628448486, 2.0180118083953857, 3.4131879806518555, 18.145984649658203, 0.3974105715751648, 2.351032257080078, 14600, 0.00019793559857353432]
263
+ 2024-07-31 19:27:08,992 vctk_base INFO ====> Epoch: 84
264
+ 2024-07-31 19:29:20,189 vctk_base INFO ====> Epoch: 85
265
+ 2024-07-31 19:29:55,450 vctk_base INFO Train Epoch: 86 [6%]
266
+ 2024-07-31 19:29:55,450 vctk_base INFO [2.629013776779175, 2.1479477882385254, 3.935757875442505, 18.07757568359375, 0.42243802547454834, 2.5991103649139404, 14800, 0.00019788611776663464]
267
+ 2024-07-31 19:31:28,775 vctk_base INFO ====> Epoch: 86
268
+ 2024-07-31 19:32:17,425 vctk_base INFO Train Epoch: 87 [21%]
269
+ 2024-07-31 19:32:17,426 vctk_base INFO [2.673011064529419, 2.0242648124694824, 3.6163387298583984, 18.361595153808594, 0.40812259912490845, 2.448956251144409, 15000, 0.0001978613820019138]
270
+ 2024-07-31 19:32:23,051 vctk_base INFO Saving model and optimizer state at iteration 87 to ./logs/vctk_base/G_15000.pth
271
+ 2024-07-31 19:32:23,521 vctk_base INFO Saving model and optimizer state at iteration 87 to ./logs/vctk_base/D_15000.pth
272
+ 2024-07-31 19:33:36,473 vctk_base INFO ====> Epoch: 87
273
+ 2024-07-31 19:34:37,444 vctk_base INFO Train Epoch: 88 [36%]
274
+ 2024-07-31 19:34:37,445 vctk_base INFO [2.6237926483154297, 2.0044920444488525, 3.7436463832855225, 18.609411239624023, 0.3930659294128418, 2.507913827896118, 15200, 0.00019783664932916355]
275
+ 2024-07-31 19:35:37,264 vctk_base INFO ====> Epoch: 88
276
+ 2024-07-31 19:36:52,500 vctk_base INFO Train Epoch: 89 [51%]
277
+ 2024-07-31 19:36:52,500 vctk_base INFO [2.6626670360565186, 1.960605502128601, 3.8561346530914307, 18.106037139892578, 0.40538784861564636, 2.5277953147888184, 15400, 0.0001978119197479974]
278
+ 2024-07-31 19:37:48,375 vctk_base INFO ====> Epoch: 89
279
+ 2024-07-31 19:39:16,687 vctk_base INFO Train Epoch: 90 [66%]
280
+ 2024-07-31 19:39:16,687 vctk_base INFO [2.626413583755493, 1.977683424949646, 3.6834330558776855, 18.307308197021484, 0.3809516429901123, 2.5798938274383545, 15600, 0.0001977871932580289]
281
+ 2024-07-31 19:40:00,551 vctk_base INFO ====> Epoch: 90
282
+ 2024-07-31 19:41:43,411 vctk_base INFO Train Epoch: 91 [80%]
283
+ 2024-07-31 19:41:43,412 vctk_base INFO [2.6558823585510254, 2.2454681396484375, 4.032089710235596, 18.333227157592773, 3.2295820713043213, 2.48962140083313, 15800, 0.00019776246985887165]
284
+ 2024-07-31 19:42:02,955 vctk_base INFO ====> Epoch: 91
285
+ 2024-07-31 19:44:08,745 vctk_base INFO Train Epoch: 92 [95%]
286
+ 2024-07-31 19:44:08,745 vctk_base INFO [2.807577133178711, 2.388918161392212, 3.9953489303588867, 17.991065979003906, 0.4019758701324463, 2.4862077236175537, 16000, 0.0001977377495501393]
287
+ 2024-07-31 19:44:14,552 vctk_base INFO Saving model and optimizer state at iteration 92 to ./logs/vctk_base/G_16000.pth
288
+ 2024-07-31 19:44:15,084 vctk_base INFO Saving model and optimizer state at iteration 92 to ./logs/vctk_base/D_16000.pth
289
+ 2024-07-31 19:44:20,135 vctk_base INFO ====> Epoch: 92
290
+ 2024-07-31 19:46:31,716 vctk_base INFO ====> Epoch: 93
291
+ 2024-07-31 19:47:09,602 vctk_base INFO Train Epoch: 94 [10%]
292
+ 2024-07-31 19:47:09,602 vctk_base INFO [2.5804083347320557, 1.930048942565918, 3.9812726974487305, 18.1994571685791, 0.39961379766464233, 2.4620413780212402, 16200, 0.00019768831820240408]
293
+ 2024-07-31 19:48:32,377 vctk_base INFO ====> Epoch: 94
294
+ 2024-07-31 19:49:24,730 vctk_base INFO Train Epoch: 95 [25%]
295
+ 2024-07-31 19:49:24,730 vctk_base INFO [2.650141477584839, 2.316222667694092, 3.9717085361480713, 18.382537841796875, 0.3675995171070099, 2.589385747909546, 16400, 0.00019766360716262876]
296
+ 2024-07-31 19:50:32,909 vctk_base INFO ====> Epoch: 95
297
+ 2024-07-31 19:51:50,693 vctk_base INFO Train Epoch: 96 [40%]
298
+ 2024-07-31 19:51:50,693 vctk_base INFO [2.7319822311401367, 1.9869420528411865, 3.8629825115203857, 18.101194381713867, 0.3979954123497009, 2.40463924407959, 16600, 0.00019763889921173343]
299
+ 2024-07-31 19:52:45,636 vctk_base INFO ====> Epoch: 96
300
+ 2024-07-31 19:54:09,514 vctk_base INFO Train Epoch: 97 [55%]
301
+ 2024-07-31 19:54:09,514 vctk_base INFO [2.993732452392578, 1.927037239074707, 3.5611751079559326, 18.721088409423828, 0.3966422975063324, 2.5762922763824463, 16800, 0.00019761419434933197]
302
+ 2024-07-31 19:54:53,354 vctk_base INFO ====> Epoch: 97
303
+ 2024-07-31 19:56:27,624 vctk_base INFO Train Epoch: 98 [70%]
304
+ 2024-07-31 19:56:27,624 vctk_base INFO [2.7979679107666016, 1.9905132055282593, 3.3851537704467773, 18.267126083374023, 0.4097334146499634, 2.5044915676116943, 17000, 0.0001975894925750383]
305
+ 2024-07-31 19:56:34,879 vctk_base INFO Saving model and optimizer state at iteration 98 to ./logs/vctk_base/G_17000.pth
306
+ 2024-07-31 19:56:35,343 vctk_base INFO Saving model and optimizer state at iteration 98 to ./logs/vctk_base/D_17000.pth
307
+ 2024-07-31 19:57:14,133 vctk_base INFO ====> Epoch: 98
308
+ 2024-07-31 19:59:00,391 vctk_base INFO Train Epoch: 99 [85%]
309
+ 2024-07-31 19:59:00,392 vctk_base INFO [2.625796318054199, 2.0272037982940674, 3.654994010925293, 18.05194854736328, 0.4289575517177582, 2.430048704147339, 17200, 0.0001975647938884664]
310
+ 2024-07-31 19:59:15,687 vctk_base INFO ====> Epoch: 99
311
+ 2024-07-31 20:01:15,565 vctk_base INFO ====> Epoch: 100
312
+ 2024-07-31 20:01:45,079 vctk_base INFO Train Epoch: 101 [0%]
313
+ 2024-07-31 20:01:45,079 vctk_base INFO [2.744537830352783, 1.9861705303192139, 3.6682450771331787, 17.999156951904297, 0.37846389412879944, 2.4116063117980957, 17400, 0.00019751540577694416]
314
+ 2024-07-31 20:03:27,506 vctk_base INFO ====> Epoch: 101
315
+ 2024-07-31 20:04:10,687 vctk_base INFO Train Epoch: 102 [15%]
316
+ 2024-07-31 20:04:10,688 vctk_base INFO [2.6672444343566895, 2.0421199798583984, 3.8352768421173096, 17.990806579589844, 0.35318225622177124, 2.6415133476257324, 17600, 0.00019749071635122203]
317
+ 2024-07-31 20:05:29,759 vctk_base INFO ====> Epoch: 102
318
+ 2024-07-31 20:06:25,131 vctk_base INFO Train Epoch: 103 [30%]
319
+ 2024-07-31 20:06:25,132 vctk_base INFO [2.644667148590088, 2.2691452503204346, 4.24054479598999, 18.062803268432617, 0.38615739345550537, 2.5140414237976074, 17800, 0.00019746603001167813]
320
+ 2024-07-31 20:07:41,554 vctk_base INFO ====> Epoch: 103
321
+ 2024-07-31 20:08:51,264 vctk_base INFO Train Epoch: 104 [45%]
322
+ 2024-07-31 20:08:51,265 vctk_base INFO [2.699317693710327, 1.8191018104553223, 3.5548336505889893, 17.90565299987793, 0.3904575705528259, 2.443887233734131, 18000, 0.00019744134675792665]
323
+ 2024-07-31 20:08:56,962 vctk_base INFO Saving model and optimizer state at iteration 104 to ./logs/vctk_base/G_18000.pth
324
+ 2024-07-31 20:08:57,421 vctk_base INFO Saving model and optimizer state at iteration 104 to ./logs/vctk_base/D_18000.pth
325
+ 2024-07-31 20:09:49,436 vctk_base INFO ====> Epoch: 104
326
+ 2024-07-31 20:11:13,916 vctk_base INFO Train Epoch: 105 [60%]
327
+ 2024-07-31 20:11:13,917 vctk_base INFO [2.660168170928955, 2.0587971210479736, 3.775674343109131, 17.767053604125977, 0.39086776971817017, 2.3856089115142822, 18200, 0.0001974166665895819]
328
+ 2024-07-31 20:12:01,750 vctk_base INFO ====> Epoch: 105
329
+ 2024-07-31 20:13:39,356 vctk_base INFO Train Epoch: 106 [75%]
330
+ 2024-07-31 20:13:39,356 vctk_base INFO [2.699890613555908, 2.124408721923828, 3.9402413368225098, 17.910781860351562, 0.39350926876068115, 2.651435375213623, 18400, 0.0001973919895062582]
331
+ 2024-07-31 20:14:03,664 vctk_base INFO ====> Epoch: 106
332
+ 2024-07-31 20:15:54,572 vctk_base INFO Train Epoch: 107 [90%]
333
+ 2024-07-31 20:15:54,573 vctk_base INFO [2.67771053314209, 2.0905795097351074, 4.052742958068848, 17.98907470703125, 0.3509306311607361, 2.398362398147583, 18600, 0.0001973673155075699]
334
+ 2024-07-31 20:16:05,036 vctk_base INFO ====> Epoch: 107
335
+ 2024-07-31 20:18:05,085 vctk_base INFO ====> Epoch: 108
336
+ 2024-07-31 20:18:39,133 vctk_base INFO Train Epoch: 109 [5%]
337
+ 2024-07-31 20:18:39,133 vctk_base INFO [2.7124345302581787, 1.8752793073654175, 3.624152183532715, 17.89871597290039, 0.4105195999145508, 2.451591730117798, 18800, 0.0001973179767625573]
338
+ 2024-07-31 20:20:14,249 vctk_base INFO ====> Epoch: 109
339
+ 2024-07-31 20:21:01,189 vctk_base INFO Train Epoch: 110 [20%]
340
+ 2024-07-31 20:21:01,189 vctk_base INFO [2.5812158584594727, 2.0104386806488037, 4.494281768798828, 18.009235382080078, 0.3752250373363495, 2.570329189300537, 19000, 0.00019729331201546197]
341
+ 2024-07-31 20:21:06,679 vctk_base INFO Saving model and optimizer state at iteration 110 to ./logs/vctk_base/G_19000.pth
342
+ 2024-07-31 20:21:07,142 vctk_base INFO Saving model and optimizer state at iteration 110 to ./logs/vctk_base/D_19000.pth
343
+ 2024-07-31 20:22:21,754 vctk_base INFO ====> Epoch: 110
344
+ 2024-07-31 20:23:22,271 vctk_base INFO Train Epoch: 111 [34%]
345
+ 2024-07-31 20:23:22,272 vctk_base INFO [2.7855498790740967, 2.018437147140503, 3.8236453533172607, 18.002897262573242, 0.37773826718330383, 2.3465638160705566, 19200, 0.00019726865035146003]
346
+ 2024-07-31 20:24:22,767 vctk_base INFO ====> Epoch: 111
347
+ 2024-07-31 20:25:35,906 vctk_base INFO Train Epoch: 112 [49%]
348
+ 2024-07-31 20:25:35,906 vctk_base INFO [2.6502349376678467, 2.0667502880096436, 4.026083946228027, 17.9472713470459, 0.3715100884437561, 2.469026565551758, 19400, 0.0001972439917701661]
349
+ 2024-07-31 20:26:24,235 vctk_base INFO ====> Epoch: 112
350
+ 2024-07-31 20:27:53,845 vctk_base INFO Train Epoch: 113 [64%]
351
+ 2024-07-31 20:27:53,845 vctk_base INFO [2.6136341094970703, 1.9647881984710693, 4.162574768066406, 18.27005958557129, 0.37126827239990234, 2.4823293685913086, 19600, 0.0001972193362711948]
352
+ 2024-07-31 20:28:26,995 vctk_base INFO ====> Epoch: 113
353
+ 2024-07-31 20:30:09,743 vctk_base INFO Train Epoch: 114 [79%]
354
+ 2024-07-31 20:30:09,744 vctk_base INFO [2.7493088245391846, 1.8560041189193726, 3.7089788913726807, 18.18035888671875, 0.42066124081611633, 2.472740411758423, 19800, 0.0001971946838541609]
355
+ 2024-07-31 20:30:29,149 vctk_base INFO ====> Epoch: 114
356
+ 2024-07-31 20:32:25,643 vctk_base INFO Train Epoch: 115 [94%]
357
+ 2024-07-31 20:32:25,644 vctk_base INFO [2.6841301918029785, 1.905001163482666, 3.7393548488616943, 17.803064346313477, 0.4300490617752075, 2.464689254760742, 20000, 0.0001971700345186791]
358
+ 2024-07-31 20:32:31,243 vctk_base INFO Saving model and optimizer state at iteration 115 to ./logs/vctk_base/G_20000.pth
359
+ 2024-07-31 20:32:31,734 vctk_base INFO Saving model and optimizer state at iteration 115 to ./logs/vctk_base/D_20000.pth
360
+ 2024-07-31 20:32:38,032 vctk_base INFO ====> Epoch: 115
361
+ 2024-07-31 20:34:37,825 vctk_base INFO ====> Epoch: 116
362
+ 2024-07-31 20:35:15,546 vctk_base INFO Train Epoch: 117 [9%]
363
+ 2024-07-31 20:35:15,547 vctk_base INFO [2.6695685386657715, 2.032029151916504, 3.838996171951294, 18.040416717529297, 0.34701502323150635, 2.619849443435669, 20200, 0.0001971207450908312]
364
+ 2024-07-31 20:36:38,301 vctk_base INFO ====> Epoch: 117
365
+ 2024-07-31 20:37:28,369 vctk_base INFO Train Epoch: 118 [24%]
366
+ 2024-07-31 20:37:28,370 vctk_base INFO [2.705043315887451, 1.9148461818695068, 3.983602285385132, 17.786283493041992, 0.3931027948856354, 2.4227640628814697, 20400, 0.00019709610499769482]
367
+ 2024-07-31 20:38:49,274 vctk_base INFO ====> Epoch: 118
368
+ 2024-07-31 20:39:54,522 vctk_base INFO Train Epoch: 119 [39%]
369
+ 2024-07-31 20:39:54,524 vctk_base INFO [2.645965814590454, 1.8875361680984497, 4.101812839508057, 17.905902862548828, 0.39274656772613525, 2.404343605041504, 20600, 0.0001970714679845701]
370
+ 2024-07-31 20:40:49,896 vctk_base INFO ====> Epoch: 119
371
+ 2024-07-31 20:42:09,640 vctk_base INFO Train Epoch: 120 [54%]
372
+ 2024-07-31 20:42:09,641 vctk_base INFO [2.6330080032348633, 2.045253276824951, 3.788076400756836, 17.881996154785156, 0.41952621936798096, 2.3749547004699707, 20800, 0.000197046834051072]
373
+ 2024-07-31 20:42:52,092 vctk_base INFO ====> Epoch: 120
374
+ 2024-07-31 20:44:23,971 vctk_base INFO Train Epoch: 121 [69%]
375
+ 2024-07-31 20:44:23,972 vctk_base INFO [2.6938228607177734, 1.939889907836914, 3.769407272338867, 17.86324119567871, 0.4170839190483093, 2.463261127471924, 21000, 0.00019702220319681561]
376
+ 2024-07-31 20:44:28,996 vctk_base INFO Saving model and optimizer state at iteration 121 to ./logs/vctk_base/G_21000.pth
377
+ 2024-07-31 20:44:29,455 vctk_base INFO Saving model and optimizer state at iteration 121 to ./logs/vctk_base/D_21000.pth
378
+ 2024-07-31 20:44:58,578 vctk_base INFO ====> Epoch: 121
379
+ 2024-07-31 20:46:44,290 vctk_base INFO Train Epoch: 122 [84%]
380
+ 2024-07-31 20:46:44,290 vctk_base INFO [2.709864616394043, 1.9592831134796143, 3.756845474243164, 18.03701400756836, 0.39926597476005554, 2.622509002685547, 21200, 0.000196997575421416]
381
+ 2024-07-31 20:46:59,985 vctk_base INFO ====> Epoch: 122
382
+ 2024-07-31 20:49:20,160 vctk_base INFO Train Epoch: 123 [99%]
383
+ 2024-07-31 20:49:20,160 vctk_base INFO [2.817477226257324, 2.1873228549957275, 3.75394868850708, 17.743040084838867, 0.42045527696609497, 2.5293900966644287, 21400, 0.00019697295072448832]
384
+ 2024-07-31 20:49:22,121 vctk_base INFO ====> Epoch: 123
385
+ 2024-07-31 20:51:22,671 vctk_base INFO ====> Epoch: 124
386
+ 2024-07-31 20:52:04,656 vctk_base INFO Train Epoch: 125 [14%]
387
+ 2024-07-31 20:52:04,657 vctk_base INFO [2.6281521320343018, 1.832892656326294, 4.041123390197754, 17.598670959472656, 0.3512026071548462, 2.428828001022339, 21600, 0.00019692371056450955]
388
+ 2024-07-31 20:53:23,937 vctk_base INFO ====> Epoch: 125
389
+ 2024-07-31 20:54:19,876 vctk_base INFO Train Epoch: 126 [29%]
390
+ 2024-07-31 20:54:19,877 vctk_base INFO [2.6689653396606445, 1.92620050907135, 3.872020721435547, 17.63397789001465, 0.3861956000328064, 2.3920764923095703, 21800, 0.000196899095100689]
391
+ 2024-07-31 20:55:36,466 vctk_base INFO ====> Epoch: 126
392
+ 2024-07-31 20:56:44,448 vctk_base INFO Train Epoch: 127 [44%]
393
+ 2024-07-31 20:56:44,450 vctk_base INFO [2.772686004638672, 1.9362666606903076, 3.4218716621398926, 17.814638137817383, 0.3949701488018036, 2.619955062866211, 22000, 0.0001968744827138014]
394
+ 2024-07-31 20:56:50,451 vctk_base INFO Saving model and optimizer state at iteration 127 to ./logs/vctk_base/G_22000.pth
395
+ 2024-07-31 20:56:50,919 vctk_base INFO Saving model and optimizer state at iteration 127 to ./logs/vctk_base/D_22000.pth
396
+ 2024-07-31 20:57:43,440 vctk_base INFO ====> Epoch: 127
397
+ 2024-07-31 20:59:16,322 vctk_base INFO Train Epoch: 128 [59%]
398
+ 2024-07-31 20:59:16,322 vctk_base INFO [2.660921335220337, 1.985673427581787, 4.1565656661987305, 17.873605728149414, 0.4015752971172333, 2.616567373275757, 22200, 0.00019684987340346216]
399
+ 2024-07-31 20:59:54,387 vctk_base INFO ====> Epoch: 128
400
+ 2024-07-31 21:01:30,981 vctk_base INFO Train Epoch: 129 [74%]
401
+ 2024-07-31 21:01:30,981 vctk_base INFO [2.7021560668945312, 1.995058536529541, 3.9681735038757324, 17.80048370361328, 0.4086448550224304, 2.188969612121582, 22400, 0.00019682526716928672]
402
+ 2024-07-31 21:01:55,922 vctk_base INFO ====> Epoch: 129
403
+ 2024-07-31 21:03:45,631 vctk_base INFO Train Epoch: 130 [89%]
404
+ 2024-07-31 21:03:45,632 vctk_base INFO [2.8997881412506104, 1.8227810859680176, 3.796173572540283, 17.967849731445312, 0.38954436779022217, 2.628443479537964, 22600, 0.00019680066401089056]
405
+ 2024-07-31 21:03:56,753 vctk_base INFO ====> Epoch: 130
406
+ 2024-07-31 21:05:57,679 vctk_base INFO ====> Epoch: 131
407
+ 2024-07-31 21:06:29,911 vctk_base INFO Train Epoch: 132 [3%]
408
+ 2024-07-31 21:06:29,912 vctk_base INFO [2.7665562629699707, 1.877068042755127, 3.7602651119232178, 17.78942108154297, 0.38983428478240967, 2.37406587600708, 22800, 0.00019675146691989817]
409
+ 2024-07-31 21:07:58,224 vctk_base INFO ====> Epoch: 132
410
+ 2024-07-31 21:08:44,403 vctk_base INFO Train Epoch: 133 [18%]
411
+ 2024-07-31 21:08:44,403 vctk_base INFO [2.7101268768310547, 2.0062737464904785, 4.191795825958252, 17.891399383544922, 0.4161204993724823, 2.507888078689575, 23000, 0.00019672687298653317]
412
+ 2024-07-31 21:08:50,247 vctk_base INFO Saving model and optimizer state at iteration 133 to ./logs/vctk_base/G_23000.pth
413
+ 2024-07-31 21:08:50,706 vctk_base INFO Saving model and optimizer state at iteration 133 to ./logs/vctk_base/D_23000.pth
414
+ 2024-07-31 21:10:06,136 vctk_base INFO ====> Epoch: 133
415
+ 2024-07-31 21:11:05,733 vctk_base INFO Train Epoch: 134 [33%]
416
+ 2024-07-31 21:11:05,734 vctk_base INFO [2.710785150527954, 2.1154394149780273, 4.949193477630615, 18.15772247314453, 0.3854277729988098, 2.756701707839966, 23200, 0.00019670228212740986]
417
+ 2024-07-31 21:12:06,626 vctk_base INFO ====> Epoch: 134
418
+ 2024-07-31 21:13:19,062 vctk_base INFO Train Epoch: 135 [48%]
419
+ 2024-07-31 21:13:19,062 vctk_base INFO [2.8089773654937744, 1.9395824670791626, 3.958655595779419, 17.819007873535156, 0.35090792179107666, 2.6560781002044678, 23400, 0.00019667769434214392]
420
+ 2024-07-31 21:14:06,886 vctk_base INFO ====> Epoch: 135
421
+ 2024-07-31 21:15:33,539 vctk_base INFO Train Epoch: 136 [63%]
422
+ 2024-07-31 21:15:33,540 vctk_base INFO [2.692657709121704, 1.9194254875183105, 3.9680192470550537, 17.422657012939453, 0.3957674205303192, 2.5080041885375977, 23600, 0.00019665310963035113]
423
+ 2024-07-31 21:16:08,067 vctk_base INFO ====> Epoch: 136