model / config_nsf.yaml
qwp's picture
Upload config_nsf.yaml
3fc3c5b
K_step: 1000
accumulate_grad_batches: 1
audio_num_mel_bins: 128
audio_sample_rate: 44100
binarization_args:
shuffle: false
with_align: true
with_f0: true
with_hubert: true
with_spk_embed: false
with_wav: false
binarizer_cls: preprocessing.SVCpre.SVCBinarizer
binary_data_dir: data/binary/rewrite
check_val_every_n_epoch: 10
choose_test_manually: false
clip_grad_norm: 1
config_path: training/config_nsf.yaml
content_cond_steps: []
cwt_add_f0_loss: false
cwt_hidden_size: 128
cwt_layers: 2
cwt_loss: l1
cwt_std_scale: 0.8
datasets:
- opencpop
debug: false
dec_ffn_kernel_size: 9
dec_layers: 4
decay_steps: 40000
decoder_type: fft
dict_dir: ''
diff_decoder_type: wavenet
diff_loss_type: l2
dilation_cycle_length: 4
dropout: 0.1
ds_workers: 4
dur_enc_hidden_stride_kernel:
- 0,2,3
- 0,2,3
- 0,1,3
dur_loss: mse
dur_predictor_kernel: 3
dur_predictor_layers: 5
enc_ffn_kernel_size: 9
enc_layers: 4
encoder_K: 8
encoder_type: fft
endless_ds: false
f0_bin: 256
f0_max: 1100.0
f0_min: 40.0
ffn_act: gelu
ffn_padding: SAME
fft_size: 2048
fmax: 16000
fmin: 40
fs2_ckpt: ''
gaussian_start: true
gen_dir_name: ''
gen_tgt_spk_id: -1
hidden_size: 256
hop_size: 512
hubert_gpu: true
hubert_path: checkpoints/hubert/hubert_soft.pt
infer: false
keep_bins: 128
lambda_commit: 0.25
lambda_energy: 0.0
lambda_f0: 1.0
lambda_ph_dur: 0.3
lambda_sent_dur: 1.0
lambda_uv: 1.0
lambda_word_dur: 1.0
load_ckpt: ''
log_interval: 100
loud_norm: false
lr: 0.0012
max_beta: 0.02
max_epochs: 3000
max_eval_sentences: 1
max_eval_tokens: 60000
max_frames: 42000
max_input_tokens: 60000
max_sentences: 80
max_tokens: 128000
max_updates: 1000000
mel_loss: ssim:0.5|l1:0.5
mel_vmax: 1.5
mel_vmin: -6.0
min_level_db: -120
no_fs2: true
norm_type: gn
num_ckpt_keep: 10
num_heads: 2
num_sanity_val_steps: 1
num_spk: 1
num_test_samples: 0
num_valid_plots: 10
optimizer_adam_beta1: 0.9
optimizer_adam_beta2: 0.98
out_wav_norm: false
pe_ckpt: checkpoints/0102_xiaoma_pe/model_ckpt_steps_60000.ckpt
pe_enable: false
perform_enhance: true
pitch_ar: false
pitch_enc_hidden_stride_kernel:
- 0,2,5
- 0,2,5
- 0,2,5
pitch_extractor: parselmouth
pitch_loss: l2
pitch_norm: log
pitch_type: frame
pndm_speedup: 10
pre_align_args:
allow_no_txt: false
denoise: false
forced_align: mfa
txt_processor: zh_g2pM
use_sox: true
use_tone: false
pre_align_cls: data_gen.singing.pre_align.SingingPreAlign
predictor_dropout: 0.5
predictor_grad: 0.1
predictor_hidden: -1
predictor_kernel: 5
predictor_layers: 5
prenet_dropout: 0.5
prenet_hidden_size: 256
pretrain_fs_ckpt: ''
processed_data_dir: xxx
profile_infer: false
raw_data_dir: data/raw/rewrite
ref_norm_layer: bn
rel_pos: true
reset_phone_dict: true
residual_channels: 384
residual_layers: 20
save_best: false
save_ckpt: true
save_codes:
- configs
- modules
- src
- utils
save_f0: true
save_gt: false
schedule_type: linear
seed: 1234
sort_by_len: true
speaker_id: rewrite
spec_max:
- -0.013646591454744339
- -0.03492093086242676
- -0.023477505892515182
- 0.2240731418132782
- 0.5360018610954285
- 0.5375948548316956
- 0.7070308327674866
- 0.6859050989151001
- 0.6795750856399536
- 0.7403997778892517
- 0.6734148263931274
- 0.6695026755332947
- 0.7384544610977173
- 0.7818752527236938
- 0.8294143676757812
- 0.8030390739440918
- 0.8277796506881714
- 0.7842329740524292
- 0.7762189507484436
- 0.8058144450187683
- 0.7267101407051086
- 0.6962370276451111
- 0.7303498983383179
- 0.6225879788398743
- 0.6888490319252014
- 0.7364280223846436
- 0.6153574585914612
- 0.707781195640564
- 0.759908139705658
- 0.6678041219711304
- 0.6939466595649719
- 0.7447654008865356
- 0.7110893130302429
- 0.7236943244934082
- 0.7394376397132874
- 0.6850912570953369
- 0.6462180614471436
- 0.6484248042106628
- 0.7520641088485718
- 0.616317093372345
- 0.6223531365394592
- 0.6242066621780396
- 0.584526538848877
- 0.5907259583473206
- 0.5729789137840271
- 0.6080450415611267
- 0.5875643491744995
- 0.5783573985099792
- 0.6163155436515808
- 0.5643542408943176
- 0.509316623210907
- 0.5033355355262756
- 0.5178366303443909
- 0.435567170381546
- 0.5041572451591492
- 0.4857909083366394
- 0.441276878118515
- 0.45493438839912415
- 0.48246824741363525
- 0.39058661460876465
- 0.3842998743057251
- 0.38674700260162354
- 0.35111427307128906
- 0.3330616056919098
- 0.3221135139465332
- 0.25813382863998413
- 0.2895975708961487
- 0.25998613238334656
- 0.28663650155067444
- 0.2345530092716217
- 0.14880014955997467
- 0.08881813287734985
- 0.08260829001665115
- 0.2093590795993805
- 0.0817270427942276
- 0.15079361200332642
- 0.14103257656097412
- 0.08314933627843857
- 0.12403995543718338
- 0.11101309210062027
- 0.03476237133145332
- 0.05545520782470703
- 0.09279946237802505
- 0.14412818849086761
- 0.05606376752257347
- 0.08092702925205231
- 0.08070675283670425
- 0.04966948926448822
- 0.06069479137659073
- 0.023000987246632576
- -0.013332114554941654
- -0.10289011895656586
- -0.13337981700897217
- 0.027412841096520424
- -0.11937981098890305
- -0.11094077676534653
- -0.14477558434009552
- -0.10545164346694946
- -0.24568623304367065
- -0.24235208332538605
- -0.34665119647979736
- -0.2769622206687927
- -0.27357393503189087
- -0.35512396693229675
- -0.2698093354701996
- -0.17397552728652954
- -0.19787748157978058
- -0.26635631918907166
- -0.16491562128067017
- -0.3416293263435364
- -0.2779384255409241
- -0.46012505888938904
- -0.37809446454048157
- -0.3575749099254608
- -0.19189275801181793
- -0.14034831523895264
- -0.3368651270866394
- -0.42712533473968506
- -0.37624332308769226
- -0.544425368309021
- -0.5833626389503479
- -0.5845077633857727
- -0.7097123265266418
- -0.7711118459701538
- -0.8836023211479187
- -0.9094525575637817
- -0.928654134273529
- -0.8916470408439636
spec_min:
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
- -4.999994277954102
spk_cond_steps: []
stop_token_weight: 5.0
task_cls: training.task.SVC_task.SVCTask
test_ids: []
test_input_dir: ''
test_num: 0
test_prefixes:
- test
test_set_name: test
timesteps: 1000
train_set_name: train
use_crepe: true
use_denoise: false
use_energy_embed: false
use_gt_dur: false
use_gt_f0: false
use_midi: false
use_nsf: true
use_pitch_embed: true
use_pos_embed: true
use_spk_embed: false
use_spk_id: false
use_split_spk_id: false
use_uv: false
use_var_enc: false
use_vec: false
val_check_interval: 2000
valid_num: 0
valid_set_name: valid
vocoder: network.vocoders.nsf_hifigan.NsfHifiGAN
vocoder_ckpt: checkpoints/nsf_hifigan/model
warmup_updates: 2000
wav2spec_eps: 1e-6
weight_decay: 0
win_size: 2048
work_dir: checkpoints/rewrite