| { | |
| "_aggregator_pretrained_model_name_or_path": "./work/pretrained_models/distil-whisper-large-v3", | |
| "activation_dropout": 0.0, | |
| "activation_function": "gelu", | |
| "aggregator_attention_heads": 20, | |
| "aggregator_d_model": 1280, | |
| "aggregator_ffn_dim": 5120, | |
| "aggregator_init_cross_attn_v_proj_as_identity": false, | |
| "aggregator_layerdrop": 0.0, | |
| "aggregator_layers": 2, | |
| "aggregator_max_source_positions": 1500, | |
| "aggregator_max_target_positions": 448, | |
| "aggregator_pad_token_id": 50256, | |
| "aggregator_prefix_tokens": [ | |
| 50258, | |
| 50259, | |
| 50360, | |
| 50364 | |
| ], | |
| "aggregator_pretrained_model_name_or_path": null, | |
| "aggregator_scale_embedding": false, | |
| "architectures": [ | |
| "TasteTokenizer" | |
| ], | |
| "asr_config": { | |
| "_name_or_path": "./distil-large-v3", | |
| "activation_dropout": 0.0, | |
| "activation_function": "gelu", | |
| "apply_spec_augment": false, | |
| "architectures": [ | |
| "WhisperForConditionalGeneration" | |
| ], | |
| "attention_dropout": 0.0, | |
| "begin_suppress_tokens": [ | |
| 220, | |
| 50257 | |
| ], | |
| "bos_token_id": 50257, | |
| "classifier_proj_size": 256, | |
| "d_model": 1280, | |
| "decoder_attention_heads": 20, | |
| "decoder_ffn_dim": 5120, | |
| "decoder_layerdrop": 0.0, | |
| "decoder_layers": 2, | |
| "decoder_start_token_id": 50258, | |
| "dropout": 0.0, | |
| "encoder_attention_heads": 20, | |
| "encoder_ffn_dim": 5120, | |
| "encoder_layerdrop": 0.0, | |
| "encoder_layers": 32, | |
| "eos_token_id": 50257, | |
| "init_std": 0.02, | |
| "mask_feature_length": 10, | |
| "mask_feature_min_masks": 0, | |
| "mask_feature_prob": 0.0, | |
| "mask_time_length": 10, | |
| "mask_time_min_masks": 2, | |
| "mask_time_prob": 0.05, | |
| "max_length": 448, | |
| "max_source_positions": 1500, | |
| "max_target_positions": 448, | |
| "median_filter_width": 7, | |
| "model_type": "whisper", | |
| "num_hidden_layers": 32, | |
| "num_mel_bins": 128, | |
| "scale_embedding": false, | |
| "torch_dtype": "float32", | |
| "transformers.js_config": { | |
| "use_external_data_format": { | |
| "encoder_model.onnx": true | |
| } | |
| }, | |
| "use_cache": true, | |
| "use_weighted_layer_sum": false, | |
| "vocab_size": 51866 | |
| }, | |
| "asr_encoder_shallow_hidden_index": 6, | |
| "attention_dropout": 0.0, | |
| "dropout": 0.0, | |
| "has_quantizer": true, | |
| "ignore_index": -1, | |
| "model_type": "taste", | |
| "num_hidden_layers": 32, | |
| "quantizer_codebook_dim": 256, | |
| "quantizer_codebook_size": 512, | |
| "quantizer_decay": 0.99, | |
| "quantizer_input_dim": 1280, | |
| "quantizer_kmeans_init": true, | |
| "quantizer_num_quantizers": 4, | |
| "quantizer_quantize_dropout": false, | |
| "quantizer_type": "residual_vq", | |
| "quantizer_use_implicit_neural_codebook": true, | |
| "torch_dtype": "float32", | |
| "training_config": { | |
| "accumulate_grad_batches": 12, | |
| "aggregator_partial_freeze": true, | |
| "apply_quantization_prob": 1.0, | |
| "ckpt_every_n_steps": 1000, | |
| "delete_asr_decoder": false, | |
| "delete_vocoder": false, | |
| "devices": [ | |
| 0 | |
| ], | |
| "freeze_aggregator": false, | |
| "freeze_asr_decoder": true, | |
| "freeze_asr_encoder": true, | |
| "gradient_clip_val": 4.0, | |
| "log_every_n_steps": 10, | |
| "lr_scheduler": "warmup_cosine_decay", | |
| "lr_scheduler_config": { | |
| "warmup_steps": 2000 | |
| }, | |
| "max_epochs": 3, | |
| "optimizer": "adam", | |
| "optimizer_config": { | |
| "lr": 0.0001 | |
| }, | |
| "output_dir": "/home/andybi7676/projects/IntelliGen/src/taste/egs/emilia_libritts/exp/taste_finetune", | |
| "precision": "bf16", | |
| "return_quantization_loss": true, | |
| "strategy": "ddp_find_unused_parameters_true", | |
| "val_check_interval": 0.2 | |
| }, | |
| "transformers_version": "4.55.0", | |
| "unit_decoder_adopt_early_fusion": false, | |
| "unit_decoder_encoders_common_kwargs": { | |
| "attention_dropout_rate": 0.0, | |
| "attention_heads": 8, | |
| "dropout_rate": 0.1, | |
| "input_layer": "linear", | |
| "linear_units": 2048, | |
| "macaron_style": false, | |
| "normalize_before": true, | |
| "pos_enc_layer_type": "rel_pos_espnet", | |
| "positional_dropout_rate": 0.1, | |
| "selfattention_layer_type": "rel_selfattn", | |
| "static_chunk_size": 1, | |
| "use_cnn_module": false, | |
| "use_dynamic_chunk": false, | |
| "use_dynamic_left_chunk": false | |
| }, | |
| "unit_decoder_fuse_encoded_text_speech_kwargs": { | |
| "normalize": true, | |
| "use_layer_norm": false, | |
| "use_trainable_weights": true, | |
| "weights_init_type": "balanced" | |
| }, | |
| "unit_decoder_fuse_encoded_text_speech_type": "weighted_sum", | |
| "unit_decoder_lm_decoder_input_size": 1024, | |
| "unit_decoder_lm_decoder_kwargs": { | |
| "attention_dropout_rate": 0, | |
| "attention_heads": 8, | |
| "dropout_rate": 0.1, | |
| "input_layer": "linear_legacy", | |
| "linear_units": 2048, | |
| "pos_enc_layer_type": "rel_pos_espnet", | |
| "positional_dropout_rate": 0.1, | |
| "selfattention_layer_type": "rel_selfattn", | |
| "static_chunk_size": 1 | |
| }, | |
| "unit_decoder_lm_decoder_num_blocks": 7, | |
| "unit_decoder_lm_decoder_output_size": 1024, | |
| "unit_decoder_lm_decoder_units_vocab_size": 4096, | |
| "unit_decoder_skip_text_affine_layer": false, | |
| "unit_decoder_speaker_embed_size": 192, | |
| "unit_decoder_speech_embed_encoder_input_size": 512, | |
| "unit_decoder_speech_embed_encoder_num_blocks": 2, | |
| "unit_decoder_speech_embed_encoder_output_size": 1024, | |
| "unit_decoder_speech_embedding_size": 1280, | |
| "unit_decoder_text_token_encoder_input_size": 512, | |
| "unit_decoder_text_token_encoder_num_blocks": 3, | |
| "unit_decoder_text_token_encoder_output_size": 1024, | |
| "unit_decoder_text_token_vocab_size": 51866, | |
| "unit_decoder_use_speaker_embedding": true, | |
| "use_cache": true, | |
| "vocab_size": 51866 | |
| } | |