Raon-SpeechChat-9B / config.json
kim2h7903's picture
Super-squash branch 'main' using huggingface_hub
e7f8a01
{
"_name_or_path": "",
"accept_hidden_layer": -1,
"acoustic_delay": null,
"add_cross_attention": false,
"architectures": [
"RaonDuplexModel"
],
"audio_encoder_config": {
"_name_or_path": "",
"activation_function": "gelu",
"add_cross_attention": false,
"architectures": null,
"attention_dropout": 0.0,
"bad_words_ids": null,
"begin_suppress_tokens": null,
"bos_token_id": null,
"chunk_size_feed_forward": 0,
"cross_attention_hidden_size": null,
"decoder_start_token_id": null,
"diversity_penalty": 0.0,
"do_sample": false,
"downsample_factor": 4,
"dtype": "bfloat16",
"early_stopping": false,
"encoder_attention_heads": 32,
"encoder_layers": 32,
"encoder_no_repeat_ngram_size": 0,
"eos_token_id": null,
"exponential_decay_length_penalty": null,
"finetuning_task": null,
"forced_bos_token_id": null,
"forced_eos_token_id": null,
"head_dim": 64,
"hidden_act": "silu",
"hidden_size": 1280,
"id2label": {
"0": "LABEL_0",
"1": "LABEL_1"
},
"initializer_range": 0.02,
"intermediate_size": 5120,
"is_decoder": false,
"is_encoder_decoder": false,
"label2id": {
"LABEL_0": 0,
"LABEL_1": 1
},
"length_penalty": 1.0,
"max_length": 20,
"max_position_embeddings": 1500,
"min_length": 0,
"model_type": "voxtral_realtime_encoder",
"no_repeat_ngram_size": 0,
"num_attention_heads": 32,
"num_beam_groups": 1,
"num_beams": 1,
"num_hidden_layers": 32,
"num_key_value_heads": 32,
"num_mel_bins": 128,
"num_return_sequences": 1,
"output_attentions": false,
"output_embedding_scale": 1.0,
"output_hidden_states": false,
"output_scores": false,
"pad_token_id": 151679,
"prefix": null,
"problem_type": null,
"projector_hidden_act": "gelu",
"projector_output_size": null,
"pruned_heads": {},
"remove_invalid_values": false,
"repetition_penalty": 1.0,
"return_dict": true,
"return_dict_in_generate": false,
"rms_norm_eps": 1e-05,
"rope_theta": 1000000.0,
"sampling_rate": 24000,
"sep_token_id": null,
"skip_projector": true,
"sliding_window": 750,
"suppress_tokens": null,
"task_specific_params": null,
"temperature": 1.0,
"tf_legacy_loss": false,
"tie_encoder_decoder": false,
"tie_word_embeddings": true,
"tokenizer_class": null,
"top_k": 50,
"top_p": 1.0,
"torchscript": false,
"typical_p": 1.0,
"use_bfloat16": false
},
"audio_tokenizer_config": {
"_frame_rate": 12.5,
"_name_or_path": "kyutai/mimi",
"add_cross_attention": false,
"architectures": [
"MimiModel"
],
"attention_bias": false,
"attention_dropout": 0.0,
"audio_channels": 1,
"bad_words_ids": null,
"begin_suppress_tokens": null,
"bos_token_id": null,
"chunk_size_feed_forward": 0,
"codebook_dim": 256,
"codebook_size": 2048,
"compress": 2,
"cross_attention_hidden_size": null,
"decoder_start_token_id": null,
"dilation_growth_rate": 2,
"diversity_penalty": 0.0,
"do_sample": false,
"dtype": "bfloat16",
"early_stopping": false,
"encoder_no_repeat_ngram_size": 0,
"eos_token_id": null,
"exponential_decay_length_penalty": null,
"finetuning_task": null,
"forced_bos_token_id": null,
"forced_eos_token_id": null,
"head_dim": 64,
"hidden_act": "gelu",
"hidden_size": 512,
"id2label": {
"0": "LABEL_0",
"1": "LABEL_1"
},
"initializer_range": 0.02,
"intermediate_size": 2048,
"is_decoder": false,
"is_encoder_decoder": false,
"kernel_size": 7,
"label2id": {
"LABEL_0": 0,
"LABEL_1": 1
},
"last_kernel_size": 3,
"layer_scale_initial_scale": 0.01,
"length_penalty": 1.0,
"max_length": 20,
"max_position_embeddings": 8000,
"min_length": 0,
"model_type": "mimi",
"no_repeat_ngram_size": 0,
"norm_eps": 1e-05,
"normalize": false,
"num_attention_heads": 8,
"num_beam_groups": 1,
"num_beams": 1,
"num_filters": 64,
"num_hidden_layers": 8,
"num_key_value_heads": 8,
"num_quantizers": 32,
"num_residual_layers": 1,
"num_return_sequences": 1,
"num_semantic_quantizers": 1,
"output_attentions": false,
"output_hidden_states": false,
"output_scores": false,
"pad_mode": "constant",
"pad_token_id": 151679,
"prefix": null,
"problem_type": null,
"pruned_heads": {},
"remove_invalid_values": false,
"repetition_penalty": 1.0,
"residual_kernel_size": 3,
"return_dict": true,
"return_dict_in_generate": false,
"rope_theta": 10000.0,
"sampling_rate": 24000,
"sep_token_id": null,
"sliding_window": 250,
"suppress_tokens": null,
"task_specific_params": null,
"temperature": 1.0,
"tf_legacy_loss": false,
"tie_encoder_decoder": false,
"tie_word_embeddings": true,
"tokenizer_class": null,
"top_k": 50,
"top_p": 1.0,
"torchscript": false,
"trim_right_ratio": 1.0,
"typical_p": 1.0,
"upsample_groups": 512,
"upsampling_ratios": [
8,
6,
5,
4
],
"use_bfloat16": true,
"use_cache": false,
"use_causal_conv": true,
"use_conv_shortcut": false,
"use_streaming": false,
"vector_quantization_hidden_dimension": 256
},
"aut_is_causal": true,
"bad_words_ids": null,
"begin_suppress_tokens": null,
"bos_token_id": null,
"chunk_size_feed_forward": 0,
"code_predictor_config": {
"_name_or_path": "",
"add_cross_attention": false,
"architectures": null,
"attention_bias": false,
"attention_dropout": 0,
"bad_words_ids": null,
"begin_suppress_tokens": null,
"bos_token_id": null,
"chunk_size_feed_forward": 0,
"cross_attention_hidden_size": null,
"decoder_start_token_id": null,
"diversity_penalty": 0.0,
"do_sample": false,
"dtype": "bfloat16",
"early_stopping": false,
"encoder_no_repeat_ngram_size": 0,
"eos_token_id": null,
"exponential_decay_length_penalty": null,
"finetuning_task": null,
"forced_bos_token_id": null,
"forced_eos_token_id": null,
"head_dim": 128,
"hidden_act": "silu",
"hidden_size": 1024,
"id2label": {
"0": "LABEL_0",
"1": "LABEL_1"
},
"initializer_range": 0.02,
"intermediate_size": 3072,
"is_decoder": false,
"is_encoder_decoder": false,
"label2id": {
"LABEL_0": 0,
"LABEL_1": 1
},
"layer_types": [
"full_attention",
"full_attention",
"full_attention",
"full_attention",
"full_attention"
],
"length_penalty": 1.0,
"max_length": 20,
"max_position_embeddings": 32768,
"max_window_layers": 28,
"min_length": 0,
"model_type": "qwen3_omni_moe_talker_code_predictor",
"no_repeat_ngram_size": 0,
"num_attention_heads": 16,
"num_beam_groups": 1,
"num_beams": 1,
"num_code_groups": 16,
"num_hidden_layers": 5,
"num_key_value_heads": 8,
"num_return_sequences": 1,
"output_attentions": false,
"output_hidden_states": false,
"output_scores": false,
"pad_token_id": 151679,
"prefix": null,
"problem_type": null,
"pruned_heads": {},
"remove_invalid_values": false,
"repetition_penalty": 1.0,
"return_dict": true,
"return_dict_in_generate": false,
"rms_norm_eps": 1e-06,
"rope_scaling": null,
"rope_theta": 1000000,
"sep_token_id": null,
"sliding_window": null,
"suppress_tokens": null,
"task_specific_params": null,
"temperature": 1.0,
"tf_legacy_loss": false,
"tie_encoder_decoder": false,
"tie_word_embeddings": false,
"tokenizer_class": null,
"top_k": 50,
"top_p": 1.0,
"torchscript": false,
"typical_p": 1.0,
"use_bfloat16": true,
"use_cache": false,
"use_sliding_window": false,
"vocab_size": 2048
},
"cross_attention_hidden_size": null,
"decoder_start_token_id": null,
"depth_loss_weight": 0.25,
"diversity_penalty": 0.0,
"do_sample": false,
"dtype": "bfloat16",
"duplex_bc_token_id": 151673,
"duplex_end_pad_token_id": 151678,
"duplex_pad_token_id": 151677,
"duplex_sil_token_id": 151672,
"early_stopping": false,
"encoder_no_repeat_ngram_size": 0,
"eos_token_id": null,
"exponential_decay_length_penalty": null,
"finetuning_task": null,
"forced_bos_token_id": null,
"forced_eos_token_id": null,
"hidden_size": null,
"id2label": {
"0": "LABEL_0",
"1": "LABEL_1"
},
"input_adaptor_config": {
"_name_or_path": "",
"add_cross_attention": false,
"architectures": null,
"bad_words_ids": null,
"begin_suppress_tokens": null,
"bos_token_id": null,
"chunk_size_feed_forward": 0,
"cross_attention_hidden_size": null,
"decoder_config": null,
"decoder_start_token_id": null,
"diversity_penalty": 0.0,
"do_sample": false,
"dtype": "bfloat16",
"early_stopping": false,
"encoder_no_repeat_ngram_size": 0,
"eos_token_id": null,
"exponential_decay_length_penalty": null,
"finetuning_task": null,
"forced_bos_token_id": null,
"forced_eos_token_id": null,
"hidden_size": 4096,
"id2label": {
"0": "LABEL_0",
"1": "LABEL_1"
},
"input_size": 5120,
"is_decoder": false,
"is_encoder_decoder": false,
"label2id": {
"LABEL_0": 0,
"LABEL_1": 1
},
"length_penalty": 1.0,
"max_length": 20,
"min_length": 0,
"model_type": "embedding_adaptor",
"no_repeat_ngram_size": 0,
"norm_eps": 1e-06,
"num_beam_groups": 1,
"num_beams": 1,
"num_layers": 2,
"num_return_sequences": 1,
"output_attentions": false,
"output_hidden_states": false,
"output_scores": false,
"output_size": 4096,
"output_time_scale": 1.0,
"pad_token_id": null,
"post_norm_init_scale": 0.02,
"prefix": null,
"problem_type": null,
"pruned_heads": {},
"remove_invalid_values": false,
"repetition_penalty": 1.0,
"return_dict": true,
"return_dict_in_generate": false,
"sep_token_id": null,
"suppress_tokens": null,
"task_specific_params": null,
"temperature": 1.0,
"tf_legacy_loss": false,
"tie_encoder_decoder": false,
"tie_word_embeddings": true,
"tokenizer_class": null,
"top_k": 50,
"top_p": 1.0,
"torchscript": false,
"typical_p": 1.0,
"use_bfloat16": false,
"use_post_norm": true
},
"input_num_code_groups": null,
"is_decoder": false,
"is_encoder_decoder": false,
"keys_to_ignore_at_inference": [
"past_key_values"
],
"label2id": {
"LABEL_0": 0,
"LABEL_1": 1
},
"length_penalty": 1.0,
"max_length": 20,
"min_length": 0,
"model_type": "raon_duplex",
"no_audio_in_sil": false,
"no_repeat_ngram_size": 0,
"num_beam_groups": 1,
"num_beams": 1,
"num_return_sequences": 1,
"num_talker_layers": 4,
"output_adaptor_config": {
"_name_or_path": "",
"add_cross_attention": false,
"architectures": null,
"bad_words_ids": null,
"begin_suppress_tokens": null,
"bos_token_id": null,
"chunk_size_feed_forward": 0,
"cross_attention_hidden_size": null,
"decoder_config": null,
"decoder_start_token_id": null,
"diversity_penalty": 0.0,
"do_sample": false,
"dtype": "bfloat16",
"early_stopping": false,
"encoder_no_repeat_ngram_size": 0,
"eos_token_id": null,
"exponential_decay_length_penalty": null,
"finetuning_task": null,
"forced_bos_token_id": null,
"forced_eos_token_id": null,
"hidden_size": null,
"id2label": {
"0": "LABEL_0",
"1": "LABEL_1"
},
"input_size": 512,
"is_decoder": false,
"is_encoder_decoder": false,
"label2id": {
"LABEL_0": 0,
"LABEL_1": 1
},
"length_penalty": 1.0,
"max_length": 20,
"min_length": 0,
"model_type": "embedding_adaptor",
"no_repeat_ngram_size": 0,
"norm_eps": 1e-06,
"num_beam_groups": 1,
"num_beams": 1,
"num_layers": 2,
"num_return_sequences": 1,
"output_attentions": false,
"output_hidden_states": false,
"output_scores": false,
"output_size": 4096,
"output_time_scale": 1,
"pad_token_id": null,
"post_norm_init_scale": 0.02,
"prefix": null,
"problem_type": null,
"pruned_heads": {},
"remove_invalid_values": false,
"repetition_penalty": 1.0,
"return_dict": true,
"return_dict_in_generate": false,
"sep_token_id": null,
"suppress_tokens": null,
"task_specific_params": null,
"temperature": 1.0,
"tf_legacy_loss": false,
"tie_encoder_decoder": false,
"tie_word_embeddings": true,
"tokenizer_class": null,
"top_k": 50,
"top_p": 1.0,
"torchscript": false,
"typical_p": 1.0,
"use_bfloat16": false,
"use_post_norm": true
},
"output_attentions": false,
"output_hidden_states": false,
"output_scores": false,
"pad_token_id": 151679,
"prefix": null,
"problem_type": null,
"proj_code_bias": true,
"pruned_heads": {},
"remove_invalid_values": false,
"repetition_penalty": 1.0,
"return_dict": true,
"return_dict_in_generate": false,
"sep_token_id": null,
"sequence_mode": "uta",
"speaker_encoder_config": {
"_name_or_path": "",
"add_cross_attention": false,
"architectures": null,
"bad_words_ids": null,
"begin_suppress_tokens": null,
"bos_token_id": null,
"chunk_size_feed_forward": 0,
"cross_attention_hidden_size": null,
"decoder_start_token_id": null,
"diversity_penalty": 0.0,
"do_sample": false,
"dtype": null,
"early_stopping": false,
"encoder_no_repeat_ngram_size": 0,
"encoder_type": "ecapa_tdnn",
"eos_token_id": null,
"exponential_decay_length_penalty": null,
"finetuning_task": null,
"forced_bos_token_id": null,
"forced_eos_token_id": null,
"frame_rate": 12.5,
"id2label": {
"0": "LABEL_0",
"1": "LABEL_1"
},
"input_size": 512,
"is_decoder": false,
"is_encoder_decoder": false,
"label2id": {
"LABEL_0": 0,
"LABEL_1": 1
},
"length_penalty": 1.0,
"max_length": 20,
"max_seconds": 10.0,
"min_length": 0,
"min_seconds": 2.0,
"model_type": "speaker_encoder",
"no_repeat_ngram_size": 0,
"num_beam_groups": 1,
"num_beams": 1,
"num_heads": 8,
"num_return_sequences": 1,
"output_attentions": false,
"output_hidden_states": false,
"output_scores": false,
"output_size": 4096,
"pad_token_id": null,
"prefix": null,
"pretrained_dim": 192,
"pretrained_model_id": "speechbrain/spkrec-ecapa-voxceleb",
"problem_type": null,
"pruned_heads": {},
"remove_invalid_values": false,
"repetition_penalty": 1.0,
"return_dict": true,
"return_dict_in_generate": false,
"sep_token_id": null,
"suppress_tokens": null,
"task_specific_params": null,
"temperature": 1.0,
"tf_legacy_loss": false,
"tie_encoder_decoder": false,
"tie_word_embeddings": true,
"tokenizer_class": null,
"top_k": 50,
"top_p": 1.0,
"torchscript": false,
"typical_p": 1.0,
"use_bfloat16": false
},
"supports_audio_input": true,
"supports_audio_output": true,
"suppress_tokens": null,
"talker_config": {
"_name_or_path": "",
"add_cross_attention": false,
"architectures": null,
"attention_bias": false,
"attention_dropout": 0.0,
"bad_words_ids": null,
"begin_suppress_tokens": null,
"bos_token_id": null,
"chunk_size_feed_forward": 0,
"cross_attention_hidden_size": null,
"decoder_start_token_id": null,
"diversity_penalty": 0.0,
"do_sample": false,
"dtype": null,
"early_stopping": false,
"encoder_no_repeat_ngram_size": 0,
"eos_token_id": null,
"exponential_decay_length_penalty": null,
"finetuning_task": null,
"forced_bos_token_id": null,
"forced_eos_token_id": null,
"head_dim": 128,
"hidden_act": "silu",
"hidden_size": 2048,
"id2label": {
"0": "LABEL_0",
"1": "LABEL_1"
},
"initializer_range": 0.02,
"intermediate_size": 6144,
"is_decoder": false,
"is_encoder_decoder": false,
"label2id": {
"LABEL_0": 0,
"LABEL_1": 1
},
"layer_types": [
"full_attention",
"full_attention",
"full_attention",
"full_attention"
],
"length_penalty": 1.0,
"max_length": 20,
"max_position_embeddings": 32768,
"max_window_layers": 28,
"min_length": 0,
"model_type": "qwen3",
"no_repeat_ngram_size": 0,
"num_attention_heads": 16,
"num_beam_groups": 1,
"num_beams": 1,
"num_hidden_layers": 4,
"num_key_value_heads": 8,
"num_return_sequences": 1,
"output_attentions": false,
"output_hidden_states": false,
"output_scores": false,
"pad_token_id": 151679,
"prefix": null,
"problem_type": null,
"pruned_heads": {},
"remove_invalid_values": false,
"repetition_penalty": 1.0,
"return_dict": true,
"return_dict_in_generate": false,
"rms_norm_eps": 1e-06,
"rope_scaling": null,
"rope_theta": 10000.0,
"sep_token_id": null,
"sliding_window": null,
"suppress_tokens": null,
"task_specific_params": null,
"temperature": 1.0,
"tf_legacy_loss": false,
"tie_encoder_decoder": false,
"tie_word_embeddings": false,
"tokenizer_class": null,
"top_k": 50,
"top_p": 1.0,
"torchscript": false,
"typical_p": 1.0,
"use_bfloat16": false,
"use_cache": true,
"use_sliding_window": false,
"vocab_size": 153723
},
"task_specific_params": null,
"temperature": 1.0,
"text_model_config": {
"_name_or_path": "",
"add_cross_attention": false,
"architectures": [
"Qwen3ForCausalLM"
],
"attention_bias": false,
"attention_dropout": 0.0,
"bad_words_ids": null,
"begin_suppress_tokens": null,
"bos_token_id": null,
"chunk_size_feed_forward": 0,
"cross_attention_hidden_size": null,
"decoder_start_token_id": null,
"diversity_penalty": 0.0,
"do_sample": false,
"dtype": "bfloat16",
"early_stopping": false,
"encoder_no_repeat_ngram_size": 0,
"eos_token_id": null,
"exponential_decay_length_penalty": null,
"finetuning_task": null,
"forced_bos_token_id": null,
"forced_eos_token_id": null,
"head_dim": 128,
"hidden_act": "silu",
"hidden_size": 4096,
"id2label": {
"0": "LABEL_0",
"1": "LABEL_1"
},
"initializer_range": 0.02,
"intermediate_size": 12288,
"is_decoder": false,
"is_encoder_decoder": false,
"label2id": {
"LABEL_0": 0,
"LABEL_1": 1
},
"layer_types": [
"full_attention",
"full_attention",
"full_attention",
"full_attention",
"full_attention",
"full_attention",
"full_attention",
"full_attention",
"full_attention",
"full_attention",
"full_attention",
"full_attention",
"full_attention",
"full_attention",
"full_attention",
"full_attention",
"full_attention",
"full_attention",
"full_attention",
"full_attention",
"full_attention",
"full_attention",
"full_attention",
"full_attention",
"full_attention",
"full_attention",
"full_attention",
"full_attention",
"full_attention",
"full_attention",
"full_attention",
"full_attention",
"full_attention",
"full_attention",
"full_attention",
"full_attention"
],
"length_penalty": 1.0,
"max_length": 20,
"max_position_embeddings": 262144,
"max_window_layers": 36,
"min_length": 0,
"model_type": "qwen3",
"no_repeat_ngram_size": 0,
"num_attention_heads": 32,
"num_beam_groups": 1,
"num_beams": 1,
"num_hidden_layers": 36,
"num_key_value_heads": 8,
"num_return_sequences": 1,
"output_attentions": false,
"output_hidden_states": false,
"output_scores": false,
"pad_token_id": null,
"prefix": null,
"problem_type": null,
"pruned_heads": {},
"remove_invalid_values": false,
"repetition_penalty": 1.0,
"return_dict": true,
"return_dict_in_generate": false,
"rms_norm_eps": 1e-06,
"rope_scaling": null,
"rope_theta": 5000000,
"sep_token_id": null,
"sliding_window": null,
"suppress_tokens": null,
"task_specific_params": null,
"temperature": 1.0,
"tf_legacy_loss": false,
"tie_encoder_decoder": false,
"tie_word_embeddings": false,
"tokenizer_class": null,
"top_k": 50,
"top_p": 1.0,
"torchscript": false,
"typical_p": 1.0,
"use_bfloat16": true,
"use_cache": true,
"use_sliding_window": false,
"vocab_size": 153723
},
"tf_legacy_loss": false,
"thinker_to_talker_intermediate_size": 6144,
"thinker_to_talker_pre_norm": false,
"thinker_to_talker_projection_mode": "mlp",
"tie_encoder_decoder": false,
"tie_word_embeddings": true,
"tokenizer_class": null,
"top_k": 50,
"top_p": 1.0,
"torchscript": false,
"training_metadata": {
"args": {
"adam_beta1": 0.9,
"adam_beta2": 0.95,
"adam_eps": 1e-08,
"adapter_load": null,
"adapters": [],
"add_qkv_bias": false,
"add_version": true,
"agent_template": null,
"apply_query_key_layer_scaling": true,
"architectures": "Qwen3ForCausalLM",
"attention_backend": "flash",
"attention_dropout": 0.0,
"attention_softmax_in_fp32": true,
"attn_impl": null,
"auto_detect_ckpt_format": true,
"beta": 0.1,
"bf16": true,
"bnb_4bit_compute_dtype": "bfloat16",
"bnb_4bit_quant_storage": null,
"bnb_4bit_quant_type": "nf4",
"bnb_4bit_use_double_quant": true,
"cached_dataset": [
"dataset-is-passed-in-train-data-path"
],
"calculate_KL": null,
"calculate_per_token_loss": true,
"center_rewards_coefficient": null,
"ckpt_dir": null,
"ckpt_format": "torch_dist",
"clip_grad": 1.0,
"columns": {},
"context_parallel_size": 1,
"cross_entropy_fusion_impl": "native",
"cross_entropy_loss_fusion": true,
"custom_dataset_info": [],
"custom_register_path": [],
"data_seed": 42,
"dataloader_persistent_workers": false,
"dataloader_prefetch_factor": 64,
"dataloader_type": "external",
"dataset": [],
"dataset_num_proc": 1,
"dataset_shuffle": true,
"ddp_backend": null,
"ddp_timeout": 18000000,
"decoder_first_pipeline_num_layers": null,
"decoder_last_pipeline_num_layers": null,
"desirable_weight": 1.0,
"deterministic_mode": false,
"device_map": null,
"disable_bias_linear": true,
"distributed_backend": "nccl",
"distributed_timeout_minutes": 300000,
"download_mode": "reuse_dataset_if_exists",
"enable_channel_loss": false,
"enable_dft_loss": false,
"eval_interval": 1000,
"eval_iters": -1,
"exit_on_missing_checkpoint": true,
"exp_avg_dtype": "fp32",
"exp_avg_sq_dtype": "fp32",
"expert_model_parallel_size": 1,
"expert_tensor_parallel_size": 1,
"external_plugins": [],
"extra_args": {
"adapter_load": null,
"adapters": [],
"add_version": true,
"agent_template": null,
"architectures": "Qwen3ForCausalLM",
"attn_impl": null,
"beta": 0.1,
"bnb_4bit_compute_dtype": "bfloat16",
"bnb_4bit_quant_storage": null,
"bnb_4bit_quant_type": "nf4",
"bnb_4bit_use_double_quant": true,
"cached_dataset": [
"dataset-is-passed-in-train-data-path"
],
"calculate_KL": null,
"center_rewards_coefficient": null,
"ckpt_dir": null,
"columns": {},
"custom_dataset_info": [],
"custom_register_path": [],
"data_seed": 42,
"dataloader_persistent_workers": false,
"dataloader_prefetch_factor": 64,
"dataset": [],
"dataset_num_proc": 1,
"dataset_shuffle": true,
"ddp_backend": null,
"ddp_timeout": 18000000,
"desirable_weight": 1.0,
"device_map": null,
"download_mode": "reuse_dataset_if_exists",
"enable_channel_loss": false,
"enable_dft_loss": false,
"external_plugins": [],
"f_divergence_type": "reverse_kl",
"freeze_aligner": true,
"freeze_llm": false,
"freeze_parameters": [],
"freeze_parameters_ratio": 0.0,
"freeze_parameters_regex": null,
"freeze_vit": true,
"gradient_checkpointing_kwargs": null,
"hqq_axis": null,
"hub_token": null,
"ignore_args_error": false,
"init_strategy": null,
"initialize_embedding": false,
"interleave_prob": null,
"label_smoothing": 0.0,
"layer_types": null,
"lazy_tokenize": false,
"linear_conv_kernel_dim": null,
"linear_key_head_dim": null,
"linear_num_key_heads": null,
"linear_num_value_heads": null,
"linear_value_head_dim": null,
"llm_architectures": null,
"load_args": false,
"load_data_args": false,
"load_from_cache_file": false,
"local_repo_path": null,
"logprobs": false,
"lora_alpha": 32,
"lora_bias": "none",
"lora_dropout": 0.05,
"lora_dtype": null,
"lora_modules": [],
"lora_rank": 8,
"loss_type": null,
"max_epochs": 1,
"max_length": 4096,
"max_memory": {},
"max_model_len": null,
"max_new_tokens": null,
"max_pixels": null,
"megatron_model_meta": "MMGPTMegatronModelMeta(megatron_model_type='qwen_duplex_mcore', model_types=['qwen_duplex'], convert_mcore2hf=<function convert_mcore2hf_qwen_duplex at 0x785ff60e5800>, convert_hf2mcore=<function convert_hf2mcore_qwen_duplex at 0x785ff6077060>, model_cls=<class 'duplex_model.megatron_model.QwenDuplexGPTModel'>, convert_hf_config=<function convert_mcore2hf_qwen_duplex_config at 0x785ff60e58a0>, get_transformer_layer_spec=None, model_provider=<function model_provider at 0x785ff6058860>, visual_cls=<class 'duplex_model.megatron_model.QwenDuplexModelWrapper'>, extra_args_provider=functools.partial(<function extra_args_provider at 0x785ff60e5940>, override_kwargs={'freeze_parameters_regex': None, 'use_speaker_embedding': True}))",
"mlp_padding_free": false,
"model": "/usrs/model-team/exps/sehun/duplex/8B-V2-voxtral-duplex-resume-20k-20260401/v0-20260401-045855/iter_0025000_hf",
"model_author": null,
"model_info": "ModelInfo(model_type='qwen_duplex', model_dir='/usrs/model-team/exps/sehun/duplex/8B-V2-voxtral-duplex-resume-20k-20260401/v0-20260401-045855/iter_0025000_hf', torch_dtype=torch.bfloat16, max_model_len=None, quant_method=None, quant_bits=None, rope_scaling=None, is_moe_model=False, config=None, task_type='causal_lm', num_labels=None)",
"model_kwargs": {},
"model_meta": "ModelMeta(model_type='qwen_duplex', model_groups=[], template='qwen3', get_function=<function get_qwen_duplex_model_and_tokenizer at 0x785ff86ec180>, model_arch=MultiModelKeys(arch_name='qwen_duplex', embedding=None, module_list=None, lm_head=None, q_proj=None, k_proj=None, v_proj=None, o_proj=None, attention=None, mlp=None, down_proj=None, qkv_proj=None, qk_proj=None, qa_proj=None, qb_proj=None, kv_proj=None, kva_proj=None, kvb_proj=None, language_model=['text_model'], aligner=['input_adaptor', 'output_adaptor', 'proj_code', 'audio_lm_head', 'code_predictor', 'speaker_encoder'], vision_tower=['audio_encoder', 'speaker_encoder'], generator=[]), architectures=['QwenDuplexModel'], additional_saved_files=[], torch_dtype=None, is_multimodal=True, is_reward=False, task_type=None, ignore_patterns=None, requires=[], tags=[])",
"model_name": null,
"model_revision": null,
"model_type": "qwen_duplex",
"modules_to_save": [],
"mrope_interleaved": false,
"new_special_tokens": [],
"norm_bbox": null,
"num_beams": 1,
"num_labels": null,
"original_max_position_embeddings": null,
"packing": true,
"packing_length": 4096,
"padded_vocab_size": 153723,
"padding_free": true,
"padding_side": "right",
"partial_rotary_factor": null,
"problem_type": null,
"quant_bits": null,
"quant_method": null,
"ref_adapter_load": null,
"ref_load": null,
"reference_free": false,
"remove_unused_columns": false,
"repetition_penalty": null,
"response_prefix": null,
"rlhf_type": null,
"rope_scaling": null,
"rpo_alpha": null,
"sequence_parallel_size": 1,
"shuffle_buffer_size": 1000,
"split_dataset_ratio": 0.0,
"stop_words": [],
"stopping_strategy": "first_exhausted",
"stream": false,
"streaming": false,
"strict": false,
"system": null,
"target_modules": [
"all-linear"
],
"target_regex": null,
"task_type": "causal_lm",
"temperature": null,
"template": "qwen3",
"template_backend": "swift",
"top_k": null,
"top_logprobs": null,
"top_p": null,
"torch_dtype": "bfloat16",
"train_type": "full",
"trainable_parameters": [],
"trainable_parameters_regex": null,
"truncation_strategy": "delete",
"tuner_backend": "peft",
"undesirable_weight": 1.0,
"use_chat_template": true,
"use_hf": false,
"use_rslora": false,
"use_shared_expert_gate": false,
"use_swift_lora": false,
"val_dataset": [],
"val_dataset_shuffle": false,
"vit_gradient_checkpointing": false
},
"extra_megatron_kwargs": {},
"f_divergence_type": "reverse_kl",
"ffn_hidden_size": 12288,
"finetune": true,
"fp16": false,
"fp8_amax_compute_algo": "max",
"fp8_amax_history_len": 1024,
"fp8_format": null,
"fp8_param_gather": false,
"fp8_recipe": "delayed",
"freeze_aligner": true,
"freeze_llm": false,
"freeze_parameters": [],
"freeze_parameters_ratio": 0.0,
"freeze_parameters_regex": null,
"freeze_vit": true,
"global_batch_size": 64,
"global_world_size": 64,
"gradient_checkpointing_kwargs": null,
"group_query_attention": true,
"hidden_dropout": 0.0,
"hidden_size": 4096,
"hqq_axis": null,
"hub": "<class 'swift.hub.hub.MSHub'>",
"hub_token": null,
"ignore_args_error": false,
"init_strategy": null,
"initialize_embedding": false,
"interleave_prob": null,
"kv_channels": 128,
"kv_lora_rank": 32,
"label_smoothing": 0.0,
"layer_types": null,
"lazy_tokenize": false,
"linear_conv_kernel_dim": null,
"linear_key_head_dim": null,
"linear_num_key_heads": null,
"linear_num_value_heads": null,
"linear_value_head_dim": null,
"llm_architectures": null,
"load": "/usrs/model-team/exps/sehun/duplex/8B-V2-voxtral-duplex-resume-20k-20260401/v0-20260401-045855/iter_0025000_mcore",
"load_args": false,
"load_data_args": false,
"load_from_cache_file": false,
"local_rank": 0,
"local_repo_path": null,
"local_world_size": 8,
"log_interval": 1,
"log_memory_to_tensorboard": true,
"log_params_norm": false,
"log_throughput": true,
"log_timers_to_tensorboard": true,
"log_validation_ppl_to_tensorboard": false,
"logging_level": null,
"logprobs": false,
"lora_alpha": 32,
"lora_bias": "none",
"lora_dropout": 0.05,
"lora_dtype": null,
"lora_modules": [],
"lora_rank": 8,
"loss_scale": "default",
"loss_type": null,
"lr": 3e-06,
"lr_decay_iters": null,
"lr_decay_style": "cosine",
"lr_warmup_fraction": 0.05,
"lr_warmup_iters": 0,
"main_grads_dtype": "fp32",
"main_params_dtype": "fp32",
"manual_gc": false,
"manual_gc_interval": 0,
"max_epochs": 1,
"max_length": 4096,
"max_memory": {},
"max_model_len": null,
"max_new_tokens": null,
"max_pixels": null,
"max_position_embeddings": 262144,
"megatron_model_meta": "MMGPTMegatronModelMeta(megatron_model_type='qwen_duplex_mcore', model_types=['qwen_duplex'], convert_mcore2hf=<function convert_mcore2hf_qwen_duplex at 0x785ff60e5800>, convert_hf2mcore=<function convert_hf2mcore_qwen_duplex at 0x785ff6077060>, model_cls=<class 'duplex_model.megatron_model.QwenDuplexGPTModel'>, convert_hf_config=<function convert_mcore2hf_qwen_duplex_config at 0x785ff60e58a0>, get_transformer_layer_spec=None, model_provider=<function model_provider at 0x785ff6058860>, visual_cls=<class 'duplex_model.megatron_model.QwenDuplexModelWrapper'>, extra_args_provider=functools.partial(<function extra_args_provider at 0x785ff60e5940>, override_kwargs={'freeze_parameters_regex': None, 'use_speaker_embedding': True}))",
"micro_batch_size": 1,
"microbatch_group_size_per_virtual_pipeline_stage": null,
"min_lr": 3e-07,
"mlp_padding_free": false,
"model": "/usrs/model-team/exps/sehun/duplex/8B-V2-voxtral-duplex-resume-20k-20260401/v0-20260401-045855/iter_0025000_hf",
"model_author": null,
"model_dir": "/usrs/model-team/exps/sehun/duplex/8B-V2-voxtral-duplex-resume-20k-20260401/v0-20260401-045855/iter_0025000_hf",
"model_info": "ModelInfo(model_type='qwen_duplex', model_dir='/usrs/model-team/exps/sehun/duplex/8B-V2-voxtral-duplex-resume-20k-20260401/v0-20260401-045855/iter_0025000_hf', torch_dtype=torch.bfloat16, max_model_len=None, quant_method=None, quant_bits=None, rope_scaling=None, is_moe_model=False, config=None, task_type='causal_lm', num_labels=None)",
"model_kwargs": {},
"model_meta": "ModelMeta(model_type='qwen_duplex', model_groups=[], template='qwen3', get_function=<function get_qwen_duplex_model_and_tokenizer at 0x785ff86ec180>, model_arch=MultiModelKeys(arch_name='qwen_duplex', embedding=None, module_list=None, lm_head=None, q_proj=None, k_proj=None, v_proj=None, o_proj=None, attention=None, mlp=None, down_proj=None, qkv_proj=None, qk_proj=None, qa_proj=None, qb_proj=None, kv_proj=None, kva_proj=None, kvb_proj=None, language_model=['text_model'], aligner=['input_adaptor', 'output_adaptor', 'proj_code', 'audio_lm_head', 'code_predictor', 'speaker_encoder'], vision_tower=['audio_encoder', 'speaker_encoder'], generator=[]), architectures=['QwenDuplexModel'], additional_saved_files=[], torch_dtype=None, is_multimodal=True, is_reward=False, task_type=None, ignore_patterns=None, requires=[], tags=[])",
"model_name": null,
"model_revision": null,
"model_suffix": "iter_0025000_hf",
"model_type": "qwen_duplex",
"modules_to_save": [],
"moe_aux_loss_coeff": 0.0,
"moe_enable_deepep": false,
"moe_expert_capacity_factor": null,
"moe_ffn_hidden_size": null,
"moe_grouped_gemm": false,
"moe_layer_freq": "1",
"moe_layer_recompute": false,
"moe_pad_expert_input_to_capacity": false,
"moe_permute_fusion": false,
"moe_router_bias_update_rate": 0.001,
"moe_router_dtype": "fp32",
"moe_router_enable_expert_bias": false,
"moe_router_load_balancing_type": "aux_loss",
"moe_router_pre_softmax": false,
"moe_router_score_function": "softmax",
"moe_router_topk": 2,
"moe_router_topk_scaling_factor": null,
"moe_shared_expert_intermediate_size": null,
"moe_shared_expert_overlap": false,
"moe_token_dispatcher_type": null,
"moe_token_drop_policy": null,
"moe_z_loss_coeff": null,
"mrope_interleaved": false,
"mrope_section": null,
"multi_latent_attention": false,
"new_special_tokens": [],
"no_bias_dropout_fusion": false,
"no_bias_swiglu_fusion": false,
"no_gradient_accumulation_fusion": false,
"no_initialization": true,
"no_load_optim": false,
"no_load_rng": false,
"no_log_learning_rate_to_tensorboard": false,
"no_masked_softmax_fusion": false,
"no_rope_fusion": false,
"no_save_optim": false,
"no_save_rng": false,
"norm_bbox": null,
"norm_epsilon": 1e-06,
"normalization": "RMSNorm",
"num_attention_heads": 32,
"num_beams": 1,
"num_experts": null,
"num_labels": null,
"num_layers": 36,
"num_layers_per_virtual_pipeline_stage": null,
"num_query_groups": 8,
"num_virtual_stages_per_pipeline_rank": null,
"num_workers": 16,
"optimizer": "adam",
"optimizer_cpu_offload": false,
"optimizer_offload_fraction": 1.0,
"original_max_position_embeddings": null,
"overlap_grad_reduce": false,
"overlap_param_gather": false,
"packing": true,
"packing_length": 4096,
"padded_vocab_size": 153723,
"padding_free": true,
"padding_side": "right",
"partial_rotary_factor": null,
"pipeline_model_parallel_layout": null,
"pipeline_model_parallel_size": 1,
"position_embedding_type": "rope",
"problem_type": null,
"q_lora_rank": null,
"qk_head_dim": 128,
"qk_layernorm": true,
"qk_pos_emb_head_dim": 64,
"quant_bits": null,
"quant_method": null,
"rank": 0,
"recompute_granularity": "selective",
"recompute_method": null,
"recompute_modules": [
"core_attn"
],
"recompute_num_layers": null,
"ref_adapter_load": null,
"ref_load": null,
"reference_free": false,
"remove_unused_columns": false,
"repetition_penalty": null,
"response_prefix": null,
"rlhf_type": null,
"rope_scaling": null,
"rotary_base": 5000000,
"rotary_interleaved": false,
"rotary_percent": 1.0,
"rpo_alpha": null,
"save": "/usrs/model-team/exps/sehun/duplex/8B-V2-voxtral-ft-v4-s1-fa-res25k-20260401/v0-20260401-135515",
"save_interval": 1000,
"seed": 7,
"seq_length": 4096,
"sequence_parallel": false,
"sequence_parallel_size": 1,
"sgd_momentum": 0.9,
"shuffle_buffer_size": 1000,
"split_dataset_ratio": 0.0,
"stop_words": [],
"stopping_strategy": "first_exhausted",
"stream": false,
"streaming": false,
"strict": false,
"swiglu": true,
"system": null,
"target_modules": [
"all-linear"
],
"target_regex": null,
"task_type": "causal_lm",
"temperature": null,
"template": "qwen3",
"template_backend": "swift",
"tensor_model_parallel_size": 1,
"tensorboard_dir": "/usrs/model-team/exps/sehun/duplex/8B-V2-voxtral-ft-v4-s1-fa-res25k-20260401/v0-20260401-135515/runs",
"tensorboard_log_interval": 1,
"tensorboard_queue_size": 50,
"top_k": null,
"top_logprobs": null,
"top_p": null,
"torch_dtype": "bfloat16",
"tp_comm_overlap": false,
"train_iters": 5000,
"train_type": "full",
"trainable_parameters": [],
"trainable_parameters_regex": null,
"transformer_impl": "transformer_engine",
"truncation_strategy": "delete",
"tuner_backend": "peft",
"undesirable_weight": 1.0,
"untie_embeddings_and_output_weights": true,
"use_chat_template": true,
"use_cpu_initialization": false,
"use_distributed_optimizer": true,
"use_flash_attn": true,
"use_hf": false,
"use_precision_aware_optimizer": false,
"use_rslora": false,
"use_shared_expert_gate": false,
"use_swift_lora": false,
"val_dataset": [],
"val_dataset_shuffle": false,
"vit_gradient_checkpointing": false,
"wandb_exp_name": null,
"wandb_project": null,
"wandb_save_dir": null,
"weight_decay": 0.1
},
"checkpoint_iter": 5000,
"recipe_trace": "00 [current] /usrs/model-team/exps/sehun/duplex/8B-V2-voxtral-ft-v4-s1-fa-res25k-20260401/v0-20260401-135515\n01 [load] /usrs/model-team/exps/sehun/duplex/8B-V2-voxtral-duplex-resume-20k-20260401/v0-20260401-045855/iter_0025000_mcore",
"training_run_path": "/usrs/model-team/exps/sehun/duplex/8B-V2-voxtral-ft-v4-s1-fa-res25k-20260401/v0-20260401-135515"
},
"transformers_version": "4.57.3",
"typical_p": 1.0,
"use_backchannel_token": true,
"use_bfloat16": false,
"use_duplex_end_pad": true,
"use_inline_text_prediction": false,
"use_sil_token": true,
"auto_map": {
"AutoConfig": "configuration_raon.RaonDuplexConfig",
"AutoModel": "modeling_raon.RaonDuplexModel"
}
}