| { | |
| "architectures": [ | |
| "InternVLChatModel" | |
| ], | |
| "auto_map": { | |
| "AutoConfig": "configuration_internvl_chat.InternVLChatConfig", | |
| "AutoModel": "modeling_internvl_chat.InternVLChatModel", | |
| "AutoModelForCausalLM": "modeling_internvl_chat.InternVLChatModel" | |
| }, | |
| "downsample_ratio": 0.5, | |
| "dynamic_image_size": true, | |
| "eos_token_id": 151645, | |
| "force_image_size": 448, | |
| "llm_config": { | |
| "architectures": [ | |
| "Qwen3ForCausalLM" | |
| ], | |
| "attention_bias": false, | |
| "attention_dropout": 0.0, | |
| "bos_token_id": 151643, | |
| "debug": false, | |
| "eos_token_id": 151645, | |
| "ep_size": 1, | |
| "head_dim": 128, | |
| "hidden_act": "silu", | |
| "hidden_size": 2048, | |
| "initializer_range": 0.02, | |
| "intermediate_size": 6144, | |
| "max_position_embeddings": 40960, | |
| "max_window_layers": 28, | |
| "micro_forward": false, | |
| "model_type": "qwen3", | |
| "num_attention_heads": 16, | |
| "num_hidden_layers": 28, | |
| "num_key_value_heads": 8, | |
| "rms_norm_eps": 1e-06, | |
| "rope_scaling": null, | |
| "rope_theta": 1000000, | |
| "skip_checkpoint": false, | |
| "sliding_window": null, | |
| "torch_dtype": "bfloat16", | |
| "use_cache": false, | |
| "use_deepep": false, | |
| "use_sliding_window": false, | |
| "vocab_size": 151936 | |
| }, | |
| "max_dynamic_patch": 12, | |
| "min_dynamic_patch": 1, | |
| "model_type": "internvl_chat", | |
| "pad2square": false, | |
| "pad_token_id": 151643, | |
| "ps_version": "v2", | |
| "select_layer": -1, | |
| "template": "plm_v", | |
| "tie_word_embeddings": false, | |
| "torch_dtype": "bfloat16", | |
| "transformers_version": null, | |
| "use_backbone_lora": 0, | |
| "use_llm_lora": 0, | |
| "use_thumbnail": true, | |
| "vision_config": { | |
| "architectures": [ | |
| "InternVisionModel" | |
| ], | |
| "attention_dropout": 0.0, | |
| "auto_map": { | |
| "AutoConfig": "configuration_intern_vit.InternVisionConfig", | |
| "AutoModel": "modeling_intern_vit.InternVisionModel" | |
| }, | |
| "drop_path_rate": 0.0, | |
| "dropout": 0.0, | |
| "hidden_act": "gelu", | |
| "hidden_size": 1024, | |
| "image_size": 448, | |
| "initializer_factor": 1.0, | |
| "initializer_range": 0.02, | |
| "intermediate_size": 4096, | |
| "layer_norm_eps": 1e-06, | |
| "model_type": "intern_vit_6b", | |
| "norm_type": "layer_norm", | |
| "num_attention_heads": 16, | |
| "num_channels": 3, | |
| "num_hidden_layers": 24, | |
| "patch_size": 14, | |
| "qk_normalization": false, | |
| "qkv_bias": true, | |
| "torch_dtype": "bfloat16", | |
| "use_fa3": false, | |
| "use_flash_attn": true | |
| }, | |
| "audio_config": { | |
| "architectures": [ | |
| "DualWrappedEncoder" | |
| ], | |
| "model_type": "whisper_beats", | |
| "speech_encoder": "large-v3", | |
| "speech_encoder_type": "whisper", | |
| "speech_projector_type": "linear", | |
| "speech_encoder_ds_rate": 5, | |
| "speech_encoder_hidden_size": 1280, | |
| "mel_bins": 80, | |
| "sample_rate": 16000, | |
| "frame_length": 25, | |
| "frame_shift": 10, | |
| "torch_dtype": "bfloat16", | |
| "use_beats": true, | |
| "beats_model_path": "./BEATs_iter3_plus_AS2M_finetuned_on_AS2M_cpt2.pt", | |
| "speech_projector": null, | |
| "whisper_config": { | |
| "vocab_size": 51865, | |
| "num_mel_bins": 80, | |
| "encoder_layers": 6, | |
| "encoder_attention_heads": 8, | |
| "decoder_layers": 6, | |
| "decoder_attention_heads": 8, | |
| "decoder_ffn_dim": 1536, | |
| "encoder_ffn_dim": 1536, | |
| "encoder_layerdrop": 0.0, | |
| "decoder_layerdrop": 0.0, | |
| "decoder_start_token_id": 50257, | |
| "use_cache": true, | |
| "is_encoder_decoder": true, | |
| "activation_function": "gelu", | |
| "d_model": 512, | |
| "dropout": 0.0, | |
| "attention_dropout": 0.0, | |
| "activation_dropout": 0.0, | |
| "init_std": 0.02, | |
| "scale_embedding": false, | |
| "max_source_positions": 1500, | |
| "max_target_positions": 448, | |
| "pad_token_id": 50256, | |
| "bos_token_id": 50257, | |
| "eos_token_id": 50256, | |
| "suppress_tokens": [1, 2, 7, 8, 9, 10, 14, 25, 26, 27, 28, 29, 31, 58, 59, 60, 61, 62, 63, 90, 91, 92, 93, 359, 503, 522, 542, 873, 893, 902, 918, 922, 931, 1350, 1853, 1982, 2460, 2627, 3246, 3253, 3268, 3536, 3846, 3961, 4183, 4667, 6585, 6647, 7273, 9061, 9383, 10428, 10929, 11938, 12033, 12331, 12562, 13793, 14157, 14635, 15265, 15618, 16553, 16604, 18362, 18956, 20075, 21675, 22520, 26130, 26161, 26435, 28279, 29464, 31650, 32302, 32470, 36865, 42863, 47425, 49870, 50254, 50258, 50358, 50359, 50360, 50361, 50362], | |
| "begin_suppress_tokens": [220, 50256] | |
| } | |
| } | |
| } |