| | { |
| | "architectures": [ |
| | "Qwen2_5_VLForConditionalGeneration" |
| | ], |
| | "attention_dropout": 0.0, |
| | "bos_token_id": 151643, |
| | "eos_token_id": 151645, |
| | "hidden_act": "silu", |
| | "hidden_size": 2048, |
| | "image_token_id": 151655, |
| | "initializer_range": 0.02, |
| | "intermediate_size": 11008, |
| | "max_position_embeddings": 128000, |
| | "max_window_layers": 70, |
| | "model_type": "qwen2_5_vl", |
| | "num_attention_heads": 16, |
| | "num_hidden_layers": 36, |
| | "num_key_value_heads": 2, |
| | "pad_token_id": 151643, |
| | "rms_norm_eps": 1e-06, |
| | "rope_scaling": { |
| | "mrope_section": [ |
| | 16, |
| | 24, |
| | 24 |
| | ], |
| | "rope_type": "default", |
| | "type": "default" |
| | }, |
| | "rope_theta": 1000000.0, |
| | "sliding_window": 32768, |
| | "tie_word_embeddings": true, |
| | "torch_dtype": "bfloat16", |
| | "transformers_version": "4.51.1", |
| | "use_cache": false, |
| | "use_sliding_window": false, |
| | "video_token_id": 151656, |
| | "vision_config": { |
| | "depth": 32, |
| | "fullatt_block_indexes": [ |
| | 7, |
| | 15, |
| | 23, |
| | 31 |
| | ], |
| | "hidden_act": "silu", |
| | "hidden_size": 1280, |
| | "in_channels": 3, |
| | "in_chans": 3, |
| | "intermediate_size": 3420, |
| | "model_type": "qwen2_5_vl", |
| | "num_heads": 16, |
| | "out_hidden_size": 2048, |
| | "patch_size": 14, |
| | "spatial_merge_size": 2, |
| | "spatial_patch_size": 14, |
| | "temporal_patch_size": 2, |
| | "tokens_per_second": 2, |
| | "torch_dtype": "bfloat16", |
| | "window_size": 112 |
| | }, |
| | "vision_end_token_id": 151653, |
| | "vision_start_token_id": 151652, |
| | "vision_token_id": 151654, |
| | "vocab_size": 151936 |
| | } |
| |
|