{ "amp_dtype": "fp16", "architectures": [ "FireboltLMForCausalLM" ], "bos_token_id": 1, "dtype": "float32", "eos_token_id": 2, "freeze_llm": true, "fuse_attn_dropout": 0.1, "fuse_heads": 8, "fuse_proj_dropout": 0.1, "fuse_strategy": "cossm", "fuser_freeze": false, "fuser_type": "multimodal_fusing", "image_token_id": 64400, "lm_name_or_path": "LiquidAI/LFM2-350M", "model_type": "fireboltlm", "pad_token_id": 0, "projector_dropout": 0.1, "projector_hidden": 1024, "projector_layers": 2, "projector_type": "residual_ffn", "transformers_version": "4.57.3", "use_cache": true, "vision_ckpt_path": "/home/mamba/ML_project/Testing/Huy/joint_vlm/viper-vlm/pretrained/siglip2_base_16_256/", "vision_freeze": false, "vision_hidden_size": 768, "vision_output_tokens": 256, "vocab_size": 65536 }