{ "vision_config": { "patch_size": 14, "width": 1536, "layers": 50, "heads": 16, "mlp_ratio": 5.833333333333333, "output_dim": 1280, "ls_init_value": null, "drop_path": 0.0, "image_size": 448, "use_abs_posemb": true, "use_cls_token": false, "use_rope2d": true, "pool_type": "attn", "attn_pooler_heads": 8, "use_ln_pre": true, "use_ln_post": true }, "text_config": { "context_length": 72, "width": 1280, "heads": 20, "layers": 24, "output_dim": 1280, "mlp_ratio": 4.0, "vocab_size": 49408 } }