File size: 626 Bytes
3888497 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 |
{
"init_logit_scale": 2.659260036932778,
"text_cfg": {
"context_length": 72,
"heads": 20,
"layers": 24,
"mlp_ratio": 4.0,
"output_dim": 1280,
"vocab_size": 49408,
"width": 1280
},
"vision_cfg": {
"attn_pooler_heads": 8,
"drop_path": 0.0,
"heads": 16,
"image_size": 448,
"layers": 50,
"ls_init_value": null,
"mlp_ratio": 5.833333333333333,
"output_dim": 1280,
"patch_size": 14,
"pool_type": "attn",
"use_abs_posemb": true,
"use_cls_token": false,
"use_ln_post": true,
"use_ln_pre": true,
"use_rope2d": true,
"width": 1536
}
} |