File size: 265 Bytes
223d7e1 |
1 2 3 4 5 6 7 8 9 10 11 12 |
{
"vocab_size": 50300,
"embed_dim": 768,
"intermediate_size": 3072,
"num_boxformer_layers": 6,
"num_transformer_layers": 6,
"num_heads": 12,
"vision": null,
"architecture": "DUX_v0_Mini_Model",
"use_gradient_checkpointing": true,
"lora": false
} |