dots.ocr-5bit / config.json
prince-canuma's picture
Upload folder using huggingface_hub
a606c15 verified
{
"architectures": [
"DotsOCRForCausalLM"
],
"attention_bias": true,
"attention_dropout": 0.0,
"auto_map": {
"AutoConfig": "configuration_dots.DotsOCRConfig",
"AutoModelForCausalLM": "modeling_dots_ocr.DotsOCRForCausalLM"
},
"eos_token_id": [
151643,
151673
],
"hidden_act": "silu",
"hidden_size": 1536,
"image_token_id": 151665,
"initializer_range": 0.02,
"intermediate_size": 8960,
"max_position_embeddings": 131072,
"max_window_layers": 28,
"model_type": "dots_ocr",
"num_attention_heads": 12,
"num_hidden_layers": 28,
"num_key_value_heads": 2,
"quantization": {
"group_size": 64,
"bits": 5,
"mode": "affine"
},
"quantization_config": {
"group_size": 64,
"bits": 5,
"mode": "affine"
},
"rms_norm_eps": 1e-06,
"rope_scaling": null,
"rope_theta": 1000000,
"sliding_window": 131072,
"tie_word_embeddings": false,
"transformers_version": "4.51.0",
"use_cache": true,
"use_sliding_window": false,
"video_token_id": 151656,
"vision_config": {
"embed_dim": 1536,
"hidden_size": 1536,
"intermediate_size": 4224,
"num_hidden_layers": 42,
"num_attention_heads": 12,
"num_channels": 3,
"patch_size": 14,
"post_norm": true,
"rms_norm_eps": 1e-05,
"spatial_merge_size": 2,
"temporal_patch_size": 1,
"use_bias": false,
"attn_implementation": "flash_attention_2",
"init_merger_std": 0.02,
"initializer_range": 0.02,
"is_causal": false
},
"vocab_size": 151936
}