WHY2001's picture
Upload folder using huggingface_hub
0695d0b verified
{
"_name_or_path": "/home/nashen/deepseek-ocr/DeepSeek-OCR-master/DeepSeek-OCR-vllm/model/",
"architectures": [
"DeepseekOCRForCausalLM"
],
"attention_bias": false,
"attention_dropout": 0.0,
"auto_map": {
"AutoConfig": "modeling_deepseekocr.DeepseekOCRConfig",
"AutoModel": "modeling_deepseekocr.DeepseekOCRForCausalLM"
},
"aux_loss_alpha": 0.001,
"bos_token_id": 0,
"candidate_resolutions": [
[
1024,
1024
]
],
"eos_token_id": 1,
"ep_size": 1,
"first_k_dense_replace": 1,
"global_view_pos": "head",
"hidden_act": "silu",
"hidden_size": 1280,
"initializer_range": 0.02,
"intermediate_size": 6848,
"kv_lora_rank": null,
"language_config": {
"architectures": [
"DeepseekV2ForCausalLM"
],
"auto_map": {
"AutoConfig": "configuration_deepseekv2.DeepseekV2Config",
"AutoModel": "modeling_deepseek.DeepseekV2Model",
"AutoModelForCausalLM": "modeling_deepseek.DeepseekV2ForCausalLM"
},
"bos_token_id": 0,
"eos_token_id": 1,
"first_k_dense_replace": 1,
"hidden_size": 1280,
"intermediate_size": 6848,
"kv_lora_rank": null,
"lm_head": true,
"max_position_embeddings": 8192,
"moe_intermediate_size": 896,
"n_group": 1,
"n_routed_experts": 64,
"n_shared_experts": 2,
"num_attention_heads": 10,
"num_experts_per_tok": 6,
"num_hidden_layers": 12,
"num_key_value_heads": 10,
"q_lora_rank": null,
"qk_nope_head_dim": 0,
"qk_rope_head_dim": 0,
"rm_head": false,
"topk_group": 1,
"topk_method": "greedy",
"torch_dtype": "bfloat16",
"use_mla": false,
"v_head_dim": 0,
"vocab_size": 129280
},
"lm_head": true,
"max_position_embeddings": 8192,
"model_type": "DeepseekOCR",
"moe_intermediate_size": 896,
"moe_layer_freq": 1,
"n_group": 1,
"n_routed_experts": 64,
"n_shared_experts": 2,
"norm_topk_prob": false,
"num_attention_heads": 10,
"num_experts_per_tok": 6,
"num_hidden_layers": 12,
"num_key_value_heads": 10,
"pretraining_tp": 1,
"projector_config": {
"input_dim": 2048,
"model_type": "mlp_projector",
"n_embed": 1280,
"projector_type": "linear"
},
"q_lora_rank": null,
"qk_nope_head_dim": 0,
"qk_rope_head_dim": 0,
"quantization_config": {
"_load_in_4bit": true,
"_load_in_8bit": false,
"bnb_4bit_compute_dtype": "bfloat16",
"bnb_4bit_quant_storage": "uint8",
"bnb_4bit_quant_type": "nf4",
"bnb_4bit_use_double_quant": true,
"llm_int8_enable_fp32_cpu_offload": false,
"llm_int8_has_fp16_weight": false,
"llm_int8_skip_modules": [
"sam_model",
"model.sam_model",
"vision_model",
"model.vision_model",
"projector",
"model.projector",
"lm_head",
"embed_tokens"
],
"llm_int8_threshold": 6.0,
"load_in_4bit": true,
"load_in_8bit": false,
"quant_method": "bitsandbytes"
},
"rm_head": false,
"rms_norm_eps": 1e-06,
"rope_scaling": null,
"rope_theta": 10000.0,
"routed_scaling_factor": 1.0,
"scoring_func": "softmax",
"seq_aux": true,
"tie_word_embeddings": false,
"tile_tag": "2D",
"topk_group": 1,
"topk_method": "greedy",
"torch_dtype": "float16",
"transformers_version": "4.46.3",
"use_cache": true,
"use_mla": false,
"v_head_dim": 0,
"vision_config": {
"image_size": 1024,
"mlp_ratio": 3.7362,
"model_name": "deeplip_b_l",
"model_type": "vision",
"width": {
"clip-l-14-224": {
"heads": 16,
"image_size": 224,
"layers": 24,
"patch_size": 14,
"width": 1024
},
"sam_vit_b": {
"downsample_channels": [
512,
1024
],
"global_attn_indexes": [
2,
5,
8,
11
],
"heads": 12,
"layers": 12,
"width": 768
}
}
},
"vocab_size": 129280
}