File size: 6,473 Bytes
0a1683d
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
{
  "architectures": [
    "PaddleOCRVLForConditionalGeneration"
  ],
  "attention_probs_dropout_prob": 0.0,
  "auto_map": {
    "AutoConfig": "configuration_paddleocr_vl.PaddleOCRVLConfig",
    "AutoModel": "modeling_paddleocr_vl.PaddleOCRVLForConditionalGeneration",
    "AutoModelForCausalLM": "modeling_paddleocr_vl.PaddleOCRVLForConditionalGeneration"
  },
  "compression_ratio": 1.0,
  "disable_pipeline_warmup": false,
  "enable_mtp_magic_send": false,
  "fp16_opt_level": "O2",
  "freq_allocation": 20,
  "fuse_ln": false,
  "fuse_rms_norm": true,
  "head_dim": 128,
  "hidden_act": "silu",
  "hidden_dropout_prob": 0.0,
  "hidden_size": 1024,
  "ignored_index": -100,
  "im_patch_id": 100295,
  "image_token_id": 100295,
  "intermediate_size": 3072,
  "max_position_embeddings": 131072,
  "max_text_id": 100295,
  "model_type": "paddleocr_vl",
  "moe_dropout_prob": 0.0,
  "moe_multimodal_dispatch_use_allgather": "v2-alltoall-unpad",
  "num_attention_heads": 16,
  "num_hidden_layers": 18,
  "num_key_value_heads": 2,
  "paddleformers_version": "0.4.0",
  "pixel_hidden_size": 1152,
  "rms_norm_eps": 1e-05,
  "rope_is_neox_style": true,
  "rope_scaling": {
    "mrope_section": [
      16,
      24,
      24
    ],
    "rope_type": "default",
    "type": "default"
  },
  "rope_theta": 500000,
  "scale_qk_coeff": 1.0,
  "seqlen": 16384,
  "sliding_window": null,
  "tie_word_embeddings": false,
  "token_balance_loss": false,
  "token_balance_seqlen": 16384,
  "torch_dtype": "bfloat16",
  "use_3d_rope": true,
  "use_bias": false,
  "use_flash_attn_with_mask": true,
  "use_fp8": false,
  "use_mem_eff_attn": true,
  "use_recompute_moe": false,
  "use_rmsnorm": true,
  "video_token_id": 101307,
  "vision_config": {
    "_attn_implementation": "eager",
    "_name_or_path": "",
    "_save_to_hf": false,
    "add_cross_attention": false,
    "add_tail_layers": 0,
    "architectures": [
      "PaddleOCRVisionModel"
    ],
    "attention_dropout": 0.0,
    "auto_map": {
      "AutoConfig": "configuration_paddleocr_vl.PaddleOCRVLConfig",
      "AutoModel": "modeling_paddleocr_vl.PaddleOCRVisionModel"
    },
    "bad_words_ids": null,
    "begin_suppress_tokens": null,
    "bos_token_id": null,
    "chunk_size_feed_forward": 0,
    "classifier_dropout": null,
    "context_parallel_degree": 1,
    "cross_attention_hidden_size": null,
    "decoder_start_token_id": null,
    "diversity_penalty": 0.0,
    "do_sample": false,
    "dpo_config": null,
    "dtype": "bfloat16",
    "early_stopping": false,
    "encoder_no_repeat_ngram_size": 0,
    "eos_token_id": null,
    "ep_communication_type": "deepep",
    "exponential_decay_length_penalty": null,
    "finetuning_task": null,
    "forced_bos_token_id": null,
    "forced_eos_token_id": null,
    "fuse_attention_ffn": false,
    "fuse_attention_qkv": false,
    "fuse_linear": false,
    "fuse_rope": false,
    "fuse_sequence_parallel_allreduce": false,
    "fuse_swiglu": false,
    "hidden_act": "gelu_new",
    "hidden_size": 1152,
    "id2label": {
      "0": "LABEL_0",
      "1": "LABEL_1"
    },
    "image_size": 384,
    "intermediate_size": 4304,
    "is_decoder": false,
    "is_encoder_decoder": false,
    "kto_config": null,
    "label2id": {
      "LABEL_0": 0,
      "LABEL_1": 1
    },
    "layer_norm_eps": 1e-06,
    "length_penalty": 1.0,
    "loss_subbatch_sequence_length": -1,
    "max_length": 20,
    "min_length": 0,
    "model_type": "paddleocr_vl",
    "moe_subbatch_token_num": 0,
    "no_recompute_layers": null,
    "no_repeat_ngram_size": 0,
    "num_attention_heads": 16,
    "num_beam_groups": 1,
    "num_beams": 1,
    "num_channels": 3,
    "num_choices": null,
    "num_hidden_layers": 27,
    "num_nextn_predict_layers": 0,
    "num_return_sequences": 1,
    "offload_recompute_inputs": false,
    "output_attentions": false,
    "output_hidden_states": false,
    "output_scores": false,
    "pad_token_id": 0,
    "patch_size": 14,
    "pipeline_parallel_degree": 1,
    "pp_recompute_interval": 1,
    "prefix": null,
    "problem_type": null,
    "pruned_heads": {},
    "quantization_config": {
      "act_quant_method": "abs_max",
      "activation_scheme": null,
      "actscale_moving_rate": 0.01,
      "apply_hadamard": false,
      "apply_online_actscale_step": 200,
      "dense_quant_type": "",
      "dtype": null,
      "fmt": null,
      "fp8_format_type": "hybrid",
      "group_size": -1,
      "hadamard_block_size": 32,
      "ignore_modules": null,
      "llm_int8_threshold": 6.0,
      "moe_quant_type": "",
      "qlora_weight_blocksize": 64,
      "qlora_weight_double_quant": false,
      "qlora_weight_double_quant_block_size": 256,
      "quant_input_grad": false,
      "quant_method": null,
      "quant_round_type": 0,
      "quant_type": null,
      "quant_weight_grad": false,
      "quantization": "",
      "scale_epsilon": 1e-08,
      "shift": false,
      "shift_smooth_all_linears": false,
      "smooth": false,
      "weight_block_size": null,
      "weight_quant_method": "abs_max_channel_wise",
      "weight_quantize_algo": null
    },
    "recompute": true,
    "recompute_granularity": "full",
    "recompute_use_reentrant": false,
    "refined_recompute": "",
    "remove_invalid_values": false,
    "repetition_penalty": 1.0,
    "return_dict": false,
    "return_dict_in_generate": false,
    "sep_parallel_degree": 1,
    "sep_token_id": null,
    "sequence_parallel": false,
    "spatial_merge_size": 2,
    "suppress_tokens": null,
    "task_specific_params": null,
    "temperature": 1.0,
    "temporal_patch_size": 2,
    "tensor_parallel_degree": 1,
    "tensor_parallel_output": true,
    "tensor_parallel_rank": 0,
    "tie_encoder_decoder": false,
    "tie_word_embeddings": true,
    "tokenizer_class": null,
    "tokens_per_second": 2,
    "top_k": 50,
    "top_p": 1.0,
    "typical_p": 1.0,
    "use_cache": false,
    "use_filtered_label_loss": false,
    "use_flash_attention": true,
    "use_fused_dropout_add": false,
    "use_fused_head_and_loss_fn": false,
    "use_fused_linear": false,
    "use_fused_linear_cross_entropy": false,
    "use_fused_rms_norm": false,
    "use_fused_rope": false,
    "use_sparse_flash_attn": true,
    "use_sparse_head_and_loss_fn": false,
    "use_unified_moe": false,
    "using_fake_gate": false,
    "virtual_pp_degree": 1
  },
  "vision_end_token_id": 101306,
  "vision_start_token_id": 101305,
  "vocab_size": 103424,
  "weight_share_add_bias": true
}