imdigitalashish commited on
Commit
1184ff6
·
verified ·
1 Parent(s): c2d866c

Upload folder using huggingface_hub

Browse files
Files changed (3) hide show
  1. config.json +6 -16
  2. preprocessor_config.json +2 -6
  3. tokenizer_config.json +1 -1
config.json CHANGED
@@ -1,18 +1,10 @@
1
  {
2
  "architectures": [
3
- "AshishOcrForConditionalGeneration"
4
  ],
5
- "auto_map": {
6
- "AutoConfig": "configuration_ashish_ocr.AshishOcrConfig",
7
- "AutoModel": "modeling_ashish_ocr.AshishOcrForConditionalGeneration",
8
- "AutoModelForCausalLM": "modeling_ashish_ocr.AshishOcrForConditionalGeneration",
9
- "AutoModelForVision2Seq": "modeling_ashish_ocr.AshishOcrForConditionalGeneration",
10
- "AutoProcessor": "processing_ashish_ocr.AshishOcrProcessor",
11
- "AutoImageProcessor": "processing_ashish_ocr.AshishOcrImageProcessor"
12
- },
13
- "model_type": "ashish_ocr",
14
  "text_config": {
15
- "model_type": "ashish_ocr_text",
16
  "pad_token_id": 59246,
17
  "vocab_size": 59392,
18
  "eos_token_id": [
@@ -33,21 +25,19 @@
33
  "num_key_value_heads": 8,
34
  "rms_norm_eps": 1e-05,
35
  "dtype": "bfloat16",
36
- "rope_parameters": {
37
  "rope_type": "default",
38
  "mrope_section": [
39
  16,
40
  24,
41
  24
42
- ],
43
- "partial_rotary_factor": 1.0,
44
- "rope_theta": 10000
45
  },
46
  "tie_word_embeddings": false,
47
  "use_cache": true
48
  },
49
  "vision_config": {
50
- "model_type": "ashish_ocr_vision",
51
  "hidden_size": 1024,
52
  "depth": 24,
53
  "num_heads": 16,
 
1
  {
2
  "architectures": [
3
+ "Qwen2_5_VLForConditionalGeneration"
4
  ],
5
+ "model_type": "qwen2_5_vl",
 
 
 
 
 
 
 
 
6
  "text_config": {
7
+ "model_type": "qwen2_5_vl",
8
  "pad_token_id": 59246,
9
  "vocab_size": 59392,
10
  "eos_token_id": [
 
25
  "num_key_value_heads": 8,
26
  "rms_norm_eps": 1e-05,
27
  "dtype": "bfloat16",
28
+ "rope_scaling": {
29
  "rope_type": "default",
30
  "mrope_section": [
31
  16,
32
  24,
33
  24
34
+ ]
 
 
35
  },
36
  "tie_word_embeddings": false,
37
  "use_cache": true
38
  },
39
  "vision_config": {
40
+ "model_type": "qwen2_5_vl",
41
  "hidden_size": 1024,
42
  "depth": 24,
43
  "num_heads": 16,
preprocessor_config.json CHANGED
@@ -1,8 +1,4 @@
1
  {
2
- "auto_map": {
3
- "AutoImageProcessor": "processing_ashish_ocr.AshishOcrImageProcessor",
4
- "AutoProcessor": "processing_ashish_ocr.AshishOcrProcessor"
5
- },
6
  "size": {"shortest_edge": 12544, "longest_edge": 9633792},
7
  "do_rescale": true,
8
  "patch_size": 14,
@@ -10,6 +6,6 @@
10
  "merge_size": 2,
11
  "image_mean": [0.48145466, 0.4578275, 0.40821073],
12
  "image_std": [0.26862954, 0.26130258, 0.27577711],
13
- "image_processor_type": "AshishOcrImageProcessor",
14
- "processor_class": "AshishOcrProcessor"
15
  }
 
1
  {
 
 
 
 
2
  "size": {"shortest_edge": 12544, "longest_edge": 9633792},
3
  "do_rescale": true,
4
  "patch_size": 14,
 
6
  "merge_size": 2,
7
  "image_mean": [0.48145466, 0.4578275, 0.40821073],
8
  "image_std": [0.26862954, 0.26130258, 0.27577711],
9
+ "image_processor_type": "Qwen2VLImageProcessor",
10
+ "processor_class": "Qwen2_5_VLProcessor"
11
  }
tokenizer_config.json CHANGED
@@ -44,6 +44,6 @@
44
  "model_max_length": 655380,
45
  "pad_token": "<|endoftext|>",
46
  "padding_side": "left",
47
- "processor_class": "AshishOcrProcessor",
48
  "tokenizer_class": "TokenizersBackend"
49
  }
 
44
  "model_max_length": 655380,
45
  "pad_token": "<|endoftext|>",
46
  "padding_side": "left",
47
+ "processor_class": "Qwen2_5_VLProcessor",
48
  "tokenizer_class": "TokenizersBackend"
49
  }