ritu-kumari07 commited on
Commit
86147d2
·
verified ·
1 Parent(s): 3fda129

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +93 -93
config.json CHANGED
@@ -1,93 +1,93 @@
1
- {
2
- "architectures": [
3
- "LightOnOCRForConditionalGeneration"
4
- ],
5
- "dtype": "bfloat16",
6
- "image_token_id": 151655,
7
- "model_type": "lightonocr",
8
- "multimodal_projector_bias": false,
9
- "projector_hidden_act": "gelu",
10
- "spatial_merge_size": 2,
11
- "text_config": {
12
- "architectures": [
13
- "Qwen3ForCausalLM"
14
- ],
15
- "attention_bias": false,
16
- "attention_dropout": 0,
17
- "dtype": "bfloat16",
18
- "head_dim": 128,
19
- "hidden_act": "silu",
20
- "hidden_size": 1024,
21
- "initializer_range": 0.02,
22
- "intermediate_size": 3072,
23
- "layer_types": [
24
- "full_attention",
25
- "full_attention",
26
- "full_attention",
27
- "full_attention",
28
- "full_attention",
29
- "full_attention",
30
- "full_attention",
31
- "full_attention",
32
- "full_attention",
33
- "full_attention",
34
- "full_attention",
35
- "full_attention",
36
- "full_attention",
37
- "full_attention",
38
- "full_attention",
39
- "full_attention",
40
- "full_attention",
41
- "full_attention",
42
- "full_attention",
43
- "full_attention",
44
- "full_attention",
45
- "full_attention",
46
- "full_attention",
47
- "full_attention",
48
- "full_attention",
49
- "full_attention",
50
- "full_attention",
51
- "full_attention"
52
- ],
53
- "max_position_embeddings": 8192,
54
- "max_window_layers": 28,
55
- "model_type": "lightonocr_text",
56
- "num_attention_heads": 16,
57
- "num_hidden_layers": 28,
58
- "num_key_value_heads": 8,
59
- "rms_norm_eps": 1e-06,
60
- "rope_parameters": {
61
- "rope_theta": 1000000,
62
- "rope_type": "default"
63
- },
64
- "rope_theta": 1000000,
65
- "sliding_window": null,
66
- "use_cache": true,
67
- "use_sliding_window": false,
68
- "vocab_size": 151936
69
- },
70
- "transformers_version": "5.0.0.dev0",
71
- "use_cache": false,
72
- "vision_config": {
73
- "attention_dropout": 0,
74
- "dtype": "bfloat16",
75
- "head_dim": 64,
76
- "hidden_act": "silu",
77
- "hidden_size": 1024,
78
- "image_size": 1540,
79
- "initializer_range": 0.02,
80
- "intermediate_size": 4096,
81
- "model_type": "lightonocr_vision",
82
- "num_attention_heads": 16,
83
- "num_channels": 3,
84
- "num_hidden_layers": 24,
85
- "patch_size": 14,
86
- "rope_parameters": {
87
- "rope_theta": 10000,
88
- "rope_type": "default"
89
- },
90
- "rope_theta": 10000
91
- },
92
- "vision_feature_layer": -1
93
- }
 
1
+ {
2
+ "architectures": [
3
+ "LightOnOCRForConditionalGeneration"
4
+ ],
5
+ "dtype": "bfloat16",
6
+ "image_token_id": 151655,
7
+ "model_type": "mistral3",
8
+ "multimodal_projector_bias": false,
9
+ "projector_hidden_act": "gelu",
10
+ "spatial_merge_size": 2,
11
+ "text_config": {
12
+ "architectures": [
13
+ "Qwen3ForCausalLM"
14
+ ],
15
+ "attention_bias": false,
16
+ "attention_dropout": 0,
17
+ "dtype": "bfloat16",
18
+ "head_dim": 128,
19
+ "hidden_act": "silu",
20
+ "hidden_size": 1024,
21
+ "initializer_range": 0.02,
22
+ "intermediate_size": 3072,
23
+ "layer_types": [
24
+ "full_attention",
25
+ "full_attention",
26
+ "full_attention",
27
+ "full_attention",
28
+ "full_attention",
29
+ "full_attention",
30
+ "full_attention",
31
+ "full_attention",
32
+ "full_attention",
33
+ "full_attention",
34
+ "full_attention",
35
+ "full_attention",
36
+ "full_attention",
37
+ "full_attention",
38
+ "full_attention",
39
+ "full_attention",
40
+ "full_attention",
41
+ "full_attention",
42
+ "full_attention",
43
+ "full_attention",
44
+ "full_attention",
45
+ "full_attention",
46
+ "full_attention",
47
+ "full_attention",
48
+ "full_attention",
49
+ "full_attention",
50
+ "full_attention",
51
+ "full_attention"
52
+ ],
53
+ "max_position_embeddings": 8192,
54
+ "max_window_layers": 28,
55
+ "model_type": "qwen3",
56
+ "num_attention_heads": 16,
57
+ "num_hidden_layers": 28,
58
+ "num_key_value_heads": 8,
59
+ "rms_norm_eps": 1e-06,
60
+ "rope_parameters": {
61
+ "rope_theta": 1000000,
62
+ "rope_type": "default"
63
+ },
64
+ "rope_theta": 1000000,
65
+ "sliding_window": null,
66
+ "use_cache": true,
67
+ "use_sliding_window": false,
68
+ "vocab_size": 151936
69
+ },
70
+ "transformers_version": "5.0.0.dev0",
71
+ "use_cache": false,
72
+ "vision_config": {
73
+ "attention_dropout": 0,
74
+ "dtype": "bfloat16",
75
+ "head_dim": 64,
76
+ "hidden_act": "silu",
77
+ "hidden_size": 1024,
78
+ "image_size": 1540,
79
+ "initializer_range": 0.02,
80
+ "intermediate_size": 4096,
81
+ "model_type": "pixtral",
82
+ "num_attention_heads": 16,
83
+ "num_channels": 3,
84
+ "num_hidden_layers": 24,
85
+ "patch_size": 14,
86
+ "rope_parameters": {
87
+ "rope_theta": 10000,
88
+ "rope_type": "default"
89
+ },
90
+ "rope_theta": 10000
91
+ },
92
+ "vision_feature_layer": -1
93
+ }