aqib-prog commited on
Commit
3590461
·
verified ·
1 Parent(s): 7c39e6c

Upload config.json with huggingface_hub

Browse files
Files changed (1) hide show
  1. config.json +207 -0
config.json ADDED
@@ -0,0 +1,207 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "/Users/mohamedaqibabid/Downloads/tiny_minicpm_smallest_mb",
3
+ "architectures": [
4
+ "MiniCPMO"
5
+ ],
6
+ "attention_dropout": 0.0,
7
+ "audio_chunk_length": 1.0,
8
+ "audio_config": {
9
+ "_name_or_path": "openai/whisper-medium",
10
+ "architectures": [
11
+ "MiniCPMWhisperEncoder"
12
+ ],
13
+ "begin_suppress_tokens": [
14
+ 220,
15
+ 50257
16
+ ],
17
+ "bos_token_id": 50257,
18
+ "d_model": 4,
19
+ "decoder_attention_heads": 1,
20
+ "decoder_ffn_dim": 1,
21
+ "decoder_layers": 1,
22
+ "decoder_start_token_id": 50258,
23
+ "encoder_attention_heads": 1,
24
+ "encoder_ffn_dim": 1,
25
+ "encoder_layers": 1,
26
+ "eos_token_id": 50257,
27
+ "forced_decoder_ids": [
28
+ [
29
+ 1,
30
+ 50259
31
+ ],
32
+ [
33
+ 2,
34
+ 50359
35
+ ],
36
+ [
37
+ 3,
38
+ 50363
39
+ ]
40
+ ],
41
+ "max_length": 448,
42
+ "model_type": "whisper",
43
+ "num_hidden_layers": 24,
44
+ "pad_token_id": 50257,
45
+ "suppress_tokens": [
46
+ 1,
47
+ 2,
48
+ 7,
49
+ 8,
50
+ 9,
51
+ 10,
52
+ 14,
53
+ 25,
54
+ 26,
55
+ 27,
56
+ 28,
57
+ 29,
58
+ 31,
59
+ 58,
60
+ 59,
61
+ 60,
62
+ 61,
63
+ 62,
64
+ 63,
65
+ 90,
66
+ 91,
67
+ 92,
68
+ 93,
69
+ 359,
70
+ 503,
71
+ 522,
72
+ 542,
73
+ 873,
74
+ 893,
75
+ 902,
76
+ 918,
77
+ 922,
78
+ 931,
79
+ 1350,
80
+ 1853,
81
+ 1982,
82
+ 2460,
83
+ 2627,
84
+ 3246,
85
+ 3253,
86
+ 3268,
87
+ 3536,
88
+ 3846,
89
+ 3961,
90
+ 4183,
91
+ 4667,
92
+ 6585,
93
+ 6647,
94
+ 7273,
95
+ 9061,
96
+ 9383,
97
+ 10428,
98
+ 10929,
99
+ 11938,
100
+ 12033,
101
+ 12331,
102
+ 12562,
103
+ 13793,
104
+ 14157,
105
+ 14635,
106
+ 15265,
107
+ 15618,
108
+ 16553,
109
+ 16604,
110
+ 18362,
111
+ 18956,
112
+ 20075,
113
+ 21675,
114
+ 22520,
115
+ 26130,
116
+ 26161,
117
+ 26435,
118
+ 28279,
119
+ 29464,
120
+ 31650,
121
+ 32302,
122
+ 32470,
123
+ 36865,
124
+ 42863,
125
+ 47425,
126
+ 49870,
127
+ 50254,
128
+ 50258,
129
+ 50358,
130
+ 50359,
131
+ 50360,
132
+ 50361,
133
+ 50362
134
+ ],
135
+ "torch_dtype": "float32"
136
+ },
137
+ "audio_pool_step": 2,
138
+ "auto_map": {
139
+ "AutoConfig": "configuration_minicpm.MiniCPMOConfig",
140
+ "AutoModel": "modeling_minicpmo.MiniCPMO",
141
+ "AutoModelForCausalLM": "modeling_minicpmo.MiniCPMO"
142
+ },
143
+ "batch_vision_input": true,
144
+ "bos_token_id": 151643,
145
+ "chunk_input": true,
146
+ "drop_vision_last_layer": false,
147
+ "eos_token_id": 151645,
148
+ "hidden_act": "silu",
149
+ "hidden_size": 128,
150
+ "image_size": 448,
151
+ "init_audio": true,
152
+ "init_tts": true,
153
+ "init_vision": true,
154
+ "initializer_range": 0.02,
155
+ "intermediate_size": 1,
156
+ "listen_speak_type": "asr",
157
+ "max_position_embeddings": 32768,
158
+ "max_window_layers": 28,
159
+ "model_type": "minicpmo",
160
+ "num_attention_heads": 1,
161
+ "num_heads": 1,
162
+ "num_hidden_layers": 1,
163
+ "num_key_value_heads": 1,
164
+ "patch_size": 14,
165
+ "query_num": 64,
166
+ "rms_norm_eps": 1e-06,
167
+ "rope_scaling": null,
168
+ "rope_theta": 1000000.0,
169
+ "slice_config": {
170
+ "max_slice_nums": 9,
171
+ "model_type": "minicpmv"
172
+ },
173
+ "slice_mode": true,
174
+ "sliding_window": null,
175
+ "stream_input": false,
176
+ "tie_word_embeddings": false,
177
+ "torch_dtype": "float16",
178
+ "transformers_version": "4.45.0",
179
+ "tts_config": {
180
+ "hidden_size": 1,
181
+ "intermediate_size": 1,
182
+ "llm_dim": 1,
183
+ "model_type": "conditional_chattts",
184
+ "num_attention_heads": 1,
185
+ "num_audio_tokens": 2,
186
+ "num_heads": 1,
187
+ "num_hidden_layers": 1,
188
+ "num_layers": 1,
189
+ "num_mel_bins": 2,
190
+ "num_text_tokens": 2
191
+ },
192
+ "use_cache": true,
193
+ "use_image_id": true,
194
+ "use_sliding_window": false,
195
+ "version": 2.6,
196
+ "vision_batch_size": 16,
197
+ "vision_config": {
198
+ "hidden_size": 1,
199
+ "image_size": 980,
200
+ "intermediate_size": 1,
201
+ "model_type": "siglip_vision_model",
202
+ "num_attention_heads": 1,
203
+ "num_hidden_layers": 1,
204
+ "patch_size": 14
205
+ },
206
+ "vocab_size": 150
207
+ }