File size: 10,127 Bytes
9809418
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
{
  "architectures": [
    "ASRModel"
  ],
  "attn_implementation": "flash_attention_2",
  "audio_config": {
    "_name_or_path": "zai-org/GLM-ASR-Nano-2512",
    "architectures": [
      "GlmAsrForConditionalGeneration"
    ],
    "audio_config": {
      "_name_or_path": "",
      "add_cross_attention": false,
      "architectures": null,
      "attention_dropout": 0.0,
      "bos_token_id": null,
      "chunk_size_feed_forward": 0,
      "cross_attention_hidden_size": null,
      "decoder_start_token_id": null,
      "dtype": null,
      "eos_token_id": null,
      "finetuning_task": null,
      "head_dim": 64,
      "hidden_act": "gelu",
      "hidden_size": 1280,
      "id2label": {
        "0": "LABEL_0",
        "1": "LABEL_1"
      },
      "initializer_range": 0.02,
      "intermediate_size": 5120,
      "is_decoder": false,
      "is_encoder_decoder": false,
      "label2id": {
        "LABEL_0": 0,
        "LABEL_1": 1
      },
      "max_position_embeddings": 1500,
      "model_type": "glmasr_encoder",
      "num_attention_heads": 20,
      "num_hidden_layers": 32,
      "num_key_value_heads": 20,
      "num_mel_bins": 128,
      "output_attentions": false,
      "output_hidden_states": false,
      "pad_token_id": null,
      "partial_rotary_factor": 0.5,
      "prefix": null,
      "problem_type": null,
      "return_dict": true,
      "rope_parameters": {
        "partial_rotary_factor": 0.5,
        "rope_theta": 10000.0,
        "rope_type": "default"
      },
      "sep_token_id": null,
      "task_specific_params": null,
      "tie_word_embeddings": true,
      "tokenizer_class": null
    },
    "audio_token_id": 59260,
    "dtype": "bfloat16",
    "hidden_size": 2048,
    "model_type": "glmasr",
    "num_mel_bins": 128,
    "projector_hidden_act": "gelu",
    "text_config": {
      "_name_or_path": "",
      "add_cross_attention": false,
      "architectures": null,
      "attention_bias": false,
      "attention_dropout": 0.0,
      "bos_token_id": 1,
      "chunk_size_feed_forward": 0,
      "cross_attention_hidden_size": null,
      "decoder_start_token_id": null,
      "dtype": null,
      "eos_token_id": [
        59246,
        59253,
        59255
      ],
      "finetuning_task": null,
      "head_dim": 128,
      "hidden_act": "silu",
      "hidden_size": 2048,
      "id2label": {
        "0": "LABEL_0",
        "1": "LABEL_1"
      },
      "initializer_range": 0.02,
      "intermediate_size": 6144,
      "is_decoder": false,
      "is_encoder_decoder": false,
      "label2id": {
        "LABEL_0": 0,
        "LABEL_1": 1
      },
      "max_position_embeddings": 8192,
      "mlp_bias": false,
      "model_type": "llama",
      "num_attention_heads": 16,
      "num_hidden_layers": 28,
      "num_key_value_heads": 4,
      "output_attentions": false,
      "output_hidden_states": false,
      "pad_token_id": null,
      "prefix": null,
      "pretraining_tp": 1,
      "problem_type": null,
      "return_dict": true,
      "rms_norm_eps": 1e-05,
      "rope_parameters": {
        "rope_theta": 10000.0,
        "rope_type": "default"
      },
      "sep_token_id": null,
      "task_specific_params": null,
      "tie_word_embeddings": false,
      "tokenizer_class": null,
      "use_cache": true,
      "vocab_size": 59264
    },
    "vocab_size": 59264
  },
  "audio_model_id": "zai-org/GLM-ASR-Nano-2512",
  "audio_sample_rate": 16000,
  "auto_map": {
    "AutoConfig": "asr_config.ASRConfig",
    "AutoModel": "asr_modeling.ASRModel",
    "AutoModelForSpeechSeq2Seq": "asr_modeling.ASRModel",
    "AutoProcessor": "asr_processing.ASRProcessor"
  },
  "custom_pipelines": {
    "automatic-speech-recognition": {
      "impl": "asr_pipeline.ASRPipeline",
      "pt": [
        "AutoModelForSpeechSeq2Seq"
      ],
      "tf": [],
      "type": "audio"
    }
  },
  "downsample_rate": 5,
  "dtype": "bfloat16",
  "encoder": {
    "_name_or_path": "zai-org/GLM-ASR-Nano-2512",
    "architectures": [
      "GlmAsrForConditionalGeneration"
    ],
    "audio_config": {
      "_name_or_path": "",
      "add_cross_attention": false,
      "architectures": null,
      "attention_dropout": 0.0,
      "bos_token_id": null,
      "chunk_size_feed_forward": 0,
      "cross_attention_hidden_size": null,
      "decoder_start_token_id": null,
      "dtype": null,
      "eos_token_id": null,
      "finetuning_task": null,
      "head_dim": 64,
      "hidden_act": "gelu",
      "hidden_size": 1280,
      "id2label": {
        "0": "LABEL_0",
        "1": "LABEL_1"
      },
      "initializer_range": 0.02,
      "intermediate_size": 5120,
      "is_decoder": false,
      "is_encoder_decoder": false,
      "label2id": {
        "LABEL_0": 0,
        "LABEL_1": 1
      },
      "max_position_embeddings": 1500,
      "model_type": "glmasr_encoder",
      "num_attention_heads": 20,
      "num_hidden_layers": 32,
      "num_key_value_heads": 20,
      "num_mel_bins": 128,
      "output_attentions": false,
      "output_hidden_states": false,
      "pad_token_id": null,
      "partial_rotary_factor": 0.5,
      "prefix": null,
      "problem_type": null,
      "return_dict": true,
      "rope_parameters": {
        "partial_rotary_factor": 0.5,
        "rope_theta": 10000.0,
        "rope_type": "default"
      },
      "sep_token_id": null,
      "task_specific_params": null,
      "tie_word_embeddings": true,
      "tokenizer_class": null
    },
    "audio_token_id": 59260,
    "dtype": "bfloat16",
    "hidden_size": 2048,
    "model_type": "glmasr",
    "num_mel_bins": 128,
    "projector_hidden_act": "gelu",
    "text_config": {
      "_name_or_path": "",
      "add_cross_attention": false,
      "architectures": null,
      "attention_bias": false,
      "attention_dropout": 0.0,
      "bos_token_id": 1,
      "chunk_size_feed_forward": 0,
      "cross_attention_hidden_size": null,
      "decoder_start_token_id": null,
      "dtype": null,
      "eos_token_id": [
        59246,
        59253,
        59255
      ],
      "finetuning_task": null,
      "head_dim": 128,
      "hidden_act": "silu",
      "hidden_size": 2048,
      "id2label": {
        "0": "LABEL_0",
        "1": "LABEL_1"
      },
      "initializer_range": 0.02,
      "intermediate_size": 6144,
      "is_decoder": false,
      "is_encoder_decoder": false,
      "label2id": {
        "LABEL_0": 0,
        "LABEL_1": 1
      },
      "max_position_embeddings": 8192,
      "mlp_bias": false,
      "model_type": "llama",
      "num_attention_heads": 16,
      "num_hidden_layers": 28,
      "num_key_value_heads": 4,
      "output_attentions": false,
      "output_hidden_states": false,
      "pad_token_id": null,
      "prefix": null,
      "pretraining_tp": 1,
      "problem_type": null,
      "return_dict": true,
      "rms_norm_eps": 1e-05,
      "rope_parameters": {
        "rope_theta": 10000.0,
        "rope_type": "default"
      },
      "sep_token_id": null,
      "task_specific_params": null,
      "tie_word_embeddings": false,
      "tokenizer_class": null,
      "use_cache": true,
      "vocab_size": 59264
    },
    "vocab_size": 59264
  },
  "encoder_conv_layers": [
    [
      1,
      3,
      1
    ],
    [
      1,
      3,
      2
    ]
  ],
  "encoder_dim": 1280,
  "freeze_projector": false,
  "freq_mask_length": 27,
  "inference_warmup_tokens": 10,
  "label_smoothing": 0.0,
  "length_penalty": 1.0,
  "llm_dim": 1024,
  "lora_alpha": 32,
  "lora_dropout": 0.0,
  "lora_rank": 8,
  "lora_target_modules": [
    "q_proj",
    "k_proj",
    "v_proj",
    "o_proj",
    "gate_proj",
    "up_proj",
    "down_proj"
  ],
  "max_new_tokens": 128,
  "min_new_tokens": 0,
  "model_dtype": "bfloat16",
  "model_type": "asr_model",
  "no_repeat_ngram_size": 0,
  "num_beams": 1,
  "num_experts": 4,
  "num_experts_per_tok": 2,
  "num_freq_masks": 2,
  "num_time_masks": 10,
  "pipeline_tag": "automatic-speech-recognition",
  "pretrained_model_path": "mazesmazes/tiny-audio",
  "projector_dropout": 0.0,
  "projector_hidden_dim": null,
  "projector_init_std": 0.02,
  "projector_num_layers": 2,
  "projector_pool_stride": 4,
  "projector_type": "mlp",
  "qformer_hidden_size": null,
  "qformer_intermediate_size": null,
  "qformer_num_heads": 16,
  "qformer_num_layers": 2,
  "qformer_window_size": 15,
  "repetition_penalty": 1.0,
  "router_aux_loss_coef": 0.01,
  "system_prompt": "",
  "text_config": {
    "_name_or_path": "Qwen/Qwen3-0.6B",
    "architectures": [
      "Qwen3ForCausalLM"
    ],
    "attention_bias": false,
    "attention_dropout": 0.0,
    "dtype": "bfloat16",
    "eos_token_id": 151645,
    "head_dim": 128,
    "hidden_act": "silu",
    "hidden_size": 1024,
    "initializer_range": 0.02,
    "intermediate_size": 3072,
    "layer_types": [
      "full_attention",
      "full_attention",
      "full_attention",
      "full_attention",
      "full_attention",
      "full_attention",
      "full_attention",
      "full_attention",
      "full_attention",
      "full_attention",
      "full_attention",
      "full_attention",
      "full_attention",
      "full_attention",
      "full_attention",
      "full_attention",
      "full_attention",
      "full_attention",
      "full_attention",
      "full_attention",
      "full_attention",
      "full_attention",
      "full_attention",
      "full_attention",
      "full_attention",
      "full_attention",
      "full_attention",
      "full_attention"
    ],
    "max_position_embeddings": 40960,
    "max_window_layers": 28,
    "model_type": "qwen3",
    "num_attention_heads": 16,
    "num_hidden_layers": 28,
    "num_key_value_heads": 8,
    "pad_token_id": 151643,
    "rms_norm_eps": 1e-06,
    "rope_parameters": {
      "rope_theta": 1000000,
      "rope_type": "default"
    },
    "sliding_window": null,
    "tie_word_embeddings": true,
    "use_cache": true,
    "use_sliding_window": false,
    "vocab_size": 151670
  },
  "text_model_id": "Qwen/Qwen3-0.6B",
  "time_mask_length": 100,
  "transformers_version": "5.0.0.dev0",
  "use_cache": false,
  "use_lora": false,
  "use_specaugment": true,
  "vocab_size": 151670
}