ZHANGYUXUAN-zR commited on
Commit
296c6e4
·
verified ·
1 Parent(s): 951ebf7

Delete config.json

Browse files
Files changed (1) hide show
  1. config.json +0 -69
config.json DELETED
@@ -1,69 +0,0 @@
1
- {
2
- "activation_dropout": 0.0,
3
- "activation_function": "gelu",
4
- "apply_spec_augment": false,
5
- "architectures": [
6
- "WhisperVQForConditionalGeneration"
7
- ],
8
- "attention_dropout": 0.0,
9
- "begin_suppress_tokens": [
10
- 220,
11
- 50257
12
- ],
13
- "bos_token_id": 50257,
14
- "classifier_proj_size": 256,
15
- "d_model": 1280,
16
- "decoder_attention_heads": 20,
17
- "decoder_ffn_dim": 5120,
18
- "decoder_layerdrop": 0.0,
19
- "decoder_layers": 32,
20
- "decoder_start_token_id": 50258,
21
- "dropout": 0.0,
22
- "encoder_attention_heads": 20,
23
- "encoder_causal_attention": false,
24
- "encoder_causal_convolution": false,
25
- "encoder_ffn_dim": 5120,
26
- "encoder_layerdrop": 0.0,
27
- "encoder_layers": 32,
28
- "eos_token_id": 50257,
29
- "init_std": 0.02,
30
- "is_encoder_decoder": true,
31
- "mask_feature_length": 10,
32
- "mask_feature_min_masks": 0,
33
- "mask_feature_prob": 0.0,
34
- "mask_time_length": 10,
35
- "mask_time_min_masks": 2,
36
- "mask_time_prob": 0.05,
37
- "max_length": 448,
38
- "max_source_positions": 1500,
39
- "max_target_positions": 448,
40
- "median_filter_width": 7,
41
- "model_type": "whisper",
42
- "num_hidden_layers": 32,
43
- "num_mel_bins": 128,
44
- "pad_token_id": 50256,
45
- "pitch_control": "fcpe_f0",
46
- "pitch_loss_scale": 2.0,
47
- "pitch_upsample_scale": 2,
48
- "pooling_kernel_size": 2,
49
- "pooling_position": 16,
50
- "pooling_type": "avg",
51
- "quantize_causal_block_size": 200,
52
- "quantize_causal_encoder": false,
53
- "quantize_commit_coefficient": 0.25,
54
- "quantize_ema_decay": 0.99,
55
- "quantize_encoder_only": false,
56
- "quantize_hidden_dim": null,
57
- "quantize_loss_scale": 10.0,
58
- "quantize_position": 16,
59
- "quantize_restart_interval": 100,
60
- "quantize_vocab_size": 32768,
61
- "scale_embedding": false,
62
- "skip_language_detection": true,
63
- "torch_dtype": "float32",
64
- "transformers_version": "4.44.1",
65
- "use_cache": true,
66
- "use_weighted_layer_sum": false,
67
- "vocab_size": 51866
68
- }
69
-