QbethQ commited on
Commit
2dc6dbd
·
1 Parent(s): e3843f0

Upload folder using huggingface_hub

Browse files
Files changed (2) hide show
  1. config.json +60 -0
  2. preprocessor_config.json +14 -0
config.json ADDED
@@ -0,0 +1,60 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "openai/whisper-large-v3",
3
+ "activation_dropout": 0.0,
4
+ "activation_function": "gelu",
5
+ "apply_spec_augment": false,
6
+ "architectures": [
7
+ "WhisperLFQEncoder"
8
+ ],
9
+ "attention_dropout": 0.0,
10
+ "batch_maximization_weight": 0.3,
11
+ "classifier_proj_size": 256,
12
+ "d_model": 1280,
13
+ "delta_cross_logdet_reg_loss": 1.0,
14
+ "dropout": 0.0,
15
+ "enable_noise_contrastive": false,
16
+ "encoder_attention_heads": 20,
17
+ "encoder_causal_attention": false,
18
+ "encoder_causal_convolution": false,
19
+ "encoder_ffn_dim": 5120,
20
+ "encoder_layerdrop": 0.0,
21
+ "encoder_layers": 32,
22
+ "fix_noise_commit_loss_ratio": false,
23
+ "init_std": 0.06,
24
+ "is_encoder_decoder": true,
25
+ "lambda_consensus_loss_ratio": 0.25,
26
+ "layernorm_after_quantize": true,
27
+ "mask_feature_length": 10,
28
+ "mask_feature_min_masks": 0,
29
+ "mask_feature_prob": 0.0,
30
+ "mask_time_length": 10,
31
+ "mask_time_min_masks": 2,
32
+ "mask_time_prob": 0.05,
33
+ "max_length": 448,
34
+ "max_source_positions": 1500,
35
+ "max_target_positions": 448,
36
+ "median_filter_width": 7,
37
+ "model_type": "whisper",
38
+ "noise_commit_loss_ratio": 0.3333333333333333,
39
+ "num_clean_input": 3,
40
+ "num_hidden_layers": 32,
41
+ "num_mel_bins": 128,
42
+ "num_voters": 5,
43
+ "pooling_kernel_size": 2,
44
+ "pooling_position": 16,
45
+ "pooling_type": "avg",
46
+ "quantize_commit_coefficient": 0.25,
47
+ "quantize_encoder_only": true,
48
+ "quantize_position": 16,
49
+ "quantize_vocab_size": 8192,
50
+ "sample_minimization_weight": 0.1,
51
+ "torch_dtype": "float32",
52
+ "transformers_version": "4.48.0",
53
+ "use_aggregated_entropy": true,
54
+ "use_cache": true,
55
+ "use_codebook_ce_loss": true,
56
+ "use_commit_loss": true,
57
+ "use_projection_bias": false,
58
+ "use_svlfq": true,
59
+ "use_weighted_layer_sum": false
60
+ }
preprocessor_config.json ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "chunk_length": 30,
3
+ "feature_extractor_type": "WhisperFeatureExtractor",
4
+ "feature_size": 128,
5
+ "hop_length": 160,
6
+ "n_fft": 400,
7
+ "n_samples": 480000,
8
+ "nb_max_frames": 3000,
9
+ "padding_side": "right",
10
+ "padding_value": 0.0,
11
+ "processor_class": "WhisperProcessor",
12
+ "return_attention_mask": false,
13
+ "sampling_rate": 16000
14
+ }