TheAIchemist13 commited on
Commit
7f3b454
·
1 Parent(s): e411293

Uploaded 2 files

Browse files
Files changed (2) hide show
  1. config.json +14 -27
  2. preprocessor_config.json +0 -0
config.json CHANGED
@@ -1,10 +1,10 @@
1
  {
2
- "_name_or_path": "gramvaani_whisper-tiny-hi/",
3
  "activation_dropout": 0.0,
4
  "activation_function": "gelu",
5
  "apply_spec_augment": false,
6
  "architectures": [
7
- "PoptorchPipelinedWhisperForConditionalGeneration"
8
  ],
9
  "attention_dropout": 0.0,
10
  "begin_suppress_tokens": [
@@ -13,32 +13,19 @@
13
  ],
14
  "bos_token_id": 50257,
15
  "classifier_proj_size": 256,
16
- "d_model": 384,
17
- "decoder_attention_heads": 6,
18
- "decoder_ffn_dim": 1536,
19
  "decoder_layerdrop": 0.0,
20
- "decoder_layers": 4,
21
  "decoder_start_token_id": 50258,
22
  "dropout": 0.0,
23
- "encoder_attention_heads": 6,
24
- "encoder_ffn_dim": 1536,
25
  "encoder_layerdrop": 0.0,
26
- "encoder_layers": 4,
27
  "eos_token_id": 50257,
28
- "forced_decoder_ids": [
29
- [
30
- 1,
31
- 50306
32
- ],
33
- [
34
- 2,
35
- 50359
36
- ],
37
- [
38
- 3,
39
- 50363
40
- ]
41
- ],
42
  "init_std": 0.02,
43
  "is_encoder_decoder": true,
44
  "mask_feature_length": 10,
@@ -51,14 +38,14 @@
51
  "max_source_positions": 1500,
52
  "max_target_positions": 448,
53
  "model_type": "whisper",
54
- "num_hidden_layers": 4,
55
  "num_mel_bins": 80,
56
  "pad_token_id": 50257,
57
  "scale_embedding": false,
58
  "suppress_tokens": [],
59
- "torch_dtype": "float16",
60
- "transformers_version": "4.29.2",
61
- "use_cache": true,
62
  "use_weighted_layer_sum": false,
63
  "vocab_size": 51865
64
  }
 
1
  {
2
+ "_name_or_path": "openai/whisper-small",
3
  "activation_dropout": 0.0,
4
  "activation_function": "gelu",
5
  "apply_spec_augment": false,
6
  "architectures": [
7
+ "WhisperForConditionalGeneration"
8
  ],
9
  "attention_dropout": 0.0,
10
  "begin_suppress_tokens": [
 
13
  ],
14
  "bos_token_id": 50257,
15
  "classifier_proj_size": 256,
16
+ "d_model": 768,
17
+ "decoder_attention_heads": 12,
18
+ "decoder_ffn_dim": 3072,
19
  "decoder_layerdrop": 0.0,
20
+ "decoder_layers": 12,
21
  "decoder_start_token_id": 50258,
22
  "dropout": 0.0,
23
+ "encoder_attention_heads": 12,
24
+ "encoder_ffn_dim": 3072,
25
  "encoder_layerdrop": 0.0,
26
+ "encoder_layers": 12,
27
  "eos_token_id": 50257,
28
+ "forced_decoder_ids": null,
 
 
 
 
 
 
 
 
 
 
 
 
 
29
  "init_std": 0.02,
30
  "is_encoder_decoder": true,
31
  "mask_feature_length": 10,
 
38
  "max_source_positions": 1500,
39
  "max_target_positions": 448,
40
  "model_type": "whisper",
41
+ "num_hidden_layers": 12,
42
  "num_mel_bins": 80,
43
  "pad_token_id": 50257,
44
  "scale_embedding": false,
45
  "suppress_tokens": [],
46
+ "torch_dtype": "float32",
47
+ "transformers_version": "4.27.4",
48
+ "use_cache": false,
49
  "use_weighted_layer_sum": false,
50
  "vocab_size": 51865
51
  }
preprocessor_config.json CHANGED
The diff for this file is too large to render. See raw diff