anderloh commited on
Commit
f765033
·
verified ·
1 Parent(s): 5436804

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +22 -12
config.json CHANGED
@@ -5,26 +5,35 @@
5
  "Wav2Vec2ForPreTraining"
6
  ],
7
  "attention_dropout": 0.0,
8
- "classifier_proj_size": 64,
9
- "codevector_dim": 64,
10
  "contrastive_logits_temperature": 0.1,
11
  "conv_bias": true,
12
  "conv_dim": [
13
- 64,
14
- 64,
15
- 64,
16
- 64
 
 
 
17
  ],
18
  "conv_kernel": [
19
  10,
20
  3,
21
  3,
 
 
 
22
  2
23
  ],
24
  "conv_stride": [
25
  5,
26
  2,
27
  2,
 
 
 
28
  2
29
  ],
30
  "ctc_loss_reduction": "sum",
@@ -40,9 +49,9 @@
40
  "hidden_act": "gelu",
41
  "hidden_dropout": 0.0,
42
  "hidden_dropout_prob": 0.0,
43
- "hidden_size": 96,
44
  "initializer_range": 0.02,
45
- "intermediate_size": 384,
46
  "layer_norm_eps": 1e-05,
47
  "layerdrop": 0.0,
48
  "mask_feature_length": 10,
@@ -55,10 +64,11 @@
55
  "num_codevectors_per_group": 320,
56
  "num_conv_pos_embedding_groups": 16,
57
  "num_conv_pos_embeddings": 128,
58
- "num_feat_extract_layers": 4,
59
- "num_hidden_layers": 3,
60
  "num_negatives": 100,
61
  "pad_token_id": 0,
62
- "proj_codevector_dim": 64,
63
  "use_weighted_layer_sum": false
64
- }
 
 
5
  "Wav2Vec2ForPreTraining"
6
  ],
7
  "attention_dropout": 0.0,
8
+ "classifier_proj_size": 128,
9
+ "codevector_dim": 128,
10
  "contrastive_logits_temperature": 0.1,
11
  "conv_bias": true,
12
  "conv_dim": [
13
+ 256,
14
+ 256,
15
+ 256,
16
+ 256,
17
+ 256,
18
+ 256,
19
+ 256
20
  ],
21
  "conv_kernel": [
22
  10,
23
  3,
24
  3,
25
+ 3,
26
+ 3,
27
+ 2,
28
  2
29
  ],
30
  "conv_stride": [
31
  5,
32
  2,
33
  2,
34
+ 2,
35
+ 2,
36
+ 2,
37
  2
38
  ],
39
  "ctc_loss_reduction": "sum",
 
49
  "hidden_act": "gelu",
50
  "hidden_dropout": 0.0,
51
  "hidden_dropout_prob": 0.0,
52
+ "hidden_size": 384,
53
  "initializer_range": 0.02,
54
+ "intermediate_size": 1536,
55
  "layer_norm_eps": 1e-05,
56
  "layerdrop": 0.0,
57
  "mask_feature_length": 10,
 
64
  "num_codevectors_per_group": 320,
65
  "num_conv_pos_embedding_groups": 16,
66
  "num_conv_pos_embeddings": 128,
67
+ "num_feat_extract_layers": 7,
68
+ "num_hidden_layers": 6,
69
  "num_negatives": 100,
70
  "pad_token_id": 0,
71
+ "proj_codevector_dim": 128,
72
  "use_weighted_layer_sum": false
73
+ }
74
+