sjhuskey commited on
Commit
9e0b607
·
verified ·
1 Parent(s): 2014b2c

Updated model with emissions

Browse files
Files changed (2) hide show
  1. config.json +4 -4
  2. model.safetensors +2 -2
config.json CHANGED
@@ -45,6 +45,7 @@
45
  "ctc_zero_infinity": false,
46
  "diversity_loss_weight": 0.1,
47
  "do_stable_layer_norm": true,
 
48
  "eos_token_id": 2,
49
  "feat_extract_activation": "gelu",
50
  "feat_extract_dropout": 0.0,
@@ -85,7 +86,7 @@
85
  "num_hidden_layers": 24,
86
  "num_negatives": 100,
87
  "output_hidden_size": 1024,
88
- "pad_token_id": 41,
89
  "proj_codevector_dim": 768,
90
  "tdnn_dilation": [
91
  1,
@@ -108,9 +109,8 @@
108
  1,
109
  1
110
  ],
111
- "torch_dtype": "float32",
112
- "transformers_version": "4.55.4",
113
  "use_weighted_layer_sum": false,
114
- "vocab_size": 42,
115
  "xvector_output_dim": 512
116
  }
 
45
  "ctc_zero_infinity": false,
46
  "diversity_loss_weight": 0.1,
47
  "do_stable_layer_norm": true,
48
+ "dtype": "float32",
49
  "eos_token_id": 2,
50
  "feat_extract_activation": "gelu",
51
  "feat_extract_dropout": 0.0,
 
86
  "num_hidden_layers": 24,
87
  "num_negatives": 100,
88
  "output_hidden_size": 1024,
89
+ "pad_token_id": 42,
90
  "proj_codevector_dim": 768,
91
  "tdnn_dilation": [
92
  1,
 
109
  1,
110
  1
111
  ],
112
+ "transformers_version": "4.56.2",
 
113
  "use_weighted_layer_sum": false,
114
+ "vocab_size": 43,
115
  "xvector_output_dim": 512
116
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:3cc19c6677fe83e3b55cbabd65790c2f589019c127d01ef4a497906490b0aa68
3
- size 1261979680
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c4bb21facebec73709d151a638935ed10cdeef95e459dc1677fa47790b304f39
3
+ size 1261983780