File size: 1,707 Bytes
d80265b 205bf6c d80265b 205bf6c d80265b 205bf6c d80265b 205bf6c d80265b 205bf6c d80265b 205bf6c d80265b a8c22c7 205bf6c a8c22c7 d80265b 205bf6c d80265b 205bf6c d80265b 205bf6c d80265b 205bf6c d80265b 205bf6c d80265b 205bf6c d80265b |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 |
{
"_name_or_path": "team-lucid/hubert-base-korean",
"activation_dropout": 0.1,
"apply_spec_augment": true,
"architectures": [
"HubertForCTC"
],
"attention_dropout": 0.1,
"auto_map": {
"FlaxAutoModel": "team-lucid/hubert-base-korean--modeling_flax_hubert.FlaxHubertModel"
},
"bos_token_id": 1,
"classifier_proj_size": 256,
"conv_bias": false,
"conv_dim": [
512,
512,
512,
512,
512,
512,
512
],
"conv_kernel": [
10,
3,
3,
3,
3,
2,
2
],
"conv_stride": [
5,
2,
2,
2,
2,
2,
2
],
"ctc_loss_reduction": "sum",
"ctc_zero_infinity": false,
"do_stable_layer_norm": false,
"eos_token_id": 2,
"feat_extract_activation": "gelu",
"feat_extract_dropout": 0.0,
"feat_extract_norm": "group",
"feat_proj_dropout": 0.1,
"feat_proj_layer_norm": true,
"final_dropout": 0.1,
"final_proj_size": 256,
"gradient_checkpointing": false,
"hidden_act": "gelu",
"hidden_dropout": 0.1,
"hidden_dropout_prob": 0.1,
"hidden_size": 768,
"initializer_range": 0.02,
"intermediate_size": 3072,
"layer_norm_eps": 1e-05,
"layerdrop": 0.1,
"mask_feature_length": 10,
"mask_feature_min_masks": 0,
"mask_feature_prob": 0.0,
"mask_time_length": 10,
"mask_time_min_masks": 2,
"mask_time_prob": 0.05,
"model_type": "hubert",
"num_attention_heads": 12,
"num_conv_pos_embedding_groups": 16,
"num_conv_pos_embeddings": 128,
"num_feat_extract_layers": 7,
"num_hidden_layers": 12,
"pad_token_id": 0,
"tokenizer_class": "Wav2Vec2CTCTokenizer",
"torch_dtype": "float32",
"transformers_version": "4.44.2",
"use_weighted_layer_sum": false,
"vocab_size": 53
}
|