PlayMaker13 commited on
Commit
df5239d
·
1 Parent(s): 1d09bc3

Decodificador con kenLM

Browse files
alphabet.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"labels": ["\u3087", "\u306f", "\u3072", "\u305a", "\u3069", "\u3070", "\u307b", "\u3067", "\u304a", "\u305b", "\u306e", "\u308c", "\u3094", "\u3044", "\u3061", "\u3068", "\u306b", "\u3055", "\u3042", "\u3046", "\u3054", "\u3056", "\u3065", "\u3093", "\u3073", "\u3096", "\u307f", "\u3058", "\u3043", "\u306c", "\u308f", "\u3053", "\u3045", "\u3071", "\u3064", "\u3076", "\u3060", "\u304b", "\u3041", "\u3063", "\u307d", "\u3081", "\u3049", "\u3083", "\u3075", "\u3092", "\u307e", "\u3047", "\u3082", "\u308a", "\u3084", "\u309e", "\u3050", "\u307c", "\u304c", "\u305e", "\u304f", "\u3086", "\u3074", "\u3077", "\u3080", "\u3079", "\u3052", "\u30fc", "\u3088", "\u3057", "\u307a", "\u308d", "\u306a", "\u305c", "\u305d", "\u3095", "\u304e", "\u3089", "\u308b", "\u309d", "\u3062", "\u3066", "\u308e", "\u305f", "\u3059", "\u3078", "\u306d", "\u3048", "\u3085", "\u3051", "\u304d", "\u2047", "", "<s>", "</s>"], "is_bpe": false}
language_model/4gram.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3967d4def681ddee39bf356833361e907aa61ee2393088c6a586a40a43c6dea5
3
+ size 8844701
language_model/attrs.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"alpha": 0.5, "beta": 1.5, "unk_score_offset": -10.0, "score_boundary": true}
language_model/unigrams.txt ADDED
The diff for this file is too large to render. See raw diff
 
preprocessor_config.json CHANGED
@@ -4,7 +4,7 @@
4
  "feature_size": 1,
5
  "padding_side": "right",
6
  "padding_value": 0.0,
7
- "processor_class": "Wav2Vec2Processor",
8
  "return_attention_mask": true,
9
  "sampling_rate": 16000
10
  }
 
4
  "feature_size": 1,
5
  "padding_side": "right",
6
  "padding_value": 0.0,
7
+ "processor_class": "Wav2Vec2ProcessorWithLM",
8
  "return_attention_mask": true,
9
  "sampling_rate": 16000
10
  }
special_tokens_map.json CHANGED
@@ -1,6 +1,30 @@
1
  {
2
- "bos_token": "<s>",
3
- "eos_token": "</s>",
4
- "pad_token": "[PAD]",
5
- "unk_token": "[UNK]"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
6
  }
 
1
  {
2
+ "bos_token": {
3
+ "content": "<s>",
4
+ "lstrip": false,
5
+ "normalized": false,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "</s>",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": {
17
+ "content": "[PAD]",
18
+ "lstrip": true,
19
+ "normalized": false,
20
+ "rstrip": true,
21
+ "single_word": false
22
+ },
23
+ "unk_token": {
24
+ "content": "[UNK]",
25
+ "lstrip": true,
26
+ "normalized": false,
27
+ "rstrip": true,
28
+ "single_word": false
29
+ }
30
  }
tokenizer_config.json CHANGED
@@ -40,7 +40,7 @@
40
  "extra_special_tokens": {},
41
  "model_max_length": 1000000000000000019884624838656,
42
  "pad_token": "[PAD]",
43
- "processor_class": "Wav2Vec2Processor",
44
  "replace_word_delimiter_char": " ",
45
  "target_lang": null,
46
  "tokenizer_class": "Wav2Vec2CTCTokenizer",
 
40
  "extra_special_tokens": {},
41
  "model_max_length": 1000000000000000019884624838656,
42
  "pad_token": "[PAD]",
43
+ "processor_class": "Wav2Vec2ProcessorWithLM",
44
  "replace_word_delimiter_char": " ",
45
  "target_lang": null,
46
  "tokenizer_class": "Wav2Vec2CTCTokenizer",