anzorq commited on
Commit
bb1e90f
1 Parent(s): 9a714cd

Upload lm-boosted decoder

Browse files
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ language_model/unigrams.txt filter=lfs diff=lfs merge=lfs -text
alphabet.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"labels": [" ", "\u0430", "\u044d", "\u0431", "\u0432", "\u0433", "\u0263", "\u0434", "j", "\u04e1", "\u0435", "\u0436", "\u0290", "\u0437", "\u0438", "\u0439", "\u043a", "\u049b", "q", "\u043b", "\u026c", "\u052f", "\u043c", "\u043d", "\u043e", "\u043f", "\u0525", "\u0440", "\u0441", "\u0442", "\u04ad", "\u0443", "\u0444", "\u10f6", "\u0445", "h", "\u04b3", "\u0446", "\u04b5", "\u0447", "\u0448", "\u0449", "\u0255", "\u044b", "\u04cf", "\u2047", "", "<s>", "</s>"], "is_bpe": false}
language_model/5gram.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:be7f6914c91af0d0138cc8b5361ef3e2f8b5d7c8a1d757a30606bc2f19c1d30e
3
+ size 910297768
language_model/attrs.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"alpha": 0.5, "beta": 1.5, "unk_score_offset": -10.0, "score_boundary": true}
language_model/unigrams.txt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:949357737f27ba3a362a68a3672dce80acc874386649e9b1aa344c5324573011
3
+ size 23697394
preprocessor_config.json CHANGED
@@ -4,7 +4,7 @@
4
  "num_mel_bins": 80,
5
  "padding_side": "right",
6
  "padding_value": 0.0,
7
- "processor_class": "Wav2Vec2BertProcessor",
8
  "return_attention_mask": true,
9
  "sampling_rate": 16000,
10
  "stride": 2
 
4
  "num_mel_bins": 80,
5
  "padding_side": "right",
6
  "padding_value": 0.0,
7
+ "processor_class": "Wav2Vec2ProcessorWithLM",
8
  "return_attention_mask": true,
9
  "sampling_rate": 16000,
10
  "stride": 2
tokenizer_config.json CHANGED
@@ -39,7 +39,7 @@
39
  "eos_token": "</s>",
40
  "model_max_length": 1000000000000000019884624838656,
41
  "pad_token": "[PAD]",
42
- "processor_class": "Wav2Vec2BertProcessor",
43
  "replace_word_delimiter_char": " ",
44
  "target_lang": null,
45
  "tokenizer_class": "Wav2Vec2CTCTokenizer",
 
39
  "eos_token": "</s>",
40
  "model_max_length": 1000000000000000019884624838656,
41
  "pad_token": "[PAD]",
42
+ "processor_class": "Wav2Vec2ProcessorWithLM",
43
  "replace_word_delimiter_char": " ",
44
  "target_lang": null,
45
  "tokenizer_class": "Wav2Vec2CTCTokenizer",