Upload lm-boosted decoder
Browse files- alphabet.json +1 -0
- language_model/attrs.json +1 -0
- language_model/pt-4gram.bin +3 -0
- language_model/unigrams.txt +0 -0
- preprocessor_config.json +1 -0
- tokenizer_config.json +1 -1
alphabet.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"labels": ["", "<s>", "</s>", "\u2047", " ", "a", "e", "o", "s", "r", "i", "n", "m", "d", "t", "u", "c", "l", "p", "v", "q", "g", "f", "h", "b", "\u00e3", "\u00e9", "\u00e7", "\u00e1", "z", "j", "\u00ed", "\u00ea", "x", "\u00f3", "-", "\u00f5", "\u00fa", "\u00e0", "\u00f4", "\u00e2", "k", "y", "w"], "is_bpe": false}
|
language_model/attrs.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"alpha": 0.5, "beta": 1.5, "unk_score_offset": -10.0, "score_boundary": true}
|
language_model/pt-4gram.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:5988077751c5b3e96ae5fdfc8d913413f3bce307c85d0047c1a751eaa14b462f
|
3 |
+
size 4330633731
|
language_model/unigrams.txt
ADDED
File without changes
|
preprocessor_config.json
CHANGED
@@ -4,6 +4,7 @@
|
|
4 |
"feature_size": 1,
|
5 |
"padding_side": "right",
|
6 |
"padding_value": 0,
|
|
|
7 |
"return_attention_mask": true,
|
8 |
"sampling_rate": 16000
|
9 |
}
|
|
|
4 |
"feature_size": 1,
|
5 |
"padding_side": "right",
|
6 |
"padding_value": 0,
|
7 |
+
"processor_class": "Wav2Vec2ProcessorWithLM",
|
8 |
"return_attention_mask": true,
|
9 |
"sampling_rate": 16000
|
10 |
}
|
tokenizer_config.json
CHANGED
@@ -1 +1 @@
|
|
1 |
-
{"unk_token": "<unk>", "bos_token": "<s>", "eos_token": "</s>", "pad_token": "<pad>", "do_lower_case": false, "word_delimiter_token": "|", "tokenizer_class": "Wav2Vec2CTCTokenizer"}
|
|
|
1 |
+
{"unk_token": "<unk>", "bos_token": "<s>", "eos_token": "</s>", "pad_token": "<pad>", "do_lower_case": false, "word_delimiter_token": "|", "special_tokens_map_file": "/root/.cache/huggingface/transformers/a803f23f1afc428695542b16e67d92127ae59529b524e83b04b46e96c24e1dd5.9d6cd81ef646692fb1c169a880161ea1cb95f49694f220aced9b704b457e51dd", "tokenizer_file": null, "name_or_path": "lgris/bp_400h_xlsr2_300M", "tokenizer_class": "Wav2Vec2CTCTokenizer", "processor_class": "Wav2Vec2ProcessorWithLM"}
|