dmatekenya commited on
Commit
1869ace
1 Parent(s): abc12fb

Upload tokenizer

Browse files
Files changed (3) hide show
  1. special_tokens_map.json +6 -0
  2. tokenizer_config.json +13 -0
  3. vocab.json +87 -0
special_tokens_map.json ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": "<s>",
3
+ "eos_token": "</s>",
4
+ "pad_token": "[PAD]",
5
+ "unk_token": "[UNK]"
6
+ }
tokenizer_config.json ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": "<s>",
3
+ "clean_up_tokenization_spaces": true,
4
+ "do_lower_case": false,
5
+ "eos_token": "</s>",
6
+ "model_max_length": 1000000000000000019884624838656,
7
+ "pad_token": "[PAD]",
8
+ "replace_word_delimiter_char": " ",
9
+ "target_lang": null,
10
+ "tokenizer_class": "Wav2Vec2CTCTokenizer",
11
+ "unk_token": "[UNK]",
12
+ "word_delimiter_token": "|"
13
+ }
vocab.json ADDED
@@ -0,0 +1,87 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "\t": 0,
3
+ "!": 2,
4
+ "\"": 3,
5
+ "$": 4,
6
+ "%": 5,
7
+ "'": 6,
8
+ "(": 7,
9
+ ")": 8,
10
+ ",": 9,
11
+ "-": 10,
12
+ ".": 11,
13
+ "/": 12,
14
+ "0": 13,
15
+ "1": 14,
16
+ "2": 15,
17
+ "3": 16,
18
+ "4": 17,
19
+ "5": 18,
20
+ "6": 19,
21
+ "7": 20,
22
+ "8": 21,
23
+ "9": 22,
24
+ ":": 23,
25
+ ";": 24,
26
+ "?": 25,
27
+ "A": 26,
28
+ "B": 27,
29
+ "C": 28,
30
+ "D": 29,
31
+ "E": 30,
32
+ "F": 31,
33
+ "G": 32,
34
+ "H": 33,
35
+ "I": 34,
36
+ "J": 35,
37
+ "K": 36,
38
+ "L": 37,
39
+ "M": 38,
40
+ "N": 39,
41
+ "O": 40,
42
+ "P": 41,
43
+ "Q": 42,
44
+ "R": 43,
45
+ "S": 44,
46
+ "T": 45,
47
+ "U": 46,
48
+ "V": 47,
49
+ "W": 48,
50
+ "Y": 49,
51
+ "Z": 50,
52
+ "[PAD]": 84,
53
+ "[UNK]": 83,
54
+ "a": 51,
55
+ "b": 52,
56
+ "c": 53,
57
+ "d": 54,
58
+ "e": 55,
59
+ "f": 56,
60
+ "g": 57,
61
+ "h": 58,
62
+ "i": 59,
63
+ "j": 60,
64
+ "k": 61,
65
+ "l": 62,
66
+ "m": 63,
67
+ "n": 64,
68
+ "o": 65,
69
+ "p": 66,
70
+ "q": 67,
71
+ "r": 68,
72
+ "s": 69,
73
+ "t": 70,
74
+ "u": 71,
75
+ "v": 72,
76
+ "w": 73,
77
+ "x": 74,
78
+ "y": 75,
79
+ "z": 76,
80
+ "|": 1,
81
+ "‘": 77,
82
+ "’": 78,
83
+ "“": 79,
84
+ "”": 80,
85
+ "…": 81,
86
+ "": 82
87
+ }