FrankTCH's picture
Upload tokenizer
2f3ebda
raw
history blame
465 Bytes
{
"tur": {
"[PAD]": 33,
"[UNK]": 32,
"a": 1,
"b": 2,
"c": 3,
"d": 4,
"e": 5,
"f": 6,
"g": 7,
"h": 8,
"i": 9,
"j": 10,
"k": 11,
"l": 12,
"m": 13,
"n": 14,
"o": 15,
"p": 16,
"r": 17,
"s": 18,
"t": 19,
"u": 20,
"v": 21,
"w": 22,
"y": 23,
"z": 24,
"|": 0,
"ç": 25,
"ö": 26,
"ü": 27,
"ğ": 28,
"ı": 29,
"ş": 30,
"̇": 31
}
}