kho-v1-lex-fi-sv / tokenizer_config.json
ossib's picture
Training in progress epoch 0
8446f28
raw
history blame contribute delete
306 Bytes
{
"clean_up_tokenization_spaces": true,
"eos_token": "</s>",
"model_max_length": 512,
"pad_token": "<pad>",
"return_tensors": "pt",
"separate_vocabs": false,
"source_lang": "fi",
"sp_model_kwargs": {},
"target_lang": "sv",
"tokenizer_class": "MarianTokenizer",
"unk_token": "<unk>"
}