File size: 451 Bytes
6bf1998 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 |
{
"model_max_length": 512,
"name_or_path": "/global/scratch/users/aniketh/PromoGen/SentencePieceUnigramTokenizer_4096_min_exp_2_fast",
"special_tokens": [
"<BOS>",
"<EOS>",
"<PAD>",
"<UNK>",
"<CLS>",
"<SEP>",
"<MASK>"
],
"special_tokens_map_file": "/global/scratch/users/aniketh/PromoGen/SentencePieceUnigramTokenizer_4096_min_exp_2_fast/special_tokens_map.json",
"tokenizer_class": "PreTrainedTokenizerFast"
}
|