gpt-wee-large / tokenizer_config.json
bbunzeck's picture
Upload 7 files
98ee0fe verified
raw
history blame
275 Bytes
{
"bos_token": "<|endoftext|>",
"eos_token": "<|endoftext|>",
"model_max_length": 1000000000000000019884624838656,
"name_or_path": "tokenizer_16k",
"special_tokens_map_file": "tokenizer_16k/special_tokens_map.json",
"tokenizer_class": "PreTrainedTokenizerFast"
}