gpt2-wikitext2 / tokenizer_config.json
Nick Ball
Training in progress, step 500
be7c32c
raw
history blame contribute delete
278 Bytes
{
"add_prefix_space": false,
"bos_token": "<|endoftext|>",
"eos_token": "<|endoftext|>",
"model_max_length": 1024,
"name_or_path": "sgugger/gpt2-like-tokenizer",
"special_tokens_map_file": null,
"tokenizer_class": "GPT2Tokenizer",
"unk_token": "<|endoftext|>"
}