{ "add_bos_token": false, "add_prefix_space": false, "added_tokens_decoder": { "0": { "content": "", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "1": { "content": "", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "2": { "content": "", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "3": { "content": "", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "4": { "content": "", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32768": { "content": "<|endoftext|>", "lstrip": false, "normalized": true, "rstrip": false, "single_word": false, "special": true }, "32769": { "content": "", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32770": { "content": "", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32771": { "content": "", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32772": { "content": "", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32773": { "content": "", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32774": { "content": "<|im_start|>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true }, "32775": { "content": "<|im_end|>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false, "special": true } }, "additional_special_tokens": [ "<|im_start|>", "<|im_end|>" ], "bos_token": "<|im_start|>", "chat_template": "{% for message in messages %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}", "clean_up_tokenization_spaces": true, "eos_token": "<|im_end|>", "errors": "replace", "model_max_length": 1000000000000000019884624838656, "pad_token": "<|im_end|>", "tokenizer_class": "GPT2Tokenizer", "unk_token": "" }