honest_llama2_chat_7B / tokenizer_config.json
likenneth's picture
Upload tokenizer
29e54eb
raw
history blame contribute delete
974 Bytes
{
"auto_map": {
"AutoTokenizer": [
"tokenization_llama.LLaMATokenizer",
null
]
},
"bos_token": {
"__type": "AddedToken",
"content": "<s>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false
},
"clean_up_tokenization_spaces": false,
"eos_token": {
"__type": "AddedToken",
"content": "</s>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false
},
"legacy": false,
"model_max_length": 1000000000000000019884624838656,
"pad_token": null,
"special_tokens_map_file": "/ssd4tb/huggingface_cache/hub/models--meta-llama--Llama-2-7b-chat-hf/snapshots/0d52e200fc7ba73089b86c1b5727267dccf65311/special_tokens_map.json",
"tokenizer_class": "LLaMATokenizer",
"unk_token": {
"__type": "AddedToken",
"content": "<unk>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false
},
"use_fast": false
}