MoH-LLaMA3-8B / tokenizer_config.json
Chat-UniVi's picture
Upload tokenizer_config.json
b06f2ae verified
raw
history blame contribute delete
641 Bytes
{"tokenizer_class": "LlamaTokenizer", "bos_token": {"__type": "AddedToken", "content": "<s>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false}, "eos_token": {"__type": "AddedToken", "content": "</s>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false}, "unk_token": {"__type": "AddedToken", "content": "<unk>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false}, "pad_token": null, "add_bos_token": false, "add_eos_token": false, "clean_up_tokenization_spaces": false, "legacy": false, "model_max_length": 1000000000000000019884624838656, "sp_model_kwargs": {}}