MoH-LLaMA3-8B / tokenizer_config.json
Chat-UniVi's picture
Upload 7 files
94af976 verified
raw
history blame
640 Bytes
{"tokenizer_class": "LlamaTokenizer", "bos_token": {"__type": "AddedToken", "content": "<s>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false}, "eos_token": {"__type": "AddedToken", "content": "</s>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false}, "unk_token": {"__type": "AddedToken", "content": "<unk>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false}, "pad_token": null, "add_bos_token": true, "add_eos_token": false, "clean_up_tokenization_spaces": false, "legacy": false, "model_max_length": 1000000000000000019884624838656, "sp_model_kwargs": {}}