Chat-UniVi commited on
Commit
b06f2ae
1 Parent(s): be55fea

Upload tokenizer_config.json

Browse files
Files changed (1) hide show
  1. tokenizer_config.json +1 -1
tokenizer_config.json CHANGED
@@ -1 +1 @@
1
- {"tokenizer_class": "LlamaTokenizer", "bos_token": {"__type": "AddedToken", "content": "<s>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false}, "eos_token": {"__type": "AddedToken", "content": "</s>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false}, "unk_token": {"__type": "AddedToken", "content": "<unk>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false}, "pad_token": null, "add_bos_token": true, "add_eos_token": false, "clean_up_tokenization_spaces": false, "legacy": false, "model_max_length": 1000000000000000019884624838656, "sp_model_kwargs": {}}
 
1
+ {"tokenizer_class": "LlamaTokenizer", "bos_token": {"__type": "AddedToken", "content": "<s>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false}, "eos_token": {"__type": "AddedToken", "content": "</s>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false}, "unk_token": {"__type": "AddedToken", "content": "<unk>", "lstrip": false, "normalized": false, "rstrip": false, "single_word": false}, "pad_token": null, "add_bos_token": false, "add_eos_token": false, "clean_up_tokenization_spaces": false, "legacy": false, "model_max_length": 1000000000000000019884624838656, "sp_model_kwargs": {}}