{ "added_tokens_decoder": {}, "additional_special_tokens": [], "auto_map": { "AutoTokenizer": [ "tokenization_zhinao.ZhinaoTokenizer", null ] }, "clean_up_tokenization_spaces": false, "do_lower_case": false, "model_max_length": 1024, "padding_side": "right", "remove_space": false, "tokenizer_class": "ZhinaoTokenizer", "tokenizer_file": null }