{ "tokenizer_class": "BertTokenizer", "do_lower_case": false, "strip_accents": false, "max_len": 512, "init_inputs": [], "model_max_length": 512, "special_tokens_map_file": null, "tokenizer_file": "tokenizer.json", "name_or_path": "ubaada/original-transformer", "tokenizer_type": "BPE" }