fix max length
Browse files- tokenizer_config.json +1 -1
tokenizer_config.json
CHANGED
@@ -2,7 +2,7 @@
|
|
2 |
"add_prefix_space": false,
|
3 |
"bos_token": "<|endoftext|>",
|
4 |
"eos_token": "<|endoftext|>",
|
5 |
-
"model_max_length":
|
6 |
"name_or_path": "gpt2",
|
7 |
"special_tokens_map_file": null,
|
8 |
"tokenizer_class": "GPT2Tokenizer",
|
|
|
2 |
"add_prefix_space": false,
|
3 |
"bos_token": "<|endoftext|>",
|
4 |
"eos_token": "<|endoftext|>",
|
5 |
+
"model_max_length": 128,
|
6 |
"name_or_path": "gpt2",
|
7 |
"special_tokens_map_file": null,
|
8 |
"tokenizer_class": "GPT2Tokenizer",
|