Update tokenization_phi3_small.py
#18
by
damajercakms
- opened
tokenization_phi3_small.py
CHANGED
@@ -180,12 +180,10 @@ class Phi3SmallTokenizer(PreTrainedTokenizer):
|
|
180 |
# First try to load from the tokenization config if it exists
|
181 |
tokenization_config = get_tokenizer_config(pretrained_model_name_or_path, **kwargs)
|
182 |
if tokenization_config:
|
183 |
-
cls_kwargs
|
184 |
-
|
185 |
-
|
186 |
-
|
187 |
-
)
|
188 |
-
)
|
189 |
else:
|
190 |
config = AutoConfig.from_pretrained(pretrained_model_name_or_path, trust_remote_code=True)
|
191 |
cls_kwargs["model_max_length"] = config.max_position_embeddings
|
|
|
180 |
# First try to load from the tokenization config if it exists
|
181 |
tokenization_config = get_tokenizer_config(pretrained_model_name_or_path, **kwargs)
|
182 |
if tokenization_config:
|
183 |
+
cls_kwargs = {
|
184 |
+
**tokenization_config,
|
185 |
+
**cls_kwargs
|
186 |
+
}
|
|
|
|
|
187 |
else:
|
188 |
config = AutoConfig.from_pretrained(pretrained_model_name_or_path, trust_remote_code=True)
|
189 |
cls_kwargs["model_max_length"] = config.max_position_embeddings
|