Tokenizer loading fix
#1
by
Den4ikAI
- opened
README.md
CHANGED
@@ -91,7 +91,7 @@ from transformers import M2M100ForConditionalGeneration, M2M100Tokenizer
|
|
91 |
path_to_model = "ai-forever/RuM2M100-1.2B"
|
92 |
|
93 |
model = M2M100ForConditionalGeneration.from_pretrained(path_to_model)
|
94 |
-
tokenizer = M2M100Tokenizer.from_pretrained(path_to_model)
|
95 |
|
96 |
sentence = "прийдя в МГТУ я был удивлен никого необноружив там…"
|
97 |
|
|
|
91 |
path_to_model = "ai-forever/RuM2M100-1.2B"
|
92 |
|
93 |
model = M2M100ForConditionalGeneration.from_pretrained(path_to_model)
|
94 |
+
tokenizer = M2M100Tokenizer.from_pretrained(path_to_model, src_lang="ru", tgt_lang="ru")
|
95 |
|
96 |
sentence = "прийдя в МГТУ я был удивлен никого необноружив там…"
|
97 |
|