Cristian283's picture
Create J
bfb1bd0 verified
raw
history blame contribute delete
414 Bytes
from tokenizers import Tokenizer, models, trainers, pre_tokenizers, decoders, processors
tokenizer = Tokenizer(models.BPE())
tokenizer.pre_tokenizer = pre_tokenizers.ByteLevel()
tokenizer.decoder = decoders.ByteLevel()
tokenizer.post_processor = processors.ByteLevel()
trainer = trainers.BpeTrainer(special_tokens=["[PAD]", "[CLS]", "[SEP]", "[MASK]", "[MASK2]"])
tokenizer.train(["archivo_texto.txt"], trainer)