File size: 845 Bytes
801b8c7 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 |
#!/bin/env python
"""
It turns out that T5 does some odd token padding of its text input.
This program shows the padding, in human readable form
"""
from transformers import T5Tokenizer,T5EncoderModel
import torch
T="mcmonkey/google_t5-v1_1-xxl_encoderonly"
tokenizer = T5Tokenizer.from_pretrained(T)
print("loded tokenzier")
def get_tokens(word):
tokens = tokenizer(word, return_tensors="pt")
input_ids = tokens.input_ids
print("Bare input_ids:",input_ids)
decoded_tokens = tokenizer.convert_ids_to_tokens(tokens["input_ids"][0])
print("Tokenized input:", decoded_tokens)
# id should be a numeral
def get_token_from_id(id):
decoded_tokens = tokenizer.convert_ids_to_tokens(id)
print("Tokenized id:", decoded_tokens)
get_tokens("cat")
get_tokens("dogs and cats living together")
get_token_from_id(1712)
|