from transformers import AutoTokenizer, AutoModelForSeq2SeqLM
tokenizer = AutoTokenizer.from_pretrained("svjack/T5-dialogue-choose")
model = AutoModelForSeq2SeqLM.from_pretrained("svjack/T5-dialogue-choose")
text = '''
根据如下上下文,选择最优的后续句子
上下文:如何成为程序员?
选项1:多练习做菜。
选项2:了解多门编程语言。
选项3:看几本历史书。
答案:
'''
tokenizer.decode(
model.generate(
tokenizer.encode(
text, return_tensors="pt", add_special_tokens=True
))[0],
skip_special_tokens = True
)
'''
了解多门编程语言。
'''
- Downloads last month
- 3
This model does not have enough activity to be deployed to Inference API (serverless) yet. Increase its social
visibility and check back later, or deploy to Inference Endpoints (dedicated)
instead.