|
import gradio as gr |
|
from transformers import AutoModelForCausalLM,AutoModel |
|
from transformers import AutoTokenizer |
|
import torch |
|
|
|
model = AutoModel.from_pretrained("aframson/RDPDLM",trust_remote_code=True) |
|
tokenizer = AutoTokenizer.from_pretrained("aframson/RDPDLM",trust_remote_code=True) |
|
|
|
|
|
def Generate(context): |
|
context_tokens = torch.tensor(tokenizer._encode(context), dtype=torch.long).unsqueeze(0) |
|
|
|
generated_text = model.generate(context_tokens, max_new_tokens=10)[0].tolist() |
|
return tokenizer._decode(generated_text) |
|
|
|
iface = gr.Interface(fn=Generate, inputs="text", outputs="text") |
|
iface.launch() |