Spaces:
Sleeping
Sleeping
import gradio as gr | |
from transformers import AutoTokenizer | |
import spaces | |
# Function to load and tokenize the message using the selected tokenizer | |
def tokenize(message, tokenizer_name): | |
try: | |
tokenizer = AutoTokenizer.from_pretrained(tokenizer_name) | |
return str(tokenizer.encode(message)) | |
except Exception as e: | |
return f"Error loading tokenizer: {str(e)}" | |
# Wrapper function to handle chat interaction | |
def chat(message, history, tokenizer_name): | |
tokenized_message = tokenize(message, tokenizer_name) | |
history.append((message, tokenized_message)) | |
return history, history | |
# Gradio interface | |
with gr.Blocks() as demo: | |
tokenizer_input = gr.Textbox(placeholder="Enter the tokenizer name (e.g., gpt2, bert-base-uncased)", label="Tokenizer Name") | |
chatbot = gr.Chatbot() | |
msg = gr.Textbox() | |
clear = gr.Button("Clear") | |
def clear_history(): | |
return [], [] | |
msg.submit(chat, inputs=[msg, chatbot, tokenizer_input], outputs=[chatbot, chatbot]) | |
clear.click(clear_history, outputs=[chatbot, chatbot]) | |
demo.launch() | |