Spaces:
Sleeping
Sleeping
import gradio as gr | |
from transformers import MarianMTModel, MarianTokenizer | |
# Specify the model name from the Hugging Face Hub, for example, an English to French model by the University of Helsinki | |
model_name = "Helsinki-NLP/opus-mt-en-fr" | |
# Load the tokenizer and model | |
tokenizer = MarianTokenizer.from_pretrained(model_name) | |
model = MarianMTModel.from_pretrained(model_name) | |
# Function to handle translation | |
def translate_text(text, target_language): | |
# Adjust the model_name based on the target language | |
# Note: You'd need to find the exact model names for each language pair you want to support | |
model_name_map = { | |
"French": "Helsinki-NLP/opus-mt-en-fr", | |
"German": "Helsinki-NLP/opus-mt-en-de", | |
"Spanish": "Helsinki-NLP/opus-mt-en-es", | |
} | |
selected_model_name = model_name_map.get(target_language, "Helsinki-NLP/opus-mt-en-fr") | |
# Load the selected model and tokenizer | |
tokenizer = MarianTokenizer.from_pretrained(selected_model_name) | |
model = MarianMTModel.from_pretrained(selected_model_name) | |
# Prepare the text for translation | |
encoded_text = tokenizer.prepare_seq2seq_batch([text], return_tensors="pt") | |
# Perform the translation | |
translated = model.generate(**encoded_text) | |
# Decode the translated text | |
translated_text = tokenizer.decode(translated[0], skip_special_tokens=True) | |
return translated_text | |
# Define the interface | |
iface = gr.Interface( | |
fn=translate_text, | |
inputs=[gr.inputs.Textbox(lines=2, placeholder="Enter text to translate..."), gr.inputs.Dropdown(["French", "German", "Spanish"], label="Select Language")], | |
outputs=[gr.outputs.Textbox()], | |
title="Text Translator with Helsinki NLP Models", | |
description="Select a language to translate English text into using University of Helsinki models." | |
) | |
# Launch the app | |
iface.launch() |