import gradio as gr from transformers import AutoModelForSeq2SeqLM, AutoTokenizer, pipeline # Load the translation model and tokenizer from Hugging Face # Here, we use the "Helsinki-NLP" translation models model_name = "Helsinki-NLP/opus-mt-en-fr" # Example model for English to French tokenizer = AutoTokenizer.from_pretrained(model_name) model = AutoModelForSeq2SeqLM.from_pretrained(model_name) translator = pipeline("translation", model=model, tokenizer=tokenizer) # Define the translation function def translate_text(text, target_language): model_name = f"Helsinki-NLP/opus-mt-en-{target_language}" translator = pipeline("translation", model=model_name) translation = translator(text)[0]['translation_text'] return translation # Set up the Gradio interface languages = { "en": "English", "es": "Spanish", "fr": "French", "de": "German", "zh": "Chinese (Mandarin)", "ja": "Japanese", "ko": "Korean", "it": "Italian", "pt": "Portuguese", "ru": "Russian", "hi": "Hindi", "ar": "Arabic", "nl": "Dutch", "tr": "Turkish", "el": "Greek", "ur": "Urdu" } iface = gr.Interface( fn=translate_text, inputs=[ gr.inputs.Textbox(label="Enter text to translate"), gr.inputs.Dropdown(list(languages.keys()), label="Target Language", type="value") ], outputs="text", live=True, title="Text Translator", description="Translate text into multiple languages using Hugging Face models." ) iface.launch()