Translator-API / app.py
Lenylvt's picture
Create app.py
d60a3d5 verified
raw
history blame
1.56 kB
from huggingface_hub import InferenceClient
import gradio as gr
# Initialize the inference client with the Mixtral model
client = InferenceClient("mistralai/Mixtral-8x7B-Instruct-v0.1")
def translate_text(text, target_language):
# Format the prompt to include the translation instruction
prompt = f"Translate the following text to {target_language}:\n{text}"
# Call the Mixtral model for translation
response = client(text_generation=prompt, parameters={"max_new_tokens": 100}, options={"wait_for_model": True})
# The Mixtral model response includes the translated text in its output
translated_text = response[0]['generated_text']
# Clean up the response to extract only the translated part
# This step might need adjustment based on the model's output format
translated_text = translated_text.replace(prompt, '').strip()
return translated_text
# Define the languages you want to support in your app
languages = [
"French",
"Spanish",
"German",
"Italian",
"Portuguese",
# Add more languages as needed
]
# Create the Gradio interface
iface = gr.Interface(
fn=translate_text,
inputs=[
gr.Textbox(label="Text to Translate", placeholder="Enter text here..."),
gr.Dropdown(label="Target Language", choices=languages)
],
outputs=gr.Textbox(label="Translated Text"),
title="Simple Translator with Mixtral",
description="Translate text to various languages using the Mixtral model from Hugging Face."
)
# Launch the interface
iface.launch()