Lenylvt commited on
Commit
d60a3d5
β€’
1 Parent(s): 48b48f0

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +46 -0
app.py ADDED
@@ -0,0 +1,46 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from huggingface_hub import InferenceClient
2
+ import gradio as gr
3
+
4
+ # Initialize the inference client with the Mixtral model
5
+ client = InferenceClient("mistralai/Mixtral-8x7B-Instruct-v0.1")
6
+
7
+ def translate_text(text, target_language):
8
+ # Format the prompt to include the translation instruction
9
+ prompt = f"Translate the following text to {target_language}:\n{text}"
10
+
11
+ # Call the Mixtral model for translation
12
+ response = client(text_generation=prompt, parameters={"max_new_tokens": 100}, options={"wait_for_model": True})
13
+
14
+ # The Mixtral model response includes the translated text in its output
15
+ translated_text = response[0]['generated_text']
16
+
17
+ # Clean up the response to extract only the translated part
18
+ # This step might need adjustment based on the model's output format
19
+ translated_text = translated_text.replace(prompt, '').strip()
20
+
21
+ return translated_text
22
+
23
+ # Define the languages you want to support in your app
24
+ languages = [
25
+ "French",
26
+ "Spanish",
27
+ "German",
28
+ "Italian",
29
+ "Portuguese",
30
+ # Add more languages as needed
31
+ ]
32
+
33
+ # Create the Gradio interface
34
+ iface = gr.Interface(
35
+ fn=translate_text,
36
+ inputs=[
37
+ gr.Textbox(label="Text to Translate", placeholder="Enter text here..."),
38
+ gr.Dropdown(label="Target Language", choices=languages)
39
+ ],
40
+ outputs=gr.Textbox(label="Translated Text"),
41
+ title="Simple Translator with Mixtral",
42
+ description="Translate text to various languages using the Mixtral model from Hugging Face."
43
+ )
44
+
45
+ # Launch the interface
46
+ iface.launch()