Spaces:
Sleeping
Sleeping
razanalsulami
commited on
Commit
•
a3c0b99
1
Parent(s):
c97016b
Update app.py
Browse files
app.py
CHANGED
@@ -6,6 +6,7 @@ from transformers import pipeline
|
|
6 |
import gradio as gr
|
7 |
import torch
|
8 |
from gtts import gTTS
|
|
|
9 |
device = "cuda" if torch.cuda.is_available() else "cpu"
|
10 |
translator_ar_to_en = pipeline("translation_ar_to_en", model="Helsinki-NLP/opus-mt-ar-en", device=0 if device == "cuda" else -1)
|
11 |
translator_en_to_ar = pipeline("translation_en_to_arabic", model="Helsinki-NLP/opus-mt-en-ar", device=0 if device == "cuda" else -1)
|
@@ -213,3 +214,6 @@ with gr.Blocks() as demo:
|
|
213 |
|
214 |
# Launch the Gradio app with Tabs
|
215 |
demo.launch()
|
|
|
|
|
|
|
|
6 |
import gradio as gr
|
7 |
import torch
|
8 |
from gtts import gTTS
|
9 |
+
import IPython.display as ipd
|
10 |
device = "cuda" if torch.cuda.is_available() else "cpu"
|
11 |
translator_ar_to_en = pipeline("translation_ar_to_en", model="Helsinki-NLP/opus-mt-ar-en", device=0 if device == "cuda" else -1)
|
12 |
translator_en_to_ar = pipeline("translation_en_to_arabic", model="Helsinki-NLP/opus-mt-en-ar", device=0 if device == "cuda" else -1)
|
|
|
214 |
|
215 |
# Launch the Gradio app with Tabs
|
216 |
demo.launch()
|
217 |
+
|
218 |
+
if __name__ == "__main__":
|
219 |
+
app.run(host="0.0.0.0", port=7860)
|