Enutrof's picture
Update app.py
380dc0e
import gradio as gr
from simpletransformers.seq2seq import Seq2SeqModel, Seq2SeqArgs
# Define the models' paths
BM_MODEL_PATH = "Enutrof/marian-mt-en-pcm"
BBGM_EN_PCM_MODEL_PATH = "NITHUB-AI/marian-mt-bbc-en-pcm"
BBGM_PCM_EN_MODEL_PATH = "NITHUB-AI/marian-mt-bbc-pcm-en"
def load_translator(model_name='Enutrof/marian-mt-en-pcm'):
'''
This method loads the sequence to sequence model for translation.
:return: model
'''
pmodel_args = Seq2SeqArgs()
pmodel_args.max_length = 1024
pmodel_args.length_penalty = 1
pmodel_args.num_beams = 50
pmodel_args.num_return_sequences = 3
pmodel = Seq2SeqModel(
encoder_decoder_type="marian",
encoder_decoder_name=model_name,
args=pmodel_args,
use_cuda=False
)
return pmodel
#Load models
bm_model = load_translator(BM_MODEL_PATH)
bbgm_en_pcm_model = load_translator(BBGM_EN_PCM_MODEL_PATH)
bbgm_pcm_en_model = load_translator(BBGM_PCM_EN_MODEL_PATH)
# Dictionary to easily select model
models = {
"BM Model": bm_model,
"BBGM Model (EN to PCM)": bbgm_en_pcm_model,
"BBGM Model (PCM to EN)": bbgm_pcm_en_model
}
def translate(model_name, source_sentence):
if isinstance(source_sentence, str):
source_sentence = [source_sentence]
model = models[model_name]
predictions = model.predict(source_sentence)
return [i.replace('▁', ' ') for i in predictions[0]]
# Gradio interface
interface = gr.Interface(
fn=translate,
inputs=[
gr.Dropdown(choices=["BM Model", "BBGM Model (EN to PCM)", "BBGM Model (PCM to EN)"], label="Model Selection"),
gr.Textbox(placeholder="Enter source sentence here...", label="Source Sentence"),
],
outputs=[
gr.Textbox(label="Predicted 1"),
gr.Textbox(label="Prediction 2"),
gr.Textbox(label="Prediction 3"),
],
title='β€œEHN?”: A Bi-directional English to πŸ‡³πŸ‡¬ Pidgin Machine Translator'
'\n'
'A product of the NITHUB AI Team', # ![NITHUB Logo](https://imgur.com/rNfN7cf)
description='Type your English/πŸ‡³πŸ‡¬ Pidgin text in the left text box to get πŸ‡³πŸ‡¬ Pidgin/English translations on the right. '
'\n'
'- BM Model: Bible-based Marian Model\n'
'- BBGM Model: Bible-BBC-GPT3.5Turbo-based Marian Model',
examples=[
['BBGM Model (EN to PCM)', 'Who are you?'],
['BBGM Model (EN to PCM)', 'I know every song by that artiste.'],
['BBGM Model (EN to PCM)', 'I am lost, please help me find my way to the market.'],
['BBGM Model (EN to PCM)', 'Is a personal philosophy of moral relativism, the only way to survive in this ethically complex world, or is it just an excuse to justify doing bad things?'],
['BBGM Model (PCM to EN)', 'Wetin Ifihan dey talk about sef?'],
['BBGM Model (PCM to EN)', 'Dey don place reward for anyone wey go bring information about di matter.'],
['BBGM Model (PCM to EN)', 'Who dey breath?'],
['BBGM Model (PCM to EN)', 'Di marriage happun six months after di couple introduction wen dem make dia relationship public in early November, 2021.'],
['BM Model', 'Is a personal philosophy of moral relativism, the only way to survive in this ethically complex world, or is it just an excuse to justify doing bad things?'],
['BM Model', 'I know every song by that artiste.'],
['BM Model', 'They should not be permitted here.'],
['BM Model', 'I am lost, please help me find my way to the market.']
]
)
interface.launch(enable_queue=True)