File size: 3,589 Bytes
59608ee 4b8e36c 59608ee 36e7a18 59608ee 4b8e36c 701fdff 4b8e36c 59608ee 4b8e36c 36e7a18 4b8e36c 59608ee 36e7a18 59608ee 24e1fce 4b8e36c 24e1fce 4b8e36c 59608ee 36e7a18 59608ee 36e7a18 59608ee 380dc0e 36e7a18 8c8448e d6b237e 36d7535 4b8e36c 36e7a18 24e1fce 36e7a18 4b8e36c 59608ee 4b8e36c |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 |
import gradio as gr
from simpletransformers.seq2seq import Seq2SeqModel, Seq2SeqArgs
# Define the models' paths
BM_MODEL_PATH = "Enutrof/marian-mt-en-pcm"
BBGM_EN_PCM_MODEL_PATH = "NITHUB-AI/marian-mt-bbc-en-pcm"
BBGM_PCM_EN_MODEL_PATH = "NITHUB-AI/marian-mt-bbc-pcm-en"
def load_translator(model_name='Enutrof/marian-mt-en-pcm'):
'''
This method loads the sequence to sequence model for translation.
:return: model
'''
pmodel_args = Seq2SeqArgs()
pmodel_args.max_length = 1024
pmodel_args.length_penalty = 1
pmodel_args.num_beams = 50
pmodel_args.num_return_sequences = 3
pmodel = Seq2SeqModel(
encoder_decoder_type="marian",
encoder_decoder_name=model_name,
args=pmodel_args,
use_cuda=False
)
return pmodel
#Load models
bm_model = load_translator(BM_MODEL_PATH)
bbgm_en_pcm_model = load_translator(BBGM_EN_PCM_MODEL_PATH)
bbgm_pcm_en_model = load_translator(BBGM_PCM_EN_MODEL_PATH)
# Dictionary to easily select model
models = {
"BM Model": bm_model,
"BBGM Model (EN to PCM)": bbgm_en_pcm_model,
"BBGM Model (PCM to EN)": bbgm_pcm_en_model
}
def translate(model_name, source_sentence):
if isinstance(source_sentence, str):
source_sentence = [source_sentence]
model = models[model_name]
predictions = model.predict(source_sentence)
return [i.replace('β', ' ') for i in predictions[0]]
# Gradio interface
interface = gr.Interface(
fn=translate,
inputs=[
gr.Dropdown(choices=["BM Model", "BBGM Model (EN to PCM)", "BBGM Model (PCM to EN)"], label="Model Selection"),
gr.Textbox(placeholder="Enter source sentence here...", label="Source Sentence"),
],
outputs=[
gr.Textbox(label="Predicted 1"),
gr.Textbox(label="Prediction 2"),
gr.Textbox(label="Prediction 3"),
],
title='βEHN?β: A Bi-directional English to π³π¬ Pidgin Machine Translator'
'\n'
'A product of the NITHUB AI Team', # ![NITHUB Logo](https://imgur.com/rNfN7cf)
description='Type your English/π³π¬ Pidgin text in the left text box to get π³π¬ Pidgin/English translations on the right. '
'\n'
'- BM Model: Bible-based Marian Model\n'
'- BBGM Model: Bible-BBC-GPT3.5Turbo-based Marian Model',
examples=[
['BBGM Model (EN to PCM)', 'Who are you?'],
['BBGM Model (EN to PCM)', 'I know every song by that artiste.'],
['BBGM Model (EN to PCM)', 'I am lost, please help me find my way to the market.'],
['BBGM Model (EN to PCM)', 'Is a personal philosophy of moral relativism, the only way to survive in this ethically complex world, or is it just an excuse to justify doing bad things?'],
['BBGM Model (PCM to EN)', 'Wetin Ifihan dey talk about sef?'],
['BBGM Model (PCM to EN)', 'Dey don place reward for anyone wey go bring information about di matter.'],
['BBGM Model (PCM to EN)', 'Who dey breath?'],
['BBGM Model (PCM to EN)', 'Di marriage happun six months after di couple introduction wen dem make dia relationship public in early November, 2021.'],
['BM Model', 'Is a personal philosophy of moral relativism, the only way to survive in this ethically complex world, or is it just an excuse to justify doing bad things?'],
['BM Model', 'I know every song by that artiste.'],
['BM Model', 'They should not be permitted here.'],
['BM Model', 'I am lost, please help me find my way to the market.']
]
)
interface.launch(enable_queue=True) |