Kleber's picture
Create app.py
e5f115a
raw
history blame
No virus
2.2 kB
import gradio as gr
from transformers import AutoTokenizer, AutoModelForSeq2SeqLM, pipeline
import torch
LANGS = ["kin_Latn","eng_Latn"]
TASK = "translation"
# CKPT = "DigitalUmuganda/Finetuned-NLLB"
# MODELS = ["facebook/nllb-200-distilled-600M","DigitalUmuganda/Finetuned-NLLB"]
# model = AutoModelForSeq2SeqLM.from_pretrained(CKPT)
# tokenizer = AutoTokenizer.from_pretrained(CKPT)
device = 0 if torch.cuda.is_available() else -1
#general_model = AutoModelForSeq2SeqLM.from_pretrained("mbazaNLP/Nllb_finetuned_general_en_kin")
education_model = AutoModelForSeq2SeqLM.from_pretrained("mbazaNLP/Nllb_finetuned_education_en_kin")
#tourism_model = AutoModelForSeq2SeqLM.from_pretrained("mbazaNLP/Nllb_finetuned_tourism_en_kin")
#MODELS = {"General model":general_model_model,"Education model":education_model,"Tourism model":tourism_model}
#MODELS = {"Education model":education_model,"Tourism model":tourism_model}
tokenizer = AutoTokenizer.from_pretrained("mbazaNLP/Nllb_finetuned_general_en_kin")
# def translate(text, src_lang, tgt_lang, max_length=400):
def translate(CKPT,text, src_lang, tgt_lang, max_length=400):
"""
Translate the text from source lang to target lang
"""
translation_pipeline = pipeline(TASK,
model=education_model,
tokenizer=tokenizer,
src_lang=src_lang,
tgt_lang=tgt_lang,
max_length=max_length,
device=device)
result = translation_pipeline(text)
return result[0]['translation_text']
gr.Interface(
translate,
[
#gr.components.Dropdown(label="choose a model",choices=MODELS),
gr.components.Textbox(label="Text"),
gr.components.Dropdown(label="Source Language", choices=LANGS),
gr.components.Dropdown(label="Target Language", choices=LANGS),
#gr.components.Slider(8, 512, value=400, step=8, label="Max Length")
],
["text"],
#examples=examples,
# article=article,
cache_examples=False,
title="Finetuned-NLLB-EN-KIN",
#description=description
).launch()