#import necessary libraries import gradio as gr from transformers import pipeline #Instantiate the model model = pipeline(task="fill-mask", model="MUmairAB/bert-based-MaskedLM") #Typically, the model should be imported within a function. However, in this case, we are downloading it outside the function to avoid a significant delay that could annoy the user when downloading it inside the main function. By loading the model at this point, it will be downloaded when the app runs, and the user will overlook this initial loading time, as opposed to experiencing a delay after entering the input. def fill_the_mask(text): if "[MASK]" not in text: return "You did not enter \"[MASK]\" in the text. Please write your text again!" else: #Apply the model model_out = model(text) #First sort the list of dictionaries according to the score model_out = sorted(model_out, key=lambda x: x['score'],reverse=True) #Create a list to store the model output out_list = [] #Iterate over the list of dictionaries and get the required ouput for sub_dict in model_out: out_list.append(sub_dict["sequence"]) return out_list #Create a Gradio user interface my_interface = gr.Interface(title="Masked Language Model APP\n(by Umair Akram)", description="This App uses a fine-tuned DistilBERT-Base-Uncased Masked Language Model to predict the missed word in a sentence.\nEnter your text and put \"[MASK]\" at the word which you want to predict, as shown in the following example: Can we [MASK] to Paris?", fn=fill_the_mask, inputs="text", outputs="text") #Define the main function if __name__ == "__main__": #Launch the Gradio interface my_interface.launch()