yasminesarraj's picture
Update app.py
b97ac1a
raw
history blame
611 Bytes
# app.py
import gradio as gr
from transformers import pipeline
from transformers import AutoTokenizer, AutoModelForSequenceClassification
#Defining the classify function which takes text as input and returns the label of the sentiment
def classify(text):
# Initializing the pipeline for sentiment analysis
cls = pipeline('text-classification', model='RJuro/dk_emotion_bert_in_class')
# Predicting the sentiment label for the input text
return cls(text)[0]['label']
#Creating the Gradio interface with input textbox and output text
gr.Interface(fn=classify, inputs=["textbox"], outputs="text").launch()