AliArshad's picture
Update app.py
a9da16d
raw
history blame
1.48 kB
from transformers import pipeline
import gradio as gr
# Load the model using the pipeline
pipe = pipeline("text-classification", model="AliArshad/Severity_Predictor")
from transformers import pipeline
import gradio as gr
# Load the model using the pipeline
pipe = pipeline("text-classification", model="AliArshad/Severity_Predictor")
# Function to predict severity and return confidence score
def predict_severity(text):
# Get prediction from the pipeline
prediction = pipe(text)
# Interpret the label and get the confidence score
label = prediction[0]['label']
confidence = prediction[0]['score']
severity = "Severe" if label == "LABEL_1" else "Non-Severe"
# Return severity and confidence as separate outputs
return severity, confidence
# Define the Gradio interface with a title, specific placeholder message, and a progress bar for confidence
iface = gr.Interface(
fn=predict_severity,
inputs=gr.Textbox(lines=2, label="summary", placeholder="Please Enter Bug Report Summary"),
outputs=[
gr.Textbox(label="Prediction"),
gr.Number(label="Confidence", precision=2)
],
title="GPT-2 Based Severity Prediction",
description="Enter text and predict its severity (Severe or Non-severe).",
examples=[
["Can't open multiple bookmarks at once from the bookmarks sidebar using the context menu"],
["Minor enhancements to make-source-package.sh"]
]
)
# Launch the interface
iface.launch()