Spaces:
Sleeping
Sleeping
File size: 1,479 Bytes
ccce6c3 fc5186e ccce6c3 fc5186e ccce6c3 687d513 ccce6c3 fc5186e ccce6c3 fc5186e ccce6c3 fc5186e ccce6c3 5f847ad ccce6c3 5f847ad ccce6c3 5f847ad fc5186e ccce6c3 5f847ad |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 |
from transformers import pipeline
import gradio as gr
# Load the model using the pipeline
pipe = pipeline("text-classification", model="AliArshad/Severity_Predictor")
from transformers import pipeline
import gradio as gr
# Load the model using the pipeline
pipe = pipeline("text-classification", model="AliArshad/Severity_Predictor")
# Function to predict severity and return confidence score
def predict_severity(text):
# Get prediction from the pipeline
prediction = pipe(text)
# Interpret the label and get the confidence score
label = prediction[0]['label']
confidence = prediction[0]['score']
severity = "Severe" if label == "LABEL_1" else "Non-Severe"
# Return severity and confidence as separate outputs
return severity, confidence
# Define the Gradio interface with a title, specific placeholder message, and a progress bar for confidence
iface = gr.Interface(
fn=predict_severity,
inputs=gr.Textbox(lines=2, placeholder="Please Enter Bug Report Summary"),
outputs=[
gr.Textbox(label="Prediction"),
gr.Number(label="Confidence", precision=2)
],
title="SevPredict: GPT-2 Based Severity Prediction",
description="Enter text and predict its severity (Severe or Non-severe).",
examples=[
["Can't open multiple bookmarks at once from the bookmarks sidebar using the context menu"],
["Minor enhancements to make-source-package.sh"]
]
)
# Launch the interface
iface.launch()
|