|
from huggingface_hub import from_pretrained_fastai |
|
import gradio as gr |
|
|
|
from transformers import AutoModelForSequenceClassification, AutoTokenizer |
|
import torch |
|
|
|
|
|
|
|
repo_id = "islasher/clasificador-dair-emotion" |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
nombre_modelo = "islasher/clasificador-dair-emotion" |
|
model = AutoModelForSequenceClassification.from_pretrained(nombre_modelo) |
|
tokenizer = AutoTokenizer.from_pretrained(nombre_modelo) |
|
|
|
|
|
def predict(frase): |
|
|
|
inputs = tokenizer(frase, return_tensors="pt") |
|
outputs = model(**inputs) |
|
predicted_class = torch.argmax(outputs.logits, dim=1).item() |
|
return predicted_class |
|
|
|
|
|
gr.Interface(fn=predict, inputs="text", outputs="text").launch(share=False) |
|
|
|
|