Spaces:
Sleeping
Sleeping
import transformers | |
import torch | |
from transformers import AutoTokenizer, AutoModelForSequenceClassification | |
device = 'cuda' if torch.cuda.is_available() else 'cpu' | |
tokenizer = AutoTokenizer.from_pretrained("distilbert-base-cased") | |
def predict_probs(model, text, | |
labels=['World', 'Sports', 'Business', 'Sci/Tech']): | |
with torch.no_grad(): | |
tokens = tokenizer(text, padding="max_length", truncation=True, return_tensors='pt').to(device) | |
logits = model(**tokens).logits | |
probs = torch.nn.functional.softmax(logits)[0] | |
return {labels[i]: float(probs[i]) for i in range(min(len(probs), len(labels)))} | |
def load_model(labels_count=4): | |
model = AutoModelForSequenceClassification.from_pretrained("pretrained_acc935/", num_labels=labels_count).to(device) | |
return model | |
__all__ = ['predict_probs', 'load_model'] |