Spaces:
Sleeping
Sleeping
File size: 2,644 Bytes
36f0169 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 |
# SentimentCNN class based on Sentiment Analysis tutorial by Ben Trevett
# https://github.com/bentrevett/pytorch-sentiment-analysis
import torch
import torch.nn as nn
import torchtext
class SentimentCNN(nn.Module):
def __init__(self, state_dict=None, vocab=None, tokenizer='basic_english'):
super().__init__()
# tokenizer setup
self.tokenizer = torchtext.data.utils.get_tokenizer(tokenizer)
self.state_dict_name = state_dict
if vocab:
self.load_vocab(vocab)
self.device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
def _setup_model(self):
# cnn parameters
n_filters=100
filter_sizes=[3,5,7]
dropout_rate=0.25
self.min_length = max(filter_sizes)
# language space parameters
embedding_dim=300
output_dim=2
# model setup
self.embedding = nn.Embedding(
len(self.vocab),
embedding_dim,
padding_idx=self.pad_index)
self.convs = nn.ModuleList([nn.Conv1d(embedding_dim,
n_filters,
filter_size)
for filter_size in filter_sizes])
self.fc = nn.Linear(len(filter_sizes) * n_filters, output_dim)
self.dropout = nn.Dropout(dropout_rate)
if self.state_dict_name:
self.load_state_dict(torch.load(self.state_dict_name))
def load_vocab(self, vocab):
# vocabulary parameters
self.vocab = torch.load(vocab)
self.pad_index = self.vocab['<pad>']
self._setup_model()
def forward(self, ids):
embedded = self.dropout(self.embedding(ids))
embedded = embedded.permute(0,2,1)
conved = [torch.relu(conv(embedded)) for conv in self.convs]
pooled = [conv.max(dim=-1).values for conv in conved]
cat = self.dropout(torch.cat(pooled, dim=-1))
prediction = self.fc(cat)
return prediction
def predict_sentiment(self, text):
tokens = self.tokenizer(text)
ids = [self.vocab[t] for t in tokens]
if len(ids) < self.min_length:
ids += [self.pad_index] * (self.min_length - len(ids))
tensor = torch.LongTensor(ids).unsqueeze(dim=0).to(self.device)
prediction = self(tensor).squeeze(dim=0)
probability = torch.softmax(prediction, dim=-1)
predicted_class = prediction.argmax(dim=-1).item()
predicted_probability = probability[predicted_class].item()
return predicted_class, predicted_probability
|