Rzhishchev commited on
Commit
58d5d8a
1 Parent(s): 37669fc

Update toxic.py

Browse files
Files changed (1) hide show
  1. toxic.py +5 -0
toxic.py CHANGED
@@ -2,9 +2,14 @@ import streamlit as st
2
  import torch
3
  from transformers import AutoTokenizer, AutoModelForSequenceClassification
4
 
 
 
 
 
5
  model_checkpoint = 'cointegrated/rubert-tiny-toxicity'
6
  tokenizer = AutoTokenizer.from_pretrained(model_checkpoint)
7
  model = AutoModelForSequenceClassification.from_pretrained(model_checkpoint)
 
8
  if torch.cuda.is_available():
9
  model.cuda()
10
 
 
2
  import torch
3
  from transformers import AutoTokenizer, AutoModelForSequenceClassification
4
 
5
+
6
+ def app():
7
+ st.title('Toxic Comment Classifier')
8
+ st.write('This is the toxic comment classifier page.')
9
  model_checkpoint = 'cointegrated/rubert-tiny-toxicity'
10
  tokenizer = AutoTokenizer.from_pretrained(model_checkpoint)
11
  model = AutoModelForSequenceClassification.from_pretrained(model_checkpoint)
12
+
13
  if torch.cuda.is_available():
14
  model.cuda()
15