Rzhishchev commited on
Commit
0e9f7cd
1 Parent(s): 0aa0213

Update toxic.py

Browse files
Files changed (1) hide show
  1. toxic.py +1 -3
toxic.py CHANGED
@@ -4,7 +4,7 @@ from transformers import AutoTokenizer, AutoModelForSequenceClassification
4
 
5
 
6
  def app():
7
- st.title('Toxic Comment Classifier')
8
  st.write('This is the toxic comment classifier page.')
9
 
10
  model_checkpoint = 'cointegrated/rubert-tiny-toxicity'
@@ -25,8 +25,6 @@ def app():
25
  return 1 - proba.T[0] * (1 - proba.T[-1])
26
  return proba
27
 
28
- st.title("Toxicity Detector")
29
-
30
  user_input = st.text_area("Enter text to check for toxicity:", "Капец ты гнида")
31
  if st.button("Analyze"):
32
  toxicity_score = text2toxicity(user_input, True)
 
4
 
5
 
6
  def app():
7
+ st.title('Toxic Comment Detector')
8
  st.write('This is the toxic comment classifier page.')
9
 
10
  model_checkpoint = 'cointegrated/rubert-tiny-toxicity'
 
25
  return 1 - proba.T[0] * (1 - proba.T[-1])
26
  return proba
27
 
 
 
28
  user_input = st.text_area("Enter text to check for toxicity:", "Капец ты гнида")
29
  if st.button("Analyze"):
30
  toxicity_score = text2toxicity(user_input, True)