Rzhishchev commited on
Commit
8560266
1 Parent(s): c2cba17

Update toxic.py

Browse files
Files changed (1) hide show
  1. toxic.py +3 -1
toxic.py CHANGED
@@ -7,6 +7,8 @@ def app():
7
  st.title('Toxic Comment Detector')
8
  st.write('This is the toxic comment classifier page.')
9
 
 
 
10
  model_checkpoint = 'cointegrated/rubert-tiny-toxicity'
11
  tokenizer = AutoTokenizer.from_pretrained(model_checkpoint)
12
  model = AutoModelForSequenceClassification.from_pretrained(model_checkpoint)
@@ -25,7 +27,7 @@ def app():
25
  return 1 - proba.T[0] * (1 - proba.T[-1])
26
  return proba
27
 
28
- user_input = st.text_area("Enter text to check for toxicity:", "Собака сутулая")
29
  if st.button("Analyze"):
30
  toxicity_score = text2toxicity(user_input, True)
31
  st.write(f"Toxicity Score: {toxicity_score:.4f}")
 
7
  st.title('Toxic Comment Detector')
8
  st.write('This is the toxic comment classifier page.')
9
 
10
+ st.image('https://media4.giphy.com/media/CdhxVrdRN4YFi/giphy.gif')
11
+
12
  model_checkpoint = 'cointegrated/rubert-tiny-toxicity'
13
  tokenizer = AutoTokenizer.from_pretrained(model_checkpoint)
14
  model = AutoModelForSequenceClassification.from_pretrained(model_checkpoint)
 
27
  return 1 - proba.T[0] * (1 - proba.T[-1])
28
  return proba
29
 
30
+ user_input = st.text_area("Enter your text:", "Собака сутулая")
31
  if st.button("Analyze"):
32
  toxicity_score = text2toxicity(user_input, True)
33
  st.write(f"Toxicity Score: {toxicity_score:.4f}")