nppmatt commited on
Commit
7ccc2c1
·
1 Parent(s): df570c4

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +23 -1
app.py CHANGED
@@ -3,8 +3,30 @@ import torch
3
  import torch.nn.functional as TF
4
  import streamlit as st
5
 
 
 
6
  classifier = pipeline("sentiment-analysis")
7
  defaultTxt = "I hate you cancerous insects so much"
8
  result = classifier(defaultTxt)
9
-
10
  st.write(result)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3
  import torch.nn.functional as TF
4
  import streamlit as st
5
 
6
+ model_name = "RoBERTa"
7
+
8
  classifier = pipeline("sentiment-analysis")
9
  defaultTxt = "I hate you cancerous insects so much"
10
  result = classifier(defaultTxt)
 
11
  st.write(result)
12
+
13
+ if (option == "RoBERTa"):
14
+ tokenizerPath = "s-nlp/roberta_toxicity_classifier"
15
+ modelPath = "s-nlp/roberta_toxicity_classifier"
16
+ neutralIndex = 0
17
+ toxicIndex = 1
18
+ elif (option == "DistilBERT"):
19
+ tokenizerPath = "citizenlab/distilbert-base-multilingual-cased-toxicity"
20
+ modelPath = "citizenlab/distilbert-base-multilingual-cased-toxicity"
21
+ neutralIndex = 1
22
+ toxicIndex = 0
23
+ elif (option == "XLM-RoBERTa"):
24
+ tokenizerPath = "unitary/multilingual-toxic-xlm-roberta"
25
+ modelPath = "unitary/multilingual-toxic-xlm-roberta"
26
+ neutralIndex = 1
27
+ toxicIndex = 0
28
+ else:
29
+ tokenizerPath = "s-nlp/roberta_toxicity_classifier"
30
+ modelPath = "s-nlp/roberta_toxicity_classifier"
31
+ neutralIndex = 0
32
+ toxicIndex = 1