Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -1,30 +1,30 @@
|
|
1 |
-
from transformers import BertTokenizer, BertForSequenceClassification
|
2 |
-
import torch
|
3 |
-
import streamlit as st
|
4 |
-
|
5 |
-
tokenizer = BertTokenizer.from_pretrained(
|
6 |
-
"ashish-001/Bert-Amazon-review-sentiment-classifier")
|
7 |
-
model = BertForSequenceClassification.from_pretrained(
|
8 |
-
"ashish-001/Bert-Amazon-review-sentiment-classifier")
|
9 |
-
|
10 |
-
|
11 |
-
def classify_text(text):
|
12 |
-
inputs = tokenizer(
|
13 |
-
text,
|
14 |
-
max_length=256,
|
15 |
-
truncation=True,
|
16 |
-
padding="max_length",
|
17 |
-
return_tensors="pt"
|
18 |
-
)
|
19 |
-
output = model(**inputs)
|
20 |
-
logits = output.logits
|
21 |
-
probs = torch.nn.functional.sigmoid(logits)
|
22 |
-
return probs
|
23 |
-
|
24 |
-
|
25 |
-
st.title("Amazon Review Sentiment classifier")
|
26 |
-
data = st.text_area("Enter or paste a review")
|
27 |
-
if st.button('Predict'):
|
28 |
-
prediction = classify_text(data)
|
29 |
-
st.header(
|
30 |
-
f"
|
|
|
1 |
+
from transformers import BertTokenizer, BertForSequenceClassification
|
2 |
+
import torch
|
3 |
+
import streamlit as st
|
4 |
+
|
5 |
+
tokenizer = BertTokenizer.from_pretrained(
|
6 |
+
"ashish-001/Bert-Amazon-review-sentiment-classifier")
|
7 |
+
model = BertForSequenceClassification.from_pretrained(
|
8 |
+
"ashish-001/Bert-Amazon-review-sentiment-classifier")
|
9 |
+
|
10 |
+
|
11 |
+
def classify_text(text):
|
12 |
+
inputs = tokenizer(
|
13 |
+
text,
|
14 |
+
max_length=256,
|
15 |
+
truncation=True,
|
16 |
+
padding="max_length",
|
17 |
+
return_tensors="pt"
|
18 |
+
)
|
19 |
+
output = model(**inputs)
|
20 |
+
logits = output.logits
|
21 |
+
probs = torch.nn.functional.sigmoid(logits)
|
22 |
+
return probs
|
23 |
+
|
24 |
+
|
25 |
+
st.title("Amazon Review Sentiment classifier")
|
26 |
+
data = st.text_area("Enter or paste a review")
|
27 |
+
if st.button('Predict'):
|
28 |
+
prediction = classify_text(data)
|
29 |
+
st.header(
|
30 |
+
f"{prediction}")
|