Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -2,16 +2,13 @@
|
|
2 |
#python -m streamlit run d:/NSFW/Project/test1.py
|
3 |
import torch
|
4 |
from transformers import AutoModelForSequenceClassification, AutoTokenizer
|
5 |
-
#from transformers import BertTokenizer, BertForSequenceClassification
|
6 |
import math, keras_ocr
|
7 |
# Initialize pipeline
|
8 |
pipeline = None
|
9 |
model_path="NSFW_text_classifier"
|
10 |
-
#tokenizer = BertTokenizer.from_pretrained('bert-base-uncased')
|
11 |
-
#model_2 = BertForSequenceClassification.from_pretrained("CustomModel")
|
12 |
tokenizer = AutoTokenizer.from_pretrained(model_path)
|
13 |
model = AutoModelForSequenceClassification.from_pretrained(model_path)
|
14 |
-
|
15 |
import streamlit as st
|
16 |
|
17 |
def get_distance(predictions):
|
@@ -107,10 +104,6 @@ if uploaded_file is not None:
|
|
107 |
|
108 |
input_text =sentance
|
109 |
print(input_text)
|
110 |
-
#inputs = tokenizer(text,padding = True, truncation = True, return_tensors='pt').to('cpu')
|
111 |
-
#outputs = model_2(**inputs)
|
112 |
-
#predictions = torch.nn.functional.softmax(outputs.logits, dim=-1)
|
113 |
-
#predictions = predictions.cpu().detach().numpy()
|
114 |
inputs = tokenizer(input_text, return_tensors="pt")
|
115 |
outputs = model(**inputs)
|
116 |
predictions = outputs.logits.softmax(dim=-1)
|
|
|
2 |
#python -m streamlit run d:/NSFW/Project/test1.py
|
3 |
import torch
|
4 |
from transformers import AutoModelForSequenceClassification, AutoTokenizer
|
|
|
5 |
import math, keras_ocr
|
6 |
# Initialize pipeline
|
7 |
pipeline = None
|
8 |
model_path="NSFW_text_classifier"
|
|
|
|
|
9 |
tokenizer = AutoTokenizer.from_pretrained(model_path)
|
10 |
model = AutoModelForSequenceClassification.from_pretrained(model_path)
|
11 |
+
|
12 |
import streamlit as st
|
13 |
|
14 |
def get_distance(predictions):
|
|
|
104 |
|
105 |
input_text =sentance
|
106 |
print(input_text)
|
|
|
|
|
|
|
|
|
107 |
inputs = tokenizer(input_text, return_tensors="pt")
|
108 |
outputs = model(**inputs)
|
109 |
predictions = outputs.logits.softmax(dim=-1)
|