Spaces:
Sleeping
Sleeping
Upload app.py
Browse files
app.py
ADDED
@@ -0,0 +1,75 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import streamlit as st
|
2 |
+
import streamlit.components.v1 as com
|
3 |
+
#import libraries
|
4 |
+
from transformers import AutoModelForSequenceClassification,AutoTokenizer, AutoConfig
|
5 |
+
import numpy as np
|
6 |
+
#convert logits to probabilities
|
7 |
+
from scipy.special import softmax
|
8 |
+
|
9 |
+
|
10 |
+
#import the model
|
11 |
+
tokenizer = AutoTokenizer.from_pretrained('distilbert-base-uncased')
|
12 |
+
|
13 |
+
model_path = f"penscola/news-d-bert"
|
14 |
+
config = AutoConfig.from_pretrained(model_path)
|
15 |
+
model = AutoModelForSequenceClassification.from_pretrained(model_path)
|
16 |
+
#Set the page configs
|
17 |
+
st.set_page_config(page_title='Fake News Detection',page_icon='😎',layout='wide')
|
18 |
+
|
19 |
+
#welcome Animation
|
20 |
+
st.markdown('<h1> Fake News Detection </h1>',unsafe_allow_html=True)
|
21 |
+
|
22 |
+
#Create a form to take user inputs
|
23 |
+
with st.form(key='tweet',clear_on_submit=True):
|
24 |
+
text=st.text_area('Copy and paste the news or type one')
|
25 |
+
submit=st.form_submit_button('submit')
|
26 |
+
|
27 |
+
#create columns to show outputs
|
28 |
+
col2,col3=st.columns(2)
|
29 |
+
col2.title('Fake or Legit')
|
30 |
+
col3.title('Confidence of this prediction')
|
31 |
+
|
32 |
+
if submit:
|
33 |
+
print('submitted')
|
34 |
+
#pass text to preprocessor
|
35 |
+
def preprocess(text):
|
36 |
+
#initiate an empty list
|
37 |
+
new_text = []
|
38 |
+
#split text by space
|
39 |
+
for t in text.split(" "):
|
40 |
+
#set username to @user
|
41 |
+
t = '@user' if t.startswith('@') and len(t) > 1 else t
|
42 |
+
#set tweet source to http
|
43 |
+
t = 'http' if t.startswith('http') else t
|
44 |
+
#store text in the list
|
45 |
+
new_text.append(t)
|
46 |
+
#change text from list back to string
|
47 |
+
return " ".join(new_text)
|
48 |
+
|
49 |
+
|
50 |
+
#pass text to model
|
51 |
+
|
52 |
+
#change label id
|
53 |
+
config.id2label = {0: 'Fake', 1: 'Legit'}
|
54 |
+
|
55 |
+
text = preprocess(text)
|
56 |
+
|
57 |
+
# PyTorch-based models
|
58 |
+
encoded_input = tokenizer(text, return_tensors='pt')
|
59 |
+
output = model(**encoded_input)
|
60 |
+
scores = output[0][0].detach().numpy()
|
61 |
+
scores = softmax(scores)
|
62 |
+
|
63 |
+
#Process scores
|
64 |
+
ranking = np.argsort(scores)
|
65 |
+
ranking = ranking[::-1]
|
66 |
+
l = config.id2label[ranking[0]]
|
67 |
+
s = scores[ranking[0]]
|
68 |
+
|
69 |
+
#output
|
70 |
+
if l==1:
|
71 |
+
col2.write('Legit')
|
72 |
+
col3.write(f'{s}%')
|
73 |
+
else:
|
74 |
+
col2.write('Fake')
|
75 |
+
col3.write(f'{s}%')
|