Update app.py
Browse files
app.py
CHANGED
@@ -11,8 +11,6 @@ from datasets import load_dataset
|
|
11 |
from sentence_transformers import SentenceTransformer
|
12 |
from Scraper import Scrap
|
13 |
|
14 |
-
st.set_page_config(layout="wide")
|
15 |
-
|
16 |
model_checkpoint = "Rifky/indobert-hoax-classification"
|
17 |
base_model_checkpoint = "indobenchmark/indobert-base-p1"
|
18 |
data_checkpoint = "Rifky/indonesian-hoax-news"
|
@@ -70,20 +68,19 @@ if submit:
|
|
70 |
).flatten()
|
71 |
sorted = np.argsort(similarity_score)[::-1].tolist()
|
72 |
|
73 |
-
|
74 |
-
|
75 |
-
input_column.markdown(f"<small>Compute Finished in {int(time.time() - last_time)} seconds</small>", unsafe_allow_html=True)
|
76 |
prediction = np.argmax(result, axis=-1)
|
77 |
if prediction == 0:
|
78 |
-
|
79 |
else:
|
80 |
-
|
81 |
|
82 |
-
|
83 |
-
|
84 |
|
85 |
-
|
86 |
-
|
87 |
-
|
88 |
-
|
89 |
-
|
|
|
|
11 |
from sentence_transformers import SentenceTransformer
|
12 |
from Scraper import Scrap
|
13 |
|
|
|
|
|
14 |
model_checkpoint = "Rifky/indobert-hoax-classification"
|
15 |
base_model_checkpoint = "indobenchmark/indobert-base-p1"
|
16 |
data_checkpoint = "Rifky/indonesian-hoax-news"
|
|
|
68 |
).flatten()
|
69 |
sorted = np.argsort(similarity_score)[::-1].tolist()
|
70 |
|
71 |
+
st.markdown(f"<small>Compute Finished in {int(time.time() - last_time)} seconds</small>", unsafe_allow_html=True)
|
|
|
|
|
72 |
prediction = np.argmax(result, axis=-1)
|
73 |
if prediction == 0:
|
74 |
+
st.success(f"This news is {label[prediction]}.")
|
75 |
else:
|
76 |
+
st.error(f"This news is {label[prediction]}.")
|
77 |
|
78 |
+
st.text(f"{int(result[prediction]*100)}% confidence")
|
79 |
+
st.progress(result[prediction])
|
80 |
|
81 |
+
with st.expander("Related Articles"):
|
82 |
+
for i in sorted[:5]:
|
83 |
+
st.write(f"""
|
84 |
+
<small>{data["url"][i].split("/")[2]}</small>
|
85 |
+
<a href={data["url"][i]}>{data["title"][i]}</a>
|
86 |
+
""", unsafe_allow_html=True)
|