kxx-kkk commited on
Commit
6f2256e
·
verified ·
1 Parent(s): 623192d

Upload app.py

Browse files
Files changed (1) hide show
  1. app.py +19 -11
app.py CHANGED
@@ -1,34 +1,41 @@
1
  import streamlit as st
2
  from transformers import pipeline
3
  from transformers import AutoModelForQuestionAnswering, AutoTokenizer
4
- from transformers import DebertaV2Tokenizer
5
 
 
6
  st.set_page_config(page_title="Automated Question Answering System")
7
- st.title("Automated Question Answering System")
8
- st.subheader("Try")
 
 
 
9
 
 
10
  @st.cache_resource(show_spinner=True)
11
  def question_model():
 
12
  model_name = "kxx-kkk/FYP_deberta-v3-base-squad2_mrqa"
13
  tokenizer = AutoTokenizer.from_pretrained(model_name)
14
  model = AutoModelForQuestionAnswering.from_pretrained(model_name)
15
  question_answerer = pipeline("question-answering", model=model, tokenizer=tokenizer)
16
  return question_answerer
17
 
18
- st.markdown("<h2 style='text-align: center; color:grey;'>Question Answering on Academic Essays</h2>", unsafe_allow_html=True)
19
- st.markdown("<h3 style='text-align: left; color:#F63366; font-size:18px;'><b>What is extractive question answering about?<b></h3>", unsafe_allow_html=True)
20
- st.write("Extractive question answering is a Natural Language Processing task where text is provided for a model so that the model can refer to it and make predictions about where the answer to a question is.")
21
- # st.markdown('___')
22
-
23
  tab1, tab2 = st.tabs(["Input text", "Upload File"])
24
 
 
25
  with tab1:
26
  sample_question = "What is NLP?"
27
  with open("sample.txt", "r") as text_file:
28
  sample_text = text_file.read()
29
 
30
- context = st.text_area("Use the example below / input your essay in English", value=sample_text, height=330)
31
- question = st.text_input(label="Use the example question below / enter your own question", value=sample_question)
 
 
 
 
 
32
  button = st.button("Get answer")
33
  if button:
34
  with st.spinner(text="Loading question model..."):
@@ -38,7 +45,8 @@ with tab1:
38
  answer = answer["answer"]
39
  container = st.container(border=True)
40
  container.write("<h5><b>Answer:</b></h5>" + answer, unsafe_allow_html=True)
41
-
 
42
  with tab2:
43
  uploaded_file = st.file_uploader("Choose a .txt file to upload", type=["txt"])
44
  if uploaded_file is not None:
 
1
  import streamlit as st
2
  from transformers import pipeline
3
  from transformers import AutoModelForQuestionAnswering, AutoTokenizer
 
4
 
5
+ # set page title
6
  st.set_page_config(page_title="Automated Question Answering System")
7
+ # heading
8
+ st.markdown("<h2 style='text-align: center; color:grey;'>Question Answering on Academic Essays</h2>", unsafe_allow_html=True)
9
+ st.markdown("<h3 style='text-align: left; color:#F63366; font-size:18px;'><b>What is extractive question answering about?<b></h3>", unsafe_allow_html=True)
10
+ st.write("Extractive question answering is a Natural Language Processing task where text is provided for a model so that the model can refer to it and make predictions about where the answer to a question is.")
11
+ # st.markdown('___')
12
 
13
+ # store the model in cache resources to enhance efficiency
14
  @st.cache_resource(show_spinner=True)
15
  def question_model():
16
+ # call my model for question answering
17
  model_name = "kxx-kkk/FYP_deberta-v3-base-squad2_mrqa"
18
  tokenizer = AutoTokenizer.from_pretrained(model_name)
19
  model = AutoModelForQuestionAnswering.from_pretrained(model_name)
20
  question_answerer = pipeline("question-answering", model=model, tokenizer=tokenizer)
21
  return question_answerer
22
 
23
+ # choose the source with different tabs
 
 
 
 
24
  tab1, tab2 = st.tabs(["Input text", "Upload File"])
25
 
26
+ # if type the text as input
27
  with tab1:
28
  sample_question = "What is NLP?"
29
  with open("sample.txt", "r") as text_file:
30
  sample_text = text_file.read()
31
 
32
+ example = st.button("Try example")
33
+ context = st.text_area("Enter the essay below:", height=330)
34
+ question = st.text_input(label="Enter the question: ")
35
+ if example:
36
+ context.value = sample_text
37
+ question.value = sample_question
38
+
39
  button = st.button("Get answer")
40
  if button:
41
  with st.spinner(text="Loading question model..."):
 
45
  answer = answer["answer"]
46
  container = st.container(border=True)
47
  container.write("<h5><b>Answer:</b></h5>" + answer, unsafe_allow_html=True)
48
+
49
+ # if upload file as input
50
  with tab2:
51
  uploaded_file = st.file_uploader("Choose a .txt file to upload", type=["txt"])
52
  if uploaded_file is not None: