Spaces:
Sleeping
Sleeping
Upload app.py
Browse files
app.py
CHANGED
@@ -30,6 +30,8 @@ def question_model():
|
|
30 |
question_answerer = pipeline("question-answering", model=model, tokenizer=tokenizer, handle_impossible_answer=True)
|
31 |
return question_answerer
|
32 |
|
|
|
|
|
33 |
@st.cache_data(show_spinner=False)
|
34 |
def extract_text(file_path):
|
35 |
text = ""
|
@@ -72,7 +74,7 @@ def extract_text(file_path):
|
|
72 |
|
73 |
def question_answering(context, question):
|
74 |
with st.spinner(text="Loading question model..."):
|
75 |
-
question_answerer =
|
76 |
with st.spinner(text="Getting answer..."):
|
77 |
segment_size = 45000
|
78 |
overlap_size = 50
|
|
|
30 |
question_answerer = pipeline("question-answering", model=model, tokenizer=tokenizer, handle_impossible_answer=True)
|
31 |
return question_answerer
|
32 |
|
33 |
+
qamodel = question_model()
|
34 |
+
|
35 |
@st.cache_data(show_spinner=False)
|
36 |
def extract_text(file_path):
|
37 |
text = ""
|
|
|
74 |
|
75 |
def question_answering(context, question):
|
76 |
with st.spinner(text="Loading question model..."):
|
77 |
+
question_answerer = qamodel
|
78 |
with st.spinner(text="Getting answer..."):
|
79 |
segment_size = 45000
|
80 |
overlap_size = 50
|