Spaces:
Paused
Paused
modify code
Browse files
app.py
CHANGED
@@ -10,9 +10,7 @@ from langchain_community.vectorstores import FAISS
|
|
10 |
from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline
|
11 |
|
12 |
|
13 |
-
# Global variables
|
14 |
-
knowledge_base = None
|
15 |
-
qa_chain = None
|
16 |
|
17 |
# PDF ํ์ผ ๋ก๋ ๋ฐ ํ
์คํธ ์ถ์ถ
|
18 |
def load_pdf(pdf_file):
|
@@ -82,24 +80,22 @@ def main():
|
|
82 |
st.write(f"Total characters extracted: {len(text)}")
|
83 |
|
84 |
if st.button("Create Knowledge Base"):
|
85 |
-
global knowledge_base
|
86 |
chunks = split_text(text)
|
87 |
-
knowledge_base = create_knowledge_base(chunks)
|
88 |
-
print("knowledge_base:", knowledge_base)
|
89 |
|
90 |
-
if knowledge_base is None:
|
91 |
st.error("Failed to create knowledge base.")
|
92 |
return
|
93 |
|
94 |
# QA ์ฒด์ธ ์ค์
|
95 |
-
global qa_chain
|
96 |
try:
|
97 |
pipe = load_model()
|
98 |
except Exception as e:
|
99 |
st.error(f"Error loading model: {e}")
|
100 |
return
|
101 |
llm = HuggingFacePipeline(pipeline=pipe)
|
102 |
-
qa_chain = load_qa_chain(llm, chain_type="map_rerank")
|
103 |
|
104 |
st.success("Knowledge base created! You can now ask questions.")
|
105 |
|
@@ -107,7 +103,7 @@ def main():
|
|
107 |
st.error(f"Failed to process the PDF: {str(e)}")
|
108 |
|
109 |
# ์ง๋ฌธ-์๋ต ์น์
|
110 |
-
if knowledge_base and qa_chain:
|
111 |
with st.expander("Ask Questions", expanded=True):
|
112 |
prompt = st.text_input("Chat here!")
|
113 |
|
@@ -121,14 +117,13 @@ def main():
|
|
121 |
# ๋ชจ๋ธ ์๋ต ์ฒ๋ฆฌ
|
122 |
def get_response_from_model(prompt):
|
123 |
try:
|
124 |
-
|
125 |
-
if not knowledge_base:
|
126 |
return "No PDF has been uploaded yet."
|
127 |
-
if not
|
128 |
return "QA chain is not initialized."
|
129 |
|
130 |
-
docs = knowledge_base.similarity_search(prompt)
|
131 |
-
response = qa_chain.run(input_documents=docs, question=prompt)
|
132 |
|
133 |
if "Helpful Answer:" in response:
|
134 |
response = response.split("Helpful Answer:")[1].strip()
|
|
|
10 |
from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline
|
11 |
|
12 |
|
13 |
+
# Global variables are no longer needed, we will use session state
|
|
|
|
|
14 |
|
15 |
# PDF ํ์ผ ๋ก๋ ๋ฐ ํ
์คํธ ์ถ์ถ
|
16 |
def load_pdf(pdf_file):
|
|
|
80 |
st.write(f"Total characters extracted: {len(text)}")
|
81 |
|
82 |
if st.button("Create Knowledge Base"):
|
|
|
83 |
chunks = split_text(text)
|
84 |
+
st.session_state.knowledge_base = create_knowledge_base(chunks)
|
85 |
+
print("knowledge_base:", st.session_state.knowledge_base)
|
86 |
|
87 |
+
if st.session_state.knowledge_base is None:
|
88 |
st.error("Failed to create knowledge base.")
|
89 |
return
|
90 |
|
91 |
# QA ์ฒด์ธ ์ค์
|
|
|
92 |
try:
|
93 |
pipe = load_model()
|
94 |
except Exception as e:
|
95 |
st.error(f"Error loading model: {e}")
|
96 |
return
|
97 |
llm = HuggingFacePipeline(pipeline=pipe)
|
98 |
+
st.session_state.qa_chain = load_qa_chain(llm, chain_type="map_rerank")
|
99 |
|
100 |
st.success("Knowledge base created! You can now ask questions.")
|
101 |
|
|
|
103 |
st.error(f"Failed to process the PDF: {str(e)}")
|
104 |
|
105 |
# ์ง๋ฌธ-์๋ต ์น์
|
106 |
+
if "knowledge_base" in st.session_state and "qa_chain" in st.session_state:
|
107 |
with st.expander("Ask Questions", expanded=True):
|
108 |
prompt = st.text_input("Chat here!")
|
109 |
|
|
|
117 |
# ๋ชจ๋ธ ์๋ต ์ฒ๋ฆฌ
|
118 |
def get_response_from_model(prompt):
|
119 |
try:
|
120 |
+
if "knowledge_base" not in st.session_state:
|
|
|
121 |
return "No PDF has been uploaded yet."
|
122 |
+
if "qa_chain" not in st.session_state:
|
123 |
return "QA chain is not initialized."
|
124 |
|
125 |
+
docs = st.session_state.knowledge_base.similarity_search(prompt)
|
126 |
+
response = st.session_state.qa_chain.run(input_documents=docs, question=prompt)
|
127 |
|
128 |
if "Helpful Answer:" in response:
|
129 |
response = response.split("Helpful Answer:")[1].strip()
|