Update app.py
Browse files
app.py
CHANGED
@@ -22,6 +22,7 @@ from langchain.chains import ConversationalRetrievalChain
|
|
22 |
from langchain_core.output_parsers import StrOutputParser
|
23 |
from langchain_core.runnables import RunnablePassthrough
|
24 |
from langchain import hub
|
|
|
25 |
|
26 |
|
27 |
|
@@ -120,10 +121,10 @@ def main():
|
|
120 |
st.header("Chat with multiple PDFs :books:")
|
121 |
|
122 |
|
123 |
-
|
124 |
|
125 |
if user_question := st.text_input("Ask a question about your documents:"):
|
126 |
-
handle_userinput(user_question)
|
127 |
|
128 |
|
129 |
with st.sidebar:
|
@@ -139,11 +140,10 @@ def main():
|
|
139 |
text_chunks = get_text_chunks(raw_text)
|
140 |
|
141 |
# create vector store
|
142 |
-
|
143 |
|
144 |
# create conversation chain
|
145 |
-
|
146 |
-
vectorstore)
|
147 |
|
148 |
|
149 |
|
@@ -156,35 +156,32 @@ def main():
|
|
156 |
|
157 |
|
158 |
|
159 |
-
def handle_userinput(user_question ):
|
160 |
-
|
161 |
if "chat_history" not in st.session_state:
|
162 |
st.session_state["chat_history"] = [
|
163 |
-
|
164 |
-
|
165 |
-
|
166 |
-
|
167 |
st.session_state.chat_history.append({"role": "user", "content": user_question})
|
168 |
-
|
169 |
-
|
170 |
|
171 |
-
|
172 |
-
|
173 |
-
|
174 |
-
|
175 |
|
176 |
for i, message in enumerate(st.session_state.chat_history):
|
177 |
if i % 2 == 0:
|
178 |
st.write(user_template.replace(
|
179 |
-
"{{MSG}}", message['content']), unsafe_allow_html=True)
|
180 |
else:
|
181 |
st.write(bot_template.replace(
|
182 |
"{{MSG}}", message['content']), unsafe_allow_html=True)
|
183 |
|
184 |
st.subheader("Your documents")
|
185 |
-
|
186 |
-
|
187 |
-
|
|
|
188 |
|
189 |
|
190 |
|
|
|
22 |
from langchain_core.output_parsers import StrOutputParser
|
23 |
from langchain_core.runnables import RunnablePassthrough
|
24 |
from langchain import hub
|
25 |
+
from state_manager import StateManager
|
26 |
|
27 |
|
28 |
|
|
|
121 |
st.header("Chat with multiple PDFs :books:")
|
122 |
|
123 |
|
124 |
+
state_manager = StateManager()
|
125 |
|
126 |
if user_question := st.text_input("Ask a question about your documents:"):
|
127 |
+
handle_userinput(user_question,state_manager)
|
128 |
|
129 |
|
130 |
with st.sidebar:
|
|
|
140 |
text_chunks = get_text_chunks(raw_text)
|
141 |
|
142 |
# create vector store
|
143 |
+
state_manager.create_vectorstore(text_chunks)
|
144 |
|
145 |
# create conversation chain
|
146 |
+
state_manager.create_conversation_chain()
|
|
|
147 |
|
148 |
|
149 |
|
|
|
156 |
|
157 |
|
158 |
|
159 |
+
def handle_userinput(user_question, state_manager):
|
|
|
160 |
if "chat_history" not in st.session_state:
|
161 |
st.session_state["chat_history"] = [
|
162 |
+
{"role": "assistant", "content": "Hi, I'm a Q&A chatbot who is based on your imported pdf documents. How can I help you?"}
|
163 |
+
]
|
164 |
+
|
|
|
165 |
st.session_state.chat_history.append({"role": "user", "content": user_question})
|
|
|
|
|
166 |
|
167 |
+
if state_manager.vectorstore is not None and state_manager.conversation_chain is not None:
|
168 |
+
# Invoke conversation chain
|
169 |
+
response = state_manager.conversation_chain({"question": user_question})
|
170 |
+
st.session_state.chat_history.append({"role": "assistant", "content": response})
|
171 |
|
172 |
for i, message in enumerate(st.session_state.chat_history):
|
173 |
if i % 2 == 0:
|
174 |
st.write(user_template.replace(
|
175 |
+
"{{MSG}}", message['content']), unsafe_allow_html=True)
|
176 |
else:
|
177 |
st.write(bot_template.replace(
|
178 |
"{{MSG}}", message['content']), unsafe_allow_html=True)
|
179 |
|
180 |
st.subheader("Your documents")
|
181 |
+
if state_manager.vectorstore is not None:
|
182 |
+
docs = state_manager.vectorstore.as_retriever().get_relevant_documents(user_question)
|
183 |
+
for doc in docs:
|
184 |
+
st.write(f"Document: {doc}")
|
185 |
|
186 |
|
187 |
|