Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -11,11 +11,9 @@ from langchain.document_loaders import TextLoader
|
|
11 |
from langchain.document_loaders import Docx2txtLoader
|
12 |
from langchain.callbacks.streaming_stdout import StreamingStdOutCallbackHandler
|
13 |
import os
|
14 |
-
from dotenv import load_dotenv
|
15 |
import tempfile
|
16 |
|
17 |
-
|
18 |
-
|
19 |
def initialize_session_state():
|
20 |
if 'history' not in st.session_state:
|
21 |
st.session_state['history'] = []
|
@@ -26,11 +24,13 @@ def initialize_session_state():
|
|
26 |
if 'past' not in st.session_state:
|
27 |
st.session_state['past'] = ["Hey! 👋"]
|
28 |
|
|
|
29 |
def conversation_chat(query, chain, history):
|
30 |
result = chain({"question": query, "chat_history": history})
|
31 |
history.append((query, result["answer"]))
|
32 |
return result["answer"]
|
33 |
|
|
|
34 |
def display_chat_history(chain):
|
35 |
reply_container = st.container()
|
36 |
container = st.container()
|
@@ -53,9 +53,8 @@ def display_chat_history(chain):
|
|
53 |
message(st.session_state["past"][i], is_user=True, key=str(i) + '_user', avatar_style="thumbs")
|
54 |
message(st.session_state["generated"][i], key=str(i), avatar_style="fun-emoji")
|
55 |
|
|
|
56 |
def create_conversational_chain(vector_store):
|
57 |
-
load_dotenv()
|
58 |
-
|
59 |
replicate_api_token = "r8_MgTUrfPJIluDoXUhG7JXuPAYr6PonOW4BJCj0"
|
60 |
os.environ["REPLICATE_API_TOKEN"] = replicate_api_token
|
61 |
|
@@ -69,12 +68,12 @@ def create_conversational_chain(vector_store):
|
|
69 |
memory = ConversationBufferMemory(memory_key="chat_history", return_messages=True)
|
70 |
|
71 |
chain = ConversationalRetrievalChain.from_llm(llm=llm, chain_type='stuff',
|
72 |
-
|
73 |
-
|
74 |
return chain
|
75 |
|
|
|
76 |
def main():
|
77 |
-
load_dotenv()
|
78 |
initialize_session_state()
|
79 |
st.title("Chat With Your Doc")
|
80 |
st.sidebar.title("Document Processing")
|
@@ -110,4 +109,4 @@ def main():
|
|
110 |
display_chat_history(chain)
|
111 |
|
112 |
if __name__ == "__main__":
|
113 |
-
main()
|
|
|
11 |
from langchain.document_loaders import Docx2txtLoader
|
12 |
from langchain.callbacks.streaming_stdout import StreamingStdOutCallbackHandler
|
13 |
import os
|
|
|
14 |
import tempfile
|
15 |
|
16 |
+
# Initialize session state
|
|
|
17 |
def initialize_session_state():
|
18 |
if 'history' not in st.session_state:
|
19 |
st.session_state['history'] = []
|
|
|
24 |
if 'past' not in st.session_state:
|
25 |
st.session_state['past'] = ["Hey! 👋"]
|
26 |
|
27 |
+
# Conversation chat function
|
28 |
def conversation_chat(query, chain, history):
|
29 |
result = chain({"question": query, "chat_history": history})
|
30 |
history.append((query, result["answer"]))
|
31 |
return result["answer"]
|
32 |
|
33 |
+
# Display chat history
|
34 |
def display_chat_history(chain):
|
35 |
reply_container = st.container()
|
36 |
container = st.container()
|
|
|
53 |
message(st.session_state["past"][i], is_user=True, key=str(i) + '_user', avatar_style="thumbs")
|
54 |
message(st.session_state["generated"][i], key=str(i), avatar_style="fun-emoji")
|
55 |
|
56 |
+
# Create conversational chain
|
57 |
def create_conversational_chain(vector_store):
|
|
|
|
|
58 |
replicate_api_token = "r8_MgTUrfPJIluDoXUhG7JXuPAYr6PonOW4BJCj0"
|
59 |
os.environ["REPLICATE_API_TOKEN"] = replicate_api_token
|
60 |
|
|
|
68 |
memory = ConversationBufferMemory(memory_key="chat_history", return_messages=True)
|
69 |
|
70 |
chain = ConversationalRetrievalChain.from_llm(llm=llm, chain_type='stuff',
|
71 |
+
retriever=vector_store.as_retriever(search_kwargs={"k": 2}),
|
72 |
+
memory=memory)
|
73 |
return chain
|
74 |
|
75 |
+
# Main function
|
76 |
def main():
|
|
|
77 |
initialize_session_state()
|
78 |
st.title("Chat With Your Doc")
|
79 |
st.sidebar.title("Document Processing")
|
|
|
109 |
display_chat_history(chain)
|
110 |
|
111 |
if __name__ == "__main__":
|
112 |
+
main()
|