Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -1,7 +1,7 @@
|
|
1 |
import streamlit as st
|
2 |
from streamlit_chat import message
|
3 |
from langchain.chains import ConversationalRetrievalChain
|
4 |
-
from
|
5 |
from langchain.llms import Replicate
|
6 |
from langchain.text_splitter import CharacterTextSplitter
|
7 |
from langchain.vectorstores import FAISS
|
@@ -56,9 +56,8 @@ def display_chat_history(chain):
|
|
56 |
def create_conversational_chain(vector_store):
|
57 |
load_dotenv()
|
58 |
|
59 |
-
replicate_api_token =
|
60 |
-
|
61 |
-
raise ValueError("Replicate API token is not set. Please set the REPLICATE_API_TOKEN environment variable.")
|
62 |
|
63 |
llm = Replicate(
|
64 |
streaming=True,
|
@@ -70,8 +69,8 @@ def create_conversational_chain(vector_store):
|
|
70 |
memory = ConversationBufferMemory(memory_key="chat_history", return_messages=True)
|
71 |
|
72 |
chain = ConversationalRetrievalChain.from_llm(llm=llm, chain_type='stuff',
|
73 |
-
|
74 |
-
|
75 |
return chain
|
76 |
|
77 |
def main():
|
@@ -111,4 +110,4 @@ def main():
|
|
111 |
display_chat_history(chain)
|
112 |
|
113 |
if __name__ == "__main__":
|
114 |
-
main()
|
|
|
1 |
import streamlit as st
|
2 |
from streamlit_chat import message
|
3 |
from langchain.chains import ConversationalRetrievalChain
|
4 |
+
from langchain.embeddings import HuggingFaceEmbeddings
|
5 |
from langchain.llms import Replicate
|
6 |
from langchain.text_splitter import CharacterTextSplitter
|
7 |
from langchain.vectorstores import FAISS
|
|
|
56 |
def create_conversational_chain(vector_store):
|
57 |
load_dotenv()
|
58 |
|
59 |
+
replicate_api_token = "r8_MgTUrfPJIluDoXUhG7JXuPAYr6PonOW4BJCj0"
|
60 |
+
os.environ["REPLICATE_API_TOKEN"] = replicate_api_token
|
|
|
61 |
|
62 |
llm = Replicate(
|
63 |
streaming=True,
|
|
|
69 |
memory = ConversationBufferMemory(memory_key="chat_history", return_messages=True)
|
70 |
|
71 |
chain = ConversationalRetrievalChain.from_llm(llm=llm, chain_type='stuff',
|
72 |
+
retriever=vector_store.as_retriever(search_kwargs={"k": 2}),
|
73 |
+
memory=memory)
|
74 |
return chain
|
75 |
|
76 |
def main():
|
|
|
110 |
display_chat_history(chain)
|
111 |
|
112 |
if __name__ == "__main__":
|
113 |
+
main()
|