vishwask commited on
Commit
1f5a7d5
·
verified ·
1 Parent(s): 29bf3db

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +5 -6
app.py CHANGED
@@ -10,7 +10,7 @@ from langchain.llms import HuggingFacePipeline
10
  from langchain.chains import ConversationChain
11
  from langchain.memory import ConversationBufferMemory
12
  from langchain.llms import HuggingFaceHub
13
- from langchain.memory import ConversationTokenBufferMemory
14
 
15
  from pathlib import Path
16
  import chromadb
@@ -128,9 +128,8 @@ def initialize_llmchain(temperature, max_tokens, top_k, vector_db, progress=gr.P
128
  "load_in_8bit": True})
129
 
130
  progress(0.75, desc="Defining buffer memory...")
131
- memory = ConversationBufferMemory(memory_key="chat_history",output_key='answer',return_messages=True)
132
- #memory = ConversationTokenBufferMemory(llm = llm, max_token_limit=100)
133
- # retriever=vector_db.as_retriever(search_type="similarity", search_kwargs={'k': 3})
134
  retriever=vector_db.as_retriever()
135
  progress(0.8, desc="Defining retrieval chain...")
136
  qa_chain = ConversationalRetrievalChain.from_llm(llm,retriever=retriever,chain_type="stuff",
@@ -177,9 +176,9 @@ def initialize_LLM(llm_temperature, max_tokens, top_k, vector_db, progress=gr.Pr
177
  return qa_chain, "Complete!"
178
 
179
 
180
- def format_chat_history(message, chat_history):
181
  formatted_chat_history = []
182
- for user_message, bot_message in chat_history:
183
  formatted_chat_history.append(f"User: {user_message}")
184
  formatted_chat_history.append(f"Assistant: {bot_message}")
185
  return formatted_chat_history
 
10
  from langchain.chains import ConversationChain
11
  from langchain.memory import ConversationBufferMemory
12
  from langchain.llms import HuggingFaceHub
13
+ from langchain.memory import ConversationBufferWindowMemory
14
 
15
  from pathlib import Path
16
  import chromadb
 
128
  "load_in_8bit": True})
129
 
130
  progress(0.75, desc="Defining buffer memory...")
131
+ #memory = ConversationBufferMemory(memory_key="chat_history",output_key='answer',return_messages=True)
132
+ memory = ConversationBufferWindowMemory(memory_key = 'history', k=3)
 
133
  retriever=vector_db.as_retriever()
134
  progress(0.8, desc="Defining retrieval chain...")
135
  qa_chain = ConversationalRetrievalChain.from_llm(llm,retriever=retriever,chain_type="stuff",
 
176
  return qa_chain, "Complete!"
177
 
178
 
179
+ def format_chat_history(message, history):
180
  formatted_chat_history = []
181
+ for user_message, bot_message in history:
182
  formatted_chat_history.append(f"User: {user_message}")
183
  formatted_chat_history.append(f"Assistant: {bot_message}")
184
  return formatted_chat_history