Pijush2023 commited on
Commit
b1e2ae2
·
verified ·
1 Parent(s): 5be0d89

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +10 -3
app.py CHANGED
@@ -20,6 +20,7 @@ import tempfile
20
  from langchain.memory import ConversationBufferWindowMemory
21
  import time
22
  import logging
 
23
 
24
 
25
 
@@ -124,7 +125,6 @@ _search_query = RunnableBranch(
124
  )
125
  | CONDENSE_QUESTION_PROMPT
126
  | ChatOpenAI(temperature=0, api_key=os.environ['OPENAI_API_KEY'])
127
- | conversational_memory
128
  | StrOutputParser(),
129
  ),
130
  RunnableLambda(lambda x: x["question"]),
@@ -202,13 +202,20 @@ def add_message(history, message):
202
  history.append((message, None)) # Add the user's message to the chat history only if it's not empty
203
  return history, "" # Clear the input box
204
 
205
-
 
 
 
 
 
206
  # Define function to generate a streaming response
207
  def chat_with_bot(messages):
208
  user_message = messages[-1][0] # Get the last user message (input)
209
  messages[-1] = (user_message, "") # Prepare the placeholder for the bot's response
210
 
211
- response = get_response(user_message)
 
 
212
 
213
  # Simulate streaming response by iterating over each character in the response
214
  for character in response:
 
20
  from langchain.memory import ConversationBufferWindowMemory
21
  import time
22
  import logging
23
+ from langchain.chains import ConversationChain
24
 
25
 
26
 
 
125
  )
126
  | CONDENSE_QUESTION_PROMPT
127
  | ChatOpenAI(temperature=0, api_key=os.environ['OPENAI_API_KEY'])
 
128
  | StrOutputParser(),
129
  ),
130
  RunnableLambda(lambda x: x["question"]),
 
202
  history.append((message, None)) # Add the user's message to the chat history only if it's not empty
203
  return history, "" # Clear the input box
204
 
205
+ # Define the conversation chain using the LLM and memory
206
+ conversation_chain = ConversationChain(
207
+ llm=chat_model,
208
+ memory=conversational_memory,
209
+ verbose=True
210
+ )
211
  # Define function to generate a streaming response
212
  def chat_with_bot(messages):
213
  user_message = messages[-1][0] # Get the last user message (input)
214
  messages[-1] = (user_message, "") # Prepare the placeholder for the bot's response
215
 
216
+ #response = get_response(user_message)
217
+ response = conversation_chain.predict(input=user_message)
218
+
219
 
220
  # Simulate streaming response by iterating over each character in the response
221
  for character in response: