Kelechi Osuji commited on
Commit
c7e445e
·
1 Parent(s): decc45d

Fixed more deprecation error

Browse files
Files changed (2) hide show
  1. config.py +1 -1
  2. workflow.py +14 -15
config.py CHANGED
@@ -1,5 +1,5 @@
1
  import os
2
- from langchain_community.chat_models import ChatOpenAI
3
  from langchain.callbacks.streaming_stdout import StreamingStdOutCallbackHandler
4
 
5
 
 
1
  import os
2
+ from langchain_openai import ChatOpenAI
3
  from langchain.callbacks.streaming_stdout import StreamingStdOutCallbackHandler
4
 
5
 
workflow.py CHANGED
@@ -1,30 +1,29 @@
1
- from langchain.chains import ConversationChain
2
  from langchain.prompts import PromptTemplate
3
  from langchain.memory import ConversationBufferMemory
 
4
  from config import get_chat_model
5
 
6
-
7
  def get_workflow():
8
- """Set up the chatbot workflow with memory and prompt template."""
9
-
10
  # Define the prompt template for regular conversation
11
  prompt_template = PromptTemplate(
12
- input_variables=["question"],
13
- template="You are a helpful assistant. Answer the question: {question}"
14
  )
15
-
16
  # Create a memory object to remember previous conversations
17
- memory = ConversationBufferMemory(memory_key="chat_history", return_messages=True)
18
 
19
- chat_model = get_chat_model() # Fetch the configured model from config.py
 
20
 
21
-
22
- # Create the conversation chain
23
- conversation_chain = ConversationChain(
24
- llm=chat_model,
25
  memory=memory,
26
- verbose=True,
27
  prompt=prompt_template
28
  )
29
-
30
  return conversation_chain
 
1
+ #from langchain_openai import ChatOpenAI
2
  from langchain.prompts import PromptTemplate
3
  from langchain.memory import ConversationBufferMemory
4
+ from langchain_core.runnables.history import RunnableWithMessageHistory
5
  from config import get_chat_model
6
 
 
7
  def get_workflow():
8
+ """Set up the chatbot workflow using the updated RunnableWithMessageHistory."""
9
+
10
  # Define the prompt template for regular conversation
11
  prompt_template = PromptTemplate(
12
+ input_variables=["input"], # Match the memory variable name
13
+ template="You are a helpful assistant. Answer the question: {input}"
14
  )
15
+
16
  # Create a memory object to remember previous conversations
17
+ memory = ConversationBufferMemory(memory_key="input", return_messages=True)
18
 
19
+ # Fetch the pre-configured model from config.py
20
+ chat_model = get_chat_model()
21
 
22
+ # Create the updated RunnableWithMessageHistory
23
+ conversation_chain = RunnableWithMessageHistory(
 
 
24
  memory=memory,
25
+ runnable=chat_model,
26
  prompt=prompt_template
27
  )
28
+
29
  return conversation_chain