Kelechi Osuji commited on
Commit
bed90e7
·
1 Parent(s): 782a720

Updated get_workflow funtion with get_session_history funtion

Browse files
Files changed (1) hide show
  1. workflow.py +15 -9
workflow.py CHANGED
@@ -1,28 +1,34 @@
1
- #from langchain_openai import ChatOpenAI
2
  from langchain.prompts import PromptTemplate
3
  from langchain.memory import ConversationBufferMemory
4
  from langchain_core.runnables.history import RunnableWithMessageHistory
5
  from config import get_chat_model
6
 
 
 
 
 
7
  def get_workflow():
8
- """Set up the chatbot workflow using the updated RunnableWithMessageHistory."""
9
 
10
- # Define the prompt template for regular conversation
11
  prompt_template = PromptTemplate(
12
- input_variables=["input"], # Match the memory variable name
13
- template="You are a helpful assistant. Answer the question: {input}"
14
  )
15
 
16
- # Create a memory object to remember previous conversations
17
  memory = ConversationBufferMemory(memory_key="input", return_messages=True)
18
 
19
- # Fetch the pre-configured model from config.py
20
  chat_model = get_chat_model()
21
 
22
- # Create the updated RunnableWithMessageHistory
23
  conversation_chain = RunnableWithMessageHistory(
 
 
24
  memory=memory,
25
- runnable=chat_model,
26
  prompt=prompt_template
27
  )
28
 
 
1
+ from langchain.schema import BaseMemory
2
  from langchain.prompts import PromptTemplate
3
  from langchain.memory import ConversationBufferMemory
4
  from langchain_core.runnables.history import RunnableWithMessageHistory
5
  from config import get_chat_model
6
 
7
+ def get_session_history(memory: BaseMemory):
8
+ """Retrieve the session history from memory."""
9
+ return memory.chat_memory.messages if hasattr(memory, "chat_memory") else []
10
+
11
  def get_workflow():
12
+ """Set up the chatbot workflow with memory and prompt template."""
13
 
14
+ # Define the prompt template for conversation
15
  prompt_template = PromptTemplate(
16
+ input_variables=["input"],
17
+ template="You are a helpful assistant. Answer the input: {input}"
18
  )
19
 
20
+ # Create memory for conversation
21
  memory = ConversationBufferMemory(memory_key="input", return_messages=True)
22
 
23
+ # Fetch the chat model
24
  chat_model = get_chat_model()
25
 
26
+ # Use RunnableWithMessageHistory for session memory
27
  conversation_chain = RunnableWithMessageHistory(
28
+ llm=chat_model,
29
+ get_session_history=lambda: get_session_history(memory),
30
  memory=memory,
31
+ verbose=True,
32
  prompt=prompt_template
33
  )
34