# my_memory_logic.py import os # Import the "run_with_chain_context" function from pipeline.py # This function must accept a dict with { "input": ..., "chat_history": ... } # and return a dict with { "answer": ... }. from pipeline import run_with_chain_context # For session-based chat history from langchain_core.chat_history import BaseChatMessageHistory from langchain_community.chat_message_histories import ChatMessageHistory from langchain_core.runnables.history import RunnableWithMessageHistory ############################################################################### # 1) In-Memory Store: session_id -> ChatMessageHistory ############################################################################### store = {} # e.g., { "abc123": ChatMessageHistory(...) } def get_session_history(session_id: str) -> BaseChatMessageHistory: """ Retrieve (or create) a ChatMessageHistory for the given session_id. This ensures each session_id has its own conversation transcripts. """ if session_id not in store: store[session_id] = ChatMessageHistory() return store[session_id] ############################################################################### # 2) Build a RunnableWithMessageHistory that wraps "run_with_chain_context" ############################################################################### # "run_with_chain_context" must be a function returning a dict, # e.g. { "answer": "... final string ..." } # input_messages_key -> "input" # history_messages_key -> "chat_history" # output_messages_key -> "answer" conversational_rag_chain = RunnableWithMessageHistory( run_with_chain_context, # from pipeline.py get_session_history, input_messages_key="input", history_messages_key="chat_history", output_messages_key="answer" ) ############################################################################### # 3) A convenience function that calls our chain with session-based memory ############################################################################### def run_with_session_memory(user_query: str, session_id: str) -> str: """ Calls the 'conversational_rag_chain' with a given session_id and user_query. This returns the final 'answer' from run_with_chain_context. """ response = conversational_rag_chain.invoke( {"input": user_query}, config={ "configurable": { "session_id": session_id } } ) return response["answer"]