# my_memory_logic.py import os from langchain.memory import ConversationBufferMemory from langchain.chains import LLMChain from langchain.prompts.chat import ( ChatPromptTemplate, SystemMessagePromptTemplate, MessagesPlaceholder, HumanMessagePromptTemplate, ) # Import ChatGroq from the langchain_groq package from langchain_groq import ChatGroq # 1) Memory object for storing conversation messages memory = ConversationBufferMemory(return_messages=True) # 2) Restatement system prompt for question rewriting restatement_system_prompt = ( "Given a chat history and the latest user question " "which might reference context in the chat history, " "formulate a standalone question that can be understood " "without the chat history. Do NOT answer the question, " "just reformulate it if needed; otherwise return it as is." ) # 3) Build the ChatPromptTemplate restatement_prompt = ChatPromptTemplate.from_messages([ SystemMessagePromptTemplate.from_template(restatement_system_prompt), MessagesPlaceholder(variable_name="chat_history"), HumanMessagePromptTemplate.from_template("{input}") ]) # 4) Initialize the ChatGroq LLM # Ensure you have your GROQ_API_KEY set in the environment restatement_llm = ChatGroq( model="llama3-70b-8192", # or whichever model groq_api_key=os.environ["GROQ_API_KEY"] ) # 5) Create the LLMChain for restatement restatement_chain = LLMChain( llm=restatement_llm, prompt=restatement_prompt )