|
|
|
import os |
|
from langchain.memory import ConversationBufferMemory |
|
from langchain.chains import LLMChain |
|
from langchain.prompts.chat import ( |
|
ChatPromptTemplate, |
|
SystemMessagePromptTemplate, |
|
MessagesPlaceholder, |
|
HumanMessagePromptTemplate, |
|
) |
|
|
|
from langchain_groq import ChatGroq |
|
|
|
|
|
memory = ConversationBufferMemory(return_messages=True) |
|
|
|
|
|
restatement_system_prompt = ( |
|
"Given a chat history and the latest user question " |
|
"which might reference context in the chat history, " |
|
"formulate a standalone question that can be understood " |
|
"without the chat history. Do NOT answer the question, " |
|
"just reformulate it if needed; otherwise return it as is." |
|
) |
|
|
|
|
|
restatement_prompt = ChatPromptTemplate.from_messages([ |
|
SystemMessagePromptTemplate.from_template(restatement_system_prompt), |
|
MessagesPlaceholder(variable_name="chat_history"), |
|
HumanMessagePromptTemplate.from_template("{input}") |
|
]) |
|
|
|
|
|
|
|
restatement_llm = ChatGroq( |
|
model="llama3-70b-8192", |
|
|
|
groq_api_key=os.environ["GROQ_API_KEY"] |
|
) |
|
|
|
|
|
restatement_chain = LLMChain( |
|
llm=restatement_llm, |
|
prompt=restatement_prompt |
|
) |
|
|