File size: 1,315 Bytes
3650955 45c901d ac7b044 45c901d |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 |
from spinoza_project.source.backend.prompt_utils import SpecialTokens, make_chat_prompt
from langchain.prompts.chat import ChatPromptTemplate
def get_qa_prompts(config, prompts):
special_tokens = SpecialTokens(config)
role_instruction = make_chat_prompt(prompts["role_instruction"], special_tokens)
source_prompt = make_chat_prompt(prompts["source_prompt"], special_tokens)
# memory_prompt=make_chat_prompt(prompts['memory_prompt'], special_tokens)
question_answering_prompt = make_chat_prompt(
prompts["question_answering_prompt"], special_tokens
)
reformulation_prompt = make_chat_prompt(
prompts["reformulation_prompt"], special_tokens
)
# summarize_memory_prompt = make_chat_prompt(
# prompts["summarize_memory_prompt"], special_tokens
# )
chat_qa_prompt = ChatPromptTemplate.from_messages(
[
role_instruction,
source_prompt,
# memory_prompt,
question_answering_prompt,
]
)
chat_reformulation_prompt = ChatPromptTemplate.from_messages([reformulation_prompt])
# chat_summarize_memory_prompt = ChatPromptTemplate.from_messages([summarize_memory_prompt])
return (
chat_qa_prompt,
chat_reformulation_prompt,
) # , chat_summarize_memory_prompt
|