File size: 1,718 Bytes
7fdb8e9 cc3f1e1 7fdb8e9 cc3f1e1 7fdb8e9 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 |
from langchain.prompts import PromptTemplate
from .base import PromptTemplateFactory
class QueryExpansionTemplate(PromptTemplateFactory):
prompt: str = """You are an AI language model assistant. Your task is to generate {expand_to_n}
different versions of the given user question to retrieve relevant documents from a vector
database. By generating multiple perspectives on the user question, your goal is to help
the user overcome some of the limitations of the distance-based similarity search.
Provide these alternative questions seperated by '{separator}'.
Original question: {question}"""
@property
def separator(self) -> str:
return "#next-question#"
def create_template(self, expand_to_n: int) -> PromptTemplate:
return PromptTemplate(
template=self.prompt,
input_variables=["question"],
partial_variables={
"separator": self.separator,
"expand_to_n": expand_to_n,
},
)
class AnswerGenerationTemplate(PromptTemplateFactory):
prompt: str = """You are an AI language model assistant. Your task is to generate an answer to the given user question based on the provided context.
Context: {context}
Question: {question}"""
# Give only your answer, do not include any other text like 'Certainly! Here is the answer:' or 'The answer is:' or anything similar.
# Give your answer in markdown format if needed, for example if a table is the best way to answer the question, or if titles and subheadings are needed.
def create_template(self, context: str, question: str) -> str:
return self.prompt.format(context=context, question=question)
|