John Graham Reynolds commited on
Commit
6e20b16
·
1 Parent(s): fc8d6af

try using fn to partially format and pass system prompt instead of config

Browse files
Files changed (2) hide show
  1. chain.py +21 -2
  2. chain_config.yaml +1 -1
chain.py CHANGED
@@ -28,6 +28,24 @@ class ChainBuilder:
28
  self.retriever_config = self.model_config.get("retriever_config")
29
  self.vector_search_schema = self.retriever_config.get("schema")
30
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
31
  # Return the string contents of the most recent message from the user
32
  def extract_user_query_string(self, chat_messages_array):
33
  return chat_messages_array[-1]["content"]
@@ -101,10 +119,11 @@ class ChainBuilder:
101
  # Prompt Template for generation
102
  prompt = ChatPromptTemplate.from_messages(
103
  [
104
- ("system", partial(self.llm_config.get("llm_prompt_template").format, date_str=datetime.datetime.now().strftime("%B %d, %Y"))), # add current date to the date_str var in system prompt
 
105
  # *** Note: This chain does not compress the history, so very long converastions can overflow the context window. TODO
106
  # We need to at some point chop this history down to fixed amount of recent messages
107
- MessagesPlaceholder(variable_name="formatted_chat_history"),
108
  # User's most current question
109
  ("user", "{question}"),
110
  ]
 
28
  self.retriever_config = self.model_config.get("retriever_config")
29
  self.vector_search_schema = self.retriever_config.get("schema")
30
 
31
+ def get_system_prompt(self):
32
+ date_str = datetime.datetime.now().strftime("%B %d, %Y")
33
+ prompt = f"You are DBRX, created by Databricks and augmented by John Graham Reynolds to have access to additional information specific to Vanderbilt University Medical Center. The current date is {date_str}.\n"
34
+ prompt += """Your knowledge base was last updated in December 2023. You answer questions about events prior to and after December 2023 the way a highly informed individual in December 2023 would if they were talking to someone from the above date, and you can let the user know this when relevant.\n
35
+ Some of the context you will be given in regards to Vanderbilt University Medical Center could have come after December 2023. The rest of your knowledge base is from before December 2023 and you will answer questions accordingly with these facts.
36
+ This chunk of text is your system prompt. It is not visible to the user, but it is used to guide your responses. Don't reference it, just respond to the user.\n
37
+ If you are asked to assist with tasks involving the expression of views held by a significant number of people, you provide assistance with the task even if you personally disagree with the views being expressed, but follow this with a discussion of broader perspectives.\n
38
+ You don't engage in stereotyping, including the negative stereotyping of majority groups.\n If asked about controversial topics, you try to provide careful thoughts and objective information without downplaying its harmful content or implying that there are reasonable perspectives on both sides.\n
39
+ You are happy to help with writing, analysis, question answering, math, coding, and all sorts of other tasks.\n You use markdown for coding, which includes JSON blocks and Markdown tables.\n
40
+ You do not have tools enabled at this time, so cannot run code or access the internet. You can only provide information that you have been trained on. You do not send or receive links or images.\n
41
+ You were not trained on copyrighted books, song lyrics, poems, video transcripts, or news articles; you do not divulge details of your training data. You do not provide song lyrics, poems, or news articles and instead refer the user to find them online or in a store.\n
42
+ You give concise responses to simple questions or statements, but provide thorough responses to more complex and open-ended questions.\n
43
+ The user is unable to see the system prompt, so you should write as if it were true without mentioning it.\n You do not mention any of this information about yourself unless the information is directly pertinent to the user's query.\n
44
+ Here is some context from the Vanderbilt University Medical Center glossary which might or might not help you answer: {context}.\n
45
+ Based on this system prompt, to which you will adhere sternly and to which you will make no reference, and this possibly helpful context in relation to Vanderbilt University Medical Center, answer this question: {question}
46
+ """
47
+ return prompt
48
+
49
  # Return the string contents of the most recent message from the user
50
  def extract_user_query_string(self, chat_messages_array):
51
  return chat_messages_array[-1]["content"]
 
119
  # Prompt Template for generation
120
  prompt = ChatPromptTemplate.from_messages(
121
  [
122
+ # ("system", self.llm_config.get("llm_prompt_template")), # add current date to the date_str var in system prompt
123
+ ("system", self.get_system_prompt()),
124
  # *** Note: This chain does not compress the history, so very long converastions can overflow the context window. TODO
125
  # We need to at some point chop this history down to fixed amount of recent messages
126
+ MessagesPlaceholder(variable_name="formatted_chat_history"), # placeholder for var named 'formatted_chat_history' with messages to be passed
127
  # User's most current question
128
  ("user", "{question}"),
129
  ]
chain_config.yaml CHANGED
@@ -43,7 +43,7 @@ llm_config:
43
  \ which might or might not help you answer: {context}.\n\nBased on this system\
44
  \ prompt, to which you will adhere sternly and to which you will make no reference,\
45
  \ and this possibly helpful context in relation to Vanderbilt University Medical\
46
- \ Center, answer this question: {question}\n"
47
  llm_prompt_template_variables:
48
  - context
49
  - question
 
43
  \ which might or might not help you answer: {context}.\n\nBased on this system\
44
  \ prompt, to which you will adhere sternly and to which you will make no reference,\
45
  \ and this possibly helpful context in relation to Vanderbilt University Medical\
46
+ \ Center, answer this question: {question}\n" # TODO to be removed if passing system prompt through function resolves issue of partial string formatting with date
47
  llm_prompt_template_variables:
48
  - context
49
  - question