John Graham Reynolds commited on
Commit
fc8d6af
·
1 Parent(s): 5029f5a

try using partial to partially format the system prompt

Browse files
Files changed (1) hide show
  1. chain.py +2 -1
chain.py CHANGED
@@ -2,6 +2,7 @@ import os
2
  import mlflow
3
  import datetime
4
  import streamlit as st
 
5
  from operator import itemgetter
6
  from langchain_huggingface import HuggingFaceEmbeddings
7
  from langchain_databricks.vectorstores import DatabricksVectorSearch
@@ -100,7 +101,7 @@ class ChainBuilder:
100
  # Prompt Template for generation
101
  prompt = ChatPromptTemplate.from_messages(
102
  [
103
- ("system", self.llm_config.get("llm_prompt_template").format(date_str=datetime.datetime.now().strftime("%B %d, %Y"))), # add current date to the date_str var in system prompt
104
  # *** Note: This chain does not compress the history, so very long converastions can overflow the context window. TODO
105
  # We need to at some point chop this history down to fixed amount of recent messages
106
  MessagesPlaceholder(variable_name="formatted_chat_history"),
 
2
  import mlflow
3
  import datetime
4
  import streamlit as st
5
+ from functools import partial
6
  from operator import itemgetter
7
  from langchain_huggingface import HuggingFaceEmbeddings
8
  from langchain_databricks.vectorstores import DatabricksVectorSearch
 
101
  # Prompt Template for generation
102
  prompt = ChatPromptTemplate.from_messages(
103
  [
104
+ ("system", partial(self.llm_config.get("llm_prompt_template").format, date_str=datetime.datetime.now().strftime("%B %d, %Y"))), # add current date to the date_str var in system prompt
105
  # *** Note: This chain does not compress the history, so very long converastions can overflow the context window. TODO
106
  # We need to at some point chop this history down to fixed amount of recent messages
107
  MessagesPlaceholder(variable_name="formatted_chat_history"),