Update app.py
Browse files
app.py
CHANGED
@@ -44,8 +44,8 @@ langchain.verbose = True
|
|
44 |
|
45 |
template = """If you don't know the answer, just say that you don't know, don't try to make up an answer. Keep the answer as concise as possible. Always say "Thanks for using the 🧠 app - Bernd" at the end of the answer. """
|
46 |
|
47 |
-
llm_template = "Answer the question at the end. " + template + "Question: {question} Helpful Answer: "
|
48 |
-
rag_template = "Use the following pieces of context to answer the question at the end. " + template + "{context} Question: {question} Helpful Answer: "
|
49 |
|
50 |
LLM_CHAIN_PROMPT = PromptTemplate(input_variables = ["question"], template = llm_template)
|
51 |
RAG_CHAIN_PROMPT = PromptTemplate(input_variables = ["context", "question"], template = rag_template)
|
@@ -130,13 +130,12 @@ def wandb_trace(rag_option, prompt, completion, status_msg, start_time_ms, end_t
|
|
130 |
"model": config["model"],
|
131 |
"temperature": config["temperature"],
|
132 |
},
|
133 |
-
inputs = {"rag_option": rag_option,
|
134 |
"prompt": str(prompt if (str(status_msg) == "") else ""),
|
135 |
"prompt_template": str((llm_template if (rag_option == "Off") else rag_template) if (str(status_msg) == "") else "")},
|
136 |
outputs = {"completion": str(completion)},
|
137 |
start_time_ms = start_time_ms,
|
138 |
-
end_time_ms = end_time_ms
|
139 |
-
#model_dict = {"llm": str(llm)}
|
140 |
)
|
141 |
trace.log("test")
|
142 |
wandb.finish()
|
|
|
44 |
|
45 |
template = """If you don't know the answer, just say that you don't know, don't try to make up an answer. Keep the answer as concise as possible. Always say "Thanks for using the 🧠 app - Bernd" at the end of the answer. """
|
46 |
|
47 |
+
llm_template = "Answer the question at the end. " + template + "Question: {question}. Helpful Answer: "
|
48 |
+
rag_template = "Use the following pieces of context to answer the question at the end. " + template + "{context} Question: {question}. Helpful Answer: "
|
49 |
|
50 |
LLM_CHAIN_PROMPT = PromptTemplate(input_variables = ["question"], template = llm_template)
|
51 |
RAG_CHAIN_PROMPT = PromptTemplate(input_variables = ["context", "question"], template = rag_template)
|
|
|
130 |
"model": config["model"],
|
131 |
"temperature": config["temperature"],
|
132 |
},
|
133 |
+
inputs = {"rag_option": rag_option if (str(status_msg) == "") else "",
|
134 |
"prompt": str(prompt if (str(status_msg) == "") else ""),
|
135 |
"prompt_template": str((llm_template if (rag_option == "Off") else rag_template) if (str(status_msg) == "") else "")},
|
136 |
outputs = {"completion": str(completion)},
|
137 |
start_time_ms = start_time_ms,
|
138 |
+
end_time_ms = end_time_ms}
|
|
|
139 |
)
|
140 |
trace.log("test")
|
141 |
wandb.finish()
|