Spaces:
Build error
Build error
Update app.py
Browse files
app.py
CHANGED
@@ -32,6 +32,8 @@ MONGODB_INDEX_NAME = "default"
|
|
32 |
|
33 |
description = os.environ["DESCRIPTION"]
|
34 |
|
|
|
|
|
35 |
config = {
|
36 |
"chunk_overlap": 150,
|
37 |
"chunk_size": 1500,
|
@@ -40,8 +42,6 @@ config = {
|
|
40 |
"temperature": 0,
|
41 |
}
|
42 |
|
43 |
-
langchain.verbose = True
|
44 |
-
|
45 |
template = """If you don't know the answer, just say that you don't know, don't try to make up an answer. Keep the answer as concise as possible. Always say "Thanks for using the 🧠 app - Bernd" at the end of the answer. """
|
46 |
|
47 |
llm_template = "Answer the question at the end. " + template + "Question: {question} Helpful Answer: "
|
@@ -118,7 +118,7 @@ def rag_chain(llm, prompt, db):
|
|
118 |
|
119 |
def wandb_trace(rag_option, prompt, completion, status_msg, start_time_ms, end_time_ms):
|
120 |
wandb.init(project = "openai-llm-rag")
|
121 |
-
if (rag_option == "Off"):
|
122 |
result = completion
|
123 |
else:
|
124 |
result = completion["result"]
|
@@ -141,9 +141,9 @@ def wandb_trace(rag_option, prompt, completion, status_msg, start_time_ms, end_t
|
|
141 |
"prompt": str(prompt if (str(status_msg) == "") else ""),
|
142 |
"prompt_template": str((llm_template if (rag_option == "Off") else rag_template) if (str(status_msg) == "") else "")},
|
143 |
outputs = {"result": result,
|
144 |
-
"document_0": "" if (rag_option == "Off") else str(document_0),
|
145 |
-
"document_1": "" if (rag_option == "Off") else str(document_1),
|
146 |
-
"document_2": "" if (rag_option == "Off") else str(document_2)},
|
147 |
start_time_ms = start_time_ms,
|
148 |
end_time_ms = end_time_ms
|
149 |
)
|
|
|
32 |
|
33 |
description = os.environ["DESCRIPTION"]
|
34 |
|
35 |
+
#langchain.verbose = True
|
36 |
+
|
37 |
config = {
|
38 |
"chunk_overlap": 150,
|
39 |
"chunk_size": 1500,
|
|
|
42 |
"temperature": 0,
|
43 |
}
|
44 |
|
|
|
|
|
45 |
template = """If you don't know the answer, just say that you don't know, don't try to make up an answer. Keep the answer as concise as possible. Always say "Thanks for using the 🧠 app - Bernd" at the end of the answer. """
|
46 |
|
47 |
llm_template = "Answer the question at the end. " + template + "Question: {question} Helpful Answer: "
|
|
|
118 |
|
119 |
def wandb_trace(rag_option, prompt, completion, status_msg, start_time_ms, end_time_ms):
|
120 |
wandb.init(project = "openai-llm-rag")
|
121 |
+
if (rag_option == "Off" || str(status_msg) != ""):
|
122 |
result = completion
|
123 |
else:
|
124 |
result = completion["result"]
|
|
|
141 |
"prompt": str(prompt if (str(status_msg) == "") else ""),
|
142 |
"prompt_template": str((llm_template if (rag_option == "Off") else rag_template) if (str(status_msg) == "") else "")},
|
143 |
outputs = {"result": result,
|
144 |
+
"document_0": "" if (rag_option == "Off" || str(status_msg) != "") else str(document_0),
|
145 |
+
"document_1": "" if (rag_option == "Off" || str(status_msg) != "") else str(document_1),
|
146 |
+
"document_2": "" if (rag_option == "Off" || str(status_msg) != "") else str(document_2)},
|
147 |
start_time_ms = start_time_ms,
|
148 |
end_time_ms = end_time_ms
|
149 |
)
|