Spaces:
Build error
Build error
Update app.py
Browse files
app.py
CHANGED
@@ -116,20 +116,20 @@ def rag_chain(llm, prompt, db):
|
|
116 |
return completion, rag_chain
|
117 |
|
118 |
def wandb_trace(rag_option, prompt, completion, chain, status_msg, start_time_ms, end_time_ms):
|
119 |
-
wandb.init(project = "openai-llm-rag")
|
120 |
if (rag_option == "Off" or str(status_msg) != ""):
|
121 |
result = completion
|
122 |
else:
|
123 |
result = completion["result"]
|
124 |
-
|
125 |
-
|
126 |
-
|
|
|
127 |
trace = Trace(
|
128 |
kind = "chain",
|
129 |
name = type(chain).__name__ if (chain != None) else "",
|
130 |
status_code = "SUCCESS" if (str(status_msg) == "") else "ERROR",
|
131 |
status_message = str(status_msg),
|
132 |
-
metadata={
|
133 |
"chunk_overlap": "" if (rag_option == "Off") else config["chunk_overlap"],
|
134 |
"chunk_size": "" if (rag_option == "Off") else config["chunk_size"],
|
135 |
"k": "" if (rag_option == "Off") else config["k"],
|
@@ -139,9 +139,9 @@ def wandb_trace(rag_option, prompt, completion, chain, status_msg, start_time_ms
|
|
139 |
inputs = {"rag_option": rag_option if (str(status_msg) == "") else "",
|
140 |
"prompt": str(prompt if (str(status_msg) == "") else ""),
|
141 |
"prompt_template": str((llm_template if (rag_option == "Off") else rag_template) if (str(status_msg) == "") else ""),
|
142 |
-
"
|
143 |
-
"
|
144 |
-
"
|
145 |
outputs = {"result": result},
|
146 |
start_time_ms = start_time_ms,
|
147 |
end_time_ms = end_time_ms
|
|
|
116 |
return completion, rag_chain
|
117 |
|
118 |
def wandb_trace(rag_option, prompt, completion, chain, status_msg, start_time_ms, end_time_ms):
|
|
|
119 |
if (rag_option == "Off" or str(status_msg) != ""):
|
120 |
result = completion
|
121 |
else:
|
122 |
result = completion["result"]
|
123 |
+
docs_meta_0 = completion["source_documents"][0].metadata
|
124 |
+
docs_meta_1 = completion["source_documents"][1].metadata
|
125 |
+
docs_meta_2 = completion["source_documents"][2].metadata
|
126 |
+
wandb.init(project = "openai-llm-rag")
|
127 |
trace = Trace(
|
128 |
kind = "chain",
|
129 |
name = type(chain).__name__ if (chain != None) else "",
|
130 |
status_code = "SUCCESS" if (str(status_msg) == "") else "ERROR",
|
131 |
status_message = str(status_msg),
|
132 |
+
metadata = {
|
133 |
"chunk_overlap": "" if (rag_option == "Off") else config["chunk_overlap"],
|
134 |
"chunk_size": "" if (rag_option == "Off") else config["chunk_size"],
|
135 |
"k": "" if (rag_option == "Off") else config["k"],
|
|
|
139 |
inputs = {"rag_option": rag_option if (str(status_msg) == "") else "",
|
140 |
"prompt": str(prompt if (str(status_msg) == "") else ""),
|
141 |
"prompt_template": str((llm_template if (rag_option == "Off") else rag_template) if (str(status_msg) == "") else ""),
|
142 |
+
"docs_meta_0": "" if (rag_option == "Off" or str(status_msg) != "") else str(docs_meta_0),
|
143 |
+
"docs_meta_1": "" if (rag_option == "Off" or str(status_msg) != "") else str(docs_meta_1),
|
144 |
+
"docs_meta_2": "" if (rag_option == "Off" or str(status_msg) != "") else str(docs_meta_2)},
|
145 |
outputs = {"result": result},
|
146 |
start_time_ms = start_time_ms,
|
147 |
end_time_ms = end_time_ms
|