Spaces:
Build error
Build error
Update app.py
Browse files
app.py
CHANGED
@@ -128,27 +128,27 @@ def wandb_trace(rag_option, prompt, completion, result, generation_info, llm_out
|
|
128 |
status_code = "success" if (str(err_msg) == "") else "error",
|
129 |
status_message = str(err_msg),
|
130 |
metadata = {
|
131 |
-
"chunk_overlap": config["chunk_overlap"] if (
|
132 |
-
"chunk_size": config["chunk_size"] if (
|
133 |
-
},
|
134 |
inputs = {"rag_option": rag_option,
|
135 |
"prompt": prompt,
|
136 |
-
} if (str(err_msg) == "") else
|
137 |
-
outputs = {"result": result
|
138 |
-
"generation_info": str(generation_info)
|
139 |
-
"llm_output": str(llm_output)
|
140 |
-
"completion": str(completion)
|
141 |
-
},
|
142 |
-
model_dict = {"
|
143 |
-
|
144 |
-
"
|
145 |
-
|
146 |
-
"
|
147 |
-
|
148 |
-
"
|
149 |
-
|
150 |
-
"
|
151 |
-
},
|
152 |
start_time_ms = start_time_ms,
|
153 |
end_time_ms = end_time_ms
|
154 |
)
|
|
|
128 |
status_code = "success" if (str(err_msg) == "") else "error",
|
129 |
status_message = str(err_msg),
|
130 |
metadata = {
|
131 |
+
"chunk_overlap": config["chunk_overlap"] if (rag_option != RAG_OFF) else "",
|
132 |
+
"chunk_size": config["chunk_size"] if (rag_option != RAG_OFF) else "",
|
133 |
+
} if (str(err_msg) == "") else {},
|
134 |
inputs = {"rag_option": rag_option,
|
135 |
"prompt": prompt,
|
136 |
+
} if (str(err_msg) == "") else {},
|
137 |
+
outputs = {"result": result,
|
138 |
+
"generation_info": str(generation_info),
|
139 |
+
"llm_output": str(llm_output),
|
140 |
+
"completion": str(completion),
|
141 |
+
} if (str(err_msg) == "") else {},
|
142 |
+
model_dict = {"client": (str(chain.llm.client) if (rag_option == RAG_OFF) else
|
143 |
+
str(chain.combine_documents_chain.llm_chain.llm.client)),
|
144 |
+
"model_name": (str(chain.llm.model_name) if (rag_option == RAG_OFF) else
|
145 |
+
str(chain.combine_documents_chain.llm_chain.llm.model_name)),
|
146 |
+
"temperature": (str(chain.llm.temperature) if (rag_option == RAG_OFF) else
|
147 |
+
str(chain.combine_documents_chain.llm_chain.llm.temperature)),
|
148 |
+
"prompt": (str(chain.prompt) if (rag_option == RAG_OFF) else
|
149 |
+
str(chain.combine_documents_chain.llm_chain.prompt)),
|
150 |
+
"retriever": ("" if (rag_option == RAG_OFF) else str(chain.retriever)),
|
151 |
+
} if (str(err_msg) == "") else {},
|
152 |
start_time_ms = start_time_ms,
|
153 |
end_time_ms = end_time_ms
|
154 |
)
|