Spaces:
Build error
Build error
Update app.py
Browse files
app.py
CHANGED
@@ -124,10 +124,10 @@ def wandb_trace(rag_option, prompt, completion, chain, err_msg, start_time_ms, e
|
|
124 |
if (str(err_msg) != ""):
|
125 |
result = ""
|
126 |
elif (rag_option == RAG_OFF):
|
127 |
-
result = completion.generations[0][0].text if (completion.generations[0] != None and
|
|
|
128 |
else:
|
129 |
result = completion["result"]
|
130 |
-
docs_meta = str([doc.metadata for doc in completion["source_documents"]])
|
131 |
wandb.init(project = "openai-llm-rag")
|
132 |
trace = Trace(
|
133 |
kind = "chain",
|
@@ -143,8 +143,7 @@ def wandb_trace(rag_option, prompt, completion, chain, err_msg, start_time_ms, e
|
|
143 |
},
|
144 |
inputs = {"rag_option": rag_option if (str(err_msg) == "") else "",
|
145 |
"prompt": str(prompt if (str(err_msg) == "") else ""),
|
146 |
-
"prompt_template": str((llm_template if (rag_option == RAG_OFF) else rag_template) if (str(err_msg) == "") else "")
|
147 |
-
"docs_meta": "" if (str(err_msg) != "" or rag_option == RAG_OFF) else docs_meta},
|
148 |
outputs = {"result": result,
|
149 |
"completion": str(completion)},
|
150 |
start_time_ms = start_time_ms,
|
@@ -183,7 +182,8 @@ def invoke(openai_api_key, rag_option, prompt):
|
|
183 |
result = completion["result"]
|
184 |
else:
|
185 |
completion, chain = llm_chain(llm, prompt)
|
186 |
-
result = completion.generations[0][0].text if (completion.generations[0] != None and
|
|
|
187 |
except Exception as e:
|
188 |
err_msg = e
|
189 |
raise gr.Error(e)
|
|
|
124 |
if (str(err_msg) != ""):
|
125 |
result = ""
|
126 |
elif (rag_option == RAG_OFF):
|
127 |
+
result = completion.generations[0][0].text if (completion.generations[0] != None and
|
128 |
+
completion.generations[0][0] != None) else ""
|
129 |
else:
|
130 |
result = completion["result"]
|
|
|
131 |
wandb.init(project = "openai-llm-rag")
|
132 |
trace = Trace(
|
133 |
kind = "chain",
|
|
|
143 |
},
|
144 |
inputs = {"rag_option": rag_option if (str(err_msg) == "") else "",
|
145 |
"prompt": str(prompt if (str(err_msg) == "") else ""),
|
146 |
+
"prompt_template": str((llm_template if (rag_option == RAG_OFF) else rag_template) if (str(err_msg) == "") else "")}
|
|
|
147 |
outputs = {"result": result,
|
148 |
"completion": str(completion)},
|
149 |
start_time_ms = start_time_ms,
|
|
|
182 |
result = completion["result"]
|
183 |
else:
|
184 |
completion, chain = llm_chain(llm, prompt)
|
185 |
+
result = completion.generations[0][0].text if (completion.generations[0] != None and
|
186 |
+
completion.generations[0][0] != None) else ""
|
187 |
except Exception as e:
|
188 |
err_msg = e
|
189 |
raise gr.Error(e)
|