bstraehle commited on
Commit
8484d1a
·
1 Parent(s): ddfaa69

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +19 -19
app.py CHANGED
@@ -128,27 +128,27 @@ def wandb_trace(rag_option, prompt, completion, result, generation_info, llm_out
128
  status_code = "success" if (str(err_msg) == "") else "error",
129
  status_message = str(err_msg),
130
  metadata = {
131
- "chunk_overlap": config["chunk_overlap"] if (str(err_msg) == "" and rag_option != RAG_OFF) else "",
132
- "chunk_size": config["chunk_size"] if (str(err_msg) == "" and rag_option != RAG_OFF) else "",
133
- },
134
  inputs = {"rag_option": rag_option,
135
  "prompt": prompt,
136
- } if (str(err_msg) == "") else "",
137
- outputs = {"result": result if (str(err_msg) == "") else "",
138
- "generation_info": str(generation_info) if (str(err_msg) == "") else "",
139
- "llm_output": str(llm_output) if (str(err_msg) == "") else "",
140
- "completion": str(completion) if (str(err_msg) == "") else "",
141
- },
142
- model_dict = {"llm_client": (str(chain.llm.client) if (rag_option == RAG_OFF) else
143
- str(chain.combine_documents_chain.llm_chain.llm.client)) if (str(err_msg) == "") else "",
144
- "llm_model_name": (str(chain.llm.model_name) if (rag_option == RAG_OFF) else
145
- str(chain.combine_documents_chain.llm_chain.llm.model_name)) if (str(err_msg) == "") else "",
146
- "llm_temperature": (str(chain.llm.temperature) if (rag_option == RAG_OFF) else
147
- str(chain.combine_documents_chain.llm_chain.llm.temperature)) if (str(err_msg) == "") else "",
148
- "chain_prompt": (str(chain.prompt) if (rag_option == RAG_OFF) else
149
- str(chain.combine_documents_chain.llm_chain.prompt)) if (str(err_msg) == "") else "",
150
- "chain_retriever": ("" if (rag_option == RAG_OFF) else str(chain.retriever)) if (str(err_msg) == "") else "",
151
- },
152
  start_time_ms = start_time_ms,
153
  end_time_ms = end_time_ms
154
  )
 
128
  status_code = "success" if (str(err_msg) == "") else "error",
129
  status_message = str(err_msg),
130
  metadata = {
131
+ "chunk_overlap": config["chunk_overlap"] if (rag_option != RAG_OFF) else "",
132
+ "chunk_size": config["chunk_size"] if (rag_option != RAG_OFF) else "",
133
+ } if (str(err_msg) == "") else {},
134
  inputs = {"rag_option": rag_option,
135
  "prompt": prompt,
136
+ } if (str(err_msg) == "") else {},
137
+ outputs = {"result": result,
138
+ "generation_info": str(generation_info),
139
+ "llm_output": str(llm_output),
140
+ "completion": str(completion),
141
+ } if (str(err_msg) == "") else {},
142
+ model_dict = {"client": (str(chain.llm.client) if (rag_option == RAG_OFF) else
143
+ str(chain.combine_documents_chain.llm_chain.llm.client)),
144
+ "model_name": (str(chain.llm.model_name) if (rag_option == RAG_OFF) else
145
+ str(chain.combine_documents_chain.llm_chain.llm.model_name)),
146
+ "temperature": (str(chain.llm.temperature) if (rag_option == RAG_OFF) else
147
+ str(chain.combine_documents_chain.llm_chain.llm.temperature)),
148
+ "prompt": (str(chain.prompt) if (rag_option == RAG_OFF) else
149
+ str(chain.combine_documents_chain.llm_chain.prompt)),
150
+ "retriever": ("" if (rag_option == RAG_OFF) else str(chain.retriever)),
151
+ } if (str(err_msg) == "") else {},
152
  start_time_ms = start_time_ms,
153
  end_time_ms = end_time_ms
154
  )