bstraehle commited on
Commit
968a729
·
1 Parent(s): f45ad50

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +5 -2
app.py CHANGED
@@ -134,10 +134,13 @@ def wandb_trace(rag_option, prompt, completion, result, chain, err_msg, start_ti
134
  "temperature": config["temperature"],
135
  },
136
  inputs = {"rag_option": rag_option if (str(err_msg) == "") else "",
137
- "prompt": str(prompt if (str(err_msg) == "") else ""),
138
- "prompt_template": str((llm_template if (rag_option == RAG_OFF) else rag_template) if (str(err_msg) == "") else "")},
139
  outputs = {"result": result if (str(err_msg) == "") else "",
140
  "completion": str(completion)},
 
 
 
141
  start_time_ms = start_time_ms,
142
  end_time_ms = end_time_ms
143
  )
 
134
  "temperature": config["temperature"],
135
  },
136
  inputs = {"rag_option": rag_option if (str(err_msg) == "") else "",
137
+ "prompt": str(prompt if (str(err_msg) == "") else "")},
138
+ #"prompt_template": str((llm_template if (rag_option == RAG_OFF) else rag_template) if (str(err_msg) == "") else "")},
139
  outputs = {"result": result if (str(err_msg) == "") else "",
140
  "completion": str(completion)},
141
+ dict = {"llm_client": chain.llm.client if (rag_option == RAG_OFF) else chain.combine_documents_chain.llm_chain.llm.client,
142
+ "prompt_engineered": chain.prompt if (rag_option == RAG_OFF) else chain.combine_documents_chain.llm_chain.prompt,
143
+ "retriever": "" if (rag_option == RAG_OFF) else chain.retriever},
144
  start_time_ms = start_time_ms,
145
  end_time_ms = end_time_ms
146
  )