bstraehle commited on
Commit
a5e97a2
·
1 Parent(s): 192e500

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +5 -3
app.py CHANGED
@@ -124,11 +124,11 @@ def wandb_trace(rag_option, prompt, completion, chain, status_msg, start_time_ms
124
  print("1=" + str(chain.llm.temperature))
125
  print("2=" + str(chain.prompt.input_variables))
126
  print("2=" + str(chain.prompt.template))
127
- if (type(chain).__name__ == "RetrievalQA"):
128
  #print(chain.combine_documents_chain.llm_chain.llm)
129
  #print(chain.combine_documents_chain.llm_chain.async_client)
130
- print(chain.combine_documents_chain.retriever)
131
- print(chain.combine_documents_chain.vectorstore)
132
  #print("3=" + str(chain.llm))
133
  #print("4=" + str(chain.chain_type_kwargs))
134
  #print("5=" + str(chain.retriever))
@@ -137,6 +137,7 @@ def wandb_trace(rag_option, prompt, completion, chain, status_msg, start_time_ms
137
  result = completion
138
  else:
139
  result = completion["result"]
 
140
  document_0 = completion["source_documents"][0]
141
  document_1 = completion["source_documents"][1]
142
  document_2 = completion["source_documents"][2]
@@ -155,6 +156,7 @@ def wandb_trace(rag_option, prompt, completion, chain, status_msg, start_time_ms
155
  inputs = {"rag_option": rag_option if (str(status_msg) == "") else "",
156
  "prompt": str(prompt if (str(status_msg) == "") else ""),
157
  "prompt_template": str((llm_template if (rag_option == "Off") else rag_template) if (str(status_msg) == "") else ""),
 
158
  "document_0": "" if (rag_option == "Off" or str(status_msg) != "") else str(document_0),
159
  "document_1": "" if (rag_option == "Off" or str(status_msg) != "") else str(document_1),
160
  "document_2": "" if (rag_option == "Off" or str(status_msg) != "") else str(document_2)},
 
124
  print("1=" + str(chain.llm.temperature))
125
  print("2=" + str(chain.prompt.input_variables))
126
  print("2=" + str(chain.prompt.template))
127
+ #if (type(chain).__name__ == "RetrievalQA"):
128
  #print(chain.combine_documents_chain.llm_chain.llm)
129
  #print(chain.combine_documents_chain.llm_chain.async_client)
130
+ #print(chain.combine_documents_chain.retriever)
131
+ #print(chain.combine_documents_chain.vectorstore)
132
  #print("3=" + str(chain.llm))
133
  #print("4=" + str(chain.chain_type_kwargs))
134
  #print("5=" + str(chain.retriever))
 
137
  result = completion
138
  else:
139
  result = completion["result"]
140
+ documents = completion["source_documents"]
141
  document_0 = completion["source_documents"][0]
142
  document_1 = completion["source_documents"][1]
143
  document_2 = completion["source_documents"][2]
 
156
  inputs = {"rag_option": rag_option if (str(status_msg) == "") else "",
157
  "prompt": str(prompt if (str(status_msg) == "") else ""),
158
  "prompt_template": str((llm_template if (rag_option == "Off") else rag_template) if (str(status_msg) == "") else ""),
159
+ "documents": "" if (rag_option == "Off" or str(status_msg) != "") else str(documents),
160
  "document_0": "" if (rag_option == "Off" or str(status_msg) != "") else str(document_0),
161
  "document_1": "" if (rag_option == "Off" or str(status_msg) != "") else str(document_1),
162
  "document_2": "" if (rag_option == "Off" or str(status_msg) != "") else str(document_2)},