Update app.py
Browse files
app.py
CHANGED
@@ -118,8 +118,13 @@ def rag_chain(llm, prompt, db):
|
|
118 |
def wandb_trace(rag_option, prompt, completion, chain, status_msg, start_time_ms, end_time_ms):
|
119 |
if (chain != None):
|
120 |
if (type(chain).__name__ == "LLMChain"):
|
121 |
-
print("1=" + str(chain.llm))
|
122 |
-
print("
|
|
|
|
|
|
|
|
|
|
|
123 |
if (type(chain).__name__ == "RetrievalQA"):
|
124 |
print(chain)
|
125 |
#print("3=" + str(chain.llm))
|
|
|
118 |
def wandb_trace(rag_option, prompt, completion, chain, status_msg, start_time_ms, end_time_ms):
|
119 |
if (chain != None):
|
120 |
if (type(chain).__name__ == "LLMChain"):
|
121 |
+
print("1=" + str(chain.llm.client))
|
122 |
+
print("1=" + str(chain.llm.async_client))
|
123 |
+
print("1=" + str(chain.llm.model_name))
|
124 |
+
print("1=" + str(chain.llm.temperature))
|
125 |
+
print("1=" + str(chain.llm.openai_proxy))
|
126 |
+
print("2=" + str(chain.prompt.input_variables))
|
127 |
+
print("2=" + str(chain.prompt.template))
|
128 |
if (type(chain).__name__ == "RetrievalQA"):
|
129 |
print(chain)
|
130 |
#print("3=" + str(chain.llm))
|