bstraehle commited on
Commit
6017859
·
1 Parent(s): 8573a63

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +11 -26
app.py CHANGED
@@ -123,24 +123,25 @@ def wandb_trace(rag_option, prompt, completion, result, chain, err_msg, start_ti
123
  wandb.init(project = "openai-llm-rag")
124
  trace = Trace(
125
  kind = "chain",
126
- name = type(chain).__name__ if (chain != None) else "",
127
  status_code = "success" if (str(err_msg) == "") else "error",
128
  status_message = str(err_msg),
129
  metadata = {
130
- "chunk_overlap": "" if (rag_option == RAG_OFF) else config["chunk_overlap"],
131
- "chunk_size": "" if (rag_option == RAG_OFF) else config["chunk_size"],
132
- #"k": "" if (rag_option == RAG_OFF) else config["k"],
133
- #"model_name": config["model_name"],
134
- #"temperature": config["temperature"],
135
  },
136
  inputs = {"rag_option": rag_option if (str(err_msg) == "") else "",
137
- "prompt": str(prompt if (str(err_msg) == "") else ""),
138
  },
139
  outputs = {"result": result if (str(err_msg) == "") else "",
140
- "completion": str(completion),
141
  },
142
- model_dict = {"llm_client": str(chain.llm.client) if (rag_option == RAG_OFF) else str(chain.combine_documents_chain.llm_chain.llm.client),
143
- "chain_prompt": str(chain.prompt) if (rag_option == RAG_OFF) else str(chain.combine_documents_chain.llm_chain.prompt),
 
 
 
 
144
  "retriever": "" if (rag_option == RAG_OFF) else str(chain.retriever),
145
  },
146
  start_time_ms = start_time_ms,
@@ -177,26 +178,10 @@ def invoke(openai_api_key, rag_option, prompt):
177
  db = document_retrieval_mongodb(llm, prompt)
178
  completion, chain = rag_chain(llm, prompt, db)
179
  result = completion["result"]
180
- #print("###")
181
- #print(completion)
182
- #print("###")
183
- #print(chain.combine_documents_chain)
184
- print("###")
185
- print(chain.combine_documents_chain.llm_chain.llm.client)
186
- print("###")
187
- print(chain.combine_documents_chain.llm_chain.prompt)
188
- print("###")
189
- print(chain.retriever)
190
- print("###")
191
  else:
192
  completion, chain = llm_chain(llm, prompt)
193
  result = completion.generations[0][0].text if (completion.generations[0] != None and
194
  completion.generations[0][0] != None) else ""
195
- print("###")
196
- print(chain.llm.client)
197
- print("###")
198
- print(chain.prompt)
199
- print("###")
200
  except Exception as e:
201
  err_msg = e
202
  raise gr.Error(e)
 
123
  wandb.init(project = "openai-llm-rag")
124
  trace = Trace(
125
  kind = "chain",
126
+ name = "" if (chain == None) else type(chain).__name__,
127
  status_code = "success" if (str(err_msg) == "") else "error",
128
  status_message = str(err_msg),
129
  metadata = {
130
+ "chunk_overlap": config["chunk_overlap"] if (str(err_msg) == "" and rag_option != RAG_OFF) else "",
131
+ "chunk_size": config["chunk_size"] if (str(err_msg) == "" and rag_option != RAG_OFF) else "",
 
 
 
132
  },
133
  inputs = {"rag_option": rag_option if (str(err_msg) == "") else "",
134
+ "prompt": prompt if (str(err_msg) == "") else "",
135
  },
136
  outputs = {"result": result if (str(err_msg) == "") else "",
137
+ "completion": str(completion) if (str(err_msg) == "") else "",
138
  },
139
+ model_dict = {"llm_client": str(chain.llm.client) if (rag_option == RAG_OFF) else
140
+ str(chain.combine_documents_chain.llm_chain.llm.client),
141
+ "model_name": config["model_name"],
142
+ "temperature": config["temperature"],
143
+ "chain_prompt": str(chain.prompt) if (rag_option == RAG_OFF) else
144
+ str(chain.combine_documents_chain.llm_chain.prompt),
145
  "retriever": "" if (rag_option == RAG_OFF) else str(chain.retriever),
146
  },
147
  start_time_ms = start_time_ms,
 
178
  db = document_retrieval_mongodb(llm, prompt)
179
  completion, chain = rag_chain(llm, prompt, db)
180
  result = completion["result"]
 
 
 
 
 
 
 
 
 
 
 
181
  else:
182
  completion, chain = llm_chain(llm, prompt)
183
  result = completion.generations[0][0].text if (completion.generations[0] != None and
184
  completion.generations[0][0] != None) else ""
 
 
 
 
 
185
  except Exception as e:
186
  err_msg = e
187
  raise gr.Error(e)