Kevin Hu commited on
Commit
7362294
·
1 Parent(s): f40f403

refine markdown prompt (#2551)

Browse files

### What problem does this PR solve?


### Type of change

- [x] Performance Improvement

Files changed (1) hide show
  1. api/db/services/dialog_service.py +2 -2
api/db/services/dialog_service.py CHANGED
@@ -180,7 +180,7 @@ def chat(dialog, messages, stream=True, **kwargs):
180
  yield {"answer": empty_res, "reference": kbinfos, "audio_binary": tts(tts_mdl, empty_res)}
181
  return {"answer": prompt_config["empty_response"], "reference": kbinfos}
182
 
183
- kwargs["knowledge"] = "\n------\n".join(knowledges)
184
  gen_conf = dialog.llm_setting
185
 
186
  msg = [{"role": "system", "content": prompt_config["system"].format(**kwargs)}]
@@ -221,7 +221,7 @@ def chat(dialog, messages, stream=True, **kwargs):
221
  if answer.lower().find("invalid key") >= 0 or answer.lower().find("invalid api") >= 0:
222
  answer += " Please set LLM API-Key in 'User Setting -> Model Providers -> API-Key'"
223
  done_tm = timer()
224
- prompt += "\n### Elapsed\n - Retrieval: %.1f ms\n - LLM: %.1f ms"%((retrieval_tm-st)*1000, (done_tm-st)*1000)
225
  return {"answer": answer, "reference": refs, "prompt": prompt}
226
 
227
  if stream:
 
180
  yield {"answer": empty_res, "reference": kbinfos, "audio_binary": tts(tts_mdl, empty_res)}
181
  return {"answer": prompt_config["empty_response"], "reference": kbinfos}
182
 
183
+ kwargs["knowledge"] = "\n\n------\n\n".join(knowledges)
184
  gen_conf = dialog.llm_setting
185
 
186
  msg = [{"role": "system", "content": prompt_config["system"].format(**kwargs)}]
 
221
  if answer.lower().find("invalid key") >= 0 or answer.lower().find("invalid api") >= 0:
222
  answer += " Please set LLM API-Key in 'User Setting -> Model Providers -> API-Key'"
223
  done_tm = timer()
224
+ prompt += "\n\n### Elapsed\n - Retrieval: %.1f ms\n - LLM: %.1f ms"%((retrieval_tm-st)*1000, (done_tm-st)*1000)
225
  return {"answer": answer, "reference": refs, "prompt": prompt}
226
 
227
  if stream: