crystalkalem commited on
Commit
8bc4469
·
verified ·
1 Parent(s): dd3400c

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +1 -1
app.py CHANGED
@@ -14,7 +14,7 @@ def generate(message, history,temperature=0.7,max_tokens=1024):
14
  formatted_prompt.append({"role": "user", "content": user_prompt})
15
  formatted_prompt.append({"role": "assistant", "content": bot_response })
16
  formatted_prompt.append({"role": "user", "content": message})
17
- stream_response = llm.create_chat_completion(messages=formatted_prompt, temperature=temperature, max_tokens=max_tokens, stream=True)
18
  response = ""
19
  for chunk in stream_response:
20
  if len(chunk['choices'][0]["delta"]) != 0 and "content" in chunk['choices'][0]["delta"]:
 
14
  formatted_prompt.append({"role": "user", "content": user_prompt})
15
  formatted_prompt.append({"role": "assistant", "content": bot_response })
16
  formatted_prompt.append({"role": "user", "content": message})
17
+ stream_response = llm.create_chat_completion(messages=formatted_prompt, temperature=temperature, max_tokens=max_tokens, stream=False)
18
  response = ""
19
  for chunk in stream_response:
20
  if len(chunk['choices'][0]["delta"]) != 0 and "content" in chunk['choices'][0]["delta"]: