John Langley commited on
Commit
b1b31bf
·
1 Parent(s): 718f6da

working of a streaming solution

Browse files
Files changed (1) hide show
  1. utilsasync.py +0 -4
utilsasync.py CHANGED
@@ -68,10 +68,6 @@ def generate_llm_output(
68
  if character in stop_words:
69
  # end of context
70
  return
71
-
72
- if emoji.is_emoji(character):
73
- # Bad emoji not a meaning messes chat from next lines
74
- return
75
 
76
  output += response["choices"][0]["text"]
77
  yield output
 
68
  if character in stop_words:
69
  # end of context
70
  return
 
 
 
 
71
 
72
  output += response["choices"][0]["text"]
73
  yield output