theosaurus commited on
Commit
526fda1
·
1 Parent(s): ed36d82

feat/ updated the ```generate_llm_response``` method for it to stream the text as the model generates it.

Browse files
Files changed (1) hide show
  1. app.py +3 -4
app.py CHANGED
@@ -123,11 +123,10 @@ def generate_llm_response(
123
  t.start()
124
 
125
  # Collect the output
126
- outputs = []
127
  for text in streamer:
128
- outputs.append(text)
129
-
130
- return "".join(outputs)
131
 
132
  def append_text_knowledge(file_path: str) -> str:
133
  """
 
123
  t.start()
124
 
125
  # Collect the output
126
+ accumulated_response = ""
127
  for text in streamer:
128
+ accumulated_response += text
129
+ yield accumulated_response
 
130
 
131
  def append_text_knowledge(file_path: str) -> str:
132
  """