Damien Benveniste commited on
Commit
2ffd335
·
1 Parent(s): c7bb7b5
Files changed (1) hide show
  1. app.py +1 -1
app.py CHANGED
@@ -33,7 +33,7 @@ async def generate_stream(prompt: str, max_tokens: int, temperature: float):
33
  max_tokens=max_tokens
34
  )
35
 
36
- async for output in engine.generate(prompt, sampling_params): # True enables streaming
37
  yield f"data: {json.dumps({'text': output.outputs[0].text})}\n\n"
38
 
39
  yield "data: [DONE]\n\n"
 
33
  max_tokens=max_tokens
34
  )
35
 
36
+ async for output in engine.generate(prompt, sampling_params, 1): # True enables streaming
37
  yield f"data: {json.dumps({'text': output.outputs[0].text})}\n\n"
38
 
39
  yield "data: [DONE]\n\n"