Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -263,16 +263,25 @@ def simple_chat(message, temperature: float = 0.8, max_length: int = 4096, top_p
|
|
263 |
def generate_text():
|
264 |
nonlocal buffer
|
265 |
with torch.no_grad():
|
266 |
-
|
267 |
-
thread.start()
|
268 |
for new_text in streamer:
|
269 |
buffer += new_text
|
|
|
|
|
|
|
|
|
270 |
|
271 |
-
# Start the generation in a separate thread
|
272 |
-
generate_text()
|
273 |
|
274 |
-
|
|
|
|
|
275 |
thread.join()
|
|
|
|
|
|
|
|
|
|
|
|
|
276 |
|
277 |
print("---------")
|
278 |
print("Text: ")
|
|
|
263 |
def generate_text():
|
264 |
nonlocal buffer
|
265 |
with torch.no_grad():
|
266 |
+
model.generate(**gen_kwargs)
|
|
|
267 |
for new_text in streamer:
|
268 |
buffer += new_text
|
269 |
+
# thread = Thread(target=model.generate, kwargs=gen_kwargs)
|
270 |
+
# thread.start()
|
271 |
+
# for new_text in streamer:
|
272 |
+
# buffer += new_text
|
273 |
|
|
|
|
|
274 |
|
275 |
+
thread = Thread(target=generate_text)
|
276 |
+
thread.start()
|
277 |
+
|
278 |
thread.join()
|
279 |
+
|
280 |
+
# # Start the generation in a separate thread
|
281 |
+
# generate_text()
|
282 |
+
|
283 |
+
# # Wait for the generation to finish
|
284 |
+
# thread.join()
|
285 |
|
286 |
print("---------")
|
287 |
print("Text: ")
|