Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -38,7 +38,8 @@ def respond(
|
|
38 |
):
|
39 |
global messages
|
40 |
|
41 |
-
|
|
|
42 |
|
43 |
generation_kwargs = dict(
|
44 |
max_length=max_length,
|
@@ -54,8 +55,6 @@ def respond(
|
|
54 |
if messages == []:
|
55 |
messages = [chatglm_cpp.ChatMessage(role="system", content=system_message)]
|
56 |
|
57 |
-
print(messages)
|
58 |
-
|
59 |
# for val in history:
|
60 |
# if val[0]:
|
61 |
# messages.append(chatglm_cpp.ChatMessage(role="user", content=val[0]))
|
@@ -63,24 +62,16 @@ def respond(
|
|
63 |
# messages.append(chatglm_cpp.ChatMessage(role="assistant", content=val[0]))
|
64 |
|
65 |
messages.append(chatglm_cpp.ChatMessage(role="user", content=message))
|
66 |
-
|
67 |
-
print(messages)
|
68 |
-
|
69 |
-
response = ""
|
70 |
-
# yield response
|
71 |
chunks = []
|
72 |
-
# yield response
|
73 |
|
74 |
for chunk in pipeline.chat(messages, **generation_kwargs):
|
75 |
response += chunk.content
|
76 |
chunks.append(chunk)
|
77 |
-
|
78 |
-
|
79 |
messages.append(chatglm_cpp.ChatMessage(role="assistant", content=response))
|
80 |
|
81 |
-
print(messages)
|
82 |
-
return response
|
83 |
-
|
84 |
|
85 |
"""
|
86 |
For information on how to customize the ChatInterface, peruse the gradio docs: https://www.gradio.app/docs/chatinterface
|
|
|
38 |
):
|
39 |
global messages
|
40 |
|
41 |
+
response = ""
|
42 |
+
yield response
|
43 |
|
44 |
generation_kwargs = dict(
|
45 |
max_length=max_length,
|
|
|
55 |
if messages == []:
|
56 |
messages = [chatglm_cpp.ChatMessage(role="system", content=system_message)]
|
57 |
|
|
|
|
|
58 |
# for val in history:
|
59 |
# if val[0]:
|
60 |
# messages.append(chatglm_cpp.ChatMessage(role="user", content=val[0]))
|
|
|
62 |
# messages.append(chatglm_cpp.ChatMessage(role="assistant", content=val[0]))
|
63 |
|
64 |
messages.append(chatglm_cpp.ChatMessage(role="user", content=message))
|
65 |
+
|
|
|
|
|
|
|
|
|
66 |
chunks = []
|
|
|
67 |
|
68 |
for chunk in pipeline.chat(messages, **generation_kwargs):
|
69 |
response += chunk.content
|
70 |
chunks.append(chunk)
|
71 |
+
yield response
|
72 |
+
|
73 |
messages.append(chatglm_cpp.ChatMessage(role="assistant", content=response))
|
74 |
|
|
|
|
|
|
|
75 |
|
76 |
"""
|
77 |
For information on how to customize the ChatInterface, peruse the gradio docs: https://www.gradio.app/docs/chatinterface
|