Update app.py
Browse files
app.py
CHANGED
@@ -39,7 +39,7 @@ client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
|
|
39 |
|
40 |
def respond(message, history):
|
41 |
response = ""
|
42 |
-
char_limit= 500
|
43 |
|
44 |
top_chunks = get_top_chunks(message)
|
45 |
context = "\n".join(top_chunks)
|
@@ -72,16 +72,16 @@ def respond(message, history):
|
|
72 |
response += token
|
73 |
yield response
|
74 |
|
75 |
-
if len(response) > char_limit:
|
76 |
-
|
77 |
|
78 |
-
for punc in [".", "!", "?"]:
|
79 |
-
|
80 |
-
|
81 |
-
|
82 |
-
|
83 |
|
84 |
-
yield response
|
85 |
|
86 |
chatbot = gr.ChatInterface(respond, type="messages")
|
87 |
|
|
|
39 |
|
40 |
def respond(message, history):
|
41 |
response = ""
|
42 |
+
# char_limit= 500
|
43 |
|
44 |
top_chunks = get_top_chunks(message)
|
45 |
context = "\n".join(top_chunks)
|
|
|
72 |
response += token
|
73 |
yield response
|
74 |
|
75 |
+
# if len(response) > char_limit:
|
76 |
+
# response = response[:char_limit]
|
77 |
|
78 |
+
# for punc in [".", "!", "?"]:
|
79 |
+
# i = response.rfind(punc)
|
80 |
+
# if i != -1:
|
81 |
+
# response = response[:i+1]
|
82 |
+
# break
|
83 |
|
84 |
+
# yield response
|
85 |
|
86 |
chatbot = gr.ChatInterface(respond, type="messages")
|
87 |
|