Corvius commited on
Commit
0fab6d4
Β·
verified Β·
1 Parent(s): fecb2b1

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +6 -2
app.py CHANGED
@@ -118,7 +118,7 @@ def stop_generation_func():
118
  with gr.Blocks(theme='gradio/monochrome') as demo:
119
  with gr.Row():
120
  with gr.Column(scale=2):
121
- chatbot = gr.Chatbot()
122
  msg = gr.Textbox(label="Message")
123
  with gr.Row():
124
  clear = gr.Button("Clear")
@@ -143,10 +143,14 @@ with gr.Blocks(theme='gradio/monochrome') as demo:
143
 
144
  def user(user_message, history):
145
  print(f"{get_timestamp()} <|user|> {user_message}")
 
146
  return "", history + [[user_message, None]]
147
 
148
  def bot(history, system_prompt, temperature, top_p, top_k, frequency_penalty, presence_penalty, repetition_penalty, max_tokens):
149
  global stop_generation
 
 
 
150
  user_message = history[-1][0]
151
  bot_message = predict(user_message, history[:-1], system_prompt, temperature, top_p, top_k, frequency_penalty, presence_penalty, repetition_penalty, max_tokens)
152
  history[-1][1] = ""
@@ -156,7 +160,7 @@ with gr.Blocks(theme='gradio/monochrome') as demo:
156
  break
157
  history[-1][1] = chunk
158
  yield history
159
- stop_generation.clear()
160
 
161
  def regenerate_response(history, system_prompt, temperature, top_p, top_k, frequency_penalty, presence_penalty, repetition_penalty, max_tokens):
162
  if len(history) > 0:
 
118
  with gr.Blocks(theme='gradio/monochrome') as demo:
119
  with gr.Row():
120
  with gr.Column(scale=2):
121
+ chatbot = gr.Chatbot(value=[])
122
  msg = gr.Textbox(label="Message")
123
  with gr.Row():
124
  clear = gr.Button("Clear")
 
143
 
144
  def user(user_message, history):
145
  print(f"{get_timestamp()} <|user|> {user_message}")
146
+ history = history or []
147
  return "", history + [[user_message, None]]
148
 
149
  def bot(history, system_prompt, temperature, top_p, top_k, frequency_penalty, presence_penalty, repetition_penalty, max_tokens):
150
  global stop_generation
151
+ history = history or []
152
+ if not history:
153
+ return history
154
  user_message = history[-1][0]
155
  bot_message = predict(user_message, history[:-1], system_prompt, temperature, top_p, top_k, frequency_penalty, presence_penalty, repetition_penalty, max_tokens)
156
  history[-1][1] = ""
 
160
  break
161
  history[-1][1] = chunk
162
  yield history
163
+ stop_generation.clear()
164
 
165
  def regenerate_response(history, system_prompt, temperature, top_p, top_k, frequency_penalty, presence_penalty, repetition_penalty, max_tokens):
166
  if len(history) > 0: