MaxLSB commited on
Commit
3c8907f
·
verified ·
1 Parent(s): a9c626e

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +21 -10
app.py CHANGED
@@ -51,7 +51,12 @@ def respond(message, max_tokens, temperature, top_p):
51
  response = ""
52
  for new_text in streamer:
53
  response += new_text
54
- yield response
 
 
 
 
 
55
 
56
  # User input handler
57
  def user(message, chat_history):
@@ -64,8 +69,7 @@ def bot(chatbot, max_tokens, temperature, top_p):
64
  response_generator = respond(message, max_tokens, temperature, top_p)
65
  for response in response_generator:
66
  chatbot[-1][1] = response
67
- # yield model name and updated chat history
68
- yield current_model_name, chatbot
69
 
70
  # Model selector handler
71
  def update_model(model_name):
@@ -96,12 +100,9 @@ with gr.Blocks(title="LeCarnet - Chat Interface") as demo:
96
 
97
  # Chat column
98
  with gr.Column(scale=4):
99
- # Mini window for model name
100
- model_box = gr.Textbox(label="Model", interactive=False)
101
- # Main chat window
102
  chatbot = gr.Chatbot(
103
  bubble_full_width=False,
104
- height=400
105
  )
106
  msg_input = gr.Textbox(
107
  placeholder="Type your message and press Enter...",
@@ -120,12 +121,22 @@ with gr.Blocks(title="LeCarnet - Chat Interface") as demo:
120
  # Event handlers
121
  model_selector.change(fn=update_model, inputs=[model_selector], outputs=[])
122
 
123
- msg_input.submit(fn=user, inputs=[msg_input, chatbot], outputs=[msg_input, chatbot], queue=False).then(
 
 
 
 
 
124
  fn=bot,
125
  inputs=[chatbot, max_tokens, temperature, top_p],
126
- outputs=[model_box, chatbot]
 
 
 
 
 
 
127
  )
128
- clear_button.click(fn=lambda: ("", []), inputs=None, outputs=[model_box, chatbot], queue=False)
129
 
130
  if __name__ == "__main__":
131
  demo.queue(default_concurrency_limit=10, max_size=10).launch(ssr_mode=False, max_threads=10)
 
51
  response = ""
52
  for new_text in streamer:
53
  response += new_text
54
+ # wrap model name and response in HTML
55
+ bubble = (
56
+ f"<div style='background:#eef2f5;padding:4px;border-radius:4px;font-size:small;max-width:fit-content;margin-bottom:4px;'>"
57
+ f"Model: {current_model_name}</div>" + response
58
+ )
59
+ yield bubble
60
 
61
  # User input handler
62
  def user(message, chat_history):
 
69
  response_generator = respond(message, max_tokens, temperature, top_p)
70
  for response in response_generator:
71
  chatbot[-1][1] = response
72
+ yield chatbot
 
73
 
74
  # Model selector handler
75
  def update_model(model_name):
 
100
 
101
  # Chat column
102
  with gr.Column(scale=4):
 
 
 
103
  chatbot = gr.Chatbot(
104
  bubble_full_width=False,
105
+ height=500
106
  )
107
  msg_input = gr.Textbox(
108
  placeholder="Type your message and press Enter...",
 
121
  # Event handlers
122
  model_selector.change(fn=update_model, inputs=[model_selector], outputs=[])
123
 
124
+ msg_input.submit(
125
+ fn=user,
126
+ inputs=[msg_input, chatbot],
127
+ outputs=[msg_input, chatbot],
128
+ queue=False
129
+ ).then(
130
  fn=bot,
131
  inputs=[chatbot, max_tokens, temperature, top_p],
132
+ outputs=[chatbot]
133
+ )
134
+ clear_button.click(
135
+ fn=lambda: [],
136
+ inputs=None,
137
+ outputs=chatbot,
138
+ queue=False
139
  )
 
140
 
141
  if __name__ == "__main__":
142
  demo.queue(default_concurrency_limit=10, max_size=10).launch(ssr_mode=False, max_threads=10)