MaxLSB commited on
Commit
a9c626e
·
verified ·
1 Parent(s): 3a38c1f

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +15 -6
app.py CHANGED
@@ -51,8 +51,7 @@ def respond(message, max_tokens, temperature, top_p):
51
  response = ""
52
  for new_text in streamer:
53
  response += new_text
54
- # prepend model name on its own line
55
- yield f"**Model: {current_model_name}**\n\n{response}"
56
 
57
  # User input handler
58
  def user(message, chat_history):
@@ -65,7 +64,8 @@ def bot(chatbot, max_tokens, temperature, top_p):
65
  response_generator = respond(message, max_tokens, temperature, top_p)
66
  for response in response_generator:
67
  chatbot[-1][1] = response
68
- yield chatbot
 
69
 
70
  # Model selector handler
71
  def update_model(model_name):
@@ -82,6 +82,7 @@ with gr.Blocks(title="LeCarnet - Chat Interface") as demo:
82
  """ )
83
 
84
  with gr.Row():
 
85
  with gr.Column(scale=1, min_width=150):
86
  model_selector = gr.Dropdown(
87
  choices=["LeCarnet-3M", "LeCarnet-8M", "LeCarnet-21M"],
@@ -93,10 +94,14 @@ with gr.Blocks(title="LeCarnet - Chat Interface") as demo:
93
  top_p = gr.Slider(0.1, 1.0, value=0.9, step=0.05, label="Top-p Sampling")
94
  clear_button = gr.Button("Clear Chat")
95
 
 
96
  with gr.Column(scale=4):
 
 
 
97
  chatbot = gr.Chatbot(
98
  bubble_full_width=False,
99
- height=500
100
  )
101
  msg_input = gr.Textbox(
102
  placeholder="Type your message and press Enter...",
@@ -112,11 +117,15 @@ with gr.Blocks(title="LeCarnet - Chat Interface") as demo:
112
  label="Example Prompts"
113
  )
114
 
 
115
  model_selector.change(fn=update_model, inputs=[model_selector], outputs=[])
 
116
  msg_input.submit(fn=user, inputs=[msg_input, chatbot], outputs=[msg_input, chatbot], queue=False).then(
117
- fn=bot, inputs=[chatbot, max_tokens, temperature, top_p], outputs=[chatbot]
 
 
118
  )
119
- clear_button.click(fn=lambda: None, inputs=None, outputs=chatbot, queue=False)
120
 
121
  if __name__ == "__main__":
122
  demo.queue(default_concurrency_limit=10, max_size=10).launch(ssr_mode=False, max_threads=10)
 
51
  response = ""
52
  for new_text in streamer:
53
  response += new_text
54
+ yield response
 
55
 
56
  # User input handler
57
  def user(message, chat_history):
 
64
  response_generator = respond(message, max_tokens, temperature, top_p)
65
  for response in response_generator:
66
  chatbot[-1][1] = response
67
+ # yield model name and updated chat history
68
+ yield current_model_name, chatbot
69
 
70
  # Model selector handler
71
  def update_model(model_name):
 
82
  """ )
83
 
84
  with gr.Row():
85
+ # Options column
86
  with gr.Column(scale=1, min_width=150):
87
  model_selector = gr.Dropdown(
88
  choices=["LeCarnet-3M", "LeCarnet-8M", "LeCarnet-21M"],
 
94
  top_p = gr.Slider(0.1, 1.0, value=0.9, step=0.05, label="Top-p Sampling")
95
  clear_button = gr.Button("Clear Chat")
96
 
97
+ # Chat column
98
  with gr.Column(scale=4):
99
+ # Mini window for model name
100
+ model_box = gr.Textbox(label="Model", interactive=False)
101
+ # Main chat window
102
  chatbot = gr.Chatbot(
103
  bubble_full_width=False,
104
+ height=400
105
  )
106
  msg_input = gr.Textbox(
107
  placeholder="Type your message and press Enter...",
 
117
  label="Example Prompts"
118
  )
119
 
120
+ # Event handlers
121
  model_selector.change(fn=update_model, inputs=[model_selector], outputs=[])
122
+
123
  msg_input.submit(fn=user, inputs=[msg_input, chatbot], outputs=[msg_input, chatbot], queue=False).then(
124
+ fn=bot,
125
+ inputs=[chatbot, max_tokens, temperature, top_p],
126
+ outputs=[model_box, chatbot]
127
  )
128
+ clear_button.click(fn=lambda: ("", []), inputs=None, outputs=[model_box, chatbot], queue=False)
129
 
130
  if __name__ == "__main__":
131
  demo.queue(default_concurrency_limit=10, max_size=10).launch(ssr_mode=False, max_threads=10)