MaxLSB commited on
Commit
7e54aad
·
verified ·
1 Parent(s): 7a04511

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +12 -9
app.py CHANGED
@@ -6,7 +6,6 @@ from transformers import AutoModelForCausalLM, AutoTokenizer, TextIteratorStream
6
 
7
  # Hugging Face token
8
  hf_token = os.environ["HUGGINGFACEHUB_API_TOKEN"]
9
-
10
  torch.set_num_threads(1)
11
 
12
  # Globals
@@ -51,7 +50,6 @@ def respond(message, max_tokens, temperature, top_p):
51
  response = ""
52
  for new_text in streamer:
53
  response += new_text
54
- # prepend model name on its own line
55
  yield f"**{current_model_name}**\n\n{response}"
56
 
57
  # User input handler
@@ -77,14 +75,17 @@ with gr.Blocks(title="LeCarnet - Chat Interface") as demo:
77
  with gr.Row():
78
  gr.HTML("""
79
  <div style="text-align: center; width: 100%;">
80
- <h1 style="margin: 0;">LeCarnet Demo 📊</h1>
81
  </div>
82
- """ )
83
 
 
84
  msg_input = gr.Textbox(
85
- placeholder="Il était une fois un petit garçon",
86
- label="User Input"
 
87
  )
 
88
  with gr.Row():
89
  with gr.Column(scale=1, min_width=150):
90
  model_selector = gr.Dropdown(
@@ -96,7 +97,7 @@ with gr.Blocks(title="LeCarnet - Chat Interface") as demo:
96
  temperature = gr.Slider(0.1, 2.0, value=0.4, step=0.1, label="Temperature")
97
  top_p = gr.Slider(0.1, 1.0, value=0.9, step=0.05, label="Top-p Sampling")
98
  clear_button = gr.Button("Clear Chat")
99
-
100
  gr.Examples(
101
  examples=[
102
  ["Il était une fois un petit phoque nommé Zoom. Zoom était très habile et aimait jouer dans l'eau."],
@@ -112,8 +113,10 @@ with gr.Blocks(title="LeCarnet - Chat Interface") as demo:
112
  bubble_full_width=False,
113
  height=500
114
  )
115
- msg_input
 
116
 
 
117
  model_selector.change(fn=update_model, inputs=[model_selector], outputs=[])
118
  msg_input.submit(fn=user, inputs=[msg_input, chatbot], outputs=[msg_input, chatbot], queue=False).then(
119
  fn=bot, inputs=[chatbot, max_tokens, temperature, top_p], outputs=[chatbot]
@@ -121,4 +124,4 @@ with gr.Blocks(title="LeCarnet - Chat Interface") as demo:
121
  clear_button.click(fn=lambda: None, inputs=None, outputs=chatbot, queue=False)
122
 
123
  if __name__ == "__main__":
124
- demo.queue(default_concurrency_limit=10, max_size=10).launch(ssr_mode=False, max_threads=10)
 
6
 
7
  # Hugging Face token
8
  hf_token = os.environ["HUGGINGFACEHUB_API_TOKEN"]
 
9
  torch.set_num_threads(1)
10
 
11
  # Globals
 
50
  response = ""
51
  for new_text in streamer:
52
  response += new_text
 
53
  yield f"**{current_model_name}**\n\n{response}"
54
 
55
  # User input handler
 
75
  with gr.Row():
76
  gr.HTML("""
77
  <div style="text-align: center; width: 100%;">
78
+ <h1 style="margin: 0;">LeCarnet Demo</h1>
79
  </div>
80
+ """)
81
 
82
+ # Create the msg_input early, but don't render it yet
83
  msg_input = gr.Textbox(
84
+ placeholder="Il était une fois un petit garçon",
85
+ label="User Input",
86
+ render=False
87
  )
88
+
89
  with gr.Row():
90
  with gr.Column(scale=1, min_width=150):
91
  model_selector = gr.Dropdown(
 
97
  temperature = gr.Slider(0.1, 2.0, value=0.4, step=0.1, label="Temperature")
98
  top_p = gr.Slider(0.1, 1.0, value=0.9, step=0.05, label="Top-p Sampling")
99
  clear_button = gr.Button("Clear Chat")
100
+
101
  gr.Examples(
102
  examples=[
103
  ["Il était une fois un petit phoque nommé Zoom. Zoom était très habile et aimait jouer dans l'eau."],
 
113
  bubble_full_width=False,
114
  height=500
115
  )
116
+ # Now render the msg_input inside the right column, below the chatbot
117
+ msg_input.render()
118
 
119
+ # Event Handlers
120
  model_selector.change(fn=update_model, inputs=[model_selector], outputs=[])
121
  msg_input.submit(fn=user, inputs=[msg_input, chatbot], outputs=[msg_input, chatbot], queue=False).then(
122
  fn=bot, inputs=[chatbot, max_tokens, temperature, top_p], outputs=[chatbot]
 
124
  clear_button.click(fn=lambda: None, inputs=None, outputs=chatbot, queue=False)
125
 
126
  if __name__ == "__main__":
127
+ demo.queue(default_concurrency_limit=10, max_size=10).launch(ssr_mode=False, max_threads=10)