MaxLSB commited on
Commit
3a38c1f
Β·
verified Β·
1 Parent(s): f6b834f

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +8 -11
app.py CHANGED
@@ -9,17 +9,19 @@ hf_token = os.environ["HUGGINGFACEHUB_API_TOKEN"]
9
 
10
  torch.set_num_threads(4)
11
 
12
- # Global model & tokenizer
13
  tokenizer = None
14
  model = None
 
15
 
16
  # Load selected model
17
  def load_model(model_name):
18
- global tokenizer, model
19
  full_model_name = f"MaxLSB/{model_name}"
20
  tokenizer = AutoTokenizer.from_pretrained(full_model_name, token=hf_token)
21
  model = AutoModelForCausalLM.from_pretrained(full_model_name, token=hf_token)
22
  model.eval()
 
23
 
24
  # Initialize default model
25
  load_model("LeCarnet-8M")
@@ -49,7 +51,8 @@ def respond(message, max_tokens, temperature, top_p):
49
  response = ""
50
  for new_text in streamer:
51
  response += new_text
52
- yield response
 
53
 
54
  # User input handler
55
  def user(message, chat_history):
@@ -69,8 +72,6 @@ def update_model(model_name):
69
  load_model(model_name)
70
  return []
71
 
72
- image_path = "static/le-carnet.png"
73
-
74
  # Gradio UI
75
  with gr.Blocks(title="LeCarnet - Chat Interface") as demo:
76
  with gr.Row():
@@ -78,11 +79,9 @@ with gr.Blocks(title="LeCarnet - Chat Interface") as demo:
78
  <div style="text-align: center; width: 100%;">
79
  <h1 style="margin: 0;">LeCarnet Demo πŸ“Š</h1>
80
  </div>
81
- """)
82
 
83
- # Main layout
84
  with gr.Row():
85
- # Left column: Options
86
  with gr.Column(scale=1, min_width=150):
87
  model_selector = gr.Dropdown(
88
  choices=["LeCarnet-3M", "LeCarnet-8M", "LeCarnet-21M"],
@@ -94,7 +93,6 @@ with gr.Blocks(title="LeCarnet - Chat Interface") as demo:
94
  top_p = gr.Slider(0.1, 1.0, value=0.9, step=0.05, label="Top-p Sampling")
95
  clear_button = gr.Button("Clear Chat")
96
 
97
- # Right column: Chat
98
  with gr.Column(scale=4):
99
  chatbot = gr.Chatbot(
100
  bubble_full_width=False,
@@ -114,7 +112,6 @@ with gr.Blocks(title="LeCarnet - Chat Interface") as demo:
114
  label="Example Prompts"
115
  )
116
 
117
- # Event handlers
118
  model_selector.change(fn=update_model, inputs=[model_selector], outputs=[])
119
  msg_input.submit(fn=user, inputs=[msg_input, chatbot], outputs=[msg_input, chatbot], queue=False).then(
120
  fn=bot, inputs=[chatbot, max_tokens, temperature, top_p], outputs=[chatbot]
@@ -122,4 +119,4 @@ with gr.Blocks(title="LeCarnet - Chat Interface") as demo:
122
  clear_button.click(fn=lambda: None, inputs=None, outputs=chatbot, queue=False)
123
 
124
  if __name__ == "__main__":
125
- demo.queue(default_concurrency_limit=10, max_size=10).launch(ssr_mode=False, max_threads=10)
 
9
 
10
  torch.set_num_threads(4)
11
 
12
+ # Globals
13
  tokenizer = None
14
  model = None
15
+ current_model_name = None
16
 
17
  # Load selected model
18
  def load_model(model_name):
19
+ global tokenizer, model, current_model_name
20
  full_model_name = f"MaxLSB/{model_name}"
21
  tokenizer = AutoTokenizer.from_pretrained(full_model_name, token=hf_token)
22
  model = AutoModelForCausalLM.from_pretrained(full_model_name, token=hf_token)
23
  model.eval()
24
+ current_model_name = model_name
25
 
26
  # Initialize default model
27
  load_model("LeCarnet-8M")
 
51
  response = ""
52
  for new_text in streamer:
53
  response += new_text
54
+ # prepend model name on its own line
55
+ yield f"**Model: {current_model_name}**\n\n{response}"
56
 
57
  # User input handler
58
  def user(message, chat_history):
 
72
  load_model(model_name)
73
  return []
74
 
 
 
75
  # Gradio UI
76
  with gr.Blocks(title="LeCarnet - Chat Interface") as demo:
77
  with gr.Row():
 
79
  <div style="text-align: center; width: 100%;">
80
  <h1 style="margin: 0;">LeCarnet Demo πŸ“Š</h1>
81
  </div>
82
+ """ )
83
 
 
84
  with gr.Row():
 
85
  with gr.Column(scale=1, min_width=150):
86
  model_selector = gr.Dropdown(
87
  choices=["LeCarnet-3M", "LeCarnet-8M", "LeCarnet-21M"],
 
93
  top_p = gr.Slider(0.1, 1.0, value=0.9, step=0.05, label="Top-p Sampling")
94
  clear_button = gr.Button("Clear Chat")
95
 
 
96
  with gr.Column(scale=4):
97
  chatbot = gr.Chatbot(
98
  bubble_full_width=False,
 
112
  label="Example Prompts"
113
  )
114
 
 
115
  model_selector.change(fn=update_model, inputs=[model_selector], outputs=[])
116
  msg_input.submit(fn=user, inputs=[msg_input, chatbot], outputs=[msg_input, chatbot], queue=False).then(
117
  fn=bot, inputs=[chatbot, max_tokens, temperature, top_p], outputs=[chatbot]
 
119
  clear_button.click(fn=lambda: None, inputs=None, outputs=chatbot, queue=False)
120
 
121
  if __name__ == "__main__":
122
+ demo.queue(default_concurrency_limit=10, max_size=10).launch(ssr_mode=False, max_threads=10)