YucYux commited on
Commit
6cadff6
·
1 Parent(s): 60e176e

Fixed the bug of model reloading

Browse files
Files changed (1) hide show
  1. app.py +14 -10
app.py CHANGED
@@ -118,7 +118,6 @@ def _load_model_and_tokenizer_core(model_path_to_load, model_display_name_for_st
118
 
119
  def handle_model_selection_change(selected_model_name_ui):
120
  global MODEL, TOKENIZER, MASK_ID, CURRENT_MODEL_PATH, DEVICE, uni_prompting
121
-
122
  status_msg = ""
123
  # 初始化 Examples 的可见性更新
124
  vis_lm_base = gr.update(visible=False)
@@ -127,7 +126,6 @@ def handle_model_selection_change(selected_model_name_ui):
127
  vis_mmu_base = gr.update(visible=False)
128
  vis_mmu_mixcot = gr.update(visible=False)
129
  vis_mmu_max = gr.update(visible=False)
130
-
131
  # 根据选择的模型决定 thinking mode 的默认状态
132
  is_mixcot_model_selected = (selected_model_name_ui == "MMaDA-8B-MixCoT")
133
 
@@ -138,10 +136,8 @@ def handle_model_selection_change(selected_model_name_ui):
138
 
139
  lm_think_button_label = "Thinking Mode ✅" if current_thinking_mode_lm_state else "Thinking Mode ❌"
140
  mmu_think_button_label = "Thinking Mode ✅" if current_thinking_mode_mmu_state else "Thinking Mode ❌"
141
-
142
  update_think_button_lm = gr.update(value=lm_think_button_label)
143
  update_think_button_mmu = gr.update(value=mmu_think_button_label)
144
-
145
  if selected_model_name_ui == "MMaDA-8B-Max (coming soon)":
146
  MODEL = None
147
  TOKENIZER = None
@@ -169,17 +165,26 @@ def handle_model_selection_change(selected_model_name_ui):
169
  # 尝试加载模型
170
  status_msg = _load_model_and_tokenizer_core(actual_path, selected_model_name_ui)
171
 
172
- # 检查模型是否成功加载
173
- if "Error loading model" in status_msg or MODEL is None:
 
 
 
 
 
 
 
 
 
 
 
174
  # 如果是 MixCoT 模型但加载失败,则关闭 thinking mode
175
  if is_mixcot_model_selected:
176
  current_thinking_mode_lm_state = False
177
  current_thinking_mode_mmu_state = False
178
  update_think_button_lm = gr.update(value="Thinking Mode ❌")
179
  update_think_button_mmu = gr.update(value="Thinking Mode ❌")
180
- if MODEL is None and "Error" not in status_msg: # 补充一个通用错误信息
181
- status_msg = f"Failed to properly load model '{selected_model_name_ui}'. {status_msg}"
182
- else: # 模型成功加载
183
  if selected_model_name_ui == "MMaDA-8B-Base":
184
  vis_lm_base = gr.update(visible=True)
185
  vis_mmu_base = gr.update(visible=True)
@@ -187,7 +192,6 @@ def handle_model_selection_change(selected_model_name_ui):
187
  vis_lm_mixcot = gr.update(visible=True)
188
  vis_mmu_mixcot = gr.update(visible=True)
189
  # thinking mode 已经在函数开头根据 is_mixcot_model_selected 设置为 True
190
-
191
  return (
192
  status_msg,
193
  vis_lm_base,
 
118
 
119
  def handle_model_selection_change(selected_model_name_ui):
120
  global MODEL, TOKENIZER, MASK_ID, CURRENT_MODEL_PATH, DEVICE, uni_prompting
 
121
  status_msg = ""
122
  # 初始化 Examples 的可见性更新
123
  vis_lm_base = gr.update(visible=False)
 
126
  vis_mmu_base = gr.update(visible=False)
127
  vis_mmu_mixcot = gr.update(visible=False)
128
  vis_mmu_max = gr.update(visible=False)
 
129
  # 根据选择的模型决定 thinking mode 的默认状态
130
  is_mixcot_model_selected = (selected_model_name_ui == "MMaDA-8B-MixCoT")
131
 
 
136
 
137
  lm_think_button_label = "Thinking Mode ✅" if current_thinking_mode_lm_state else "Thinking Mode ❌"
138
  mmu_think_button_label = "Thinking Mode ✅" if current_thinking_mode_mmu_state else "Thinking Mode ❌"
 
139
  update_think_button_lm = gr.update(value=lm_think_button_label)
140
  update_think_button_mmu = gr.update(value=mmu_think_button_label)
 
141
  if selected_model_name_ui == "MMaDA-8B-Max (coming soon)":
142
  MODEL = None
143
  TOKENIZER = None
 
165
  # 尝试加载模型
166
  status_msg = _load_model_and_tokenizer_core(actual_path, selected_model_name_ui)
167
 
168
+ # 改进的错误检查逻辑
169
+ model_load_failed = False
170
+
171
+ # 检查是否是明确的错误消息
172
+ if "Error loading model" in status_msg:
173
+ model_load_failed = True
174
+ # 检查模型是否为None,但排除"已经加载"的情况
175
+ elif MODEL is None:
176
+ if "is already loaded" not in status_msg:
177
+ model_load_failed = True
178
+ status_msg = f"Failed to properly load model '{selected_model_name_ui}'. {status_msg}"
179
+
180
+ if model_load_failed:
181
  # 如果是 MixCoT 模型但加载失败,则关闭 thinking mode
182
  if is_mixcot_model_selected:
183
  current_thinking_mode_lm_state = False
184
  current_thinking_mode_mmu_state = False
185
  update_think_button_lm = gr.update(value="Thinking Mode ❌")
186
  update_think_button_mmu = gr.update(value="Thinking Mode ❌")
187
+ else: # 模型成功加载或已经加载
 
 
188
  if selected_model_name_ui == "MMaDA-8B-Base":
189
  vis_lm_base = gr.update(visible=True)
190
  vis_mmu_base = gr.update(visible=True)
 
192
  vis_lm_mixcot = gr.update(visible=True)
193
  vis_mmu_mixcot = gr.update(visible=True)
194
  # thinking mode 已经在函数开头根据 is_mixcot_model_selected 设置为 True
 
195
  return (
196
  status_msg,
197
  vis_lm_base,