Tuchuanhuhuhu commited on
Commit
c9610d9
·
1 Parent(s): 1cb7a61

重新加入切换模型的功能

Browse files
Files changed (2) hide show
  1. ChuanhuChatbot.py +4 -3
  2. modules/models.py +5 -3
ChuanhuChatbot.py CHANGED
@@ -23,7 +23,7 @@ with gr.Blocks(css=customCSS, theme=small_and_beautiful_theme) as demo:
23
  promptTemplates = gr.State(load_template(get_template_names(plain=True)[0], mode=2))
24
  user_api_key = gr.State(my_api_key)
25
  user_question = gr.State("")
26
- current_model = gr.State(get_model(MODELS[0], my_api_key))
27
 
28
  topic = gr.State("未命名对话历史记录")
29
 
@@ -79,7 +79,7 @@ with gr.Blocks(css=customCSS, theme=small_and_beautiful_theme) as demo:
79
  else:
80
  usageTxt = gr.Markdown("**发送消息** 或 **提交key** 以显示额度", elem_id="usage_display")
81
  model_select_dropdown = gr.Dropdown(
82
- label="选择模型", choices=MODELS, multiselect=False, value=MODELS[0]
83
  )
84
  use_streaming_checkbox = gr.Checkbox(
85
  label="实时传输回答", value=True, visible=enable_streaming_option
@@ -287,9 +287,10 @@ with gr.Blocks(css=customCSS, theme=small_and_beautiful_theme) as demo:
287
 
288
  two_column.change(update_doc_config, [two_column], None)
289
 
290
- # ChatGPT
291
  keyTxt.change(submit_key, keyTxt, [user_api_key, status_display]).then(**get_usage_args)
292
  keyTxt.submit(**get_usage_args)
 
293
 
294
  # Template
295
  systemPromptTxt.change(current_model.value.set_system_prompt, [systemPromptTxt], None)
 
23
  promptTemplates = gr.State(load_template(get_template_names(plain=True)[0], mode=2))
24
  user_api_key = gr.State(my_api_key)
25
  user_question = gr.State("")
26
+ current_model = gr.State(get_model(MODELS[0], my_api_key)[0])
27
 
28
  topic = gr.State("未命名对话历史记录")
29
 
 
79
  else:
80
  usageTxt = gr.Markdown("**发送消息** 或 **提交key** 以显示额度", elem_id="usage_display")
81
  model_select_dropdown = gr.Dropdown(
82
+ label="选择模型", choices=MODELS, multiselect=False, value=MODELS[0], interactive=True
83
  )
84
  use_streaming_checkbox = gr.Checkbox(
85
  label="实时传输回答", value=True, visible=enable_streaming_option
 
287
 
288
  two_column.change(update_doc_config, [two_column], None)
289
 
290
+ # LLM Models
291
  keyTxt.change(submit_key, keyTxt, [user_api_key, status_display]).then(**get_usage_args)
292
  keyTxt.submit(**get_usage_args)
293
+ model_select_dropdown.change(get_model, [model_select_dropdown, keyTxt, temperature_slider, top_p_slider, systemPromptTxt], [current_model, status_display], show_progress=True)
294
 
295
  # Template
296
  systemPromptTxt.change(current_model.value.set_system_prompt, [systemPromptTxt], None)
modules/models.py CHANGED
@@ -168,11 +168,13 @@ class OpenAIClient(BaseLLMModel):
168
  # logging.error(f"Error: {e}")
169
  continue
170
 
171
- def get_model(model_name, access_key=None, temprature=None, top_p=None, system_prompt = None) -> BaseLLMModel:
 
 
172
  model_type = ModelType.get_type(model_name)
173
  if model_type == ModelType.OpenAI:
174
- model = OpenAIClient(model_name, access_key, system_prompt, temprature, top_p)
175
- return model
176
 
177
  if __name__=="__main__":
178
  with open("config.json", "r") as f:
 
168
  # logging.error(f"Error: {e}")
169
  continue
170
 
171
+ def get_model(model_name, access_key=None, temperature=None, top_p=None, system_prompt = None) -> BaseLLMModel:
172
+ msg = f"模型设置为了: {model_name}"
173
+ logging.info(msg)
174
  model_type = ModelType.get_type(model_name)
175
  if model_type == ModelType.OpenAI:
176
+ model = OpenAIClient(model_name=model_name, api_key=access_key,system_prompt=system_prompt, temperature=temperature, top_p=top_p)
177
+ return model, msg
178
 
179
  if __name__=="__main__":
180
  with open("config.json", "r") as f: