Spaces:
Running
Running
Fixed model selection.
Browse files
app/gradio_meta_prompt.py
CHANGED
@@ -806,7 +806,7 @@ with gr.Blocks(title='Meta Prompt') as demo:
|
|
806 |
model_name_states["acceptance_criteria"],
|
807 |
model_temperature_states["acceptance_criteria"],
|
808 |
prompt_template_group],
|
809 |
-
outputs=[
|
810 |
)
|
811 |
generate_initial_system_message_button.click(
|
812 |
generate_initial_system_message,
|
|
|
806 |
model_name_states["acceptance_criteria"],
|
807 |
model_temperature_states["acceptance_criteria"],
|
808 |
prompt_template_group],
|
809 |
+
outputs=[acceptance_criteria_input, logs_chatbot]
|
810 |
)
|
811 |
generate_initial_system_message_button.click(
|
812 |
generate_initial_system_message,
|
app/gradio_meta_prompt_utils.py
CHANGED
@@ -214,6 +214,8 @@ def on_prompt_model_tab_state_change(config, model_tab_select_state,
|
|
214 |
advanced_optimizer_model_name, \
|
215 |
config.default_llm_temperature, \
|
216 |
advanced_optimizer_model_name, \
|
|
|
|
|
217 |
config.default_llm_temperature
|
218 |
elif model_tab_select_state == 'Expert':
|
219 |
return expert_prompt_initial_developer_model_name, \
|
|
|
214 |
advanced_optimizer_model_name, \
|
215 |
config.default_llm_temperature, \
|
216 |
advanced_optimizer_model_name, \
|
217 |
+
config.default_llm_temperature, \
|
218 |
+
advanced_executor_model_name, \
|
219 |
config.default_llm_temperature
|
220 |
elif model_tab_select_state == 'Expert':
|
221 |
return expert_prompt_initial_developer_model_name, \
|