Commit
·
ba42139
1
Parent(s):
e731c90
Upload web-ui.py
Browse files
web-ui.py
CHANGED
@@ -60,7 +60,7 @@ def convert_model(checkpoint_path, output_path):
|
|
60 |
def load_model(model_name):
|
61 |
if model_name in model_cache:
|
62 |
return model_cache[model_name]
|
63 |
-
|
64 |
# Limit cache size
|
65 |
if len(model_cache) >= max_cache_size:
|
66 |
model_cache.pop(next(iter(model_cache)))
|
@@ -96,9 +96,10 @@ def load_model(model_name):
|
|
96 |
return ip_model
|
97 |
|
98 |
# Function to process image and generate output
|
99 |
-
def generate_image(input_image, positive_prompt, negative_prompt, width, height, model_name, num_inference_steps, seed, randomize_seed, num_images, batch_size, enable_shortcut, s_scale):
|
100 |
saved_images = []
|
101 |
-
|
|
|
102 |
# Load and prepare the model
|
103 |
ip_model = load_model(model_name)
|
104 |
|
@@ -170,6 +171,7 @@ with gr.Blocks() as demo:
|
|
170 |
negative_prompt = gr.Textbox(label="Negative Prompt")
|
171 |
with gr.Row():
|
172 |
model_selector = gr.Dropdown(label="Select Model", choices=static_model_names, value=static_model_names[0])
|
|
|
173 |
|
174 |
with gr.Column():
|
175 |
output_gallery = gr.Gallery(label="Generated Images")
|
@@ -184,8 +186,8 @@ with gr.Blocks() as demo:
|
|
184 |
|
185 |
generate_btn.click(
|
186 |
generate_image,
|
187 |
-
inputs=[input_image, positive_prompt, negative_prompt, width, height, model_selector, num_inference_steps, seed, randomize_seed, num_images, batch_size, enable_shortcut, s_scale],
|
188 |
-
outputs=[output_gallery, output_text, display_seed]
|
189 |
)
|
190 |
|
191 |
convert_btn.click(
|
|
|
60 |
def load_model(model_name):
|
61 |
if model_name in model_cache:
|
62 |
return model_cache[model_name]
|
63 |
+
print(f"loading model {model_name}")
|
64 |
# Limit cache size
|
65 |
if len(model_cache) >= max_cache_size:
|
66 |
model_cache.pop(next(iter(model_cache)))
|
|
|
96 |
return ip_model
|
97 |
|
98 |
# Function to process image and generate output
|
99 |
+
def generate_image(input_image, positive_prompt, negative_prompt, width, height, model_name, num_inference_steps, seed, randomize_seed, num_images, batch_size, enable_shortcut, s_scale, custom_model_path):
|
100 |
saved_images = []
|
101 |
+
if custom_model_path:
|
102 |
+
model_name = custom_model_path
|
103 |
# Load and prepare the model
|
104 |
ip_model = load_model(model_name)
|
105 |
|
|
|
171 |
negative_prompt = gr.Textbox(label="Negative Prompt")
|
172 |
with gr.Row():
|
173 |
model_selector = gr.Dropdown(label="Select Model", choices=static_model_names, value=static_model_names[0])
|
174 |
+
custom_model_path = gr.Textbox(label="Custom Model Path (Optional)")
|
175 |
|
176 |
with gr.Column():
|
177 |
output_gallery = gr.Gallery(label="Generated Images")
|
|
|
186 |
|
187 |
generate_btn.click(
|
188 |
generate_image,
|
189 |
+
inputs=[input_image, positive_prompt, negative_prompt, width, height, model_selector, num_inference_steps, seed, randomize_seed, num_images, batch_size, enable_shortcut, s_scale, custom_model_path],
|
190 |
+
outputs=[output_gallery, output_text, display_seed]
|
191 |
)
|
192 |
|
193 |
convert_btn.click(
|