Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
@@ -461,7 +461,8 @@ class ModelManager:
|
|
461 |
).to(DEVICE)
|
462 |
# Bind the custom LoRA call to the pipeline.
|
463 |
self.pipe.flux_pipe_call_that_returns_an_iterable_of_images = flux_pipe_call_that_returns_an_iterable_of_images.__get__(self.pipe)
|
464 |
-
|
|
|
465 |
def generate_image(self, prompt_mash, steps, seed, cfg_scale, width, height, lora_scale):
|
466 |
"""Generates an image using the text-to-image pipeline."""
|
467 |
self.pipe.to(DEVICE)
|
@@ -584,7 +585,7 @@ class Frontend:
|
|
584 |
print("Warning: lora.py not found, using placeholder LoRAs.")
|
585 |
pass
|
586 |
|
587 |
-
@spaces.GPU(duration=
|
588 |
def run_lora(self, prompt, image_input, image_strength, cfg_scale, steps, selected_index,
|
589 |
randomize_seed, seed, width, height, lora_scale, use_enhancer,
|
590 |
progress=gr.Progress(track_tqdm=True)):
|
|
|
461 |
).to(DEVICE)
|
462 |
# Bind the custom LoRA call to the pipeline.
|
463 |
self.pipe.flux_pipe_call_that_returns_an_iterable_of_images = flux_pipe_call_that_returns_an_iterable_of_images.__get__(self.pipe)
|
464 |
+
|
465 |
+
@spaces.GPU(duration=100)
|
466 |
def generate_image(self, prompt_mash, steps, seed, cfg_scale, width, height, lora_scale):
|
467 |
"""Generates an image using the text-to-image pipeline."""
|
468 |
self.pipe.to(DEVICE)
|
|
|
585 |
print("Warning: lora.py not found, using placeholder LoRAs.")
|
586 |
pass
|
587 |
|
588 |
+
@spaces.GPU(duration=100)
|
589 |
def run_lora(self, prompt, image_input, image_strength, cfg_scale, steps, selected_index,
|
590 |
randomize_seed, seed, width, height, lora_scale, use_enhancer,
|
591 |
progress=gr.Progress(track_tqdm=True)):
|