Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -48,17 +48,25 @@ def load_b_lora_to_unet(pipe, content_lora_model_id: str = '', style_lora_model_
|
|
48 |
raise type(e)(f'failed to load_b_lora_to_unet, due to: {e}')
|
49 |
|
50 |
def main(prompt, negative_prompt, seed, randomize_seed, width, height, guidance_scale, num_inference_steps):
|
|
|
|
|
|
|
|
|
|
|
|
|
51 |
content_B_LoRA_path = ''
|
52 |
-
style_B_LoRA_path = 'fffiloni/
|
53 |
content_alpha,style_alpha = 1,1.1
|
54 |
|
55 |
load_b_lora_to_unet(pipeline, content_B_LoRA_path, style_B_LoRA_path, content_alpha, style_alpha)
|
56 |
-
prompt =
|
57 |
image = pipeline(
|
58 |
prompt,
|
59 |
-
generator=
|
60 |
-
num_images_per_prompt=1
|
61 |
-
|
|
|
|
|
62 |
|
63 |
pipeline.unload_lora_weights()
|
64 |
|
|
|
48 |
raise type(e)(f'failed to load_b_lora_to_unet, due to: {e}')
|
49 |
|
50 |
def main(prompt, negative_prompt, seed, randomize_seed, width, height, guidance_scale, num_inference_steps):
|
51 |
+
|
52 |
+
if randomize_seed:
|
53 |
+
seed = random.randint(0, MAX_SEED)
|
54 |
+
|
55 |
+
generator = torch.Generator().manual_seed(seed)
|
56 |
+
|
57 |
content_B_LoRA_path = ''
|
58 |
+
style_B_LoRA_path = 'fffiloni/b_lora_tardi_4'
|
59 |
content_alpha,style_alpha = 1,1.1
|
60 |
|
61 |
load_b_lora_to_unet(pipeline, content_B_LoRA_path, style_B_LoRA_path, content_alpha, style_alpha)
|
62 |
+
prompt = prompt
|
63 |
image = pipeline(
|
64 |
prompt,
|
65 |
+
generator=generator,
|
66 |
+
num_images_per_prompt=1,
|
67 |
+
width = width,
|
68 |
+
height = height,
|
69 |
+
).images[0])
|
70 |
|
71 |
pipeline.unload_lora_weights()
|
72 |
|