Krebzonide
commited on
Commit
·
ea3b1d6
1
Parent(s):
ae9efe4
Update app.py
Browse files
app.py
CHANGED
@@ -23,7 +23,6 @@ pipe = StableDiffusionXLPipeline.from_single_file(
|
|
23 |
variant = "fp16",
|
24 |
vae = vae,
|
25 |
use_safetensors = True,
|
26 |
-
scheduler_type = "ddim",
|
27 |
use_auth_token="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"
|
28 |
)
|
29 |
pipe.enable_model_cpu_offload()
|
@@ -32,6 +31,7 @@ pipeRefiner = StableDiffusionXLImg2ImgPipeline.from_pretrained(
|
|
32 |
"stabilityai/stable-diffusion-xl-refiner-1.0",
|
33 |
torch_dtype=torch.float16,
|
34 |
variant="fp16",
|
|
|
35 |
use_safetensors=True
|
36 |
)
|
37 |
pipeRefiner.enable_model_cpu_offload()
|
@@ -57,16 +57,17 @@ def generate(prompt, neg_prompt, samp_steps, guide_scale, batch_size, seed, heig
|
|
57 |
guidance_scale=guide_scale,
|
58 |
#cross_attention_kwargs={"scale": lora_scale},
|
59 |
num_images_per_prompt=batch_size,
|
60 |
-
height=height
|
61 |
-
width=width
|
62 |
generator=torch.manual_seed(seed),
|
|
|
|
|
63 |
).images
|
64 |
-
|
65 |
prompt,
|
66 |
image=images,
|
67 |
-
num_inference_steps=
|
68 |
-
|
69 |
-
width=width
|
70 |
).images
|
71 |
return [(img, f"Image {i+1}") for i, img in enumerate(imagesRefined)]
|
72 |
|
|
|
23 |
variant = "fp16",
|
24 |
vae = vae,
|
25 |
use_safetensors = True,
|
|
|
26 |
use_auth_token="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"
|
27 |
)
|
28 |
pipe.enable_model_cpu_offload()
|
|
|
31 |
"stabilityai/stable-diffusion-xl-refiner-1.0",
|
32 |
torch_dtype=torch.float16,
|
33 |
variant="fp16",
|
34 |
+
vae=vae,
|
35 |
use_safetensors=True
|
36 |
)
|
37 |
pipeRefiner.enable_model_cpu_offload()
|
|
|
57 |
guidance_scale=guide_scale,
|
58 |
#cross_attention_kwargs={"scale": lora_scale},
|
59 |
num_images_per_prompt=batch_size,
|
60 |
+
height=height,
|
61 |
+
width=width,
|
62 |
generator=torch.manual_seed(seed),
|
63 |
+
output_type="latent",
|
64 |
+
denoising_end=0.8
|
65 |
).images
|
66 |
+
images = pipeRefiner(
|
67 |
prompt,
|
68 |
image=images,
|
69 |
+
num_inference_steps=samp_steps,
|
70 |
+
denoising_start=0.8
|
|
|
71 |
).images
|
72 |
return [(img, f"Image {i+1}") for i, img in enumerate(imagesRefined)]
|
73 |
|