alfredplpl
commited on
Commit
·
db86cd0
1
Parent(s):
70d1eaa
Update app.py
Browse files
app.py
CHANGED
@@ -150,16 +150,19 @@ def txt_to_img(prompt, neg_prompt, guidance, steps, width, height, generator,sup
|
|
150 |
generator = generator).images
|
151 |
pipe=pipe.to("cpu")
|
152 |
upscaler=upscaler.to("cuda")
|
153 |
-
|
154 |
prompt=prompt,
|
155 |
negative_prompt = neg_prompt,
|
156 |
image=low_res_latents,
|
157 |
num_inference_steps=20,
|
158 |
guidance_scale=0,
|
|
|
159 |
generator=generator,
|
160 |
-
)
|
161 |
pipe=pipe.to("cuda")
|
162 |
upscaler=upscaler.to("cpu")
|
|
|
|
|
163 |
else:
|
164 |
result = pipe(
|
165 |
prompt,
|
|
|
150 |
generator = generator).images
|
151 |
pipe=pipe.to("cpu")
|
152 |
upscaler=upscaler.to("cuda")
|
153 |
+
latents = upscaler(
|
154 |
prompt=prompt,
|
155 |
negative_prompt = neg_prompt,
|
156 |
image=low_res_latents,
|
157 |
num_inference_steps=20,
|
158 |
guidance_scale=0,
|
159 |
+
output_type="latent",
|
160 |
generator=generator,
|
161 |
+
).images
|
162 |
pipe=pipe.to("cuda")
|
163 |
upscaler=upscaler.to("cpu")
|
164 |
+
with torch.no_grad():
|
165 |
+
result = pipeline.decode_latents(latents)
|
166 |
else:
|
167 |
result = pipe(
|
168 |
prompt,
|