Spaces:
Running
on
Zero
Running
on
Zero
[ / ] Test: Add upscale to generation
Browse files
app.py
CHANGED
@@ -144,7 +144,7 @@ def generate(
|
|
144 |
upscaler_pipe = StableDiffusionXLImg2ImgPipeline(**pipe.components)
|
145 |
|
146 |
try:
|
147 |
-
|
148 |
latents = pipe(
|
149 |
prompt = prompt,
|
150 |
negative_prompt = negative_prompt,
|
@@ -159,6 +159,7 @@ def generate(
|
|
159 |
|
160 |
upscaled_latents = upscale(latents, "nearest-exact", 2.0)
|
161 |
|
|
|
162 |
img = upscaler_pipe(
|
163 |
prompt=prompt,
|
164 |
negative_prompt=negative_prompt,
|
@@ -180,7 +181,7 @@ def generate(
|
|
180 |
|
181 |
if torch.cuda.is_available():
|
182 |
pipe = load_pipeline(MODEL)
|
183 |
-
|
184 |
else:
|
185 |
pipe = None
|
186 |
|
|
|
144 |
upscaler_pipe = StableDiffusionXLImg2ImgPipeline(**pipe.components)
|
145 |
|
146 |
try:
|
147 |
+
gr.Info("Generating image...")
|
148 |
latents = pipe(
|
149 |
prompt = prompt,
|
150 |
negative_prompt = negative_prompt,
|
|
|
159 |
|
160 |
upscaled_latents = upscale(latents, "nearest-exact", 2.0)
|
161 |
|
162 |
+
gr.Info("Upscaling...")
|
163 |
img = upscaler_pipe(
|
164 |
prompt=prompt,
|
165 |
negative_prompt=negative_prompt,
|
|
|
181 |
|
182 |
if torch.cuda.is_available():
|
183 |
pipe = load_pipeline(MODEL)
|
184 |
+
gr.Info("Loaded on Device!")
|
185 |
else:
|
186 |
pipe = None
|
187 |
|