Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -72,7 +72,6 @@ def generate(
|
|
72 |
num_inference_steps: int = 26,
|
73 |
sampler: str = "Euler a",
|
74 |
clip_skip: int = 1,
|
75 |
-
progress=gr.Progress(track_tqdm=True),
|
76 |
):
|
77 |
if torch.cuda.is_available():
|
78 |
pipe = StableDiffusionXLPipeline.from_pretrained(
|
@@ -109,7 +108,7 @@ def generate(
|
|
109 |
output_type="pil",
|
110 |
).images
|
111 |
|
112 |
-
return img
|
113 |
|
114 |
except Exception as e:
|
115 |
print(f"An error occurred: {e}")
|
@@ -196,7 +195,7 @@ with gr.Blocks(
|
|
196 |
preview=True,
|
197 |
show_label=False
|
198 |
)
|
199 |
-
|
200 |
used_seed = gr.Number(label="Used Seed", interactive=False)
|
201 |
|
202 |
gr.on(
|
@@ -223,7 +222,7 @@ with gr.Blocks(
|
|
223 |
sampler,
|
224 |
clip_skip
|
225 |
],
|
226 |
-
outputs=result,
|
227 |
api_name="run"
|
228 |
)
|
229 |
|
|
|
72 |
num_inference_steps: int = 26,
|
73 |
sampler: str = "Euler a",
|
74 |
clip_skip: int = 1,
|
|
|
75 |
):
|
76 |
if torch.cuda.is_available():
|
77 |
pipe = StableDiffusionXLPipeline.from_pretrained(
|
|
|
108 |
output_type="pil",
|
109 |
).images
|
110 |
|
111 |
+
return img, seed
|
112 |
|
113 |
except Exception as e:
|
114 |
print(f"An error occurred: {e}")
|
|
|
195 |
preview=True,
|
196 |
show_label=False
|
197 |
)
|
198 |
+
with gr.Group():
|
199 |
used_seed = gr.Number(label="Used Seed", interactive=False)
|
200 |
|
201 |
gr.on(
|
|
|
222 |
sampler,
|
223 |
clip_skip
|
224 |
],
|
225 |
+
outputs=[result, used_seed],
|
226 |
api_name="run"
|
227 |
)
|
228 |
|