Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
@@ -64,6 +64,14 @@ def generate(
|
|
64 |
num_images: int = 1, # Number of images to generate
|
65 |
progress=gr.Progress(track_tqdm=True),
|
66 |
):
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
67 |
seed = int(randomize_seed_fn(seed, randomize_seed))
|
68 |
generator = torch.Generator(device=device).manual_seed(seed)
|
69 |
|
@@ -93,6 +101,10 @@ def generate(
|
|
93 |
images.extend(pipe(**batch_options).images)
|
94 |
|
95 |
image_paths = [save_image(img) for img in images]
|
|
|
|
|
|
|
|
|
96 |
return image_paths, seed
|
97 |
|
98 |
examples = [
|
@@ -124,7 +136,7 @@ with gr.Blocks(css=css, theme="bethecloud/storj_theme") as demo:
|
|
124 |
container=False,
|
125 |
)
|
126 |
run_button = gr.Button("Run", scale=0)
|
127 |
-
result = gr.Gallery(label="Result", columns=1, show_label=False)
|
128 |
loading_animation = gr.HTML(
|
129 |
"""
|
130 |
<div id="loading-animation" style="display:none;">
|
@@ -202,22 +214,20 @@ with gr.Blocks(css=css, theme="bethecloud/storj_theme") as demo:
|
|
202 |
api_name=False,
|
203 |
)
|
204 |
|
205 |
-
|
206 |
-
|
207 |
-
|
208 |
-
|
209 |
-
|
210 |
-
|
211 |
-
|
212 |
-
|
|
|
|
|
|
|
|
|
213 |
|
214 |
-
|
215 |
-
triggers=[
|
216 |
-
prompt.submit,
|
217 |
-
run_button.click,
|
218 |
-
],
|
219 |
-
fn=generate,
|
220 |
-
inputs=[
|
221 |
prompt,
|
222 |
negative_prompt,
|
223 |
use_negative_prompt,
|
@@ -228,11 +238,7 @@ with gr.Blocks(css=css, theme="bethecloud/storj_theme") as demo:
|
|
228 |
num_inference_steps,
|
229 |
randomize_seed,
|
230 |
num_images
|
231 |
-
],
|
232 |
-
outputs=[result, seed],
|
233 |
-
api_name="run",
|
234 |
-
postprocess_fn=stop_loading,
|
235 |
-
)
|
236 |
|
237 |
if __name__ == "__main__":
|
238 |
demo.queue(max_size=50).launch()
|
|
|
64 |
num_images: int = 1, # Number of images to generate
|
65 |
progress=gr.Progress(track_tqdm=True),
|
66 |
):
|
67 |
+
# Show the loading animation
|
68 |
+
loading_html = """
|
69 |
+
<div id="loading-animation">
|
70 |
+
<iframe src="https://prithivmlmods-hamster-static.static.hf.space/index.html" width="100%" height="200" style="border:none;"></iframe>
|
71 |
+
</div>
|
72 |
+
"""
|
73 |
+
gr.HTML.update(loading_html, visible=True)
|
74 |
+
|
75 |
seed = int(randomize_seed_fn(seed, randomize_seed))
|
76 |
generator = torch.Generator(device=device).manual_seed(seed)
|
77 |
|
|
|
101 |
images.extend(pipe(**batch_options).images)
|
102 |
|
103 |
image_paths = [save_image(img) for img in images]
|
104 |
+
|
105 |
+
# Hide the loading animation
|
106 |
+
gr.HTML.update("", visible=False)
|
107 |
+
|
108 |
return image_paths, seed
|
109 |
|
110 |
examples = [
|
|
|
136 |
container=False,
|
137 |
)
|
138 |
run_button = gr.Button("Run", scale=0)
|
139 |
+
result = gr.Gallery(label="Result", columns=1, show_label=False)
|
140 |
loading_animation = gr.HTML(
|
141 |
"""
|
142 |
<div id="loading-animation" style="display:none;">
|
|
|
214 |
api_name=False,
|
215 |
)
|
216 |
|
217 |
+
prompt.submit(fn=generate, inputs=[
|
218 |
+
prompt,
|
219 |
+
negative_prompt,
|
220 |
+
use_negative_prompt,
|
221 |
+
seed,
|
222 |
+
width,
|
223 |
+
height,
|
224 |
+
guidance_scale,
|
225 |
+
num_inference_steps,
|
226 |
+
randomize_seed,
|
227 |
+
num_images
|
228 |
+
], outputs=[result, seed], queue=True)
|
229 |
|
230 |
+
run_button.click(fn=generate, inputs=[
|
|
|
|
|
|
|
|
|
|
|
|
|
231 |
prompt,
|
232 |
negative_prompt,
|
233 |
use_negative_prompt,
|
|
|
238 |
num_inference_steps,
|
239 |
randomize_seed,
|
240 |
num_images
|
241 |
+
], outputs=[result, seed], queue=True)
|
|
|
|
|
|
|
|
|
242 |
|
243 |
if __name__ == "__main__":
|
244 |
demo.queue(max_size=50).launch()
|