Spaces:
Running
on
Zero
Running
on
Zero
empty cache after generating an image
#587
by
vedalken
- opened
app.py
CHANGED
@@ -184,6 +184,7 @@ def inference(
|
|
184 |
"sampler": sampler,
|
185 |
},
|
186 |
)
|
|
|
187 |
|
188 |
return out_image["images"][0], gr.update(visible=True), gr.update(visible=True), my_seed
|
189 |
|
@@ -274,7 +275,7 @@ with gr.Blocks(css=css) as app_with_history:
|
|
274 |
app.render()
|
275 |
with gr.Tab("Past generations"):
|
276 |
user_history.render()
|
277 |
-
|
278 |
app_with_history.queue(max_size=20,api_open=False )
|
279 |
|
280 |
if __name__ == "__main__":
|
|
|
184 |
"sampler": sampler,
|
185 |
},
|
186 |
)
|
187 |
+
torch.cuda.empty_cache()
|
188 |
|
189 |
return out_image["images"][0], gr.update(visible=True), gr.update(visible=True), my_seed
|
190 |
|
|
|
275 |
app.render()
|
276 |
with gr.Tab("Past generations"):
|
277 |
user_history.render()
|
278 |
+
|
279 |
app_with_history.queue(max_size=20,api_open=False )
|
280 |
|
281 |
if __name__ == "__main__":
|