Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
@@ -304,8 +304,9 @@ def sample_then_run():
|
|
304 |
steps = 25
|
305 |
image = inference( prompt, negative_prompt, cfg, steps, seed)
|
306 |
torch.save(network.value.proj, "model.pt" )
|
|
|
307 |
|
308 |
-
return image, "model.pt" #, network.value.cpu()
|
309 |
|
310 |
|
311 |
class CustomImageDataset(Dataset):
|
@@ -539,7 +540,7 @@ with gr.Blocks(css="style.css") as demo:
|
|
539 |
outputs = [input_image, file_output])
|
540 |
|
541 |
|
542 |
-
sample.click(fn=sample_then_run, outputs=[input_image, file_output])
|
543 |
|
544 |
submit.click(
|
545 |
fn=edit_inference, inputs=[prompt, negative_prompt, cfg, steps, seed, injection_step, a1, a2, a3, a4], outputs=[gallery]
|
|
|
304 |
steps = 25
|
305 |
image = inference( prompt, negative_prompt, cfg, steps, seed)
|
306 |
torch.save(network.value.proj, "model.pt" )
|
307 |
+
net = network.value.cpu()
|
308 |
|
309 |
+
return image, "model.pt", net #, network.value.cpu()
|
310 |
|
311 |
|
312 |
class CustomImageDataset(Dataset):
|
|
|
540 |
outputs = [input_image, file_output])
|
541 |
|
542 |
|
543 |
+
sample.click(fn=sample_then_run, outputs=[input_image, file_output, network])
|
544 |
|
545 |
submit.click(
|
546 |
fn=edit_inference, inputs=[prompt, negative_prompt, cfg, steps, seed, injection_step, a1, a2, a3, a4], outputs=[gallery]
|