Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
@@ -303,10 +303,9 @@ def sample_then_run():
|
|
303 |
cfg = 3.0
|
304 |
steps = 25
|
305 |
image = inference( prompt, negative_prompt, cfg, steps, seed)
|
306 |
-
print("doneeee111111")
|
307 |
torch.save(network.value.proj, "model.pt" )
|
308 |
-
|
309 |
-
return image, "model.pt"
|
310 |
|
311 |
|
312 |
class CustomImageDataset(Dataset):
|
@@ -540,7 +539,7 @@ with gr.Blocks(css="style.css") as demo:
|
|
540 |
outputs = [input_image, file_output])
|
541 |
|
542 |
|
543 |
-
sample.click(fn=sample_then_run, outputs=[input_image, file_output
|
544 |
|
545 |
submit.click(
|
546 |
fn=edit_inference, inputs=[prompt, negative_prompt, cfg, steps, seed, injection_step, a1, a2, a3, a4], outputs=[gallery]
|
|
|
303 |
cfg = 3.0
|
304 |
steps = 25
|
305 |
image = inference( prompt, negative_prompt, cfg, steps, seed)
|
|
|
306 |
torch.save(network.value.proj, "model.pt" )
|
307 |
+
|
308 |
+
return image, "model.pt" #, network.value.cpu()
|
309 |
|
310 |
|
311 |
class CustomImageDataset(Dataset):
|
|
|
539 |
outputs = [input_image, file_output])
|
540 |
|
541 |
|
542 |
+
sample.click(fn=sample_then_run, outputs=[input_image, file_output])
|
543 |
|
544 |
submit.click(
|
545 |
fn=edit_inference, inputs=[prompt, negative_prompt, cfg, steps, seed, injection_step, a1, a2, a3, a4], outputs=[gallery]
|