zhangyang-0123 commited on
Commit
fc9e5bf
·
verified ·
1 Parent(s): b5d1ac9

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +0 -3
app.py CHANGED
@@ -53,7 +53,6 @@ def binary_mask_eval(args, model):
53
  ),
54
  map_location="cpu",
55
  )
56
- torch.cuda.empty_cache()
57
 
58
  # reload the original model
59
  if model == "sdxl":
@@ -64,7 +63,6 @@ def binary_mask_eval(args, model):
64
  pipe = FluxPipeline.from_pretrained(
65
  "black-forest-labs/FLUX.1-schnell", torch_dtype=torch.bfloat16
66
  ).to("cpu")
67
- torch.cuda.empty_cache()
68
 
69
  print("prune complete")
70
  return pipe, pruned_pipe
@@ -79,7 +77,6 @@ def generate_images(prompt, seed, steps, pipe, pruned_pipe):
79
  original_image = pipe(
80
  prompt=prompt, generator=g_cpu, num_inference_steps=steps
81
  ).images[0]
82
- torch.cuda.empty_cache()
83
  g_cpu = torch.Generator("cuda").manual_seed(seed)
84
  ecodiff_image = pruned_pipe(
85
  prompt=prompt, generator=g_cpu, num_inference_steps=steps
 
53
  ),
54
  map_location="cpu",
55
  )
 
56
 
57
  # reload the original model
58
  if model == "sdxl":
 
63
  pipe = FluxPipeline.from_pretrained(
64
  "black-forest-labs/FLUX.1-schnell", torch_dtype=torch.bfloat16
65
  ).to("cpu")
 
66
 
67
  print("prune complete")
68
  return pipe, pruned_pipe
 
77
  original_image = pipe(
78
  prompt=prompt, generator=g_cpu, num_inference_steps=steps
79
  ).images[0]
 
80
  g_cpu = torch.Generator("cuda").manual_seed(seed)
81
  ecodiff_image = pruned_pipe(
82
  prompt=prompt, generator=g_cpu, num_inference_steps=steps