craftgamesnetwork commited on
Commit
090a8df
·
verified ·
1 Parent(s): 53c9f0c

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +6 -4
app.py CHANGED
@@ -60,18 +60,18 @@ def generate(
60
 
61
  if not use_img2img:
62
  pipe = DiffusionPipeline.from_pretrained(model, torch_dtype=torch.float16)
63
- pipe.unet.set_default_attn_processor()
64
  if use_vae:
65
  vae = AutoencoderKL.from_pretrained(vaecall, torch_dtype=torch.float16)
66
  pipe = DiffusionPipeline.from_pretrained(model, vae=vae, torch_dtype=torch.float16)
67
- pipe.unet.set_default_attn_processor()
68
  if use_img2img:
69
  pipe = AutoPipelineForImage2Image.from_pretrained(model, torch_dtype=torch.float16)
70
- pipe.unet.set_default_attn_processor()
71
  if use_vae:
72
  vae = AutoencoderKL.from_pretrained(vaecall, torch_dtype=torch.float16)
73
  pipe = AutoPipelineForImage2Image.from_pretrained(model, vae=vae, torch_dtype=torch.float16)
74
- pipe.unet.set_default_attn_processor()
75
  response = requests.get(url)
76
  init_image = Image.open(BytesIO(response.content)).convert("RGB")
77
  init_image = init_image.resize((width, height))
@@ -82,6 +82,8 @@ def generate(
82
 
83
  else:
84
  pipe.to(device)
 
 
85
 
86
  generator = torch.Generator().manual_seed(seed)
87
 
 
60
 
61
  if not use_img2img:
62
  pipe = DiffusionPipeline.from_pretrained(model, torch_dtype=torch.float16)
63
+
64
  if use_vae:
65
  vae = AutoencoderKL.from_pretrained(vaecall, torch_dtype=torch.float16)
66
  pipe = DiffusionPipeline.from_pretrained(model, vae=vae, torch_dtype=torch.float16)
67
+
68
  if use_img2img:
69
  pipe = AutoPipelineForImage2Image.from_pretrained(model, torch_dtype=torch.float16)
70
+
71
  if use_vae:
72
  vae = AutoencoderKL.from_pretrained(vaecall, torch_dtype=torch.float16)
73
  pipe = AutoPipelineForImage2Image.from_pretrained(model, vae=vae, torch_dtype=torch.float16)
74
+
75
  response = requests.get(url)
76
  init_image = Image.open(BytesIO(response.content)).convert("RGB")
77
  init_image = init_image.resize((width, height))
 
82
 
83
  else:
84
  pipe.to(device)
85
+ pipe.unet = torch.compile(pipe.unet, mode="reduce-overhead", fullgraph=True)
86
+ pipe.unet.set_default_attn_processor()
87
 
88
  generator = torch.Generator().manual_seed(seed)
89