jokerbit commited on
Commit
34e48c1
·
verified ·
1 Parent(s): 8129208

Get backend - 9

Browse files
Files changed (1) hide show
  1. src/pipeline.py +4 -3
src/pipeline.py CHANGED
@@ -55,10 +55,11 @@ def load_pipeline() -> Pipeline:
55
  ).to("cuda")
56
 
57
  pipeline.transformer.to(memory_format=torch.channels_last)
58
- pipeline.transformer = torch.compile(pipeline.transformer, fullgraph=True, mode="max-autotune")
 
59
  # quantize_(pipeline.vae, int8_weight_only())
60
- # pipeline.vae = torch.compile(pipeline.vae, mode="max-autotune")
61
- pipeline.set_progress_bar_config(disable=True)
62
 
63
  PROMPT = 'semiconformity, peregrination, quip, twineless, emotionless, tawa, depickle'
64
  with torch.no_grad():
 
55
  ).to("cuda")
56
 
57
  pipeline.transformer.to(memory_format=torch.channels_last)
58
+ pipeline.vae.to(memory_format=torch.channels_last)
59
+ pipeline.transformer = torch.compile(pipeline.transformer, mode="max-autotune")
60
  # quantize_(pipeline.vae, int8_weight_only())
61
+ pipeline.vae = torch.compile(pipeline.vae, mode="max-autotune")
62
+ # pipeline.set_progress_bar_config(disable=True)
63
 
64
  PROMPT = 'semiconformity, peregrination, quip, twineless, emotionless, tawa, depickle'
65
  with torch.no_grad():