jokerbit commited on
Commit
6acd771
·
verified ·
1 Parent(s): b32e748
Files changed (1) hide show
  1. src/pipeline.py +3 -2
src/pipeline.py CHANGED
@@ -47,9 +47,10 @@ def load_pipeline() -> Pipeline:
47
  torch_dtype=torch.bfloat16,
48
  ).to("cuda")
49
 
50
- pipeline.transformer.to(memory_format=torch.channels_last)
51
- pipeline.transformer = torch.compile(pipeline.transformer, mode="max-autotune", fullgraph=False)
52
  quantize_(pipeline.vae, int8_weight_only())
 
53
 
54
  PROMPT = 'semiconformity, peregrination, quip, twineless, emotionless, tawa, depickle'
55
  with torch.inference_mode():
 
47
  torch_dtype=torch.bfloat16,
48
  ).to("cuda")
49
 
50
+ pipeline.to(memory_format=torch.channels_last)
51
+ pipeline.transformer = torch.compile(pipeline.transformer)
52
  quantize_(pipeline.vae, int8_weight_only())
53
+ pipeline.vae = torch.compile(pipeline.vae)
54
 
55
  PROMPT = 'semiconformity, peregrination, quip, twineless, emotionless, tawa, depickle'
56
  with torch.inference_mode():