jokerbit commited on
Commit
b32e748
·
verified ·
1 Parent(s): d79f069
Files changed (1) hide show
  1. src/pipeline.py +2 -3
src/pipeline.py CHANGED
@@ -47,11 +47,10 @@ def load_pipeline() -> Pipeline:
47
  torch_dtype=torch.bfloat16,
48
  ).to("cuda")
49
 
50
- pipeline.to(memory_format=torch.channels_last)
51
  pipeline.transformer = torch.compile(pipeline.transformer, mode="max-autotune", fullgraph=False)
52
  quantize_(pipeline.vae, int8_weight_only())
53
- pipeline.vae = torch.compile(pipeline.vae, fullgraph=True, mode="max-autotune")
54
-
55
  PROMPT = 'semiconformity, peregrination, quip, twineless, emotionless, tawa, depickle'
56
  with torch.inference_mode():
57
  for _ in range(4):
 
47
  torch_dtype=torch.bfloat16,
48
  ).to("cuda")
49
 
50
+ pipeline.transformer.to(memory_format=torch.channels_last)
51
  pipeline.transformer = torch.compile(pipeline.transformer, mode="max-autotune", fullgraph=False)
52
  quantize_(pipeline.vae, int8_weight_only())
53
+
 
54
  PROMPT = 'semiconformity, peregrination, quip, twineless, emotionless, tawa, depickle'
55
  with torch.inference_mode():
56
  for _ in range(4):