jokerbit commited on
Commit
faba203
·
verified ·
1 Parent(s): 194a53d

Update src/pipeline.py

Browse files
Files changed (1) hide show
  1. src/pipeline.py +4 -2
src/pipeline.py CHANGED
@@ -1,4 +1,4 @@
1
- import gc
2
  import os
3
  from typing import TypeAlias
4
 
@@ -10,7 +10,9 @@ from pipelines.models import TextToImageRequest
10
  from torch import Generator
11
  from torchao.quantization import quantize_, int8_weight_only
12
  from transformers import T5EncoderModel, CLIPTextModel, logging
 
13
 
 
14
 
15
  Pipeline: TypeAlias = FluxPipeline
16
  torch.backends.cudnn.benchmark = True
@@ -51,7 +53,7 @@ def load_pipeline() -> Pipeline:
51
 
52
  pipeline.to(memory_format=torch.channels_last)
53
  quantize_(pipeline.vae, int8_weight_only())
54
- pipeline.vae = torch.compile(pipeline.vae, mode="max-autotune", fullgraph=True)
55
  with torch.inference_mode():
56
  for _ in range(2):
57
  pipeline("cat", num_inference_steps=4)
 
1
+ # partial
2
  import os
3
  from typing import TypeAlias
4
 
 
10
  from torch import Generator
11
  from torchao.quantization import quantize_, int8_weight_only
12
  from transformers import T5EncoderModel, CLIPTextModel, logging
13
+ from functools import partial
14
 
15
+ my_partial_compile = partial(torch.compile, mode="max-autotune")
16
 
17
  Pipeline: TypeAlias = FluxPipeline
18
  torch.backends.cudnn.benchmark = True
 
53
 
54
  pipeline.to(memory_format=torch.channels_last)
55
  quantize_(pipeline.vae, int8_weight_only())
56
+ pipeline.vae = my_partial_compile(pipeline.vae)
57
  with torch.inference_mode():
58
  for _ in range(2):
59
  pipeline("cat", num_inference_steps=4)