Get backend
Browse files- src/pipeline.py +3 -3
src/pipeline.py
CHANGED
@@ -54,10 +54,10 @@ def load_pipeline() -> Pipeline:
|
|
54 |
).to("cuda")
|
55 |
|
56 |
pipeline.to(memory_format=torch.channels_last)
|
57 |
-
pipeline.transformer = torch.compile(pipeline.transformer)
|
58 |
# quantize_(pipeline.vae, int8_weight_only())
|
59 |
-
pipeline.vae = torch.compile(pipeline.vae)
|
60 |
-
pipeline.set_progress_bar_config(disable=True)
|
61 |
|
62 |
PROMPT = 'semiconformity, peregrination, quip, twineless, emotionless, tawa, depickle'
|
63 |
with torch.no_grad():
|
|
|
54 |
).to("cuda")
|
55 |
|
56 |
pipeline.to(memory_format=torch.channels_last)
|
57 |
+
pipeline.transformer = torch.compile(pipeline.transformer, backend="tensorrt")
|
58 |
# quantize_(pipeline.vae, int8_weight_only())
|
59 |
+
pipeline.vae = torch.compile(pipeline.vae, backend="tensorrt")
|
60 |
+
# pipeline.set_progress_bar_config(disable=True)
|
61 |
|
62 |
PROMPT = 'semiconformity, peregrination, quip, twineless, emotionless, tawa, depickle'
|
63 |
with torch.no_grad():
|