Spaces:
Running
on
A10G
Running
on
A10G
fix: prior.
Browse files
app.py
CHANGED
@@ -112,7 +112,7 @@ def load_pipeline(
|
|
112 |
if pipeline_to_benchmark not in ["Würstchen (T2I)", "Kandinsky 2.2 (T2I)"]:
|
113 |
pipeline.unet.to(memory_format=torch.channels_last)
|
114 |
elif pipeline_to_benchmark == "Würstchen (T2I)":
|
115 |
-
pipeline.
|
116 |
pipeline.decoder.to(memory_format=torch.channels_last)
|
117 |
elif pipeline_to_benchmark == "Kandinsky 2.2 (T2I)":
|
118 |
pipeline.unet.to(memory_format=torch.channels_last)
|
@@ -130,8 +130,8 @@ def load_pipeline(
|
|
130 |
pipeline.unet, mode="reduce-overhead", fullgraph=True
|
131 |
)
|
132 |
elif pipeline_to_benchmark == "Würstchen (T2I)":
|
133 |
-
pipeline.
|
134 |
-
pipeline.
|
135 |
)
|
136 |
pipeline.decoder = torch.compile(
|
137 |
pipeline.decoder, mode="reduce-overhead", fullgraph=True
|
|
|
112 |
if pipeline_to_benchmark not in ["Würstchen (T2I)", "Kandinsky 2.2 (T2I)"]:
|
113 |
pipeline.unet.to(memory_format=torch.channels_last)
|
114 |
elif pipeline_to_benchmark == "Würstchen (T2I)":
|
115 |
+
pipeline.prior_prior.to(memory_format=torch.channels_last)
|
116 |
pipeline.decoder.to(memory_format=torch.channels_last)
|
117 |
elif pipeline_to_benchmark == "Kandinsky 2.2 (T2I)":
|
118 |
pipeline.unet.to(memory_format=torch.channels_last)
|
|
|
130 |
pipeline.unet, mode="reduce-overhead", fullgraph=True
|
131 |
)
|
132 |
elif pipeline_to_benchmark == "Würstchen (T2I)":
|
133 |
+
pipeline.prior_prior = torch.compile(
|
134 |
+
pipeline.prior_prior, mode="reduce-overhead", fullgraph=True
|
135 |
)
|
136 |
pipeline.decoder = torch.compile(
|
137 |
pipeline.decoder, mode="reduce-overhead", fullgraph=True
|