Update pipeline.py
Browse files- pipeline.py +3 -1
pipeline.py
CHANGED
@@ -962,12 +962,14 @@ class AnimateDiffPipeline(DiffusionPipeline, TextualInversionLoaderMixin, IPAdap
|
|
962 |
num_warmup_steps = len(timesteps) - num_inference_steps * self.scheduler.order
|
963 |
with self.progress_bar(total=len(timesteps)) as progress_bar:
|
964 |
for i, t in enumerate(timesteps):
|
965 |
-
|
|
|
966 |
latent_sum = torch.zeros_like(latents).to(device).to(dtype=torch.float16)
|
967 |
latent_counter = torch.zeros(num_frames).to(device).to(dtype=torch.float16)
|
968 |
|
969 |
# foreach context group seperately denoise the current timestep
|
970 |
for context_group in range(num_context_groups):
|
|
|
971 |
# calculate to current indexes, considering overlap
|
972 |
if context_group == 0:current_context_start = 0
|
973 |
else:current_context_start = context_group * (context_size - overlap)
|
|
|
962 |
num_warmup_steps = len(timesteps) - num_inference_steps * self.scheduler.order
|
963 |
with self.progress_bar(total=len(timesteps)) as progress_bar:
|
964 |
for i, t in enumerate(timesteps):
|
965 |
+
print(f"Step: {i}")
|
966 |
+
print(f"Timestep: {t}")
|
967 |
latent_sum = torch.zeros_like(latents).to(device).to(dtype=torch.float16)
|
968 |
latent_counter = torch.zeros(num_frames).to(device).to(dtype=torch.float16)
|
969 |
|
970 |
# foreach context group seperately denoise the current timestep
|
971 |
for context_group in range(num_context_groups):
|
972 |
+
print(f"Context group: {context_group}")
|
973 |
# calculate to current indexes, considering overlap
|
974 |
if context_group == 0:current_context_start = 0
|
975 |
else:current_context_start = context_group * (context_size - overlap)
|