Spaces:
Running
on
Zero
Running
on
Zero
arthur-qiu
commited on
Commit
·
4da4f88
1
Parent(s):
6cc7e15
add turbo
Browse files
app.py
CHANGED
@@ -27,9 +27,11 @@ def infer_gpu_turbo(pipe, seed, prompt, negative_prompt, ddim_steps, guidance_sc
|
|
27 |
pipe = pipe.to("cuda")
|
28 |
generator = torch.Generator(device='cuda')
|
29 |
generator = generator.manual_seed(seed)
|
|
|
30 |
if not disable_freeu:
|
31 |
register_free_upblock2d(pipe, b1=1.1, b2=1.2, s1=0.6, s2=0.4)
|
32 |
register_free_crossattn_upblock2d(pipe, b1=1.1, b2=1.2, s1=0.6, s2=0.4)
|
|
|
33 |
result = pipe(prompt, negative_prompt=negative_prompt, generator=generator,
|
34 |
num_inference_steps=ddim_steps, guidance_scale=guidance_scale,
|
35 |
resolutions_list=resolutions_list, fast_mode=fast_mode, cosine_scale=cosine_scale,
|
|
|
27 |
pipe = pipe.to("cuda")
|
28 |
generator = torch.Generator(device='cuda')
|
29 |
generator = generator.manual_seed(seed)
|
30 |
+
print('freeu starts')
|
31 |
if not disable_freeu:
|
32 |
register_free_upblock2d(pipe, b1=1.1, b2=1.2, s1=0.6, s2=0.4)
|
33 |
register_free_crossattn_upblock2d(pipe, b1=1.1, b2=1.2, s1=0.6, s2=0.4)
|
34 |
+
print('freeu ends')
|
35 |
result = pipe(prompt, negative_prompt=negative_prompt, generator=generator,
|
36 |
num_inference_steps=ddim_steps, guidance_scale=guidance_scale,
|
37 |
resolutions_list=resolutions_list, fast_mode=fast_mode, cosine_scale=cosine_scale,
|