Spaces:
Running
on
Zero
Running
on
Zero
Commit
•
143f063
1
Parent(s):
c2c42ca
Update app.py
Browse files
app.py
CHANGED
@@ -33,16 +33,18 @@ pipe_hyper = DiffusionPipeline.from_pretrained(base_model_id, unet=unet, torch_d
|
|
33 |
pipe_hyper.scheduler = LCMScheduler.from_config(pipe_hyper.scheduler.config)
|
34 |
pipe_hyper.to("cuda")
|
35 |
|
|
|
|
|
36 |
def run(prompt):
|
37 |
image_turbo=pipe_turbo(prompt=prompt, num_inference_steps=1, guidance_scale=0).images[0]
|
38 |
image_lightning=pipe_lightning(prompt=prompt, num_inference_steps=1, guidance_scale=0).images[0]
|
39 |
image_hyper=pipe_hyper(prompt=prompt, num_inference_steps=1, guidance_scale=0, timesteps=[800]).images[0]
|
40 |
return image_turbo, image_lightning, image_hyper
|
|
|
|
|
41 |
css = '''
|
42 |
.gradio-container{max-width: 768px !important}
|
43 |
'''
|
44 |
-
|
45 |
-
@spaces.GPU
|
46 |
with gr.Blocks(css=css) as demo:
|
47 |
prompt = gr.Textbox(label="Prompt")
|
48 |
run = gr.Button("Run")
|
|
|
33 |
pipe_hyper.scheduler = LCMScheduler.from_config(pipe_hyper.scheduler.config)
|
34 |
pipe_hyper.to("cuda")
|
35 |
|
36 |
+
|
37 |
+
@spaces.GPU
|
38 |
def run(prompt):
|
39 |
image_turbo=pipe_turbo(prompt=prompt, num_inference_steps=1, guidance_scale=0).images[0]
|
40 |
image_lightning=pipe_lightning(prompt=prompt, num_inference_steps=1, guidance_scale=0).images[0]
|
41 |
image_hyper=pipe_hyper(prompt=prompt, num_inference_steps=1, guidance_scale=0, timesteps=[800]).images[0]
|
42 |
return image_turbo, image_lightning, image_hyper
|
43 |
+
|
44 |
+
|
45 |
css = '''
|
46 |
.gradio-container{max-width: 768px !important}
|
47 |
'''
|
|
|
|
|
48 |
with gr.Blocks(css=css) as demo:
|
49 |
prompt = gr.Textbox(label="Prompt")
|
50 |
run = gr.Button("Run")
|