Venkateshwar Reddy commited on
Commit
6603ea2
·
1 Parent(s): 4ea785f

Fixed inference steps

Browse files
Files changed (1) hide show
  1. app.py +5 -6
app.py CHANGED
@@ -8,7 +8,7 @@ device = "cuda" if torch.cuda.is_available() else "cpu"
8
  noise_scheduler = DDPMScheduler(num_train_timesteps=1000)
9
  if torch.cuda.is_available():
10
  torch.cuda.max_memory_allocated(device=device)
11
- pipe = DDPMPipeline.from_pretrained("FrozenScar/cartoon_face", torch_dtype=torch.float16, variant="fp16", use_safetensors=True)
12
  pipe.enable_xformers_memory_efficient_attention()
13
  pipe = pipe.to(device)
14
  else:
@@ -25,14 +25,13 @@ def infer(prompt, negative_prompt, seed, randomize_seed, width, height, guidance
25
 
26
  generator = torch.Generator().manual_seed(seed)
27
 
28
- image = pipe().images[0]
29
 
30
  return image
31
 
32
  examples = [
33
- "Astronaut in a jungle, cold color palette, muted colors, detailed, 8k",
34
- "An astronaut riding a green horse",
35
- "A delicious ceviche cheesecake slice",
36
  ]
37
 
38
  css="""
@@ -119,7 +118,7 @@ with gr.Blocks(css=css) as demo:
119
  num_inference_steps = gr.Slider(
120
  label="Number of inference steps",
121
  minimum=1,
122
- maximum=12,
123
  step=1,
124
  value=2,
125
  )
 
8
  noise_scheduler = DDPMScheduler(num_train_timesteps=1000)
9
  if torch.cuda.is_available():
10
  torch.cuda.max_memory_allocated(device=device)
11
+ pipe = DDPMPipeline.from_pretrained("FrozenScar/cartoon_face", torch_dtype=torch.float16, variant="fp16", use_safetensors=True,scheduler=noise_scheduler)
12
  pipe.enable_xformers_memory_efficient_attention()
13
  pipe = pipe.to(device)
14
  else:
 
25
 
26
  generator = torch.Generator().manual_seed(seed)
27
 
28
+ image = pipe(num_inference_steps=num_inference_steps).images[0]
29
 
30
  return image
31
 
32
  examples = [
33
+ "OK broo",
34
+ "Nothing brooo"
 
35
  ]
36
 
37
  css="""
 
118
  num_inference_steps = gr.Slider(
119
  label="Number of inference steps",
120
  minimum=1,
121
+ maximum=120,
122
  step=1,
123
  value=2,
124
  )