slymnyldrm commited on
Commit
12a94f5
·
1 Parent(s): aa81e8b

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -2
app.py CHANGED
@@ -8,11 +8,11 @@ move_cache()
8
 
9
  model_path = "slymnyldrm/slymnyldrm3"
10
 
11
- pipe = StableDiffusionPipeline.from_pretrained(model_path, use_safetensors=True, safety_checker=None, torch_dtype=torch.float16).to("cuda")
12
  pipe.scheduler = DDIMScheduler.from_config(pipe.scheduler.config)
13
  pipe.enable_xformers_memory_efficient_attention()
14
 
15
- g_cuda = torch.Generator(device='cuda')
16
 
17
  def inference(prompt, negative_prompt, num_samples, height=512, width=512, num_inference_steps=50, guidance_scale=7.5, g_seed=52362):
18
  with torch.autocast("cuda"), torch.inference_mode():
 
8
 
9
  model_path = "slymnyldrm/slymnyldrm3"
10
 
11
+ pipe = StableDiffusionPipeline.from_pretrained(model_path, use_safetensors=True, safety_checker=None, torch_dtype=torch.float16)
12
  pipe.scheduler = DDIMScheduler.from_config(pipe.scheduler.config)
13
  pipe.enable_xformers_memory_efficient_attention()
14
 
15
+ g_cuda = torch.Generator()
16
 
17
  def inference(prompt, negative_prompt, num_samples, height=512, width=512, num_inference_steps=50, guidance_scale=7.5, g_seed=52362):
18
  with torch.autocast("cuda"), torch.inference_mode():