Francesco commited on
Commit
c1b2494
·
1 Parent(s): adb624b

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +11 -20
app.py CHANGED
@@ -35,26 +35,17 @@ def infer(prompt):
35
  #generator = torch.Generator(device=device).manual_seed(seed)
36
  #print("Is GPU busy? ", is_gpu_busy)
37
  images = []
38
- #if(not is_gpu_busy):
39
- # is_gpu_busy = True
40
- # images_list = pipe(
41
- # [prompt] * samples,
42
- # num_inference_steps=steps,
43
- # guidance_scale=scale,
44
- #generator=generator,
45
- # )
46
- # is_gpu_busy = False
47
- # safe_image = Image.open(r"unsafe.png")
48
- # for i, image in enumerate(images_list["sample"]):
49
- # if(images_list["nsfw_content_detected"][i]):
50
- # images.append(safe_image)
51
- # else:
52
- # images.append(image)
53
- #else:
54
- with autocast("cuda"):
55
- generated = pipe(prompt, guidance_scale=7.5)["sample"]
56
-
57
- return generated
58
 
59
 
60
  css = """
 
35
  #generator = torch.Generator(device=device).manual_seed(seed)
36
  #print("Is GPU busy? ", is_gpu_busy)
37
  images = []
38
+ if(not is_gpu_busy):
39
+ is_gpu_busy = True
40
+ with autocast("cuda"):
41
+ images = pipe(
42
+ [prompt] * samples,
43
+ num_inference_steps=steps,
44
+ guidance_scale=scale,
45
+ #generator=generator,
46
+ ).images
47
+
48
+ return images
 
 
 
 
 
 
 
 
 
49
 
50
 
51
  css = """