multimodalart HF staff commited on
Commit
1d162bd
Β·
verified Β·
1 Parent(s): fd9e499

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -1
app.py CHANGED
@@ -23,11 +23,13 @@ device="cuda"
23
  pipe = pipe.to(device)
24
 
25
  @spaces.GPU
26
- def run(prompt, negative_prompt="", guidance_scale=7.0, pag_scale=3.0, pag_layers=["mid"], randomize_seed=True, seed=42, progress=gr.Progress(track_tqdm=True)):
27
  prompt = prompt.strip()
28
  negative_prompt = negative_prompt.strip()
29
  if(randomize_seed):
30
  seed = random.randint(0, sys.maxsize)
 
 
31
  if(prompt == "" and negative_prompt == ""):
32
  guidance_scale = 0.0
33
 
 
23
  pipe = pipe.to(device)
24
 
25
  @spaces.GPU
26
+ def run(prompt, negative_prompt=None, guidance_scale=7.0, pag_scale=3.0, pag_layers=["mid"], randomize_seed=True, seed=42, progress=gr.Progress(track_tqdm=True)):
27
  prompt = prompt.strip()
28
  negative_prompt = negative_prompt.strip()
29
  if(randomize_seed):
30
  seed = random.randint(0, sys.maxsize)
31
+ if(negative_prompt == ""):
32
+ negative_prompt = None
33
  if(prompt == "" and negative_prompt == ""):
34
  guidance_scale = 0.0
35