randomtable commited on
Commit
6ae78ad
1 Parent(s): ff940cf

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -10
app.py CHANGED
@@ -20,7 +20,7 @@ MAX_SEED = np.iinfo(np.int32).max
20
  MAX_IMAGE_SIZE = 1024
21
 
22
  @spaces.GPU #[uncomment to use ZeroGPU]
23
- def infer(prompt, negative_prompt, seed, randomize_seed, width, height, guidance_scale, num_inference_steps, progress=gr.Progress(track_tqdm=True)):
24
 
25
  if randomize_seed:
26
  seed = random.randint(0, MAX_SEED)
@@ -28,8 +28,7 @@ def infer(prompt, negative_prompt, seed, randomize_seed, width, height, guidance
28
  generator = torch.Generator().manual_seed(seed)
29
 
30
  image = pipe(
31
- prompt = prompt,
32
- negative_prompt = negative_prompt,
33
  guidance_scale = guidance_scale,
34
  num_inference_steps = num_inference_steps,
35
  width = width,
@@ -75,13 +74,6 @@ with gr.Blocks(css=css) as demo:
75
 
76
  with gr.Accordion("Advanced Settings", open=False):
77
 
78
- negative_prompt = gr.Text(
79
- label="Negative prompt",
80
- max_lines=1,
81
- placeholder="Enter a negative prompt",
82
- visible=False,
83
- )
84
-
85
  seed = gr.Slider(
86
  label="Seed",
87
  minimum=0,
 
20
  MAX_IMAGE_SIZE = 1024
21
 
22
  @spaces.GPU #[uncomment to use ZeroGPU]
23
+ def infer(prompt, seed, randomize_seed, width, height, guidance_scale, num_inference_steps, progress=gr.Progress(track_tqdm=True)):
24
 
25
  if randomize_seed:
26
  seed = random.randint(0, MAX_SEED)
 
28
  generator = torch.Generator().manual_seed(seed)
29
 
30
  image = pipe(
31
+ prompt = prompt,
 
32
  guidance_scale = guidance_scale,
33
  num_inference_steps = num_inference_steps,
34
  width = width,
 
74
 
75
  with gr.Accordion("Advanced Settings", open=False):
76
 
 
 
 
 
 
 
 
77
  seed = gr.Slider(
78
  label="Seed",
79
  minimum=0,