CiaraRowles commited on
Commit
42262bc
·
verified ·
1 Parent(s): fbd72cb

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -2
app.py CHANGED
@@ -47,7 +47,7 @@ ip_model = IPAdapterInstruct(pipe, image_encoder_path, ip_ckpt, device,dtypein=t
47
  cv2.setNumThreads(1)
48
 
49
  @spaces.GPU(enable_queue=True)
50
- def generate_image(images, prompt, negative_prompt, nfaa_negative_prompt, progress=gr.Progress(track_tqdm=True)):
51
  faceid_all_embeds = []
52
  first_iteration = True
53
  image = images[0]
@@ -98,12 +98,13 @@ with gr.Blocks(css=css) as demo:
98
  submit = gr.Button("Submit")
99
  with gr.Accordion(open=False, label="Advanced Options"):
100
  nfaa_negative_prompts = gr.Textbox(label="Appended Negative Prompts", info="Negative prompts to steer generations towards safe for all audiences outputs", value="naked, bikini, skimpy, scanty, bare skin, lingerie, swimsuit, exposed, see-through")
 
101
  with gr.Column():
102
  gallery = gr.Gallery(label="Generated Images")
103
  files.upload(fn=swap_to_gallery, inputs=files, outputs=[uploaded_files, clear_button, files])
104
  remove_and_reupload.click(fn=remove_back_to_files, outputs=[uploaded_files, clear_button, files])
105
  submit.click(fn=generate_image,
106
- inputs=[files,prompt,negative_prompt, nfaa_negative_prompts],
107
  outputs=gallery)
108
 
109
  gr.Markdown("This demo includes extra features to mitigate the implicit bias of the model and prevent explicit usage of it to generate content with faces of people, including third parties, that is not safe for all audiences, including naked or semi-naked people.")
 
47
  cv2.setNumThreads(1)
48
 
49
  @spaces.GPU(enable_queue=True)
50
+ def generate_image(images, prompt, negative_prompt,scale, nfaa_negative_prompt, progress=gr.Progress(track_tqdm=True)):
51
  faceid_all_embeds = []
52
  first_iteration = True
53
  image = images[0]
 
98
  submit = gr.Button("Submit")
99
  with gr.Accordion(open=False, label="Advanced Options"):
100
  nfaa_negative_prompts = gr.Textbox(label="Appended Negative Prompts", info="Negative prompts to steer generations towards safe for all audiences outputs", value="naked, bikini, skimpy, scanty, bare skin, lingerie, swimsuit, exposed, see-through")
101
+ scale = gr.Slider(label="Scale", value=1.0, step=0.1, minimum=0, maximum=5)
102
  with gr.Column():
103
  gallery = gr.Gallery(label="Generated Images")
104
  files.upload(fn=swap_to_gallery, inputs=files, outputs=[uploaded_files, clear_button, files])
105
  remove_and_reupload.click(fn=remove_back_to_files, outputs=[uploaded_files, clear_button, files])
106
  submit.click(fn=generate_image,
107
+ inputs=[files,prompt,negative_prompt,scale, nfaa_negative_prompts],
108
  outputs=gallery)
109
 
110
  gr.Markdown("This demo includes extra features to mitigate the implicit bias of the model and prevent explicit usage of it to generate content with faces of people, including third parties, that is not safe for all audiences, including naked or semi-naked people.")