alexnasa commited on
Commit
d5ee134
·
verified ·
1 Parent(s): e6ca02c

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +1 -67
app.py CHANGED
@@ -441,21 +441,17 @@ with gr.Blocks() as demo:
441
  with gr.Row():
442
  with gr.Column():
443
  prompt = gr.Textbox(label="Prompt", value="")
444
- # 使用 Row 和 Column 来布局四个图像和描述
445
  with gr.Row():
446
  target_height = gr.Slider(512, 1024, step=128, value=768, label="Generated Height", info="")
447
  target_width = gr.Slider(512, 1024, step=128, value=768, label="Generated Width", info="")
448
  cond_size = gr.Slider(256, 384, step=128, value=256, label="Condition Size", info="")
449
  with gr.Row():
450
- # 修改 weight_id_ip_str 为两个 Slider
451
  weight_id = gr.Slider(0.1, 5, step=0.1, value=3, label="weight_id")
452
  weight_ip = gr.Slider(0.1, 5, step=0.1, value=5, label="weight_ip")
453
  with gr.Row():
454
- # 修改 ip_scale_str 为 Slider,并添加 Textbox 显示转换后的格式
455
  ip_scale_str = gr.Slider(0.5, 1.5, step=0.01, value=0.85, label="latent_lora_scale")
456
  vae_lora_scale = gr.Slider(0.5, 1.5, step=0.01, value=1.3, label="vae_lora_scale")
457
  with gr.Row():
458
- # 修改 vae_skip_iter 为两个 Slider
459
  vae_skip_iter_s1 = gr.Slider(0, 1, step=0.01, value=0.05, label="vae_skip_iter_before")
460
  vae_skip_iter_s2 = gr.Slider(0, 1, step=0.01, value=0.8, label="vae_skip_iter_after")
461
 
@@ -567,68 +563,6 @@ with gr.Blocks() as demo:
567
  det_btns[i].click(det_seg_img, inputs=[images[i], captions[i]], outputs=[images[i]])
568
  vlm_btns[i].click(vlm_img_caption, inputs=[images[i]], outputs=[captions[i]])
569
  accordion_states[i].change(fn=lambda x, state, index=i: change_accordion(x, index, state), inputs=[accordion_states[i], indexs_state], outputs=[accordions[i], indexs_state])
570
-
571
- examples = gr.Examples(
572
- examples=[
573
- [
574
- "ENT1 wearing a tiny hat",
575
- 42, 256, 768, 768,
576
- 3, 5,
577
- 0.85, 1.3,
578
- 0.05, 0.8,
579
- "sample/hamster.jpg", None, None, None, None, None,
580
- "a hamster", None, None, None, None, None,
581
- False, False, False, False, False, False
582
- ],
583
- [
584
- "ENT1 in a red dress is smiling",
585
- 42, 256, 768, 768,
586
- 3, 5,
587
- 0.85, 1.3,
588
- 0.05, 0.8,
589
- "sample/woman.jpg", None, None, None, None, None,
590
- "a woman", None, None, None, None, None,
591
- True, False, False, False, False, False
592
- ],
593
- [
594
- "ENT1 and ENT2 standing together in a park.",
595
- 42, 256, 768, 768,
596
- 2, 5,
597
- 0.85, 1.3,
598
- 0.05, 0.8,
599
- "sample/woman.jpg", "sample/girl.jpg", None, None, None, None,
600
- "a woman", "a girl", None, None, None, None,
601
- True, True, False, False, False, False
602
- ],
603
- [
604
- "ENT1, ENT2, and ENT3 standing together in a park.",
605
- 42, 256, 768, 768,
606
- 2.5, 5,
607
- 0.8, 1.2,
608
- 0.05, 0.8,
609
- "sample/woman.jpg", "sample/girl.jpg", "sample/old_man.jpg", None, None, None,
610
- "a woman", "a girl", "an old man", None, None, None,
611
- True, True, True, False, False, False
612
- ],
613
- ],
614
- inputs=[
615
- prompt, seed,
616
- cond_size,
617
- target_height,
618
- target_width,
619
- weight_id,
620
- weight_ip,
621
- ip_scale_str,
622
- vae_lora_scale,
623
- vae_skip_iter_s1,
624
- vae_skip_iter_s2,
625
- *images,
626
- *captions,
627
- *idip_checkboxes
628
- ],
629
- outputs=accordion_states,
630
- fn=open_accordion_on_example_selection,
631
- run_on_click=True
632
- )
633
 
634
  demo.queue().launch(share=True)
 
441
  with gr.Row():
442
  with gr.Column():
443
  prompt = gr.Textbox(label="Prompt", value="")
 
444
  with gr.Row():
445
  target_height = gr.Slider(512, 1024, step=128, value=768, label="Generated Height", info="")
446
  target_width = gr.Slider(512, 1024, step=128, value=768, label="Generated Width", info="")
447
  cond_size = gr.Slider(256, 384, step=128, value=256, label="Condition Size", info="")
448
  with gr.Row():
 
449
  weight_id = gr.Slider(0.1, 5, step=0.1, value=3, label="weight_id")
450
  weight_ip = gr.Slider(0.1, 5, step=0.1, value=5, label="weight_ip")
451
  with gr.Row():
 
452
  ip_scale_str = gr.Slider(0.5, 1.5, step=0.01, value=0.85, label="latent_lora_scale")
453
  vae_lora_scale = gr.Slider(0.5, 1.5, step=0.01, value=1.3, label="vae_lora_scale")
454
  with gr.Row():
 
455
  vae_skip_iter_s1 = gr.Slider(0, 1, step=0.01, value=0.05, label="vae_skip_iter_before")
456
  vae_skip_iter_s2 = gr.Slider(0, 1, step=0.01, value=0.8, label="vae_skip_iter_after")
457
 
 
563
  det_btns[i].click(det_seg_img, inputs=[images[i], captions[i]], outputs=[images[i]])
564
  vlm_btns[i].click(vlm_img_caption, inputs=[images[i]], outputs=[captions[i]])
565
  accordion_states[i].change(fn=lambda x, state, index=i: change_accordion(x, index, state), inputs=[accordion_states[i], indexs_state], outputs=[accordions[i], indexs_state])
566
+
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
567
 
568
  demo.queue().launch(share=True)