habulaj commited on
Commit
7c74483
·
verified ·
1 Parent(s): ce6ac39

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -16
app.py CHANGED
@@ -25,12 +25,6 @@ MAX_SEED = np.iinfo(np.int32).max
25
  MAX_IMAGE_SIZE = int(os.getenv("MAX_IMAGE_SIZE", "1824"))
26
  USE_TORCH_COMPILE = os.getenv("USE_TORCH_COMPILE") == "1"
27
  ENABLE_CPU_OFFLOAD = os.getenv("ENABLE_CPU_OFFLOAD") == "1"
28
- ENABLE_USE_LORA = os.getenv("ENABLE_USE_LORA") == "1"
29
- ENABLE_USE_LORA2 = os.getenv("ENABLE_USE_LORA2") == "1"
30
- ENABLE_USE_VAE = os.getenv("ENABLE_USE_VAE", "1") == "1"
31
- ENABLE_USE_IMG2IMG = os.getenv("ENABLE_USE_IMG2IMG", "1") == "1"
32
- ENABLE_USE_CONTROLNET = os.getenv("ENABLE_USE_CONTROLNET", "1") == "1"
33
- ENABLE_USE_CONTROLNETIMG2IMG = os.getenv("ENABLE_USE_CONTROLNET", "1") == "1"
34
 
35
  device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
36
 
@@ -412,16 +406,9 @@ with gr.Blocks(theme=theme, css="style.css") as demo:
412
  api_name=False,
413
  )
414
  use_lora2.change(
415
- fn=lambda x: gr.update(visible=x),
416
- inputs=use_lora2,
417
- outputs=lora2,
418
- queue=False,
419
- api_name=False,
420
- )
421
- use_lora2.change(
422
- fn=lambda x: gr.update(visible=x),
423
- inputs=use_lora,
424
- outputs=lora,
425
  queue=False,
426
  api_name=False,
427
  )
 
25
  MAX_IMAGE_SIZE = int(os.getenv("MAX_IMAGE_SIZE", "1824"))
26
  USE_TORCH_COMPILE = os.getenv("USE_TORCH_COMPILE") == "1"
27
  ENABLE_CPU_OFFLOAD = os.getenv("ENABLE_CPU_OFFLOAD") == "1"
 
 
 
 
 
 
28
 
29
  device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
30
 
 
406
  api_name=False,
407
  )
408
  use_lora2.change(
409
+ fn=lambda x: [gr.update(visible=x), gr.update(visible=x)],
410
+ inputs=[use_lora, use_lora2],
411
+ outputs=[lora, lora2],
 
 
 
 
 
 
 
412
  queue=False,
413
  api_name=False,
414
  )