KingNish commited on
Commit
cc4b343
1 Parent(s): 417cc40

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -10
app.py CHANGED
@@ -25,7 +25,6 @@ CACHE_EXAMPLES = torch.cuda.is_available() and os.getenv("CACHE_EXAMPLES", "1")
25
  MAX_IMAGE_SIZE = int(os.getenv("MAX_IMAGE_SIZE", "4192"))
26
  USE_TORCH_COMPILE = os.getenv("USE_TORCH_COMPILE", "0") == "1"
27
  ENABLE_CPU_OFFLOAD = os.getenv("ENABLE_CPU_OFFLOAD", "0") == "1"
28
- PORT = int(os.getenv("DEMO_PORT", "15432"))
29
 
30
  device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
31
 
@@ -93,15 +92,9 @@ if torch.cuda.is_available():
93
  use_safetensors=True,
94
  )
95
 
96
- if os.getenv('CONSISTENCY_DECODER', False):
97
- print("Using DALL-E 3 Consistency Decoder")
98
- pipe.vae = ConsistencyDecoderVAE.from_pretrained("openai/consistency-decoder", torch_dtype=torch.float16)
99
 
100
- if ENABLE_CPU_OFFLOAD:
101
- pipe.enable_model_cpu_offload()
102
- else:
103
- pipe.to(device)
104
- print("Loaded on Device!")
105
 
106
  # speed-up T5
107
  pipe.text_encoder.to_bettertransformer()
@@ -131,7 +124,7 @@ def generate(
131
  seed: int = 0,
132
  width: int = 1024,
133
  height: int = 1024,
134
- inference_steps: int = 8,
135
  randomize_seed: bool = False,
136
  use_resolution_binning: bool = True,
137
  progress=gr.Progress(track_tqdm=True),
 
25
  MAX_IMAGE_SIZE = int(os.getenv("MAX_IMAGE_SIZE", "4192"))
26
  USE_TORCH_COMPILE = os.getenv("USE_TORCH_COMPILE", "0") == "1"
27
  ENABLE_CPU_OFFLOAD = os.getenv("ENABLE_CPU_OFFLOAD", "0") == "1"
 
28
 
29
  device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
30
 
 
92
  use_safetensors=True,
93
  )
94
 
95
+ pipe.vae = ConsistencyDecoderVAE.from_pretrained("openai/consistency-decoder", torch_dtype=torch.float16)
 
 
96
 
97
+ pipe.to(device)
 
 
 
 
98
 
99
  # speed-up T5
100
  pipe.text_encoder.to_bettertransformer()
 
124
  seed: int = 0,
125
  width: int = 1024,
126
  height: int = 1024,
127
+ inference_steps: int = 12,
128
  randomize_seed: bool = False,
129
  use_resolution_binning: bool = True,
130
  progress=gr.Progress(track_tqdm=True),