Spanicin commited on
Commit
abb89b9
·
verified ·
1 Parent(s): cae1fbb

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +9 -6
app.py CHANGED
@@ -309,24 +309,27 @@ if huggingface_token:
309
  else:
310
  print("Hugging Face token not found in environment variables.")
311
  print(huggingface_token)
312
- model_path = snapshot_download(
 
313
  repo_id="black-forest-labs/FLUX.1-dev",
314
  repo_type="model",
315
  ignore_patterns=["*.md", "*..gitattributes"],
316
  local_dir="FLUX.1-dev",
317
- token=huggingface_token
318
 
319
  # Load pipeline
320
  print('controlnet enters')
321
- controlnet = FluxControlNetModel.from_pretrained(
 
322
  "jasperai/Flux.1-dev-Controlnet-Upscaler", torch_dtype=torch.bfloat16
323
  ).to(device)
324
  print('controlnet exits')
325
  print('pipe enters')
326
- pipe = FluxControlNetPipeline.from_pretrained(
 
327
  model_path, controlnet=controlnet, torch_dtype=torch.bfloat16
328
- )
329
- pipe.to(device)
330
  print('pipe exits')
331
 
332
  MAX_SEED = 1000000
 
309
  else:
310
  print("Hugging Face token not found in environment variables.")
311
  print(huggingface_token)
312
+ with tqdm(total=100, desc="Downloading model", bar_format="{l_bar}{bar}| {n_fmt}/{total_fmt}") as pbar:
313
+ model_path = snapshot_download(
314
  repo_id="black-forest-labs/FLUX.1-dev",
315
  repo_type="model",
316
  ignore_patterns=["*.md", "*..gitattributes"],
317
  local_dir="FLUX.1-dev",
318
+ token=huggingface_token)
319
 
320
  # Load pipeline
321
  print('controlnet enters')
322
+ with tqdm(total=100, desc="Downloading model", bar_format="{l_bar}{bar}| {n_fmt}/{total_fmt}") as pbar:
323
+ controlnet = FluxControlNetModel.from_pretrained(
324
  "jasperai/Flux.1-dev-Controlnet-Upscaler", torch_dtype=torch.bfloat16
325
  ).to(device)
326
  print('controlnet exits')
327
  print('pipe enters')
328
+ with tqdm(total=100, desc="Downloading model", bar_format="{l_bar}{bar}| {n_fmt}/{total_fmt}") as pbar:
329
+ pipe = FluxControlNetPipeline.from_pretrained(
330
  model_path, controlnet=controlnet, torch_dtype=torch.bfloat16
331
+ ).to(device)
332
+ # pipe.to(device)
333
  print('pipe exits')
334
 
335
  MAX_SEED = 1000000