rafaaa2105 commited on
Commit
31c44c1
·
verified ·
1 Parent(s): fe72eaa

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +1 -3
app.py CHANGED
@@ -4,7 +4,7 @@ import random
4
  import os
5
 
6
  # import spaces #[uncomment to use ZeroGPU]
7
- from diffusers import AutoPipelineForText2Image, AutoencoderKL, AutoencoderTiny
8
  import torch
9
 
10
  device = "cuda" if torch.cuda.is_available() else "cpu"
@@ -19,7 +19,6 @@ else:
19
  taef1 = AutoencoderTiny.from_pretrained("madebyollin/taef1", torch_dtype=dtype).to(device)
20
  pipe = pipeline = AutoPipelineForText2Image.from_pretrained("black-forest-labs/FLUX.1-dev", token=hf_token, torch_dtype=torch.bfloat16)
21
  pipe.load_lora_weights('aleksa-codes/flux-ghibsky-illustration', weight_name='lora.safetensors')
22
- good_vae = AutoencoderKL.from_pretrained("black-forest-labs/FLUX.1-dev", subfolder="vae", torch_dtype=dtype).to(device)
23
  pipe = pipe.to(device)
24
 
25
  MAX_SEED = np.iinfo(np.int32).max
@@ -49,7 +48,6 @@ def infer(
49
  width=width,
50
  height=height,
51
  generator=generator,
52
- good_vae=good_vae,
53
  ).images[0]
54
 
55
  return image, seed
 
4
  import os
5
 
6
  # import spaces #[uncomment to use ZeroGPU]
7
+ from diffusers import AutoPipelineForText2Image, AutoencoderTiny
8
  import torch
9
 
10
  device = "cuda" if torch.cuda.is_available() else "cpu"
 
19
  taef1 = AutoencoderTiny.from_pretrained("madebyollin/taef1", torch_dtype=dtype).to(device)
20
  pipe = pipeline = AutoPipelineForText2Image.from_pretrained("black-forest-labs/FLUX.1-dev", token=hf_token, torch_dtype=torch.bfloat16)
21
  pipe.load_lora_weights('aleksa-codes/flux-ghibsky-illustration', weight_name='lora.safetensors')
 
22
  pipe = pipe.to(device)
23
 
24
  MAX_SEED = np.iinfo(np.int32).max
 
48
  width=width,
49
  height=height,
50
  generator=generator,
 
51
  ).images[0]
52
 
53
  return image, seed