nroggendorff commited on
Commit
c69b53e
·
verified ·
1 Parent(s): 8a0ba18

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -3
app.py CHANGED
@@ -3,16 +3,16 @@ import spaces
3
 
4
  import torch
5
  from diffusers import FluxPipeline
 
6
 
7
  pipeline = FluxPipeline.from_pretrained("black-forest-labs/FLUX.1-dev", torch_dtype=torch.float16).to("cuda")
8
- #pipeline.enable_model_cpu_offload()
9
 
10
  @spaces.GPU(duration=70)
11
  def generate(prompt, negative_prompt, width, height, sample_steps, lora_id):
12
  try:
13
  pipeline.load_lora_weights(lora_id)
14
- except Exception as e:
15
- return f"An error occured while loading the adapter: {e}."
16
 
17
  return pipeline(prompt=f"{prompt}\nDO NOT INCLUDE {negative_prompt}", width=width, height=height, num_inference_steps=sample_steps, generator=torch.Generator("cpu").manual_seed(42), guidance_scale=7).images[0]
18
 
 
3
 
4
  import torch
5
  from diffusers import FluxPipeline
6
+ from huggingface_hub.utils import RepositoryNotFoundError
7
 
8
  pipeline = FluxPipeline.from_pretrained("black-forest-labs/FLUX.1-dev", torch_dtype=torch.float16).to("cuda")
 
9
 
10
  @spaces.GPU(duration=70)
11
  def generate(prompt, negative_prompt, width, height, sample_steps, lora_id):
12
  try:
13
  pipeline.load_lora_weights(lora_id)
14
+ except RepositoryNotFoundError:
15
+ raise ValueError(f"Recieved invalid FLUX LoRA.")
16
 
17
  return pipeline(prompt=f"{prompt}\nDO NOT INCLUDE {negative_prompt}", width=width, height=height, num_inference_steps=sample_steps, generator=torch.Generator("cpu").manual_seed(42), guidance_scale=7).images[0]
18