fffiloni commited on
Commit
3134aee
·
verified ·
1 Parent(s): ddb59f9

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +14 -14
app.py CHANGED
@@ -86,7 +86,7 @@ def load_b_lora_to_unet(pipe, content_lora_model_id: str = '', style_lora_model_
86
  except Exception as e:
87
  raise type(e)(f'failed to load_b_lora_to_unet, due to: {e}')
88
 
89
- @spaces.GPU()
90
  def load_b_loras(content_b_lora, style_b_lora):
91
 
92
  if content_b_lora != "" and content_b_lora is not None:
@@ -108,7 +108,16 @@ def load_b_loras(content_b_lora, style_b_lora):
108
 
109
  prepared_prompt = f"{content_model_instance_prompt} {style_model_instance_prompt}"
110
 
111
- pipeline.unload_lora_weights()
 
 
 
 
 
 
 
 
 
112
 
113
  if content_b_lora is None:
114
  content_B_LoRA_path = ''
@@ -124,16 +133,6 @@ def load_b_loras(content_b_lora, style_b_lora):
124
 
125
  load_b_lora_to_unet(pipeline, content_B_LoRA_path, style_B_LoRA_path, content_alpha, style_alpha)
126
 
127
- return prepared_prompt
128
-
129
- @spaces.GPU()
130
- def main(prompt, negative_prompt, seed, randomize_seed, width, height, guidance_scale, num_inference_steps, progress=gr.Progress(track_tqdm=True)):
131
-
132
- if randomize_seed:
133
- seed = random.randint(0, MAX_SEED)
134
-
135
- generator = torch.Generator().manual_seed(seed)
136
-
137
  prompt = prompt
138
  image = pipeline(
139
  prompt,
@@ -142,7 +141,8 @@ def main(prompt, negative_prompt, seed, randomize_seed, width, height, guidance_
142
  width = width,
143
  height = height,
144
  ).images[0]
145
-
 
146
  return image
147
 
148
  css="""
@@ -257,7 +257,7 @@ with gr.Blocks(css=css) as demo:
257
 
258
  run_button.click(
259
  fn = main,
260
- inputs = [prompt, negative_prompt, seed, randomize_seed, width, height, guidance_scale, num_inference_steps],
261
  outputs = [result]
262
  )
263
 
 
86
  except Exception as e:
87
  raise type(e)(f'failed to load_b_lora_to_unet, due to: {e}')
88
 
89
+
90
  def load_b_loras(content_b_lora, style_b_lora):
91
 
92
  if content_b_lora != "" and content_b_lora is not None:
 
108
 
109
  prepared_prompt = f"{content_model_instance_prompt} {style_model_instance_prompt}"
110
 
111
+
112
+ return prepared_prompt
113
+
114
+ @spaces.GPU()
115
+ def main(content_b_lora, style_b_lora, prompt, negative_prompt, seed, randomize_seed, width, height, guidance_scale, num_inference_steps, progress=gr.Progress(track_tqdm=True)):
116
+
117
+ if randomize_seed:
118
+ seed = random.randint(0, MAX_SEED)
119
+
120
+ generator = torch.Generator().manual_seed(seed)
121
 
122
  if content_b_lora is None:
123
  content_B_LoRA_path = ''
 
133
 
134
  load_b_lora_to_unet(pipeline, content_B_LoRA_path, style_B_LoRA_path, content_alpha, style_alpha)
135
 
 
 
 
 
 
 
 
 
 
 
136
  prompt = prompt
137
  image = pipeline(
138
  prompt,
 
141
  width = width,
142
  height = height,
143
  ).images[0]
144
+
145
+ pipeline.unload_lora_weights()
146
  return image
147
 
148
  css="""
 
257
 
258
  run_button.click(
259
  fn = main,
260
+ inputs = [content_b_lora, style_b_lora, prompt, negative_prompt, seed, randomize_seed, width, height, guidance_scale, num_inference_steps],
261
  outputs = [result]
262
  )
263