multimodalart HF staff commited on
Commit
26f4c39
1 Parent(s): 48542ca

Skip fuse-unfuse to test

Browse files
Files changed (1) hide show
  1. app.py +4 -3
app.py CHANGED
@@ -163,19 +163,19 @@ def run_lora(prompt, negative, lora_scale, selected_state, sdxl_loras, progress=
163
  pipe = copy.deepcopy(original_pipe)
164
  pipe.to(device)
165
  elif(last_fused):
166
- pipe.unfuse_lora()
167
  pipe.unload_lora_weights()
168
  is_compatible = sdxl_loras[selected_state.index]["is_compatible"]
169
 
170
  if is_compatible:
171
  pipe.load_lora_weights(loaded_state_dict)
172
- pipe.fuse_lora(lora_scale)
173
  last_fused = True
174
  else:
175
  is_pivotal = sdxl_loras[selected_state.index]["is_pivotal"]
176
  if(is_pivotal):
177
  pipe.load_lora_weights(loaded_state_dict)
178
- pipe.fuse_lora(lora_scale)
179
  last_fused = True
180
 
181
  #Add the textual inversion embeddings from pivotal tuning models
@@ -196,6 +196,7 @@ def run_lora(prompt, negative, lora_scale, selected_state, sdxl_loras, progress=
196
  width=1024,
197
  height=1024,
198
  num_inference_steps=20,
 
199
  guidance_scale=7.5,
200
  ).images[0]
201
  last_lora = repo_name
 
163
  pipe = copy.deepcopy(original_pipe)
164
  pipe.to(device)
165
  elif(last_fused):
166
+ #pipe.unfuse_lora()
167
  pipe.unload_lora_weights()
168
  is_compatible = sdxl_loras[selected_state.index]["is_compatible"]
169
 
170
  if is_compatible:
171
  pipe.load_lora_weights(loaded_state_dict)
172
+ #pipe.fuse_lora(lora_scale)
173
  last_fused = True
174
  else:
175
  is_pivotal = sdxl_loras[selected_state.index]["is_pivotal"]
176
  if(is_pivotal):
177
  pipe.load_lora_weights(loaded_state_dict)
178
+ #pipe.fuse_lora(lora_scale)
179
  last_fused = True
180
 
181
  #Add the textual inversion embeddings from pivotal tuning models
 
196
  width=1024,
197
  height=1024,
198
  num_inference_steps=20,
199
+ cross_attention_kwargs = {"scale" : lora_scale}
200
  guidance_scale=7.5,
201
  ).images[0]
202
  last_lora = repo_name