fffiloni commited on
Commit
ac813bc
·
verified ·
1 Parent(s): 40810f3

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +33 -1
app.py CHANGED
@@ -47,6 +47,29 @@ def load_b_lora_to_unet(pipe, content_lora_model_id: str = '', style_lora_model_
47
  except Exception as e:
48
  raise type(e)(f'failed to load_b_lora_to_unet, due to: {e}')
49
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
50
  def main(content_b_lora, style_b_lora, prompt, negative_prompt, seed, randomize_seed, width, height, guidance_scale, num_inference_steps, progress=gr.Progress(track_tqdm=True)):
51
 
52
  if randomize_seed:
@@ -100,10 +123,13 @@ with gr.Blocks(css=css) as demo:
100
  Currently running on {power_device}.
101
  """)
102
 
103
- with gr.Row():
104
  content_b_lora = gr.Textbox(label="B-LoRa for content")
105
  style_b_lora = gr.Textbox(label="B-LoRa for style")
106
 
 
 
 
107
  with gr.Row():
108
 
109
  prompt = gr.Text(
@@ -173,6 +199,12 @@ with gr.Blocks(css=css) as demo:
173
  value=50,
174
  )
175
 
 
 
 
 
 
 
176
  run_button.click(
177
  fn = main,
178
  inputs = [content_b_lora, style_b_lora, prompt, negative_prompt, seed, randomize_seed, width, height, guidance_scale, num_inference_steps],
 
47
  except Exception as e:
48
  raise type(e)(f'failed to load_b_lora_to_unet, due to: {e}')
49
 
50
+ def load_b_loras(content_b_lora, style_b_lora):
51
+
52
+ if content_b_lora is not None:
53
+ # Get instance_prompt a.k.a trigger word
54
+ content_model_card = ModelCard.load(custom_model)
55
+ content_model_repo_data = content_model_card.data.to_dict()
56
+ content_model_instance_prompt = content_model_repo_data.get("instance_prompt")
57
+ else:
58
+ content_model_instance_prompt = ''
59
+
60
+ if style_b_lora is not None:
61
+ # Get instance_prompt a.k.a trigger word
62
+ style_model_card = ModelCard.load(custom_model)
63
+ style_model_repo_data = style_model_card.data.to_dict()
64
+ style_model_instance_prompt = style_model_repo_data.get("instance_prompt")
65
+ style_model_instance_prompt = f"in {style_model_instance_prompt} style"
66
+ else:
67
+ style_model_instance_prompt = ''
68
+
69
+ prepared_prompt = f"{content_model_instance_prompt} {style_model_instance_prompt}"
70
+
71
+ return prepared_prompt
72
+
73
  def main(content_b_lora, style_b_lora, prompt, negative_prompt, seed, randomize_seed, width, height, guidance_scale, num_inference_steps, progress=gr.Progress(track_tqdm=True)):
74
 
75
  if randomize_seed:
 
123
  Currently running on {power_device}.
124
  """)
125
 
126
+ with gr.Row():
127
  content_b_lora = gr.Textbox(label="B-LoRa for content")
128
  style_b_lora = gr.Textbox(label="B-LoRa for style")
129
 
130
+ with gr.Column():
131
+ load_b_loras_btn = gr.Button("load models")
132
+
133
  with gr.Row():
134
 
135
  prompt = gr.Text(
 
199
  value=50,
200
  )
201
 
202
+ load_b_loras_btn.click(
203
+ fn = load_b_loras,
204
+ inputs = [content_b_lora, style_b_lora],
205
+ outputs = [prompt]
206
+ )
207
+
208
  run_button.click(
209
  fn = main,
210
  inputs = [content_b_lora, style_b_lora, prompt, negative_prompt, seed, randomize_seed, width, height, guidance_scale, num_inference_steps],