SemaSci commited on
Commit
98e3bfb
·
verified ·
1 Parent(s): a1e41ab

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -2
app.py CHANGED
@@ -26,7 +26,7 @@ def get_lora_sd_pipeline(
26
  if base_model_name_or_path is None:
27
  raise ValueError("Please specify the base model name or path")
28
 
29
- pipe = StableDiffusionPipeline.from_pretrained(base_model_name_or_path, torch_dtype=dtype)
30
  before_params = pipe.unet.parameters()
31
  pipe.unet = PeftModel.from_pretrained(pipe.unet, unet_sub_dir, adapter_name=adapter_name)
32
  pipe.unet.set_adapter(adapter_name)
@@ -103,7 +103,7 @@ def infer(
103
 
104
  # добавляем обновление pipe по условию
105
  if model_repo_id != model_id_default:
106
- pipe = StableDiffusionPipeline.from_pretrained(model_id, torch_dtype=torch_dtype).to(device)
107
  prompt_embeds = process_prompt(prompt, pipe.tokenizer, pipe.text_encoder)
108
  negative_prompt_embeds = process_prompt(negative_prompt, pipe.tokenizer, pipe.text_encoder)
109
  prompt_embeds, negative_prompt_embeds = align_embeddings(prompt_embeds, negative_prompt_embeds)
@@ -195,6 +195,7 @@ with gr.Blocks(css=css) as demo:
195
  info="Choose model",
196
  visible=True,
197
  allow_custom_value=True,
 
198
  value=model_id_default,
199
  )
200
 
 
26
  if base_model_name_or_path is None:
27
  raise ValueError("Please specify the base model name or path")
28
 
29
+ pipe = DiffusionPipeline.from_pretrained(base_model_name_or_path, torch_dtype=dtype)
30
  before_params = pipe.unet.parameters()
31
  pipe.unet = PeftModel.from_pretrained(pipe.unet, unet_sub_dir, adapter_name=adapter_name)
32
  pipe.unet.set_adapter(adapter_name)
 
103
 
104
  # добавляем обновление pipe по условию
105
  if model_repo_id != model_id_default:
106
+ pipe = DiffusionPipeline.from_pretrained(model_id, torch_dtype=torch_dtype).to(device)
107
  prompt_embeds = process_prompt(prompt, pipe.tokenizer, pipe.text_encoder)
108
  negative_prompt_embeds = process_prompt(negative_prompt, pipe.tokenizer, pipe.text_encoder)
109
  prompt_embeds, negative_prompt_embeds = align_embeddings(prompt_embeds, negative_prompt_embeds)
 
195
  info="Choose model",
196
  visible=True,
197
  allow_custom_value=True,
198
+ # value=model_repo_id,
199
  value=model_id_default,
200
  )
201