Elora commited on
Commit
9406910
·
1 Parent(s): d10daf1
Files changed (2) hide show
  1. app.py +41 -6
  2. requirements.txt +2 -1
app.py CHANGED
@@ -2,33 +2,60 @@ import gradio as gr
2
  import numpy as np
3
  import random
4
  from diffusers import DiffusionPipeline
 
 
5
  import torch
6
 
7
  device = "cuda" if torch.cuda.is_available() else "cpu"
8
 
9
  if torch.cuda.is_available():
10
  torch.cuda.max_memory_allocated(device=device)
11
- pipe = DiffusionPipeline.from_pretrained("stabilityai/sdxl-turbo", torch_dtype=torch.float16, variant="fp16", use_safetensors=True)
12
  pipe.enable_xformers_memory_efficient_attention()
13
  pipe = pipe.to(device)
14
  else:
15
- pipe = DiffusionPipeline.from_pretrained("stabilityai/sdxl-turbo", use_safetensors=True)
16
  pipe = pipe.to(device)
17
 
18
  MAX_SEED = np.iinfo(np.int32).max
19
  MAX_IMAGE_SIZE = 1024
20
 
21
- def infer(prompt, negative_prompt, seed, randomize_seed, width, height, guidance_scale, num_inference_steps):
 
 
 
 
 
 
 
 
 
 
 
 
 
 
22
 
23
  if randomize_seed:
24
  seed = random.randint(0, MAX_SEED)
25
 
26
  generator = torch.Generator().manual_seed(seed)
 
 
 
 
 
 
27
 
28
  image = pipe(
29
- prompt = prompt,
30
- negative_prompt = negative_prompt,
 
 
 
 
31
  guidance_scale = guidance_scale,
 
32
  num_inference_steps = num_inference_steps,
33
  width = width,
34
  height = height,
@@ -95,6 +122,14 @@ with gr.Blocks(css=css) as demo:
95
  )
96
 
97
  randomize_seed = gr.Checkbox(label="Randomize seed", value=True)
 
 
 
 
 
 
 
 
98
 
99
  with gr.Row():
100
 
@@ -139,7 +174,7 @@ with gr.Blocks(css=css) as demo:
139
 
140
  run_button.click(
141
  fn = infer,
142
- inputs = [prompt, negative_prompt, seed, randomize_seed, width, height, guidance_scale, num_inference_steps],
143
  outputs = [result]
144
  )
145
 
 
2
  import numpy as np
3
  import random
4
  from diffusers import DiffusionPipeline
5
+ from diffusers import EulerAncestralDiscreteScheduler
6
+ from compel import Compel, ReturnedEmbeddingsType
7
  import torch
8
 
9
  device = "cuda" if torch.cuda.is_available() else "cpu"
10
 
11
  if torch.cuda.is_available():
12
  torch.cuda.max_memory_allocated(device=device)
13
+ pipe = DiffusionPipeline.from_pretrained("John6666/mala-anime-mix-nsfw-pony-xl-v6-sdxl", torch_dtype=torch.float16, variant="fp16", use_safetensors=True)
14
  pipe.enable_xformers_memory_efficient_attention()
15
  pipe = pipe.to(device)
16
  else:
17
+ pipe = DiffusionPipeline.from_pretrained("John6666/mala-anime-mix-nsfw-pony-xl-v6-sdxl", use_safetensors=True)
18
  pipe = pipe.to(device)
19
 
20
  MAX_SEED = np.iinfo(np.int32).max
21
  MAX_IMAGE_SIZE = 1024
22
 
23
+
24
+
25
+ pipe.safety_checker = None
26
+ pipe.scheduler = EulerAncestralDiscreteScheduler.from_config(pipe.scheduler.config)
27
+
28
+ compel = Compel(
29
+ tokenizer=[pipe.tokenizer, pipe.tokenizer_2] ,
30
+ text_encoder=[pipe.text_encoder, pipe.text_encoder_2],
31
+ returned_embeddings_type=ReturnedEmbeddingsType.PENULTIMATE_HIDDEN_STATES_NON_NORMALIZED,
32
+ requires_pooled=[False, True]
33
+ )
34
+
35
+ pipe.load_lora_weights("xenov2/pony", weight_name="style_cogecha_pony_1.safetensors", adapter_name="chochega")
36
+
37
+ def infer(prompt, negative_prompt, seed, randomize_seed, width, height, guidance_scale, num_inference_steps, lora_weight):
38
 
39
  if randomize_seed:
40
  seed = random.randint(0, MAX_SEED)
41
 
42
  generator = torch.Generator().manual_seed(seed)
43
+
44
+ conditioning, pooled = compel(prompt)
45
+ negative_conditioning, negative_pooled = compel(neg)
46
+
47
+ [conditioning, negative_conditioning] = compel.pad_conditioning_tensors_to_same_length([conditioning, negative_conditioning])
48
+
49
 
50
  image = pipe(
51
+ # prompt = prompt,
52
+ # negative_prompt = negative_prompt,
53
+ prompt_embeds=conditioning,
54
+ pooled_prompt_embeds=pooled,
55
+ negative_propmt_embeds=negative_conditioning,
56
+ negative_pooled_prompt_embeds=negative_pooled,
57
  guidance_scale = guidance_scale,
58
+ cross_attention_kwargs={"scale": lora_weight},
59
  num_inference_steps = num_inference_steps,
60
  width = width,
61
  height = height,
 
122
  )
123
 
124
  randomize_seed = gr.Checkbox(label="Randomize seed", value=True)
125
+
126
+ lora_weight = gr.Slider(
127
+ label="Lora weight",
128
+ minimum=0.0,
129
+ maximum=1.0,
130
+ step=0.1,
131
+ value=0.0,
132
+ )
133
 
134
  with gr.Row():
135
 
 
174
 
175
  run_button.click(
176
  fn = infer,
177
+ inputs = [prompt, negative_prompt, seed, randomize_seed, width, height, guidance_scale, num_inference_steps, lora_weight],
178
  outputs = [result]
179
  )
180
 
requirements.txt CHANGED
@@ -3,4 +3,5 @@ diffusers
3
  invisible_watermark
4
  torch
5
  transformers
6
- xformers
 
 
3
  invisible_watermark
4
  torch
5
  transformers
6
+ xformers
7
+ compel