eienmojiki commited on
Commit
11ec090
·
verified ·
1 Parent(s): e81b8e3

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +19 -0
app.py CHANGED
@@ -16,6 +16,7 @@ HF_TOKEN = os.getenv("HF_TOKEN")
16
  MIN_IMAGE_SIZE = int(os.getenv("MIN_IMAGE_SIZE", "512"))
17
  MAX_IMAGE_SIZE = int(os.getenv("MAX_IMAGE_SIZE", "2048"))
18
  MAX_SEED = np.iinfo(np.int32).max
 
19
 
20
  sampler_list = [
21
  "DPM++ 2M Karras",
@@ -26,6 +27,16 @@ sampler_list = [
26
  "DDIM",
27
  ]
28
 
 
 
 
 
 
 
 
 
 
 
29
  torch.backends.cudnn.deterministic = True
30
  torch.backends.cudnn.benchmark = False
31
 
@@ -198,6 +209,14 @@ with gr.Blocks(
198
  with gr.Group():
199
  used_seed = gr.Number(label="Used Seed", interactive=False)
200
 
 
 
 
 
 
 
 
 
201
  gr.on(
202
  triggers=[
203
  prompt.submit,
 
16
  MIN_IMAGE_SIZE = int(os.getenv("MIN_IMAGE_SIZE", "512"))
17
  MAX_IMAGE_SIZE = int(os.getenv("MAX_IMAGE_SIZE", "2048"))
18
  MAX_SEED = np.iinfo(np.int32).max
19
+ CACHE_EXAMPLES = torch.cuda.is_available() and os.getenv("CACHE_EXAMPLES") == "1"
20
 
21
  sampler_list = [
22
  "DPM++ 2M Karras",
 
27
  "DDIM",
28
  ]
29
 
30
+ examples = [
31
+ """
32
+ 1girl,
33
+ midori \(blue archive\), blue archive,
34
+ (ningen mame:0.9), ciloranko, sho \(sho lwlw\), (tianliang duohe fangdongye:0.8), ask \(askzy\), wlop,
35
+ indoors, plant, hair bow, cake, cat ears, food, smile, animal ear headphones, bare legs, short shorts, drawing \(object\), feet, legs, on back, bed, solo, green eyes, cat, table, window blinds, headphones, nintendo switch, toes, bow, toenails, looking at viewer, chips \(food\), potted plant, halo, calendar \(object\), tray, blonde hair, green halo, lying, barefoot, bare shoulders, blunt bangs, green shorts, picture frame, fake animal ears, closed mouth, shorts, handheld game console, green bow, animal ears, on bed, medium hair, knees up, upshorts, eating, potato chips, pillow, blush, dolphin shorts, ass, character doll, alternate costume,
36
+ masterpiece, newest, absurdres
37
+ """
38
+ ]
39
+
40
  torch.backends.cudnn.deterministic = True
41
  torch.backends.cudnn.benchmark = False
42
 
 
209
  with gr.Group():
210
  used_seed = gr.Number(label="Used Seed", interactive=False)
211
 
212
+ gr.Examples(
213
+ examples=examples,
214
+ inputs=prompt,
215
+ outputs=[result, used_seed],
216
+ fn=lambda *args, **kwargs: generate(*args, **kwargs),
217
+ cache_examples=CACHE_EXAMPLES,
218
+ )
219
+
220
  gr.on(
221
  triggers=[
222
  prompt.submit,