eienmojiki commited on
Commit
fc73707
·
verified ·
1 Parent(s): 7150625

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +27 -5
app.py CHANGED
@@ -28,13 +28,25 @@ sampler_list = [
28
  ]
29
 
30
  examples = [
31
- """
32
- 1girl,
 
33
  midori \(blue archive\), blue archive,
34
  (ningen mame:0.9), ciloranko, sho \(sho lwlw\), (tianliang duohe fangdongye:0.8), ask \(askzy\), wlop,
35
  indoors, plant, hair bow, cake, cat ears, food, smile, animal ear headphones, bare legs, short shorts, drawing \(object\), feet, legs, on back, bed, solo, green eyes, cat, table, window blinds, headphones, nintendo switch, toes, bow, toenails, looking at viewer, chips \(food\), potted plant, halo, calendar \(object\), tray, blonde hair, green halo, lying, barefoot, bare shoulders, blunt bangs, green shorts, picture frame, fake animal ears, closed mouth, shorts, handheld game console, green bow, animal ears, on bed, medium hair, knees up, upshorts, eating, potato chips, pillow, blush, dolphin shorts, ass, character doll, alternate costume,
36
  masterpiece, newest, absurdres
37
- """
 
 
 
 
 
 
 
 
 
 
 
38
  ]
39
 
40
  torch.backends.cudnn.deterministic = True
@@ -95,7 +107,7 @@ def generate(
95
  height: int = 1024,
96
  guidance_scale: float = 5.0,
97
  num_inference_steps: int = 26,
98
- sampler: str = "Eul""er a",
99
  clip_skip: int = 1,
100
  progress=gr.Progress(track_tqdm=True),
101
  ):
@@ -256,7 +268,17 @@ with gr.Blocks(
256
 
257
  gr.Examples(
258
  examples=examples,
259
- inputs=prompt,
 
 
 
 
 
 
 
 
 
 
260
  outputs=[result, used_seed],
261
  fn=lambda *args, **kwargs: generate(*args, **kwargs),
262
  cache_examples=CACHE_EXAMPLES,
 
28
  ]
29
 
30
  examples = [
31
+ [
32
+ """
33
+ 1girl,
34
  midori \(blue archive\), blue archive,
35
  (ningen mame:0.9), ciloranko, sho \(sho lwlw\), (tianliang duohe fangdongye:0.8), ask \(askzy\), wlop,
36
  indoors, plant, hair bow, cake, cat ears, food, smile, animal ear headphones, bare legs, short shorts, drawing \(object\), feet, legs, on back, bed, solo, green eyes, cat, table, window blinds, headphones, nintendo switch, toes, bow, toenails, looking at viewer, chips \(food\), potted plant, halo, calendar \(object\), tray, blonde hair, green halo, lying, barefoot, bare shoulders, blunt bangs, green shorts, picture frame, fake animal ears, closed mouth, shorts, handheld game console, green bow, animal ears, on bed, medium hair, knees up, upshorts, eating, potato chips, pillow, blush, dolphin shorts, ass, character doll, alternate costume,
37
  masterpiece, newest, absurdres
38
+ """,
39
+ """
40
+ bad anatomy,blurry,(worst quality:1.8),low quality,hands bad,face bad,(normal quality:1.3),bad hands,mutated hands and fingers,extra legs,extra arms,duplicate,cropped,text,jpeg,artifacts,signature,watermark,username,blurry,artist name,trademark,title,multiple view,Reference sheet,long body,multiple breasts,mutated,bad anatomy,disfigured,bad proportions,duplicate,bad feet,artist name,ugly,text font ui,missing limb,monochrome,
41
+ """,
42
+ 0,
43
+ 1024,
44
+ 1536,
45
+ 5.0,
46
+ 26,
47
+ "Euler a",
48
+ 2
49
+ ]
50
  ]
51
 
52
  torch.backends.cudnn.deterministic = True
 
107
  height: int = 1024,
108
  guidance_scale: float = 5.0,
109
  num_inference_steps: int = 26,
110
+ sampler: str = "Euler a",
111
  clip_skip: int = 1,
112
  progress=gr.Progress(track_tqdm=True),
113
  ):
 
268
 
269
  gr.Examples(
270
  examples=examples,
271
+ inputs=[
272
+ prompt,
273
+ negative_prompt,
274
+ seed,
275
+ width,
276
+ height,
277
+ guidance_scale,
278
+ num_inference_steps,
279
+ sampler,
280
+ clip_skip
281
+ ],
282
  outputs=[result, used_seed],
283
  fn=lambda *args, **kwargs: generate(*args, **kwargs),
284
  cache_examples=CACHE_EXAMPLES,