ginipick commited on
Commit
4a06587
ยท
verified ยท
1 Parent(s): 230cfaa

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +37 -30
app.py CHANGED
@@ -6,10 +6,11 @@ import spaces
6
  from PIL import Image
7
  import requests
8
  from translatepy import Translator
 
9
 
10
  translator = Translator()
11
 
12
- # Constants
13
  model = "Corcelio/mobius"
14
  vae_model = "madebyollin/sdxl-vae-fp16-fix"
15
 
@@ -29,20 +30,19 @@ JS = """function () {
29
  }
30
  }"""
31
 
32
- # Load VAE component
33
  vae = AutoencoderKL.from_pretrained(
34
  vae_model,
35
  torch_dtype=torch.float16
36
  )
37
 
38
- # Ensure model and scheduler are initialized in GPU-enabled function
39
  if torch.cuda.is_available():
40
  pipe = StableDiffusionXLPipeline.from_pretrained(model, vae=vae, torch_dtype=torch.float16).to("cuda")
41
 
42
  pipe.scheduler = KDPM2AncestralDiscreteScheduler.from_config(pipe.scheduler.config)
43
 
44
-
45
- # Function
46
  @spaces.GPU()
47
  def generate_image(
48
  prompt,
@@ -50,32 +50,45 @@ def generate_image(
50
  width=1024,
51
  height=1024,
52
  scale=1.5,
53
- steps=30,
54
- clip=3):
55
 
56
  prompt = str(translator.translate(prompt, 'English'))
57
 
58
  print(f'prompt:{prompt}')
59
-
60
- image = pipe(
 
 
 
61
  prompt,
62
  negative_prompt=negative,
63
  width=width,
64
  height=height,
65
  guidance_scale=scale,
66
  num_inference_steps=steps,
67
- clip_skip=clip,
68
- )
69
- return image.images[0]
 
 
 
 
 
 
 
 
 
 
 
70
 
71
 
72
  examples = [
73
  "์•„๋ฆ„๋‹ค์šด 20์„ธ ํ•œ๊ตญ ์—ฌ์ž ๋ชจ๋ธ, 'ํ•œ๊ตญ ์—ฌ์ž๊ฐ€์ˆ˜ ์•„์ด์œ  ๋‹ฎ์€ ์–ผ๊ตด', ๊ฒ€์€์ƒ‰ ์งง์€ ๋‹จ๋ฐœ๋จธ๋ฆฌ, C์ปต ์‚ฌ์ด์ฆˆ์˜ ํฐ ๊ฐ€์Šด, ํฐ ๊ณจ๋ฐ˜, ๊ฐ€์ˆ˜ ์œ ๋‹ˆํผ, ๋ฐฐ๊ฒฝ ํฐ์ƒ‰, ์Šค๋งˆ์ผ ํ‘œ์ •, ๋ชจ๋ธ ํฌ์ฆˆ, ์ •๋ฉด ์‘์‹œ, ์ „์‹  ๋…ธ์ถœ, ์ดˆ๊ณ ํ•ด์ƒ๋„ ์‚ฌ์ง„",
74
  "์•„๋ฆ„๋‹ค์šด 20์„ธ ์˜๊ตญ ์—ฌ์ž ๋ชจ๋ธ, '์— ๋งˆ์™“์Šจ ๋‹ฎ์€ ์–ผ๊ตด', ๊ธˆ๋ฐœ ์งง์€ ๋‹จ๋ฐœ๋จธ๋ฆฌ, ์ด๋ธŒ๋‹ ๋“œ๋ ˆ์Šค, ๋ฐฐ๊ฒฝ ์‹œ์ƒ์‹, ์Šค๋งˆ์ผ ํ‘œ์ •, ๋ชจ๋ธ ํฌ์ฆˆ, ์ •๋ฉด ์‘์‹œ, ์ „์‹  ๋…ธ์ถœ, ์ดˆ๊ณ ํ•ด์ƒ๋„ ์‚ฌ์ง„",
75
  "์•„๋ฆ„๋‹ค์šด 20์„ธ ํ•œ๊ตญ ์—ฌ์ž ๋ชจ๋ธ, 'ํ•œ๊ตญ ์—ฌ์ž ์•„์ด๋Œ ๋‹ฎ์€ ์–ผ๊ตด', ๊ฒ€์€์ƒ‰ ์งง์€ ๋‹จ๋ฐœ๋จธ๋ฆฌ, ๋น„ํ‚ค๋‹ˆ ์ˆ˜์˜๋ณต, ๋ฐฐ๊ฒฝ ์ˆ˜์˜์žฅ, ์Šค๋งˆ์ผ ํ‘œ์ •, ๋ชจ๋ธ ํฌ์ฆˆ, ์ •๋ฉด ์‘์‹œ, ์ „์‹  ๋…ธ์ถœ, ์ดˆ๊ณ ํ•ด์ƒ๋„ ์‚ฌ์ง„",
76
- "์•„๋ฆ„๋‹ค์šด 23์„ธ ์ค‘๊ตญ๊ตญ ์—ฌ์ž ๋ชจ๋ธ, ๊ฐˆ์ƒ‰ ๊ธด ์ƒ๋จธ๋ฆฌ, C์ปต ์‚ฌ์ด์ฆˆ์˜ ํฐ ๊ฐ€์Šด, ๋ฐฐ๊ฒฝ ์ŠคํŠœ๋””์˜ค, ์ง„์ง€ํ•œ ํ‘œ์ •, ์˜คํ”ผ์Šค ์œ ๋‹ˆํผ, ๋ชจ๋ธ ํฌ์ฆˆ, ์ •๋ฉด ์‘์‹œ, ์ดˆ๊ณ ํ•ด์ƒ๋„ ์‚ฌ์ง„",
77
  "์•„๋ฆ„๋‹ค์šด 18์„ธ ์ผ๋ณธ ์—ฌ์ž ๋ชจ๋ธ, ๊ฒ€์€์ƒ‰ ์งง์€ ๋‹จ๋ฐœ๋จธ๋ฆฌ, ์Šค๋งˆ์ผ ํ‘œ์ •, ๊ต๋ณต ์œ ๋‹ˆํผ, ๋ฐฐ๊ฒฝ ํ•™๊ต ๊ต์‹ค, ๋ชจ๋ธ ํฌ์ฆˆ, ์ •๋ฉด ์‘์‹œ, ์ดˆ๊ณ ํ•ด์ƒ๋„ ์‚ฌ์ง„",
78
- "์•„๋ฆ„๋‹ค์šด 20์„ธ ๋ธŒ๋ผ์งˆ ์—ฌ์ž ๋ชจ๋ธ, ๊ฒ€์€์ƒ‰ ์งง์€ ๋‹จ๋ฐœ๋จธ๋ฆฌ, C์ปต ์‚ฌ์ด์ฆˆ์˜ ํฐ ๊ฐ€์Šด, ํฐ ๊ณจ๋ฐ˜, ๊ฐ„ํ˜ธ์‚ฌ ์œ ๋‹ˆํผ, ๋ฐฐ๊ฒฝ ํฐ์ƒ‰, ์Šค๋งˆ์ผ ํ‘œ์ •, ๋ชจ๋ธ ํฌ์ฆˆ, ์ •๋ฉด ์‘์‹œ, ์ดˆ๊ณ ํ•ด์ƒ๋„ ์‚ฌ์ง„",
79
  "์•„๋ฆ„๋‹ค์šด 20์„ธ ์Šค์›จ๋ด ์—ฌ์ž ๋ชจ๋ธ, ๊ธˆ๋ฐœ ๊ธด ์ƒ๋จธ๋ฆฌ, C์ปต ์‚ฌ์ด์ฆˆ์˜ ํฐ ๊ฐ€์Šด, ํฐ ๊ณจ๋ฐ˜, ๋น„ํ‚ค๋‹ˆ ์ˆ˜์˜๋ณต, ๋ฐฐ๊ฒฝ ํ•ด๋ณ€๊ฐ€, ์Šค๋งˆ์ผ ํ‘œ์ •, ๋ชจ๋ธ ํฌ์ฆˆ, ์ •๋ฉด ์‘์‹œ, ์ดˆ๊ณ ํ•ด์ƒ๋„ ์‚ฌ์ง„",
80
  "์•„๋ฆ„๋‹ค์šด 18์„ธ ๋Ÿฌ์‹œ์•„ ์—ฌ์ž ๋ชจ๋ธ, ๊ธˆ๋ฐœ ์งง์€ ๋‹จ๋ฐœ๋จธ๋ฆฌ, C์ปต ์‚ฌ์ด์ฆˆ์˜ ํฐ ๊ฐ€์Šด, ํฐ ๊ณจ๋ฐ˜, ๋น„ํ‚ค๋‹ˆ ์ˆ˜์˜๋ณต, ๋ฐฐ๊ฒฝ ์ˆ˜์˜์žฅ, ์—„์ˆ™ํ•œ ํ‘œ์ •, ๋ชจ๋ธ ํฌ์ฆˆ, ์ •๋ฉด ์‘์‹œ, ์ดˆ๊ณ ํ•ด์ƒ๋„ ์‚ฌ์ง„",
81
  "์•„๋ฆ„๋‹ค์šด 20์„ธ ํ”„๋ž‘์Šค ์—ฌ์ž ๋ชจ๋ธ, ๊ฐˆ์ƒ‰ ์งง์€ ๋‹จ๋ฐœ๋จธ๋ฆฌ, C์ปต ์‚ฌ์ด์ฆˆ์˜ ํฐ ๊ฐ€์Šด, ํฐ ๊ณจ๋ฐ˜, ๋น„์ฆˆ๋‹ˆ์Šค ์ •์žฅ, ๋ฐฐ๊ฒฝ ์‚ฌ๋ฌด์‹ค, ํฌ๊ฒŒ ์›ƒ๋Š” ํ‘œ์ •, ๋ชจ๋ธ ํฌ์ฆˆ, ์ •๋ฉด ์‘์‹œ, ์ดˆ๊ณ ํ•ด์ƒ๋„ ์‚ฌ์ง„",
@@ -83,7 +96,7 @@ examples = [
83
  ]
84
 
85
 
86
- # Gradio Interface
87
 
88
  with gr.Blocks(css=CSS, js=JS, theme="soft") as demo:
89
  gr.HTML("<h1><center>๋‚˜๋งŒ์˜ ๋ชจ๋ธ ์บ๋ฆญํ„ฐ ์ƒ์„ฑ</center></h1>")
@@ -91,7 +104,8 @@ with gr.Blocks(css=CSS, js=JS, theme="soft") as demo:
91
  with gr.Row():
92
  prompt = gr.Textbox(label='Enter Your Prompt', value="best quality, HD, aesthetic", scale=6)
93
  submit = gr.Button(scale=1, variant='primary')
94
- img = gr.Image(label='Generated Image')
 
95
  with gr.Accordion("Advanced Options", open=False):
96
  with gr.Row():
97
  negative = gr.Textbox(label="Negative prompt", value="low quality, low quality, (deformed, distorted, disfigured:1.3), poorly drawn, bad anatomy, wrong anatomy, extra limb, missing limb, floating limbs, (mutated hands and fingers:1.4), disconnected limbs, mutation, mutated, ugly, disgusting, blurry, amputation, (NSFW:1.25)")
@@ -125,29 +139,22 @@ with gr.Blocks(css=CSS, js=JS, theme="soft") as demo:
125
  step=1,
126
  value=50,
127
  )
128
- clip = gr.Slider(
129
- label="Clip Skip",
130
- minimum=1,
131
- maximum=10,
132
- step=1,
133
- value=3,
134
- )
135
  gr.Examples(
136
  examples=examples,
137
  inputs=prompt,
138
- outputs=img,
139
  fn=generate_image,
140
- cache_examples="lazy",
141
  )
142
 
143
  prompt.submit(fn=generate_image,
144
- inputs=[prompt, negative, width, height, scale, steps, clip],
145
- outputs=img,
146
  )
147
  submit.click(fn=generate_image,
148
- inputs=[prompt, negative, width, height, scale, steps, clip],
149
- outputs=img,
150
  )
151
 
152
  #demo.queue().launch()
153
- demo.queue().launch(auth=("gini", "pick"))
 
6
  from PIL import Image
7
  import requests
8
  from translatepy import Translator
9
+ import random
10
 
11
  translator = Translator()
12
 
13
+ # ์ƒ์ˆ˜ ์ •์˜
14
  model = "Corcelio/mobius"
15
  vae_model = "madebyollin/sdxl-vae-fp16-fix"
16
 
 
30
  }
31
  }"""
32
 
33
+ # VAE ์ปดํฌ๋„ŒํŠธ ๋กœ๋“œ
34
  vae = AutoencoderKL.from_pretrained(
35
  vae_model,
36
  torch_dtype=torch.float16
37
  )
38
 
39
+ # GPU ์‚ฌ์šฉ ๊ฐ€๋Šฅํ•œ ๊ฒฝ์šฐ ๋ชจ๋ธ ๋ฐ ์Šค์ผ€์ค„๋Ÿฌ ์ดˆ๊ธฐํ™”
40
  if torch.cuda.is_available():
41
  pipe = StableDiffusionXLPipeline.from_pretrained(model, vae=vae, torch_dtype=torch.float16).to("cuda")
42
 
43
  pipe.scheduler = KDPM2AncestralDiscreteScheduler.from_config(pipe.scheduler.config)
44
 
45
+ # ํ•จ์ˆ˜ ์ •์˜
 
46
  @spaces.GPU()
47
  def generate_image(
48
  prompt,
 
50
  width=1024,
51
  height=1024,
52
  scale=1.5,
53
+ steps=30):
 
54
 
55
  prompt = str(translator.translate(prompt, 'English'))
56
 
57
  print(f'prompt:{prompt}')
58
+
59
+ generator1 = torch.manual_seed(random.randint(0, 10000))
60
+ generator2 = torch.manual_seed(random.randint(0, 10000))
61
+
62
+ images1 = pipe(
63
  prompt,
64
  negative_prompt=negative,
65
  width=width,
66
  height=height,
67
  guidance_scale=scale,
68
  num_inference_steps=steps,
69
+ generator=generator1
70
+ ).images
71
+
72
+ images2 = pipe(
73
+ prompt,
74
+ negative_prompt=negative,
75
+ width=width,
76
+ height=height,
77
+ guidance_scale=scale,
78
+ num_inference_steps=steps,
79
+ generator=generator2
80
+ ).images
81
+
82
+ return images1[0], images2[0] # ๋‘ ์ด๋ฏธ์ง€๋ฅผ ๋ฐ˜ํ™˜
83
 
84
 
85
  examples = [
86
  "์•„๋ฆ„๋‹ค์šด 20์„ธ ํ•œ๊ตญ ์—ฌ์ž ๋ชจ๋ธ, 'ํ•œ๊ตญ ์—ฌ์ž๊ฐ€์ˆ˜ ์•„์ด์œ  ๋‹ฎ์€ ์–ผ๊ตด', ๊ฒ€์€์ƒ‰ ์งง์€ ๋‹จ๋ฐœ๋จธ๋ฆฌ, C์ปต ์‚ฌ์ด์ฆˆ์˜ ํฐ ๊ฐ€์Šด, ํฐ ๊ณจ๋ฐ˜, ๊ฐ€์ˆ˜ ์œ ๋‹ˆํผ, ๋ฐฐ๊ฒฝ ํฐ์ƒ‰, ์Šค๋งˆ์ผ ํ‘œ์ •, ๋ชจ๋ธ ํฌ์ฆˆ, ์ •๋ฉด ์‘์‹œ, ์ „์‹  ๋…ธ์ถœ, ์ดˆ๊ณ ํ•ด์ƒ๋„ ์‚ฌ์ง„",
87
  "์•„๋ฆ„๋‹ค์šด 20์„ธ ์˜๊ตญ ์—ฌ์ž ๋ชจ๋ธ, '์— ๋งˆ์™“์Šจ ๋‹ฎ์€ ์–ผ๊ตด', ๊ธˆ๋ฐœ ์งง์€ ๋‹จ๋ฐœ๋จธ๋ฆฌ, ์ด๋ธŒ๋‹ ๋“œ๋ ˆ์Šค, ๋ฐฐ๊ฒฝ ์‹œ์ƒ์‹, ์Šค๋งˆ์ผ ํ‘œ์ •, ๋ชจ๋ธ ํฌ์ฆˆ, ์ •๋ฉด ์‘์‹œ, ์ „์‹  ๋…ธ์ถœ, ์ดˆ๊ณ ํ•ด์ƒ๋„ ์‚ฌ์ง„",
88
  "์•„๋ฆ„๋‹ค์šด 20์„ธ ํ•œ๊ตญ ์—ฌ์ž ๋ชจ๋ธ, 'ํ•œ๊ตญ ์—ฌ์ž ์•„์ด๋Œ ๋‹ฎ์€ ์–ผ๊ตด', ๊ฒ€์€์ƒ‰ ์งง์€ ๋‹จ๋ฐœ๋จธ๋ฆฌ, ๋น„ํ‚ค๋‹ˆ ์ˆ˜์˜๋ณต, ๋ฐฐ๊ฒฝ ์ˆ˜์˜์žฅ, ์Šค๋งˆ์ผ ํ‘œ์ •, ๋ชจ๋ธ ํฌ์ฆˆ, ์ •๋ฉด ์‘์‹œ, ์ „์‹  ๋…ธ์ถœ, ์ดˆ๊ณ ํ•ด์ƒ๋„ ์‚ฌ์ง„",
89
+ "์ž˜์ƒ๊ธด 23์„ธ ์Šค์›จ๋ด ๋‚จ์ž ๋ชจ๋ธ, ๊ธˆ๋ฐœ ๋จธ๋ฆฌ, ๊ฑด์žฅํ•œ ๋ชธ๋งค, ๋ฐฐ๊ฒฝ ์ˆ˜์˜์žฅ, ์Šค๋งˆ์ผ ํ‘œ์ •, ๋น„์ง€๋‹ˆ์Šค ์ŠˆํŠธ, ๋ชจ๋ธ ํฌ์ฆˆ, ์ •๋ฉด ์‘์‹œ, ์ดˆ๊ณ ํ•ด์ƒ๋„ ์‚ฌ์ง„",
90
  "์•„๋ฆ„๋‹ค์šด 18์„ธ ์ผ๋ณธ ์—ฌ์ž ๋ชจ๋ธ, ๊ฒ€์€์ƒ‰ ์งง์€ ๋‹จ๋ฐœ๋จธ๋ฆฌ, ์Šค๋งˆ์ผ ํ‘œ์ •, ๊ต๋ณต ์œ ๋‹ˆํผ, ๋ฐฐ๊ฒฝ ํ•™๊ต ๊ต์‹ค, ๋ชจ๋ธ ํฌ์ฆˆ, ์ •๋ฉด ์‘์‹œ, ์ดˆ๊ณ ํ•ด์ƒ๋„ ์‚ฌ์ง„",
91
+ "์•„๋ฆ„๋‹ค์šด 20์„ธ ๋ธŒ๋ผ์งˆ ์—ฌ์ž ๋ชจ๋ธ, ๊ฒ€์€์ƒ‰ ์งง์€ ๋‹จ๋ฐœ๋จธ๋ฆฌ, C์ปต ์‚ฌ์ด์ฆˆ์˜ ํฐ ๊ฐ€์Šด, ํฐ ๊ณจ๋ฐ˜, ๊ฐ„ํ˜ธ์‚ฌ ์œ ๋‹ˆํผ, ๋ฐฐ๊ฒฝ ํฐ์ƒ‰, ์Šค๋งˆ์ผ ํ‘œ์ •, ๋ชจ๋ธ ํฌ์ฆˆ, ์ •๋ฉด ์‘์‹œ, ์ „์‹  ๋…ธ์ถœ, ์ดˆ๊ณ ํ•ด์ƒ๋„ ์‚ฌ์ง„",
92
  "์•„๋ฆ„๋‹ค์šด 20์„ธ ์Šค์›จ๋ด ์—ฌ์ž ๋ชจ๋ธ, ๊ธˆ๋ฐœ ๊ธด ์ƒ๋จธ๋ฆฌ, C์ปต ์‚ฌ์ด์ฆˆ์˜ ํฐ ๊ฐ€์Šด, ํฐ ๊ณจ๋ฐ˜, ๋น„ํ‚ค๋‹ˆ ์ˆ˜์˜๋ณต, ๋ฐฐ๊ฒฝ ํ•ด๋ณ€๊ฐ€, ์Šค๋งˆ์ผ ํ‘œ์ •, ๋ชจ๋ธ ํฌ์ฆˆ, ์ •๋ฉด ์‘์‹œ, ์ดˆ๊ณ ํ•ด์ƒ๋„ ์‚ฌ์ง„",
93
  "์•„๋ฆ„๋‹ค์šด 18์„ธ ๋Ÿฌ์‹œ์•„ ์—ฌ์ž ๋ชจ๋ธ, ๊ธˆ๋ฐœ ์งง์€ ๋‹จ๋ฐœ๋จธ๋ฆฌ, C์ปต ์‚ฌ์ด์ฆˆ์˜ ํฐ ๊ฐ€์Šด, ํฐ ๊ณจ๋ฐ˜, ๋น„ํ‚ค๋‹ˆ ์ˆ˜์˜๋ณต, ๋ฐฐ๊ฒฝ ์ˆ˜์˜์žฅ, ์—„์ˆ™ํ•œ ํ‘œ์ •, ๋ชจ๋ธ ํฌ์ฆˆ, ์ •๋ฉด ์‘์‹œ, ์ดˆ๊ณ ํ•ด์ƒ๋„ ์‚ฌ์ง„",
94
  "์•„๋ฆ„๋‹ค์šด 20์„ธ ํ”„๋ž‘์Šค ์—ฌ์ž ๋ชจ๋ธ, ๊ฐˆ์ƒ‰ ์งง์€ ๋‹จ๋ฐœ๋จธ๋ฆฌ, C์ปต ์‚ฌ์ด์ฆˆ์˜ ํฐ ๊ฐ€์Šด, ํฐ ๊ณจ๋ฐ˜, ๋น„์ฆˆ๋‹ˆ์Šค ์ •์žฅ, ๋ฐฐ๊ฒฝ ์‚ฌ๋ฌด์‹ค, ํฌ๊ฒŒ ์›ƒ๋Š” ํ‘œ์ •, ๋ชจ๋ธ ํฌ์ฆˆ, ์ •๋ฉด ์‘์‹œ, ์ดˆ๊ณ ํ•ด์ƒ๋„ ์‚ฌ์ง„",
 
96
  ]
97
 
98
 
99
+ # Gradio ์ธํ„ฐํŽ˜์ด์Šค
100
 
101
  with gr.Blocks(css=CSS, js=JS, theme="soft") as demo:
102
  gr.HTML("<h1><center>๋‚˜๋งŒ์˜ ๋ชจ๋ธ ์บ๋ฆญํ„ฐ ์ƒ์„ฑ</center></h1>")
 
104
  with gr.Row():
105
  prompt = gr.Textbox(label='Enter Your Prompt', value="best quality, HD, aesthetic", scale=6)
106
  submit = gr.Button(scale=1, variant='primary')
107
+ img1 = gr.Image(label='Generated Image 1')
108
+ img2 = gr.Image(label='Generated Image 2')
109
  with gr.Accordion("Advanced Options", open=False):
110
  with gr.Row():
111
  negative = gr.Textbox(label="Negative prompt", value="low quality, low quality, (deformed, distorted, disfigured:1.3), poorly drawn, bad anatomy, wrong anatomy, extra limb, missing limb, floating limbs, (mutated hands and fingers:1.4), disconnected limbs, mutation, mutated, ugly, disgusting, blurry, amputation, (NSFW:1.25)")
 
139
  step=1,
140
  value=50,
141
  )
 
 
 
 
 
 
 
142
  gr.Examples(
143
  examples=examples,
144
  inputs=prompt,
145
+ outputs=[img1, img2],
146
  fn=generate_image,
147
+ cache_examples=False, # ์บ์‹œ ์ƒ์„ฑํ•˜์ง€ ์•Š๋„๋ก ์„ค์ •
148
  )
149
 
150
  prompt.submit(fn=generate_image,
151
+ inputs=[prompt, negative, width, height, scale, steps],
152
+ outputs=[img1, img2],
153
  )
154
  submit.click(fn=generate_image,
155
+ inputs=[prompt, negative, width, height, scale, steps],
156
+ outputs=[img1, img2],
157
  )
158
 
159
  #demo.queue().launch()
160
+ demo.queue().launch(auth=("gini", "pick"))