File size: 6,525 Bytes
ef187eb 927c70f 0cffd40 ef187eb 11fa80e 63b6eaf 4a06587 2b0f02c 11fa80e 0cffd40 4a06587 3c4c016 35aa514 8b1e96d ec35e66 4efab5c 4a06587 ce19625 35aa514 ce19625 4efab5c 4a06587 8b1e96d 35aa514 ce19625 927c70f 8b1e96d 4a06587 f286ae5 ce19625 f4107e3 ce19625 4a06587 9b38787 3a2b9b2 8b1e96d ce19625 4a06587 ce19625 4a06587 0cffd40 8b3ca8d 05488f4 9504d8b 6335ee8 4a06587 6335ee8 05488f4 8b3ca8d 0cffd40 4a06587 0cffd40 4efab5c d956134 8b1e96d 0cffd40 927c70f 8b1e96d 4a06587 ce19625 33ee4fd ce19625 927c70f ce19625 927c70f ce19625 927c70f 8b3ca8d f4107e3 4a06587 8b3ca8d 4a06587 8b3ca8d 8b1e96d 4a06587 8b1e96d 4a06587 8b1e96d 2c74d2d 4a06587 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 |
import gradio as gr
import torch
from diffusers import StableDiffusionXLPipeline, AutoencoderKL, KDPM2AncestralDiscreteScheduler
from huggingface_hub import hf_hub_download
import spaces
from PIL import Image
import requests
from translatepy import Translator
import random
translator = Translator()
# ์์ ์ ์
model = "Corcelio/mobius"
vae_model = "madebyollin/sdxl-vae-fp16-fix"
CSS = """
.gradio-container {
max-width: 690px !important;
}
"""
JS = """function () {
gradioURL = window.location.href
if (!gradioURL.endsWith('?__theme=dark')) {
window.location.replace(gradioURL + '?__theme=dark');
}
}"""
# VAE ์ปดํฌ๋ํธ ๋ก๋
vae = AutoencoderKL.from_pretrained(
vae_model,
torch_dtype=torch.float16
)
# GPU ์ฌ์ฉ ๊ฐ๋ฅํ ๊ฒฝ์ฐ ๋ชจ๋ธ ๋ฐ ์ค์ผ์ค๋ฌ ์ด๊ธฐํ
if torch.cuda.is_available():
pipe = StableDiffusionXLPipeline.from_pretrained(model, vae=vae, torch_dtype=torch.float16).to("cuda")
pipe.scheduler = KDPM2AncestralDiscreteScheduler.from_config(pipe.scheduler.config)
# ํจ์ ์ ์
@spaces.GPU()
def generate_image(
prompt,
negative="low quality",
width=1024,
height=1024,
scale=1.5,
steps=30):
prompt = str(translator.translate(prompt, 'English'))
print(f'prompt:{prompt}')
generator1 = torch.manual_seed(random.randint(0, 10000))
generator2 = torch.manual_seed(random.randint(0, 10000))
images1 = pipe(
prompt,
negative_prompt=negative,
width=width,
height=height,
guidance_scale=scale,
num_inference_steps=steps,
generator=generator1
).images
images2 = pipe(
prompt,
negative_prompt=negative,
width=width,
height=height,
guidance_scale=scale,
num_inference_steps=steps,
generator=generator2
).images
return images1[0], images2[0] # ๋ ์ด๋ฏธ์ง๋ฅผ ๋ฐํ
examples = [
"์๋ฆ๋ค์ด 20์ธ ํ๊ตญ ์ฌ์ ๋ชจ๋ธ, 'ํ๊ตญ ์ฌ์๊ฐ์ ์์ด์ ๋ฎ์ ์ผ๊ตด', ๊ฒ์์ ์งง์ ๋จ๋ฐ๋จธ๋ฆฌ, C์ปต ์ฌ์ด์ฆ์ ํฐ ๊ฐ์ด, ํฐ ๊ณจ๋ฐ, ๊ฐ์ ์ ๋ํผ, ๋ฐฐ๊ฒฝ ํฐ์, ์ค๋ง์ผ ํ์ , ๋ชจ๋ธ ํฌ์ฆ, ์ ๋ฉด ์์, ์ ์ ๋
ธ์ถ, ์ด๊ณ ํด์๋ ์ฌ์ง",
"์๋ฆ๋ค์ด 20์ธ ์๊ตญ ์ฌ์ ๋ชจ๋ธ, '์ ๋ง์์จ ๋ฎ์ ์ผ๊ตด', ๊ธ๋ฐ ์งง์ ๋จ๋ฐ๋จธ๋ฆฌ, ์ด๋ธ๋ ๋๋ ์ค, ๋ฐฐ๊ฒฝ ์์์, ์ค๋ง์ผ ํ์ , ๋ชจ๋ธ ํฌ์ฆ, ์ ๋ฉด ์์, ์ ์ ๋
ธ์ถ, ์ด๊ณ ํด์๋ ์ฌ์ง",
"์๋ฆ๋ค์ด 20์ธ ํ๊ตญ ์ฌ์ ๋ชจ๋ธ, 'ํ๊ตญ ์ฌ์ ์์ด๋ ๋ฎ์ ์ผ๊ตด', ๊ฒ์์ ์งง์ ๋จ๋ฐ๋จธ๋ฆฌ, ๋นํค๋ ์์๋ณต, ๋ฐฐ๊ฒฝ ์์์ฅ, ์ค๋ง์ผ ํ์ , ๋ชจ๋ธ ํฌ์ฆ, ์ ๋ฉด ์์, ์ ์ ๋
ธ์ถ, ์ด๊ณ ํด์๋ ์ฌ์ง",
"์์๊ธด 23์ธ ์ค์จ๋ด ๋จ์ ๋ชจ๋ธ, ๊ธ๋ฐ ๋จธ๋ฆฌ, ๊ฑด์ฅํ ๋ชธ๋งค, ๋ฐฐ๊ฒฝ ์์์ฅ, ์ค๋ง์ผ ํ์ , ์ ๊ธ๋ผ์ค ์ฐฉ์ฉ, ๋น์ง๋์ค ์ํธ, ๋ชจ๋ธ ํฌ์ฆ, ์ ๋ฉด ์์, ์ด๊ณ ํด์๋ ์ฌ์ง",
"์๋ฆ๋ค์ด 18์ธ ์ผ๋ณธ ์ฌ์ ๋ชจ๋ธ, ๊ฒ์์ ์งง์ ๋จ๋ฐ๋จธ๋ฆฌ, ์ค๋ง์ผ ํ์ , ๊ต๋ณต ์ ๋ํผ, ๋ฐฐ๊ฒฝ ํ๊ต ๊ต์ค, ๋ชจ๋ธ ํฌ์ฆ, ์ ๋ฉด ์์, ์ด๊ณ ํด์๋ ์ฌ์ง",
"์๋ฆ๋ค์ด 20์ธ ๋ธ๋ผ์ง ์ฌ์ ๋ชจ๋ธ, ๊ฒ์์ ์งง์ ๋จ๋ฐ๋จธ๋ฆฌ, C์ปต ์ฌ์ด์ฆ์ ํฐ ๊ฐ์ด, ํฐ ๊ณจ๋ฐ, ๊ฐํธ์ฌ ์ ๋ํผ, ๋ฐฐ๊ฒฝ ํฐ์, ์ค๋ง์ผ ํ์ , ๋ชจ๋ธ ํฌ์ฆ, ์ ๋ฉด ์์, ์ ์ ๋
ธ์ถ, ์ด๊ณ ํด์๋ ์ฌ์ง",
"์๋ฆ๋ค์ด 20์ธ ์ค์จ๋ด ์ฌ์ ๋ชจ๋ธ, ๊ธ๋ฐ ๊ธด ์๋จธ๋ฆฌ, C์ปต ์ฌ์ด์ฆ์ ํฐ ๊ฐ์ด, ํฐ ๊ณจ๋ฐ, ๋นํค๋ ์์๋ณต, ๋ฐฐ๊ฒฝ ํด๋ณ๊ฐ, ์ค๋ง์ผ ํ์ , ๋ชจ๋ธ ํฌ์ฆ, ์ ๋ฉด ์์, ์ด๊ณ ํด์๋ ์ฌ์ง",
"์๋ฆ๋ค์ด 18์ธ ๋ฌ์์ ์ฌ์ ๋ชจ๋ธ, ๊ธ๋ฐ ์งง์ ๋จ๋ฐ๋จธ๋ฆฌ, C์ปต ์ฌ์ด์ฆ์ ํฐ ๊ฐ์ด, ํฐ ๊ณจ๋ฐ, ๋นํค๋ ์์๋ณต, ๋ฐฐ๊ฒฝ ์์์ฅ, ์์ํ ํ์ , ๋ชจ๋ธ ํฌ์ฆ, ์ ๋ฉด ์์, ์ด๊ณ ํด์๋ ์ฌ์ง",
"์๋ฆ๋ค์ด 20์ธ ํ๋์ค ์ฌ์ ๋ชจ๋ธ, ๊ฐ์ ์งง์ ๋จ๋ฐ๋จธ๋ฆฌ, C์ปต ์ฌ์ด์ฆ์ ํฐ ๊ฐ์ด, ํฐ ๊ณจ๋ฐ, ๋น์ฆ๋์ค ์ ์ฅ, ๋ฐฐ๊ฒฝ ์ฌ๋ฌด์ค, ํฌ๊ฒ ์๋ ํ์ , ๋ชจ๋ธ ํฌ์ฆ, ์ ๋ฉด ์์, ์ด๊ณ ํด์๋ ์ฌ์ง",
"์๋ฆ๋ค์ด 16์ธ ์ฐํฌ๋ผ์ด๋ ์ฌ์ ๋ชจ๋ธ, ๊ฐ์ ๊ธด ์๋จธ๋ฆฌ, C์ปต ์ฌ์ด์ฆ์ ํฐ ๊ฐ์ด, ํฐ ๊ณจ๋ฐ, ์คํผ์ค ์ ๋ํผ, ์น์ค ํฌ์ฆ, ๋ฐฐ๊ฒฝ ํธํ
, ํ๋ณตํ ํ์ , ์ ๋ฉด ์์, ์ด๊ณ ํด์๋ ์ฌ์ง"
]
# Gradio ์ธํฐํ์ด์ค
with gr.Blocks(css=CSS, js=JS, theme="soft") as demo:
gr.HTML("<h1><center>๋๋ง์ ๋ชจ๋ธ ์บ๋ฆญํฐ ์์ฑ</center></h1>")
with gr.Group():
with gr.Row():
prompt = gr.Textbox(label='Enter Your Prompt', value="best quality, HD, aesthetic", scale=6)
submit = gr.Button(scale=1, variant='primary')
img1 = gr.Image(label='Generated Image 1')
img2 = gr.Image(label='Generated Image 2')
with gr.Accordion("Advanced Options", open=False):
with gr.Row():
negative = gr.Textbox(label="Negative prompt", value="low quality, low quality, (deformed, distorted, disfigured:1.3), poorly drawn, bad anatomy, wrong anatomy, extra limb, missing limb, floating limbs, (mutated hands and fingers:1.4), disconnected limbs, mutation, mutated, ugly, disgusting, blurry, amputation, (NSFW:1.25)")
with gr.Row():
width = gr.Slider(
label="Width",
minimum=512,
maximum=1280,
step=8,
value=1024,
)
height = gr.Slider(
label="Height",
minimum=512,
maximum=1280,
step=8,
value=1024,
)
with gr.Row():
scale = gr.Slider(
label="Guidance",
minimum=3.5,
maximum=7,
step=0.1,
value=7,
)
steps = gr.Slider(
label="Steps",
minimum=1,
maximum=50,
step=1,
value=50,
)
gr.Examples(
examples=examples,
inputs=prompt,
outputs=[img1, img2],
fn=generate_image,
cache_examples=False, # ์บ์ ์์ฑํ์ง ์๋๋ก ์ค์
)
prompt.submit(fn=generate_image,
inputs=[prompt, negative, width, height, scale, steps],
outputs=[img1, img2],
)
submit.click(fn=generate_image,
inputs=[prompt, negative, width, height, scale, steps],
outputs=[img1, img2],
)
#demo.queue().launch()
demo.queue().launch(auth=("gini", "pick"))
|