Update app.py
Browse files
app.py
CHANGED
@@ -11,11 +11,18 @@ from PIL import Image
|
|
11 |
from transformers import AutoProcessor, AutoModelForCausalLM, pipeline
|
12 |
|
13 |
# ์ค์น ๊ณผ์ ์ ์๋ต ๊ฐ๋ฅํ๋ฉฐ ํ์ํ ๊ฒฝ์ฐ์๋ง ์คํ
|
14 |
-
|
|
|
|
|
|
|
15 |
|
16 |
# OpenAI ํด๋ผ์ด์ธํธ ์ด๊ธฐํ (API ํค๋ ํ๊ฒฝ ๋ณ์์์ ๊ฐ์ ธ์ด)
|
17 |
openai_api_key = os.getenv("OPENAI_API")
|
18 |
-
|
|
|
|
|
|
|
|
|
19 |
|
20 |
# ๋ฒ์ญ ๋ชจ๋ธ ์ถ๊ฐ
|
21 |
translator = pipeline("translation", model="Helsinki-NLP/opus-mt-ko-en")
|
@@ -57,8 +64,12 @@ def florence_caption(image):
|
|
57 |
# JSON ํ์ผ ๋ก๋ ํจ์
|
58 |
def load_json_file(file_name):
|
59 |
file_path = os.path.join("data", file_name)
|
60 |
-
|
61 |
-
|
|
|
|
|
|
|
|
|
62 |
|
63 |
# JSON ๋ฐ์ดํฐ ๋ก๋
|
64 |
ARTFORM = load_json_file("artform.json")
|
@@ -317,7 +328,7 @@ class PromptGenerator:
|
|
317 |
|
318 |
class OpenAIGenerationNode:
|
319 |
def __init__(self):
|
320 |
-
self.client =
|
321 |
self.prompts_dir = "./prompts"
|
322 |
os.makedirs(self.prompts_dir, exist_ok=True)
|
323 |
|
@@ -336,6 +347,9 @@ class OpenAIGenerationNode:
|
|
336 |
|
337 |
def generate(self, input_text, happy_talk, compress, compression_level, poster, custom_base_prompt=""):
|
338 |
try:
|
|
|
|
|
|
|
339 |
# ๊ณ ์ ๋ชจ๋ธ: gpt-4.1-mini
|
340 |
openai_model = "gpt-4.1-mini"
|
341 |
|
@@ -458,6 +472,14 @@ footer {
|
|
458 |
font-size: 1.1em !important;
|
459 |
padding: 12px 24px !important;
|
460 |
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
461 |
"""
|
462 |
|
463 |
def create_interface():
|
@@ -489,105 +511,96 @@ def create_interface():
|
|
489 |
with gr.Row():
|
490 |
# ์ผ์ชฝ ํจ๋ - ์ค์
|
491 |
with gr.Column(scale=2):
|
492 |
-
|
493 |
-
|
494 |
-
|
495 |
-
|
496 |
-
|
497 |
-
|
498 |
-
|
499 |
-
|
500 |
-
|
501 |
-
|
502 |
-
|
503 |
-
|
504 |
-
|
505 |
-
)
|
506 |
|
507 |
# ์์ธ ์ค์ ๋ค
|
508 |
with gr.Accordion("๐จ ์ํธํผ & ์ฌ์ง ํ์
", open=False):
|
509 |
-
|
510 |
-
|
511 |
-
photo_type = gr.Dropdown(["disabled", "random"] + PHOTO_TYPE, label="์ฌ์ง ํ์
", value="disabled")
|
512 |
|
513 |
with gr.Accordion("๐ค ์บ๋ฆญํฐ ์ค์ ", open=False):
|
514 |
-
|
515 |
-
|
516 |
-
|
517 |
-
|
518 |
-
|
519 |
-
clothing = gr.Dropdown(["disabled", "random"] + CLOTHING, label="์์", value="disabled")
|
520 |
|
521 |
with gr.Accordion("๐๏ธ ์ฅ๋ฉด ์ค์ ", open=False):
|
522 |
-
|
523 |
-
|
524 |
-
|
525 |
-
|
526 |
-
|
527 |
-
background = gr.Dropdown(["disabled", "random"] + BACKGROUND, label="๋ฐฐ๊ฒฝ", value="disabled")
|
528 |
|
529 |
with gr.Accordion("๐ญ ์คํ์ผ & ์ํฐ์คํธ", open=False):
|
530 |
-
|
531 |
-
|
532 |
-
|
533 |
-
|
534 |
-
|
535 |
-
|
536 |
-
digital_artform = gr.Dropdown(["disabled", "random"] + DIGITAL_ARTFORM, label="๋์งํธ ์ํธํผ", value="disabled")
|
537 |
|
538 |
generate_button = gr.Button("๐ ํ๋กฌํํธ ์์ฑ", variant="primary", elem_classes="generate-button")
|
539 |
|
540 |
# ์ค๊ฐ ํจ๋ - ์ด๋ฏธ์ง์ ์ถ๋ ฅ
|
541 |
with gr.Column(scale=2):
|
542 |
with gr.Accordion("๐ผ๏ธ ์ด๋ฏธ์ง ์บก์
์์ฑ", open=False):
|
543 |
-
|
544 |
-
|
545 |
-
|
546 |
-
|
547 |
-
|
548 |
-
|
549 |
-
|
550 |
-
|
551 |
-
|
552 |
-
|
553 |
-
|
554 |
-
|
555 |
-
clip_l_output = gr.Textbox(label="CLIP L", lines=2)
|
556 |
-
clip_g_output = gr.Textbox(label="CLIP G", lines=2)
|
557 |
|
558 |
# ์ค๋ฅธ์ชฝ ํจ๋ - OpenAI
|
559 |
with gr.Column(scale=2):
|
560 |
-
|
561 |
-
|
562 |
-
|
563 |
-
|
564 |
-
|
565 |
-
|
566 |
-
|
567 |
-
|
568 |
-
|
569 |
-
|
570 |
-
|
571 |
-
|
572 |
-
|
573 |
-
|
574 |
-
|
575 |
-
|
576 |
-
|
577 |
-
|
578 |
-
|
579 |
-
|
580 |
-
|
581 |
-
|
582 |
-
|
583 |
-
|
584 |
-
|
585 |
-
|
586 |
-
|
587 |
-
|
588 |
-
|
589 |
-
elem_classes="output-container"
|
590 |
-
)
|
591 |
|
592 |
# ์ด๋ฒคํธ ํธ๋ค๋ฌ๋ค
|
593 |
def create_caption(image):
|
@@ -631,7 +644,7 @@ def create_interface():
|
|
631 |
|
632 |
# ์ ์ญ ์ต์
๋ณ๊ฒฝ ํจ์
|
633 |
def update_all_options(choice):
|
634 |
-
updates =
|
635 |
dropdown_list = [
|
636 |
artform, photo_type, body_types, default_tags, roles, hairstyles, clothing,
|
637 |
place, lighting, composition, pose, background, additional_details,
|
@@ -639,20 +652,22 @@ def create_interface():
|
|
639 |
]
|
640 |
|
641 |
if choice == "Disabled":
|
642 |
-
for
|
643 |
-
updates[dropdown] = gr.update(value="disabled")
|
644 |
elif choice == "Random":
|
645 |
-
for
|
646 |
-
updates[dropdown] = gr.update(value="random")
|
647 |
else: # No Figure Random
|
|
|
648 |
# ์บ๋ฆญํฐ ๊ด๋ จ ์ค์ ์ disabled
|
649 |
-
|
650 |
-
|
651 |
-
|
652 |
-
for dropdown in
|
653 |
-
|
|
|
|
|
|
|
654 |
|
655 |
-
return
|
656 |
|
657 |
global_option.change(
|
658 |
update_all_options,
|
|
|
11 |
from transformers import AutoProcessor, AutoModelForCausalLM, pipeline
|
12 |
|
13 |
# ์ค์น ๊ณผ์ ์ ์๋ต ๊ฐ๋ฅํ๋ฉฐ ํ์ํ ๊ฒฝ์ฐ์๋ง ์คํ
|
14 |
+
try:
|
15 |
+
subprocess.run('pip install flash-attn --no-build-isolation', env={'FLASH_ATTENTION_SKIP_CUDA_BUILD': "TRUE"}, shell=True)
|
16 |
+
except:
|
17 |
+
print("Flash attention installation skipped")
|
18 |
|
19 |
# OpenAI ํด๋ผ์ด์ธํธ ์ด๊ธฐํ (API ํค๋ ํ๊ฒฝ ๋ณ์์์ ๊ฐ์ ธ์ด)
|
20 |
openai_api_key = os.getenv("OPENAI_API")
|
21 |
+
if openai_api_key:
|
22 |
+
client = OpenAI(api_key=openai_api_key)
|
23 |
+
else:
|
24 |
+
print("Warning: OPENAI_API key not found in environment variables")
|
25 |
+
client = None
|
26 |
|
27 |
# ๋ฒ์ญ ๋ชจ๋ธ ์ถ๊ฐ
|
28 |
translator = pipeline("translation", model="Helsinki-NLP/opus-mt-ko-en")
|
|
|
64 |
# JSON ํ์ผ ๋ก๋ ํจ์
|
65 |
def load_json_file(file_name):
|
66 |
file_path = os.path.join("data", file_name)
|
67 |
+
try:
|
68 |
+
with open(file_path, "r") as file:
|
69 |
+
return json.load(file)
|
70 |
+
except:
|
71 |
+
print(f"Warning: Could not load {file_name}. Using empty list.")
|
72 |
+
return []
|
73 |
|
74 |
# JSON ๋ฐ์ดํฐ ๋ก๋
|
75 |
ARTFORM = load_json_file("artform.json")
|
|
|
328 |
|
329 |
class OpenAIGenerationNode:
|
330 |
def __init__(self):
|
331 |
+
self.client = client
|
332 |
self.prompts_dir = "./prompts"
|
333 |
os.makedirs(self.prompts_dir, exist_ok=True)
|
334 |
|
|
|
347 |
|
348 |
def generate(self, input_text, happy_talk, compress, compression_level, poster, custom_base_prompt=""):
|
349 |
try:
|
350 |
+
if not self.client:
|
351 |
+
return "Error: OpenAI API key not found. Please set OPENAI_API environment variable."
|
352 |
+
|
353 |
# ๊ณ ์ ๋ชจ๋ธ: gpt-4.1-mini
|
354 |
openai_model = "gpt-4.1-mini"
|
355 |
|
|
|
472 |
font-size: 1.1em !important;
|
473 |
padding: 12px 24px !important;
|
474 |
}
|
475 |
+
.section-header {
|
476 |
+
background: linear-gradient(135deg, #667eea 0%, #764ba2 100%);
|
477 |
+
color: white;
|
478 |
+
padding: 10px 15px;
|
479 |
+
border-radius: 8px;
|
480 |
+
margin-bottom: 15px;
|
481 |
+
font-weight: bold;
|
482 |
+
}
|
483 |
"""
|
484 |
|
485 |
def create_interface():
|
|
|
511 |
with gr.Row():
|
512 |
# ์ผ์ชฝ ํจ๋ - ์ค์
|
513 |
with gr.Column(scale=2):
|
514 |
+
gr.HTML('<div class="section-header">๐ฏ ๊ธฐ๋ณธ ์ค์ </div>')
|
515 |
+
seed = gr.Slider(0, 30000, label='์๋๊ฐ', step=1, value=random.randint(0,30000))
|
516 |
+
custom = gr.Textbox(label="โ๏ธ ์ปค์คํ
ํ๋กฌํํธ (ํ๊ธ ๊ฐ๋ฅ)", placeholder="์ํ๋ ์ค๋ช
์ ์
๋ ฅํ์ธ์...")
|
517 |
+
subject = gr.Textbox(label="๐ญ ์ฃผ์ (์ ํ์ฌํญ)", placeholder="์: ์๋ฆ๋ค์ด ์ฌ์ฑ, ๊ท์ฌ์ด ๊ณ ์์ด ๋ฑ...")
|
518 |
+
|
519 |
+
# ์ ์ญ ์ต์
์ค์
|
520 |
+
gr.HTML('<div class="section-header">โก ๋น ๋ฅธ ์ค์ </div>')
|
521 |
+
global_option = gr.Radio(
|
522 |
+
["Disabled", "Random", "No Figure Rand"],
|
523 |
+
label="๋ชจ๋ ์ต์
์ผ๊ด ์ค์ :",
|
524 |
+
value="Disabled",
|
525 |
+
info="ํ ๋ฒ์ ๋ชจ๋ ์ค์ ์ ๋ณ๊ฒฝํฉ๋๋ค"
|
526 |
+
)
|
|
|
527 |
|
528 |
# ์์ธ ์ค์ ๋ค
|
529 |
with gr.Accordion("๐จ ์ํธํผ & ์ฌ์ง ํ์
", open=False):
|
530 |
+
artform = gr.Dropdown(["disabled", "random"] + ARTFORM, label="์ํธํผ", value="disabled")
|
531 |
+
photo_type = gr.Dropdown(["disabled", "random"] + PHOTO_TYPE, label="๏ฟฝ๏ฟฝ์ง ํ์
", value="disabled")
|
|
|
532 |
|
533 |
with gr.Accordion("๐ค ์บ๋ฆญํฐ ์ค์ ", open=False):
|
534 |
+
body_types = gr.Dropdown(["disabled", "random"] + BODY_TYPES, label="์ฒดํ", value="disabled")
|
535 |
+
default_tags = gr.Dropdown(["disabled", "random"] + DEFAULT_TAGS, label="๊ธฐ๋ณธ ํ๊ทธ", value="disabled")
|
536 |
+
roles = gr.Dropdown(["disabled", "random"] + ROLES, label="์ญํ ", value="disabled")
|
537 |
+
hairstyles = gr.Dropdown(["disabled", "random"] + HAIRSTYLES, label="ํค์ด์คํ์ผ", value="disabled")
|
538 |
+
clothing = gr.Dropdown(["disabled", "random"] + CLOTHING, label="์์", value="disabled")
|
|
|
539 |
|
540 |
with gr.Accordion("๐๏ธ ์ฅ๋ฉด ์ค์ ", open=False):
|
541 |
+
place = gr.Dropdown(["disabled", "random"] + PLACE, label="์ฅ์", value="disabled")
|
542 |
+
lighting = gr.Dropdown(["disabled", "random"] + LIGHTING, label="์กฐ๋ช
", value="disabled")
|
543 |
+
composition = gr.Dropdown(["disabled", "random"] + COMPOSITION, label="๊ตฌ๋", value="disabled")
|
544 |
+
pose = gr.Dropdown(["disabled", "random"] + POSE, label="ํฌ์ฆ", value="disabled")
|
545 |
+
background = gr.Dropdown(["disabled", "random"] + BACKGROUND, label="๋ฐฐ๊ฒฝ", value="disabled")
|
|
|
546 |
|
547 |
with gr.Accordion("๐ญ ์คํ์ผ & ์ํฐ์คํธ", open=False):
|
548 |
+
additional_details = gr.Dropdown(["disabled", "random"] + ADDITIONAL_DETAILS, label="์ถ๊ฐ ๋ํ
์ผ", value="disabled")
|
549 |
+
photography_styles = gr.Dropdown(["disabled", "random"] + PHOTOGRAPHY_STYLES, label="์ฌ์ง ์คํ์ผ", value="disabled")
|
550 |
+
device = gr.Dropdown(["disabled", "random"] + DEVICE, label="์ดฌ์ ์ฅ๋น", value="disabled")
|
551 |
+
photographer = gr.Dropdown(["disabled", "random"] + PHOTOGRAPHER, label="์ฌ์ง์๊ฐ", value="disabled")
|
552 |
+
artist = gr.Dropdown(["disabled", "random"] + ARTIST, label="์ํฐ์คํธ", value="disabled")
|
553 |
+
digital_artform = gr.Dropdown(["disabled", "random"] + DIGITAL_ARTFORM, label="๋์งํธ ์ํธํผ", value="disabled")
|
|
|
554 |
|
555 |
generate_button = gr.Button("๐ ํ๋กฌํํธ ์์ฑ", variant="primary", elem_classes="generate-button")
|
556 |
|
557 |
# ์ค๊ฐ ํจ๋ - ์ด๋ฏธ์ง์ ์ถ๋ ฅ
|
558 |
with gr.Column(scale=2):
|
559 |
with gr.Accordion("๐ผ๏ธ ์ด๋ฏธ์ง ์บก์
์์ฑ", open=False):
|
560 |
+
input_image = gr.Image(label="์ด๋ฏธ์ง ์
๋ก๋ (์ ํ์ฌํญ)", type="pil")
|
561 |
+
caption_output = gr.Textbox(label="์์ฑ๋ ์บก์
", lines=3)
|
562 |
+
with gr.Row():
|
563 |
+
create_caption_button = gr.Button("๐ ์บก์
์์ฑ", variant="secondary")
|
564 |
+
add_caption_button = gr.Button("โ ํ๋กฌํํธ์ ์ถ๊ฐ", variant="secondary")
|
565 |
+
|
566 |
+
gr.HTML('<div class="section-header">๐ ์์ฑ๋ ํ๋กฌํํธ</div>')
|
567 |
+
output = gr.Textbox(label="๋ฉ์ธ ํ๋กฌํํธ", lines=4)
|
568 |
+
with gr.Accordion("๊ณ ๊ธ ์ถ๋ ฅ ์ต์
", open=False):
|
569 |
+
t5xxl_output = gr.Textbox(label="T5XXL", lines=2)
|
570 |
+
clip_l_output = gr.Textbox(label="CLIP L", lines=2)
|
571 |
+
clip_g_output = gr.Textbox(label="CLIP G", lines=2)
|
|
|
|
|
572 |
|
573 |
# ์ค๋ฅธ์ชฝ ํจ๋ - OpenAI
|
574 |
with gr.Column(scale=2):
|
575 |
+
gr.HTML('<div class="section-header">๐ค OpenAI ํ๋กฌํํธ ํฅ์</div>')
|
576 |
+
gr.HTML("<p style='text-align: center; color: #95a5a6;'>์ฌ์ฉ ๋ชจ๋ธ: gpt-4.1-mini</p>")
|
577 |
+
|
578 |
+
with gr.Row():
|
579 |
+
happy_talk = gr.Checkbox(label="๐ Happy Talk", value=True, info="๋ ์์ธํ ์ค๋ช
")
|
580 |
+
compress = gr.Checkbox(label="๐๏ธ ์์ถ", value=True, info="์ถ๋ ฅ ๊ธธ์ด ์ ํ")
|
581 |
+
|
582 |
+
compression_level = gr.Radio(
|
583 |
+
["soft", "medium", "hard"],
|
584 |
+
label="์์ถ ๊ฐ๋",
|
585 |
+
value="hard",
|
586 |
+
visible=True
|
587 |
+
)
|
588 |
+
|
589 |
+
poster = gr.Checkbox(label="๐ฌ ์ํ ํฌ์คํฐ ์คํ์ผ", value=False)
|
590 |
+
|
591 |
+
custom_base_prompt = gr.Textbox(
|
592 |
+
label="๐ ๏ธ ์ปค์คํ
๋ฒ ์ด์ค ํ๋กฌํํธ",
|
593 |
+
lines=5,
|
594 |
+
placeholder="OpenAI์๊ฒ ํน๋ณํ ์ง์์ฌํญ์ ์
๋ ฅํ์ธ์..."
|
595 |
+
)
|
596 |
+
|
597 |
+
generate_text_button = gr.Button("โจ AI๋ก ํ๋กฌํํธ ํฅ์", variant="primary", elem_classes="openai-button")
|
598 |
+
|
599 |
+
text_output = gr.Textbox(
|
600 |
+
label="๐ฏ AI ํฅ์ ๊ฒฐ๊ณผ",
|
601 |
+
lines=10,
|
602 |
+
elem_classes="output-container"
|
603 |
+
)
|
|
|
|
|
604 |
|
605 |
# ์ด๋ฒคํธ ํธ๋ค๋ฌ๋ค
|
606 |
def create_caption(image):
|
|
|
644 |
|
645 |
# ์ ์ญ ์ต์
๋ณ๊ฒฝ ํจ์
|
646 |
def update_all_options(choice):
|
647 |
+
updates = []
|
648 |
dropdown_list = [
|
649 |
artform, photo_type, body_types, default_tags, roles, hairstyles, clothing,
|
650 |
place, lighting, composition, pose, background, additional_details,
|
|
|
652 |
]
|
653 |
|
654 |
if choice == "Disabled":
|
655 |
+
updates = [gr.update(value="disabled") for _ in dropdown_list]
|
|
|
656 |
elif choice == "Random":
|
657 |
+
updates = [gr.update(value="random") for _ in dropdown_list]
|
|
|
658 |
else: # No Figure Random
|
659 |
+
updates = []
|
660 |
# ์บ๋ฆญํฐ ๊ด๋ จ ์ค์ ์ disabled
|
661 |
+
character_dropdowns = [photo_type, body_types, default_tags, roles, hairstyles, clothing, pose, additional_details]
|
662 |
+
other_dropdowns = [artform, place, lighting, composition, background, photography_styles, device, photographer, artist, digital_artform]
|
663 |
+
|
664 |
+
for dropdown in dropdown_list:
|
665 |
+
if dropdown in character_dropdowns:
|
666 |
+
updates.append(gr.update(value="disabled"))
|
667 |
+
else:
|
668 |
+
updates.append(gr.update(value="random"))
|
669 |
|
670 |
+
return updates
|
671 |
|
672 |
global_option.change(
|
673 |
update_all_options,
|