ginipick commited on
Commit
1597d72
ยท
verified ยท
1 Parent(s): 82dcf73

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +112 -97
app.py CHANGED
@@ -11,11 +11,18 @@ from PIL import Image
11
  from transformers import AutoProcessor, AutoModelForCausalLM, pipeline
12
 
13
  # ์„ค์น˜ ๊ณผ์ •์€ ์ƒ๋žต ๊ฐ€๋Šฅํ•˜๋ฉฐ ํ•„์š”ํ•œ ๊ฒฝ์šฐ์—๋งŒ ์‹คํ–‰
14
- subprocess.run('pip install flash-attn --no-build-isolation', env={'FLASH_ATTENTION_SKIP_CUDA_BUILD': "TRUE"}, shell=True)
 
 
 
15
 
16
  # OpenAI ํด๋ผ์ด์–ธํŠธ ์ดˆ๊ธฐํ™” (API ํ‚ค๋Š” ํ™˜๊ฒฝ ๋ณ€์ˆ˜์—์„œ ๊ฐ€์ ธ์˜ด)
17
  openai_api_key = os.getenv("OPENAI_API")
18
- client = OpenAI(api_key=openai_api_key)
 
 
 
 
19
 
20
  # ๋ฒˆ์—ญ ๋ชจ๋ธ ์ถ”๊ฐ€
21
  translator = pipeline("translation", model="Helsinki-NLP/opus-mt-ko-en")
@@ -57,8 +64,12 @@ def florence_caption(image):
57
  # JSON ํŒŒ์ผ ๋กœ๋“œ ํ•จ์ˆ˜
58
  def load_json_file(file_name):
59
  file_path = os.path.join("data", file_name)
60
- with open(file_path, "r") as file:
61
- return json.load(file)
 
 
 
 
62
 
63
  # JSON ๋ฐ์ดํ„ฐ ๋กœ๋“œ
64
  ARTFORM = load_json_file("artform.json")
@@ -317,7 +328,7 @@ class PromptGenerator:
317
 
318
  class OpenAIGenerationNode:
319
  def __init__(self):
320
- self.client = OpenAI()
321
  self.prompts_dir = "./prompts"
322
  os.makedirs(self.prompts_dir, exist_ok=True)
323
 
@@ -336,6 +347,9 @@ class OpenAIGenerationNode:
336
 
337
  def generate(self, input_text, happy_talk, compress, compression_level, poster, custom_base_prompt=""):
338
  try:
 
 
 
339
  # ๊ณ ์ • ๋ชจ๋ธ: gpt-4.1-mini
340
  openai_model = "gpt-4.1-mini"
341
 
@@ -458,6 +472,14 @@ footer {
458
  font-size: 1.1em !important;
459
  padding: 12px 24px !important;
460
  }
 
 
 
 
 
 
 
 
461
  """
462
 
463
  def create_interface():
@@ -489,105 +511,96 @@ def create_interface():
489
  with gr.Row():
490
  # ์™ผ์ชฝ ํŒจ๋„ - ์„ค์ •
491
  with gr.Column(scale=2):
492
- with gr.Box(elem_classes="settings-container"):
493
- gr.Markdown("### ๐ŸŽฏ ๊ธฐ๋ณธ ์„ค์ •")
494
- seed = gr.Slider(0, 30000, label='์‹œ๋“œ๊ฐ’', step=1, value=random.randint(0,30000))
495
- custom = gr.Textbox(label="โœ๏ธ ์ปค์Šคํ…€ ํ”„๋กฌํ”„ํŠธ (ํ•œ๊ธ€ ๊ฐ€๋Šฅ)", placeholder="์›ํ•˜๋Š” ์„ค๋ช…์„ ์ž…๋ ฅํ•˜์„ธ์š”...")
496
- subject = gr.Textbox(label="๐ŸŽญ ์ฃผ์ œ (์„ ํƒ์‚ฌํ•ญ)", placeholder="์˜ˆ: ์•„๋ฆ„๋‹ค์šด ์—ฌ์„ฑ, ๊ท€์—ฌ์šด ๊ณ ์–‘์ด ๋“ฑ...")
497
-
498
- # ์ „์—ญ ์˜ต์…˜ ์„ค์ •
499
- gr.Markdown("### โšก ๋น ๋ฅธ ์„ค์ •")
500
- global_option = gr.Radio(
501
- ["Disabled", "Random", "No Figure Rand"],
502
- label="๋ชจ๋“  ์˜ต์…˜ ์ผ๊ด„ ์„ค์ •:",
503
- value="Disabled",
504
- info="ํ•œ ๋ฒˆ์— ๋ชจ๋“  ์„ค์ •์„ ๋ณ€๊ฒฝํ•ฉ๋‹ˆ๋‹ค"
505
- )
506
 
507
  # ์ƒ์„ธ ์„ค์ •๋“ค
508
  with gr.Accordion("๐ŸŽจ ์•„ํŠธํผ & ์‚ฌ์ง„ ํƒ€์ž…", open=False):
509
- with gr.Box(elem_classes="settings-container"):
510
- artform = gr.Dropdown(["disabled", "random"] + ARTFORM, label="์•„ํŠธํผ", value="disabled")
511
- photo_type = gr.Dropdown(["disabled", "random"] + PHOTO_TYPE, label="์‚ฌ์ง„ ํƒ€์ž…", value="disabled")
512
 
513
  with gr.Accordion("๐Ÿ‘ค ์บ๋ฆญํ„ฐ ์„ค์ •", open=False):
514
- with gr.Box(elem_classes="settings-container"):
515
- body_types = gr.Dropdown(["disabled", "random"] + BODY_TYPES, label="์ฒดํ˜•", value="disabled")
516
- default_tags = gr.Dropdown(["disabled", "random"] + DEFAULT_TAGS, label="๊ธฐ๋ณธ ํƒœ๊ทธ", value="disabled")
517
- roles = gr.Dropdown(["disabled", "random"] + ROLES, label="์—ญํ• ", value="disabled")
518
- hairstyles = gr.Dropdown(["disabled", "random"] + HAIRSTYLES, label="ํ—ค์–ด์Šคํƒ€์ผ", value="disabled")
519
- clothing = gr.Dropdown(["disabled", "random"] + CLOTHING, label="์˜์ƒ", value="disabled")
520
 
521
  with gr.Accordion("๐Ÿž๏ธ ์žฅ๋ฉด ์„ค์ •", open=False):
522
- with gr.Box(elem_classes="settings-container"):
523
- place = gr.Dropdown(["disabled", "random"] + PLACE, label="์žฅ์†Œ", value="disabled")
524
- lighting = gr.Dropdown(["disabled", "random"] + LIGHTING, label="์กฐ๋ช…", value="disabled")
525
- composition = gr.Dropdown(["disabled", "random"] + COMPOSITION, label="๊ตฌ๋„", value="disabled")
526
- pose = gr.Dropdown(["disabled", "random"] + POSE, label="ํฌ์ฆˆ", value="disabled")
527
- background = gr.Dropdown(["disabled", "random"] + BACKGROUND, label="๋ฐฐ๊ฒฝ", value="disabled")
528
 
529
  with gr.Accordion("๐ŸŽญ ์Šคํƒ€์ผ & ์•„ํ‹ฐ์ŠคํŠธ", open=False):
530
- with gr.Box(elem_classes="settings-container"):
531
- additional_details = gr.Dropdown(["disabled", "random"] + ADDITIONAL_DETAILS, label="์ถ”๊ฐ€ ๋””ํ…Œ์ผ", value="disabled")
532
- photography_styles = gr.Dropdown(["disabled", "random"] + PHOTOGRAPHY_STYLES, label="์‚ฌ์ง„ ์Šคํƒ€์ผ", value="disabled")
533
- device = gr.Dropdown(["disabled", "random"] + DEVICE, label="์ดฌ์˜ ์žฅ๋น„", value="disabled")
534
- photographer = gr.Dropdown(["disabled", "random"] + PHOTOGRAPHER, label="์‚ฌ์ง„์ž‘๊ฐ€", value="disabled")
535
- artist = gr.Dropdown(["disabled", "random"] + ARTIST, label="์•„ํ‹ฐ์ŠคํŠธ", value="disabled")
536
- digital_artform = gr.Dropdown(["disabled", "random"] + DIGITAL_ARTFORM, label="๋””์ง€ํ„ธ ์•„ํŠธํผ", value="disabled")
537
 
538
  generate_button = gr.Button("๐Ÿš€ ํ”„๋กฌํ”„ํŠธ ์ƒ์„ฑ", variant="primary", elem_classes="generate-button")
539
 
540
  # ์ค‘๊ฐ„ ํŒจ๋„ - ์ด๋ฏธ์ง€์™€ ์ถœ๋ ฅ
541
  with gr.Column(scale=2):
542
  with gr.Accordion("๐Ÿ–ผ๏ธ ์ด๋ฏธ์ง€ ์บก์…˜ ์ƒ์„ฑ", open=False):
543
- with gr.Box(elem_classes="settings-container"):
544
- input_image = gr.Image(label="์ด๋ฏธ์ง€ ์—…๋กœ๋“œ (์„ ํƒ์‚ฌํ•ญ)", type="pil")
545
- caption_output = gr.Textbox(label="์ƒ์„ฑ๋œ ์บก์…˜", lines=3)
546
- with gr.Row():
547
- create_caption_button = gr.Button("๐Ÿ“ ์บก์…˜ ์ƒ์„ฑ", variant="secondary")
548
- add_caption_button = gr.Button("โž• ํ”„๋กฌํ”„ํŠธ์— ์ถ”๊ฐ€", variant="secondary")
549
-
550
- with gr.Box(elem_classes="output-container"):
551
- gr.Markdown("### ๐Ÿ“‹ ์ƒ์„ฑ๋œ ํ”„๋กฌํ”„ํŠธ")
552
- output = gr.Textbox(label="๋ฉ”์ธ ํ”„๋กฌํ”„ํŠธ", lines=4)
553
- with gr.Accordion("๊ณ ๊ธ‰ ์ถœ๋ ฅ ์˜ต์…˜", open=False):
554
- t5xxl_output = gr.Textbox(label="T5XXL", lines=2)
555
- clip_l_output = gr.Textbox(label="CLIP L", lines=2)
556
- clip_g_output = gr.Textbox(label="CLIP G", lines=2)
557
 
558
  # ์˜ค๋ฅธ์ชฝ ํŒจ๋„ - OpenAI
559
  with gr.Column(scale=2):
560
- with gr.Box(elem_classes="prompt-generator-container"):
561
- gr.Markdown("### ๐Ÿค– OpenAI ํ”„๋กฌํ”„ํŠธ ํ–ฅ์ƒ")
562
- gr.HTML("<p style='text-align: center; color: #95a5a6;'>์‚ฌ์šฉ ๋ชจ๋ธ: gpt-4.1-mini</p>")
563
-
564
- with gr.Box(elem_classes="settings-container"):
565
- with gr.Row():
566
- happy_talk = gr.Checkbox(label="๐Ÿ˜Š Happy Talk", value=True, info="๋” ์ƒ์„ธํ•œ ์„ค๋ช…")
567
- compress = gr.Checkbox(label="๐Ÿ—œ๏ธ ์••์ถ•", value=True, info="์ถœ๋ ฅ ๊ธธ์ด ์ œํ•œ")
568
-
569
- compression_level = gr.Radio(
570
- ["soft", "medium", "hard"],
571
- label="์••์ถ• ๊ฐ•๋„",
572
- value="hard",
573
- visible=True
574
- )
575
-
576
- poster = gr.Checkbox(label="๐ŸŽฌ ์˜ํ™” ํฌ์Šคํ„ฐ ์Šคํƒ€์ผ", value=False)
577
-
578
- custom_base_prompt = gr.Textbox(
579
- label="๐Ÿ› ๏ธ ์ปค์Šคํ…€ ๋ฒ ์ด์Šค ํ”„๋กฌํ”„ํŠธ",
580
- lines=5,
581
- placeholder="OpenAI์—๊ฒŒ ํŠน๋ณ„ํ•œ ์ง€์‹œ์‚ฌํ•ญ์„ ์ž…๋ ฅํ•˜์„ธ์š”..."
582
- )
583
-
584
- generate_text_button = gr.Button("โœจ AI๋กœ ํ”„๋กฌํ”„ํŠธ ํ–ฅ์ƒ", variant="primary", elem_classes="openai-button")
585
-
586
- text_output = gr.Textbox(
587
- label="๐ŸŽฏ AI ํ–ฅ์ƒ ๊ฒฐ๊ณผ",
588
- lines=10,
589
- elem_classes="output-container"
590
- )
591
 
592
  # ์ด๋ฒคํŠธ ํ•ธ๋“ค๋Ÿฌ๋“ค
593
  def create_caption(image):
@@ -631,7 +644,7 @@ def create_interface():
631
 
632
  # ์ „์—ญ ์˜ต์…˜ ๋ณ€๊ฒฝ ํ•จ์ˆ˜
633
  def update_all_options(choice):
634
- updates = {}
635
  dropdown_list = [
636
  artform, photo_type, body_types, default_tags, roles, hairstyles, clothing,
637
  place, lighting, composition, pose, background, additional_details,
@@ -639,20 +652,22 @@ def create_interface():
639
  ]
640
 
641
  if choice == "Disabled":
642
- for dropdown in dropdown_list:
643
- updates[dropdown] = gr.update(value="disabled")
644
  elif choice == "Random":
645
- for dropdown in dropdown_list:
646
- updates[dropdown] = gr.update(value="random")
647
  else: # No Figure Random
 
648
  # ์บ๋ฆญํ„ฐ ๊ด€๋ จ ์„ค์ •์€ disabled
649
- for dropdown in [photo_type, body_types, default_tags, roles, hairstyles, clothing, pose, additional_details]:
650
- updates[dropdown] = gr.update(value="disabled")
651
- # ๋‚˜๋จธ์ง€๋Š” random
652
- for dropdown in [artform, place, lighting, composition, background, photography_styles, device, photographer, artist, digital_artform]:
653
- updates[dropdown] = gr.update(value="random")
 
 
 
654
 
655
- return list(updates.values())
656
 
657
  global_option.change(
658
  update_all_options,
 
11
  from transformers import AutoProcessor, AutoModelForCausalLM, pipeline
12
 
13
  # ์„ค์น˜ ๊ณผ์ •์€ ์ƒ๋žต ๊ฐ€๋Šฅํ•˜๋ฉฐ ํ•„์š”ํ•œ ๊ฒฝ์šฐ์—๋งŒ ์‹คํ–‰
14
+ try:
15
+ subprocess.run('pip install flash-attn --no-build-isolation', env={'FLASH_ATTENTION_SKIP_CUDA_BUILD': "TRUE"}, shell=True)
16
+ except:
17
+ print("Flash attention installation skipped")
18
 
19
  # OpenAI ํด๋ผ์ด์–ธํŠธ ์ดˆ๊ธฐํ™” (API ํ‚ค๋Š” ํ™˜๊ฒฝ ๋ณ€์ˆ˜์—์„œ ๊ฐ€์ ธ์˜ด)
20
  openai_api_key = os.getenv("OPENAI_API")
21
+ if openai_api_key:
22
+ client = OpenAI(api_key=openai_api_key)
23
+ else:
24
+ print("Warning: OPENAI_API key not found in environment variables")
25
+ client = None
26
 
27
  # ๋ฒˆ์—ญ ๋ชจ๋ธ ์ถ”๊ฐ€
28
  translator = pipeline("translation", model="Helsinki-NLP/opus-mt-ko-en")
 
64
  # JSON ํŒŒ์ผ ๋กœ๋“œ ํ•จ์ˆ˜
65
  def load_json_file(file_name):
66
  file_path = os.path.join("data", file_name)
67
+ try:
68
+ with open(file_path, "r") as file:
69
+ return json.load(file)
70
+ except:
71
+ print(f"Warning: Could not load {file_name}. Using empty list.")
72
+ return []
73
 
74
  # JSON ๋ฐ์ดํ„ฐ ๋กœ๋“œ
75
  ARTFORM = load_json_file("artform.json")
 
328
 
329
  class OpenAIGenerationNode:
330
  def __init__(self):
331
+ self.client = client
332
  self.prompts_dir = "./prompts"
333
  os.makedirs(self.prompts_dir, exist_ok=True)
334
 
 
347
 
348
  def generate(self, input_text, happy_talk, compress, compression_level, poster, custom_base_prompt=""):
349
  try:
350
+ if not self.client:
351
+ return "Error: OpenAI API key not found. Please set OPENAI_API environment variable."
352
+
353
  # ๊ณ ์ • ๋ชจ๋ธ: gpt-4.1-mini
354
  openai_model = "gpt-4.1-mini"
355
 
 
472
  font-size: 1.1em !important;
473
  padding: 12px 24px !important;
474
  }
475
+ .section-header {
476
+ background: linear-gradient(135deg, #667eea 0%, #764ba2 100%);
477
+ color: white;
478
+ padding: 10px 15px;
479
+ border-radius: 8px;
480
+ margin-bottom: 15px;
481
+ font-weight: bold;
482
+ }
483
  """
484
 
485
  def create_interface():
 
511
  with gr.Row():
512
  # ์™ผ์ชฝ ํŒจ๋„ - ์„ค์ •
513
  with gr.Column(scale=2):
514
+ gr.HTML('<div class="section-header">๐ŸŽฏ ๊ธฐ๋ณธ ์„ค์ •</div>')
515
+ seed = gr.Slider(0, 30000, label='์‹œ๋“œ๊ฐ’', step=1, value=random.randint(0,30000))
516
+ custom = gr.Textbox(label="โœ๏ธ ์ปค์Šคํ…€ ํ”„๋กฌํ”„ํŠธ (ํ•œ๊ธ€ ๊ฐ€๋Šฅ)", placeholder="์›ํ•˜๋Š” ์„ค๋ช…์„ ์ž…๋ ฅํ•˜์„ธ์š”...")
517
+ subject = gr.Textbox(label="๐ŸŽญ ์ฃผ์ œ (์„ ํƒ์‚ฌํ•ญ)", placeholder="์˜ˆ: ์•„๋ฆ„๋‹ค์šด ์—ฌ์„ฑ, ๊ท€์—ฌ์šด ๊ณ ์–‘์ด ๋“ฑ...")
518
+
519
+ # ์ „์—ญ ์˜ต์…˜ ์„ค์ •
520
+ gr.HTML('<div class="section-header">โšก ๋น ๋ฅธ ์„ค์ •</div>')
521
+ global_option = gr.Radio(
522
+ ["Disabled", "Random", "No Figure Rand"],
523
+ label="๋ชจ๋“  ์˜ต์…˜ ์ผ๊ด„ ์„ค์ •:",
524
+ value="Disabled",
525
+ info="ํ•œ ๋ฒˆ์— ๋ชจ๋“  ์„ค์ •์„ ๋ณ€๊ฒฝํ•ฉ๋‹ˆ๋‹ค"
526
+ )
 
527
 
528
  # ์ƒ์„ธ ์„ค์ •๋“ค
529
  with gr.Accordion("๐ŸŽจ ์•„ํŠธํผ & ์‚ฌ์ง„ ํƒ€์ž…", open=False):
530
+ artform = gr.Dropdown(["disabled", "random"] + ARTFORM, label="์•„ํŠธํผ", value="disabled")
531
+ photo_type = gr.Dropdown(["disabled", "random"] + PHOTO_TYPE, label="๏ฟฝ๏ฟฝ์ง„ ํƒ€์ž…", value="disabled")
 
532
 
533
  with gr.Accordion("๐Ÿ‘ค ์บ๋ฆญํ„ฐ ์„ค์ •", open=False):
534
+ body_types = gr.Dropdown(["disabled", "random"] + BODY_TYPES, label="์ฒดํ˜•", value="disabled")
535
+ default_tags = gr.Dropdown(["disabled", "random"] + DEFAULT_TAGS, label="๊ธฐ๋ณธ ํƒœ๊ทธ", value="disabled")
536
+ roles = gr.Dropdown(["disabled", "random"] + ROLES, label="์—ญํ• ", value="disabled")
537
+ hairstyles = gr.Dropdown(["disabled", "random"] + HAIRSTYLES, label="ํ—ค์–ด์Šคํƒ€์ผ", value="disabled")
538
+ clothing = gr.Dropdown(["disabled", "random"] + CLOTHING, label="์˜์ƒ", value="disabled")
 
539
 
540
  with gr.Accordion("๐Ÿž๏ธ ์žฅ๋ฉด ์„ค์ •", open=False):
541
+ place = gr.Dropdown(["disabled", "random"] + PLACE, label="์žฅ์†Œ", value="disabled")
542
+ lighting = gr.Dropdown(["disabled", "random"] + LIGHTING, label="์กฐ๋ช…", value="disabled")
543
+ composition = gr.Dropdown(["disabled", "random"] + COMPOSITION, label="๊ตฌ๋„", value="disabled")
544
+ pose = gr.Dropdown(["disabled", "random"] + POSE, label="ํฌ์ฆˆ", value="disabled")
545
+ background = gr.Dropdown(["disabled", "random"] + BACKGROUND, label="๋ฐฐ๊ฒฝ", value="disabled")
 
546
 
547
  with gr.Accordion("๐ŸŽญ ์Šคํƒ€์ผ & ์•„ํ‹ฐ์ŠคํŠธ", open=False):
548
+ additional_details = gr.Dropdown(["disabled", "random"] + ADDITIONAL_DETAILS, label="์ถ”๊ฐ€ ๋””ํ…Œ์ผ", value="disabled")
549
+ photography_styles = gr.Dropdown(["disabled", "random"] + PHOTOGRAPHY_STYLES, label="์‚ฌ์ง„ ์Šคํƒ€์ผ", value="disabled")
550
+ device = gr.Dropdown(["disabled", "random"] + DEVICE, label="์ดฌ์˜ ์žฅ๋น„", value="disabled")
551
+ photographer = gr.Dropdown(["disabled", "random"] + PHOTOGRAPHER, label="์‚ฌ์ง„์ž‘๊ฐ€", value="disabled")
552
+ artist = gr.Dropdown(["disabled", "random"] + ARTIST, label="์•„ํ‹ฐ์ŠคํŠธ", value="disabled")
553
+ digital_artform = gr.Dropdown(["disabled", "random"] + DIGITAL_ARTFORM, label="๋””์ง€ํ„ธ ์•„ํŠธํผ", value="disabled")
 
554
 
555
  generate_button = gr.Button("๐Ÿš€ ํ”„๋กฌํ”„ํŠธ ์ƒ์„ฑ", variant="primary", elem_classes="generate-button")
556
 
557
  # ์ค‘๊ฐ„ ํŒจ๋„ - ์ด๋ฏธ์ง€์™€ ์ถœ๋ ฅ
558
  with gr.Column(scale=2):
559
  with gr.Accordion("๐Ÿ–ผ๏ธ ์ด๋ฏธ์ง€ ์บก์…˜ ์ƒ์„ฑ", open=False):
560
+ input_image = gr.Image(label="์ด๋ฏธ์ง€ ์—…๋กœ๋“œ (์„ ํƒ์‚ฌํ•ญ)", type="pil")
561
+ caption_output = gr.Textbox(label="์ƒ์„ฑ๋œ ์บก์…˜", lines=3)
562
+ with gr.Row():
563
+ create_caption_button = gr.Button("๐Ÿ“ ์บก์…˜ ์ƒ์„ฑ", variant="secondary")
564
+ add_caption_button = gr.Button("โž• ํ”„๋กฌํ”„ํŠธ์— ์ถ”๊ฐ€", variant="secondary")
565
+
566
+ gr.HTML('<div class="section-header">๐Ÿ“‹ ์ƒ์„ฑ๋œ ํ”„๋กฌํ”„ํŠธ</div>')
567
+ output = gr.Textbox(label="๋ฉ”์ธ ํ”„๋กฌํ”„ํŠธ", lines=4)
568
+ with gr.Accordion("๊ณ ๊ธ‰ ์ถœ๋ ฅ ์˜ต์…˜", open=False):
569
+ t5xxl_output = gr.Textbox(label="T5XXL", lines=2)
570
+ clip_l_output = gr.Textbox(label="CLIP L", lines=2)
571
+ clip_g_output = gr.Textbox(label="CLIP G", lines=2)
 
 
572
 
573
  # ์˜ค๋ฅธ์ชฝ ํŒจ๋„ - OpenAI
574
  with gr.Column(scale=2):
575
+ gr.HTML('<div class="section-header">๐Ÿค– OpenAI ํ”„๋กฌํ”„ํŠธ ํ–ฅ์ƒ</div>')
576
+ gr.HTML("<p style='text-align: center; color: #95a5a6;'>์‚ฌ์šฉ ๋ชจ๋ธ: gpt-4.1-mini</p>")
577
+
578
+ with gr.Row():
579
+ happy_talk = gr.Checkbox(label="๐Ÿ˜Š Happy Talk", value=True, info="๋” ์ƒ์„ธํ•œ ์„ค๋ช…")
580
+ compress = gr.Checkbox(label="๐Ÿ—œ๏ธ ์••์ถ•", value=True, info="์ถœ๋ ฅ ๊ธธ์ด ์ œํ•œ")
581
+
582
+ compression_level = gr.Radio(
583
+ ["soft", "medium", "hard"],
584
+ label="์••์ถ• ๊ฐ•๋„",
585
+ value="hard",
586
+ visible=True
587
+ )
588
+
589
+ poster = gr.Checkbox(label="๐ŸŽฌ ์˜ํ™” ํฌ์Šคํ„ฐ ์Šคํƒ€์ผ", value=False)
590
+
591
+ custom_base_prompt = gr.Textbox(
592
+ label="๐Ÿ› ๏ธ ์ปค์Šคํ…€ ๋ฒ ์ด์Šค ํ”„๋กฌํ”„ํŠธ",
593
+ lines=5,
594
+ placeholder="OpenAI์—๊ฒŒ ํŠน๋ณ„ํ•œ ์ง€์‹œ์‚ฌํ•ญ์„ ์ž…๋ ฅํ•˜์„ธ์š”..."
595
+ )
596
+
597
+ generate_text_button = gr.Button("โœจ AI๋กœ ํ”„๋กฌํ”„ํŠธ ํ–ฅ์ƒ", variant="primary", elem_classes="openai-button")
598
+
599
+ text_output = gr.Textbox(
600
+ label="๐ŸŽฏ AI ํ–ฅ์ƒ ๊ฒฐ๊ณผ",
601
+ lines=10,
602
+ elem_classes="output-container"
603
+ )
 
 
604
 
605
  # ์ด๋ฒคํŠธ ํ•ธ๋“ค๋Ÿฌ๋“ค
606
  def create_caption(image):
 
644
 
645
  # ์ „์—ญ ์˜ต์…˜ ๋ณ€๊ฒฝ ํ•จ์ˆ˜
646
  def update_all_options(choice):
647
+ updates = []
648
  dropdown_list = [
649
  artform, photo_type, body_types, default_tags, roles, hairstyles, clothing,
650
  place, lighting, composition, pose, background, additional_details,
 
652
  ]
653
 
654
  if choice == "Disabled":
655
+ updates = [gr.update(value="disabled") for _ in dropdown_list]
 
656
  elif choice == "Random":
657
+ updates = [gr.update(value="random") for _ in dropdown_list]
 
658
  else: # No Figure Random
659
+ updates = []
660
  # ์บ๋ฆญํ„ฐ ๊ด€๋ จ ์„ค์ •์€ disabled
661
+ character_dropdowns = [photo_type, body_types, default_tags, roles, hairstyles, clothing, pose, additional_details]
662
+ other_dropdowns = [artform, place, lighting, composition, background, photography_styles, device, photographer, artist, digital_artform]
663
+
664
+ for dropdown in dropdown_list:
665
+ if dropdown in character_dropdowns:
666
+ updates.append(gr.update(value="disabled"))
667
+ else:
668
+ updates.append(gr.update(value="random"))
669
 
670
+ return updates
671
 
672
  global_option.change(
673
  update_all_options,