openfree commited on
Commit
eef0ab8
ยท
verified ยท
1 Parent(s): e714f1c

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +10 -28
app.py CHANGED
@@ -13,28 +13,21 @@ from transformers import pipeline as hf_pipeline
13
  ##############################################################################
14
  # 1) ZeroGPU ํ™˜๊ฒฝ ์ฒ˜๋ฆฌ + device, dtype ์„ค์ •
15
  ##############################################################################
16
- ##############################################################################
17
- # 1) ZeroGPU ํ™˜๊ฒฝ ์ฒ˜๋ฆฌ + device, dtype ์„ค์ •
18
- ##############################################################################
19
- # ZeroGPU ์ดˆ๊ธฐํ™” ์‹œ๋„
20
  try:
21
  import zerogpu
22
  zerogpu.init()
23
  print("ZeroGPU initialized successfully")
24
  device = "cuda" if torch.cuda.is_available() else "cpu"
25
  except ImportError:
26
- # ZeroGPU๊ฐ€ ์„ค์น˜๋˜์ง€ ์•Š์€ ๊ฒฝ์šฐ
27
  print("ZeroGPU package not installed, continuing without it")
28
  if os.getenv("ZERO_GPU"):
29
  print("ZeroGPU environment variable is set but zerogpu package is not installed.")
30
  device = "cuda" if torch.cuda.is_available() else "cpu"
31
  except Exception as e:
32
- # ZeroGPU ์ดˆ๊ธฐํ™” ์ค‘ ๋‹ค๋ฅธ ์˜ค๋ฅ˜๊ฐ€ ๋ฐœ์ƒํ•œ ๊ฒฝ์šฐ
33
  print(f"Error initializing ZeroGPU: {e}")
34
  print("Continuing without ZeroGPU")
35
  device = "cuda" if torch.cuda.is_available() else "cpu"
36
 
37
- # GPU์ผ ๋•Œ๋งŒ bfloat16, ๊ทธ ์™ธ์—๋Š” float32
38
  dtype = torch.bfloat16 if device == "cuda" else torch.float32
39
 
40
  print(f"Using device: {device}, dtype: {dtype}")
@@ -57,7 +50,6 @@ try:
57
  print("Models loaded successfully")
58
  except Exception as e:
59
  print(f"Error loading models: {e}")
60
- # ๋ชจ๋ธ ๋กœ๋“œ ์—๋Ÿฌ ์ฒ˜๋ฆฌ๋ฅผ ์œ„ํ•œ ๋”๋ฏธ ํ•จ์ˆ˜๋“ค
61
  def dummy_translator(text):
62
  return [{'translation_text': text}]
63
 
@@ -90,20 +82,14 @@ def contains_korean(text):
90
  # ์ด๋ฏธ์ง€ ์ƒ์„ฑ ํ•จ์ˆ˜
91
  ##############################################################################
92
  def generate_design_image(prompt, seed=42, randomize_seed=True, width=1024, height=1024, num_inference_steps=4):
93
- """
94
- ์ƒ์„ฑ๋œ ํ™•์žฅ ์•„์ด๋””์–ด ํ…์ŠคํŠธ(prompt)๋ฅผ ์ž…๋ ฅ๋ฐ›์•„,
95
- ํ•„์š”์‹œ ํ•œ๊ตญ์–ด๋ฅผ ์˜์–ด๋กœ ๋ฒˆ์—ญํ•œ ํ›„ DiffusionPipeline์œผ๋กœ ์ด๋ฏธ์ง€๋ฅผ ์ƒ์„ฑํ•ฉ๋‹ˆ๋‹ค.
96
- """
97
  original_prompt = prompt
98
  translated = False
99
 
100
- # ํ•œ๊ตญ์–ด๊ฐ€ ํฌํ•จ๋˜์–ด ์žˆ์œผ๋ฉด ์˜์–ด๋กœ ๋ฒˆ์—ญ
101
  if contains_korean(prompt):
102
  translation = translator(prompt)
103
  prompt = translation[0]['translation_text']
104
  translated = True
105
 
106
- # ๋žœ๋ค ์‹œ๋“œ ์„ค์ •
107
  if randomize_seed:
108
  seed = random.randint(0, MAX_SEED)
109
 
@@ -420,7 +406,6 @@ def process_inputs(text1, text2, text3, selected_category, progress=gr.Progress(
420
  time.sleep(0.3)
421
  progress(0.1, desc="์ฐฝ์˜์ ์ธ ์•„์ด๋””์–ด ์ƒ์„ฑ ์‹œ์ž‘...")
422
 
423
- # ์นดํ…Œ๊ณ ๋ฆฌ์— ํ•ด๋‹นํ•˜๋Š” ์•„์ด๋””์–ด ์ƒ์„ฑ
424
  results = generate_transformations(text1, text2, text3, selected_category)
425
 
426
  progress(0.8, desc="๊ฒฐ๊ณผ ํฌ๋งทํŒ… ์ค‘...")
@@ -507,7 +492,8 @@ with gr.Blocks(
507
  text_input1 = gr.Textbox(label="ํ‚ค์›Œ๋“œ 1 (ํ•„์ˆ˜)", placeholder="์˜ˆ: ์Šค๋งˆํŠธํฐ")
508
  text_input2 = gr.Textbox(label="ํ‚ค์›Œ๋“œ 2 (์„ ํƒ)", placeholder="์˜ˆ: ์ธ๊ณต์ง€๋Šฅ")
509
  text_input3 = gr.Textbox(label="ํ‚ค์›Œ๋“œ 3 (์„ ํƒ)", placeholder="์˜ˆ: ํ—ฌ์Šค์ผ€์–ด")
510
- category_dropdown = gr.Dropdown(
 
511
  label="์นดํ…Œ๊ณ ๋ฆฌ ์„ ํƒ",
512
  choices=list(physical_transformation_categories.keys()),
513
  value=list(physical_transformation_categories.keys())[0],
@@ -534,34 +520,31 @@ with gr.Blocks(
534
  idea_output = gr.Markdown(label="์•„์ด๋””์–ด ๊ฒฐ๊ณผ")
535
  generated_image = gr.Image(label="์ƒ์„ฑ๋œ ๋””์ž์ธ ์ด๋ฏธ์ง€", type="pil")
536
 
537
- # ์˜ˆ์ œ
538
  gr.Examples(
539
  examples=[
540
- ["์Šค๋งˆํŠธํฐ", "", "", list(physical_transformation_categories.keys())[0]],
541
- ["์ž๋™์ฐจ", "", "", list(physical_transformation_categories.keys())[0]],
542
- ["์ž๋™์ฐจ", "์ธ๊ณต์ง€๋Šฅ", "", list(physical_transformation_categories.keys())[0]],
543
- ["๋“œ๋ก ", "์ธ๊ณต์ง€๋Šฅ", "", list(physical_transformation_categories.keys())[0]],
544
- ["์šด๋™ํ™”", "์›จ์–ด๋Ÿฌ๋ธ”", "๊ฑด๊ฐ•", list(physical_transformation_categories.keys())[0]],
545
  ],
546
- inputs=[text_input1, text_input2, text_input3, category_dropdown],
547
  )
548
 
549
- # ์ฒ˜๋ฆฌ์ค‘ ์•„์ด์ฝ˜ ๋ณด์ด๊ธฐ
550
  def show_processing_indicator():
551
  return gr.update(visible=True)
552
 
553
- # ์ฒ˜๋ฆฌ์ค‘ ์•„์ด์ฝ˜ ์ˆจ๊ธฐ๊ธฐ
554
  def hide_processing_indicator():
555
  return gr.update(visible=False)
556
 
557
- # ๋ฒ„ํŠผ ํด๋ฆญ ์‹œ ์ฒ˜๋ฆฌ ๋กœ์ง
558
  submit_button.click(
559
  fn=show_processing_indicator,
560
  inputs=None,
561
  outputs=processing_indicator
562
  ).then(
563
  fn=process_all,
564
- inputs=[text_input1, text_input2, text_input3, category_dropdown],
565
  outputs=[idea_output, generated_image]
566
  ).then(
567
  fn=hide_processing_indicator,
@@ -569,6 +552,5 @@ with gr.Blocks(
569
  outputs=processing_indicator
570
  )
571
 
572
- # ๋ฉ”์ธ ์‹คํ–‰
573
  if __name__ == "__main__":
574
  demo.launch(debug=True)
 
13
  ##############################################################################
14
  # 1) ZeroGPU ํ™˜๊ฒฝ ์ฒ˜๋ฆฌ + device, dtype ์„ค์ •
15
  ##############################################################################
 
 
 
 
16
  try:
17
  import zerogpu
18
  zerogpu.init()
19
  print("ZeroGPU initialized successfully")
20
  device = "cuda" if torch.cuda.is_available() else "cpu"
21
  except ImportError:
 
22
  print("ZeroGPU package not installed, continuing without it")
23
  if os.getenv("ZERO_GPU"):
24
  print("ZeroGPU environment variable is set but zerogpu package is not installed.")
25
  device = "cuda" if torch.cuda.is_available() else "cpu"
26
  except Exception as e:
 
27
  print(f"Error initializing ZeroGPU: {e}")
28
  print("Continuing without ZeroGPU")
29
  device = "cuda" if torch.cuda.is_available() else "cpu"
30
 
 
31
  dtype = torch.bfloat16 if device == "cuda" else torch.float32
32
 
33
  print(f"Using device: {device}, dtype: {dtype}")
 
50
  print("Models loaded successfully")
51
  except Exception as e:
52
  print(f"Error loading models: {e}")
 
53
  def dummy_translator(text):
54
  return [{'translation_text': text}]
55
 
 
82
  # ์ด๋ฏธ์ง€ ์ƒ์„ฑ ํ•จ์ˆ˜
83
  ##############################################################################
84
  def generate_design_image(prompt, seed=42, randomize_seed=True, width=1024, height=1024, num_inference_steps=4):
 
 
 
 
85
  original_prompt = prompt
86
  translated = False
87
 
 
88
  if contains_korean(prompt):
89
  translation = translator(prompt)
90
  prompt = translation[0]['translation_text']
91
  translated = True
92
 
 
93
  if randomize_seed:
94
  seed = random.randint(0, MAX_SEED)
95
 
 
406
  time.sleep(0.3)
407
  progress(0.1, desc="์ฐฝ์˜์ ์ธ ์•„์ด๋””์–ด ์ƒ์„ฑ ์‹œ์ž‘...")
408
 
 
409
  results = generate_transformations(text1, text2, text3, selected_category)
410
 
411
  progress(0.8, desc="๊ฒฐ๊ณผ ํฌ๋งทํŒ… ์ค‘...")
 
492
  text_input1 = gr.Textbox(label="ํ‚ค์›Œ๋“œ 1 (ํ•„์ˆ˜)", placeholder="์˜ˆ: ์Šค๋งˆํŠธํฐ")
493
  text_input2 = gr.Textbox(label="ํ‚ค์›Œ๋“œ 2 (์„ ํƒ)", placeholder="์˜ˆ: ์ธ๊ณต์ง€๋Šฅ")
494
  text_input3 = gr.Textbox(label="ํ‚ค์›Œ๋“œ 3 (์„ ํƒ)", placeholder="์˜ˆ: ํ—ฌ์Šค์ผ€์–ด")
495
+ # ๋“œ๋กญ๋‹ค์šด ๋Œ€์‹  ๋ผ๋””์˜ค ๋ฒ„ํŠผ์œผ๋กœ ์นดํ…Œ๊ณ ๋ฆฌ ์„ ํƒ (๋ชจ๋“  ์˜ต์…˜์ด ํŽผ์ณ์ง)
496
+ category_radio = gr.Radio(
497
  label="์นดํ…Œ๊ณ ๋ฆฌ ์„ ํƒ",
498
  choices=list(physical_transformation_categories.keys()),
499
  value=list(physical_transformation_categories.keys())[0],
 
520
  idea_output = gr.Markdown(label="์•„์ด๋””์–ด ๊ฒฐ๊ณผ")
521
  generated_image = gr.Image(label="์ƒ์„ฑ๋œ ๋””์ž์ธ ์ด๋ฏธ์ง€", type="pil")
522
 
523
+ # ๋‹ค์–‘ํ•œ ์นดํ…Œ๊ณ ๋ฆฌ๊ฐ€ ํฌํ•จ๋œ ์˜ˆ์ œ (๋ผ๋””์˜ค ๋ฒ„ํŠผ ์ž…๋ ฅ)
524
  gr.Examples(
525
  examples=[
526
+ ["์Šค๋งˆํŠธํฐ", "", "", "๊ณต๊ฐ„ ์ด๋™"],
527
+ ["์ž๋™์ฐจ", "", "", "ํฌ๊ธฐ์™€ ํ˜•ํƒœ ๋ณ€ํ™”"],
528
+ ["์ž๋™์ฐจ", "์ธ๊ณต์ง€๋Šฅ", "", "ํ‘œ๋ฉด ๋ฐ ์™ธ๊ด€ ๋ณ€ํ™”"],
529
+ ["๋“œ๋ก ", "์ธ๊ณต์ง€๋Šฅ", "", "๋ฌผ์งˆ์˜ ์ƒํƒœ ๋ณ€ํ™”"],
530
+ ["์šด๋™ํ™”", "์›จ์–ด๋Ÿฌ๋ธ”", "๊ฑด๊ฐ•", "์ „๊ธฐ ๋ฐ ์ž๊ธฐ ๋ณ€ํ™”"],
531
  ],
532
+ inputs=[text_input1, text_input2, text_input3, category_radio],
533
  )
534
 
 
535
  def show_processing_indicator():
536
  return gr.update(visible=True)
537
 
 
538
  def hide_processing_indicator():
539
  return gr.update(visible=False)
540
 
 
541
  submit_button.click(
542
  fn=show_processing_indicator,
543
  inputs=None,
544
  outputs=processing_indicator
545
  ).then(
546
  fn=process_all,
547
+ inputs=[text_input1, text_input2, text_input3, category_radio],
548
  outputs=[idea_output, generated_image]
549
  ).then(
550
  fn=hide_processing_indicator,
 
552
  outputs=processing_indicator
553
  )
554
 
 
555
  if __name__ == "__main__":
556
  demo.launch(debug=True)