import spaces import argparse import os import time from os import path import shutil from datetime import datetime from safetensors.torch import load_file from huggingface_hub import hf_hub_download import gradio as gr import torch from diffusers import FluxPipeline from PIL import Image from transformers import pipeline import base64 translator = pipeline("translation", model="Helsinki-NLP/opus-mt-ko-en") # Hugging Face 토큰 설정 HF_TOKEN = os.getenv("HF_TOKEN") if HF_TOKEN is None: raise ValueError("HF_TOKEN environment variable is not set") # Setup and initialization code cache_path = path.join(path.dirname(path.abspath(__file__)), "models") PERSISTENT_DIR = os.environ.get("PERSISTENT_DIR", ".") gallery_path = path.join(PERSISTENT_DIR, "gallery") os.environ["TRANSFORMERS_CACHE"] = cache_path os.environ["HF_HUB_CACHE"] = cache_path os.environ["HF_HOME"] = cache_path torch.backends.cuda.matmul.allow_tf32 = True # Create gallery directory if it doesn't exist if not path.exists(gallery_path): os.makedirs(gallery_path, exist_ok=True) # 샘플 이미지와 프롬프트 정의 SAMPLE_IMAGES = { "3d2.webp": "the most famous hero according to Yuri Milner", "3d3.webp": "purple nest", "3d4.webp": "Timothy's sabbath", "3d5.webp": "A schoolboy friend of Julián Carax, fun-loving and loyal", "3d6.webp": "Friend of Daniel and his father", "3d7.webp": "WHERE ships of purple gently toss On seas of daffodil", "3d8.webp": "Beat the drums of tragedy for me, And let the white violins whir thin and slow", "3d9.webp": "And let the choir sing a stormy song To drown the rattle of my dying breath.", "3d10.webp": "Beat the drums of tragedy and death", "3d11.webp": "Beat the drums of tragedy for me.", "3d12.webp": "Touching the infinite, else far and untrod, With oracles divine that speak of God.", "3d13.webp": "Night, standing on her starry pulpit, free, Utters them in the dread, the silver roll Of spheres, woods, winds and waves, alternately", "3d14.webp": "On sermons deep, fit time to feast the soul.", "3d15.webp": "The bee is cradled in the bud; and far, Cold glittering lights, the azure curtain, throng— Planet on beaming planet, star on star.", "3d16.webp": "The lark's sweet pipe has ceased its latest song", "3d17.webp": "the snake was a roaming dog", "3d18.webp": "Antonio Battistella portraying Father of Giulia", "3d19.webp": "So straight to her father the brisk young lady went, And said, grant me one favour, do give your consent", "3d20.webp": "Before that we are marry’d let me your father see, All fear is, now miscarry’d, my heart is full of glee", "3d21.webp": "My heart you now have gained, you are all I prize, So make yourself contented, pray be satisfied.", "3d22.webp": "O pray what is the favour that of me you crave? If it lies in my power you the same shall have", "3d23.webp": "Could I but see your father, and my mind reveal, I have both gold and silver, and houses at my will", "3d1.webp": "the most famous hero according to Zhou Qi" } class timer: def __init__(self, method_name="timed process"): self.method = method_name def __enter__(self): self.start = time.time() print(f"{self.method} starts") def __exit__(self, exc_type, exc_val, exc_tb): end = time.time() print(f"{self.method} took {str(round(end - self.start, 2))}s") # Model initialization if not path.exists(cache_path): os.makedirs(cache_path, exist_ok=True) pipe = FluxPipeline.from_pretrained( "black-forest-labs/FLUX.1-dev", torch_dtype=torch.bfloat16, use_auth_token=HF_TOKEN # 경고 메시지가 뜨지만 무시 가능 ) # Hyper-SD LoRA 로드 pipe.load_lora_weights( hf_hub_download( "ByteDance/Hyper-SD", "Hyper-FLUX.1-dev-8steps-lora.safetensors", use_auth_token=HF_TOKEN ) ) pipe.fuse_lora(lora_scale=0.125) pipe.to(device="cuda", dtype=torch.bfloat16) def save_image(image): """Save the generated image and return the path""" try: if not os.path.exists(gallery_path): try: os.makedirs(gallery_path, exist_ok=True) except Exception as e: print(f"Failed to create gallery directory: {str(e)}") return None timestamp = datetime.now().strftime("%Y%m%d_%H%M%S") random_suffix = os.urandom(4).hex() filename = f"generated_{timestamp}_{random_suffix}.png" filepath = os.path.join(gallery_path, filename) try: if isinstance(image, Image.Image): image.save(filepath, "PNG", quality=100) else: image = Image.fromarray(image) image.save(filepath, "PNG", quality=100) return filepath except Exception as e: print(f"Failed to save image: {str(e)}") return None except Exception as e: print(f"Error in save_image: {str(e)}") return None def get_random_seed(): return torch.randint(0, 1000000, (1,)).item() @spaces.GPU def process_and_save_image(height=1024, width=1024, steps=8, scales=3.5, prompt="", seed=None): global pipe if seed is None: seed = get_random_seed() # 한글 감지 및 번역 def contains_korean(text): return any(ord('가') <= ord(c) <= ord('힣') for c in text) # 프롬프트 전처리 if contains_korean(prompt): translated = translator(prompt)[0]['translation_text'] prompt = translated formatted_prompt = f"wbgmsst, 3D, {prompt}, white background" with torch.inference_mode(), torch.autocast("cuda", dtype=torch.bfloat16), timer("inference"): try: generated_image = pipe( prompt=[formatted_prompt], generator=torch.Generator().manual_seed(int(seed)), num_inference_steps=int(steps), guidance_scale=float(scales), height=int(height), width=int(width), max_sequence_length=256 ).images[0] saved_path = save_image(generated_image) if saved_path is None: print("Warning: Failed to save generated image") return generated_image except Exception as e: print(f"Error in image generation: {str(e)}") return None def update_random_seed(): """버튼으로 눌렀을 때 새로운 시드를 업데이트""" return gr.update(value=get_random_seed()) # Gradio 인터페이스 with gr.Blocks( theme=gr.themes.Soft(), css=""" .container { background: linear-gradient(to bottom right, #1a1a1a, #4a4a4a); border-radius: 20px; padding: 20px; } .generate-btn { background: linear-gradient(45deg, #2196F3, #00BCD4); border: none; color: white; font-weight: bold; border-radius: 10px; } .output-image { border-radius: 15px; box-shadow: 0 8px 16px rgba(0,0,0,0.2); } .fixed-width { max-width: 1024px; margin: auto; } .gallery-container { margin-top: 40px; padding: 20px; background: #f5f5f5; border-radius: 15px; width: 100%; margin: 0 auto; } .gallery-title { text-align: center; margin-bottom: 20px; color: #333; font-size: 1.5rem; } """ ) as demo: gr.HTML( """

3D Style Image Generator v2.0

Create amazing 3D-style images with AI. https://discord.gg/openfreeai

""" ) with gr.Row(elem_classes="container"): with gr.Column(scale=3): prompt = gr.Textbox( label="Image Description", placeholder="Describe the 3D image you want to create...", lines=3 ) with gr.Accordion("Advanced Settings", open=False): with gr.Row(): height = gr.Slider( label="Height", minimum=256, maximum=1152, step=64, value=1024 ) width = gr.Slider( label="Width", minimum=256, maximum=1152, step=64, value=1024 ) with gr.Row(): steps = gr.Slider( label="Inference Steps", minimum=6, maximum=25, step=1, value=8 ) scales = gr.Slider( label="Guidance Scale", minimum=0.0, maximum=5.0, step=0.1, value=3.5 ) seed = gr.Number( label="Seed (random by default, set for reproducibility)", value=get_random_seed(), precision=0 ) randomize_seed = gr.Button("🎲 Randomize Seed", elem_classes=["generate-btn"]) generate_btn = gr.Button( "✨ Generate Image", elem_classes=["generate-btn"] ) with gr.Column(scale=4, elem_classes=["fixed-width"]): output = gr.Image( label="Generated Image", elem_id="output-image", elem_classes=["output-image", "fixed-width"], value="3d.webp" ) # gallery-container 부분을 Group으로 감싸 화면 전체에 확장 with gr.Group(elem_classes="gallery-container"): gr.HTML("") gallery_html = """
""" for img_file, prompt_text in SAMPLE_IMAGES.items(): img_path = os.path.abspath(img_file) if os.path.exists(img_path): try: with open(img_path, "rb") as img: img_data = base64.b64encode(img.read()).decode() gallery_html += f"""

Prompt: {prompt_text}

""" except Exception as e: print(f"Error loading image {img_file}: {str(e)}") gallery_html += "
" gr.HTML(gallery_html) # 이벤트 핸들러 generate_btn.click( fn=process_and_save_image, inputs=[height, width, steps, scales, prompt, seed], outputs=output ) randomize_seed.click( fn=update_random_seed, inputs=None, outputs=seed ) if __name__ == "__main__": demo.launch(allowed_paths=[PERSISTENT_DIR])