Spaces:
Running
on
CPU Upgrade
Running
on
CPU Upgrade
File size: 3,682 Bytes
8ccf632 81b26b5 06f0278 8ccf632 4ea3b6f 8ccf632 06f0278 8ccf632 32e7fe8 2c83396 54192f0 8ccf632 06f0278 8ccf632 2c83396 8ccf632 3cbb71f 7f241da 3cbb71f 7f241da c0a1b89 3cbb71f 8ccf632 f06cda9 8ccf632 f06cda9 8ccf632 f06cda9 8ccf632 2c83396 8ccf632 5a954fc 8ccf632 2c83396 8ccf632 0a779d1 8ccf632 2b62414 8ccf632 9aa8809 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 |
import gradio as gr
import numpy as np
import random
import spaces
import torch
from diffusers import DiffusionPipeline
dtype = torch.bfloat16
device = "cuda" if torch.cuda.is_available() else "cpu"
pipe = DiffusionPipeline.from_pretrained("black-forest-labs/FLUX.1-schnell", torch_dtype=dtype).to(device)
MAX_SEED = np.iinfo(np.int32).max
MAX_IMAGE_SIZE = 2048
@spaces.GPU(duration=120)
def infer(prompt, seed=765449273, randomize_seed=False, width=1024, height=1024, num_inference_steps=4, progress=gr.Progress(track_tqdm=True)):
if randomize_seed:
seed = random.randint(0, MAX_SEED)
generator = torch.Generator().manual_seed(seed)
image = pipe(
prompt = prompt,
width = width,
height = height,
num_inference_steps = num_inference_steps,
generator = generator,
guidance_scale=0.0
).images[0]
return image, seed
examples = [
"breathtaking beautiful young woman, light-blue eyes, long bronze hair, fair complexion, (freckles:0.5) , wearing sexy lace lingerie, sitting on the floor, leaning on her bed, window in the background with sheer white curtains, staring seductively at the viewer, sun shining through the window . award-winning, professional, highly detailed ]
css="""
#col-container {
background:#181717;
padding: 10px;
color:#000000:
font-weight:600;
margin: 10 auto;
border: 1px #FFFFFF;
border-radius:15px;
max-width: 100%;
min-width: 600px;
border-radius: 15px;
}
"""
with gr.Blocks(css=css) as demo:
with gr.Column(elem_id="col-container"):
gr.Markdown(f"""OUR FLUX APP""")
with gr.Row():
prompt = gr.Text(
label="Prompt",
show_label=False,
max_lines=10,
placeholder="Enter your prompt",
container=False,
)
run_button = gr.Button("Run", scale=0)
result = gr.Image(label="Result", show_label=False)
with gr.Accordion("Advanced Settings", open=True):
seed = gr.Slider(
label="Seed",
minimum=0,
maximum=MAX_SEED,
step=1,
value=0,
)
randomize_seed = gr.Checkbox(label="Randomize seed", value=True)
with gr.Row():
width = gr.Slider(
label="Width",
minimum=256,
maximum=MAX_IMAGE_SIZE,
step=32,
value=704,
)
height = gr.Slider(
label="Height",
minimum=256,
maximum=MAX_IMAGE_SIZE,
step=32,
value=1024,
)
with gr.Row():
num_inference_steps = gr.Slider(
label="Number of inference steps",
minimum=1,
maximum=30,
step=1,
value=10,
)
gr.Examples(
examples = examples,
fn = infer,
inputs = [prompt],
outputs = [result, seed],
cache_examples="lazy"
)
gr.on(
triggers=[run_button.click, prompt.submit],
fn = infer,
inputs = [prompt, seed, randomize_seed, width, height, num_inference_steps],
outputs = [result, seed]
)
demo.launch() |