Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
@@ -35,6 +35,21 @@ pipe = AuraFlowPipeline.from_pretrained(
|
|
35 |
MAX_SEED = np.iinfo(np.int32).max
|
36 |
MAX_IMAGE_SIZE = 1024
|
37 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
38 |
@spaces.GPU(duration=95)
|
39 |
def infer(prompt, negative_prompt="", seed=42, randomize_seed=False, width=1024, height=1024, guidance_scale=5.0, num_inference_steps=28, model_version="0.2", comparison_mode=False, progress=gr.Progress(track_tqdm=True)):
|
40 |
|
@@ -184,7 +199,7 @@ with gr.Blocks(css=css) as demo:
|
|
184 |
|
185 |
gr.Examples(
|
186 |
examples = examples,
|
187 |
-
fn =
|
188 |
inputs = [prompt],
|
189 |
outputs = [result, result_compare, seed],
|
190 |
cache_examples="lazy"
|
|
|
35 |
MAX_SEED = np.iinfo(np.int32).max
|
36 |
MAX_IMAGE_SIZE = 1024
|
37 |
|
38 |
+
@spaces.GPU()
|
39 |
+
def infer_example(prompt, negative_prompt="", seed=42, randomize_seed=False, width=1024, height=1024, guidance_scale=5.0, num_inference_steps=28, model_version="0.2", comparison_mode=False, progress=gr.Progress(track_tqdm=True)):
|
40 |
+
seed = random.randint(0, MAX_SEED)
|
41 |
+
generator = torch.Generator().manual_seed(seed)
|
42 |
+
image = pipe(
|
43 |
+
prompt = prompt,
|
44 |
+
negative_prompt = negative_prompt,
|
45 |
+
width=width,
|
46 |
+
height=height,
|
47 |
+
guidance_scale = guidance_scale,
|
48 |
+
num_inference_steps = num_inference_steps,
|
49 |
+
generator = generator
|
50 |
+
).images[0]
|
51 |
+
return image, seed
|
52 |
+
|
53 |
@spaces.GPU(duration=95)
|
54 |
def infer(prompt, negative_prompt="", seed=42, randomize_seed=False, width=1024, height=1024, guidance_scale=5.0, num_inference_steps=28, model_version="0.2", comparison_mode=False, progress=gr.Progress(track_tqdm=True)):
|
55 |
|
|
|
199 |
|
200 |
gr.Examples(
|
201 |
examples = examples,
|
202 |
+
fn = infer_example,
|
203 |
inputs = [prompt],
|
204 |
outputs = [result, result_compare, seed],
|
205 |
cache_examples="lazy"
|