Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
|
@@ -11,9 +11,11 @@ import os
|
|
| 11 |
dtype = torch.bfloat16
|
| 12 |
device = "cuda" if torch.cuda.is_available() else "cpu"
|
| 13 |
|
|
|
|
| 14 |
huggingface_token = os.getenv("HUGGINGFACE_TOKEN")
|
| 15 |
|
| 16 |
-
|
|
|
|
| 17 |
|
| 18 |
MAX_SEED = np.iinfo(np.int32).max
|
| 19 |
MAX_IMAGE_SIZE = 2048
|
|
@@ -97,7 +99,6 @@ body {
|
|
| 97 |
|
| 98 |
with gr.Blocks(css=css, theme=gr.themes.Soft(primary_hue="blue", secondary_hue="gray")) as demo:
|
| 99 |
|
| 100 |
-
|
| 101 |
with gr.Column(elem_id="col-container"):
|
| 102 |
gr.Markdown(f"""# FLUX.1 [dev]
|
| 103 |
12B param rectified flow transformer guidance-distilled from FLUX.1 [pro]
|
|
@@ -110,7 +111,6 @@ with gr.Blocks(css=css, theme=gr.themes.Soft(primary_hue="blue", secondary_hue="
|
|
| 110 |
</a>
|
| 111 |
""")
|
| 112 |
|
| 113 |
-
|
| 114 |
with gr.Row():
|
| 115 |
prompt = gr.Text(
|
| 116 |
label="Prompt",
|
|
@@ -189,7 +189,6 @@ with gr.Blocks(css=css, theme=gr.themes.Soft(primary_hue="blue", secondary_hue="
|
|
| 189 |
cache_examples="lazy"
|
| 190 |
)
|
| 191 |
|
| 192 |
-
|
| 193 |
gr.on(
|
| 194 |
triggers=[run_button.click, prompt.submit],
|
| 195 |
fn=infer,
|
|
@@ -203,5 +202,4 @@ with gr.Blocks(css=css, theme=gr.themes.Soft(primary_hue="blue", secondary_hue="
|
|
| 203 |
outputs=None
|
| 204 |
)
|
| 205 |
|
| 206 |
-
|
| 207 |
demo.launch()
|
|
|
|
| 11 |
dtype = torch.bfloat16
|
| 12 |
device = "cuda" if torch.cuda.is_available() else "cpu"
|
| 13 |
|
| 14 |
+
# Set your Hugging Face API token
|
| 15 |
huggingface_token = os.getenv("HUGGINGFACE_TOKEN")
|
| 16 |
|
| 17 |
+
# Load the diffusion pipeline with the Hugging Face API token
|
| 18 |
+
pipe = DiffusionPipeline.from_pretrained("black-forest-labs/FLUX.1-dev", torch_dtype=dtype, token=huggingface_token).to(device)
|
| 19 |
|
| 20 |
MAX_SEED = np.iinfo(np.int32).max
|
| 21 |
MAX_IMAGE_SIZE = 2048
|
|
|
|
| 99 |
|
| 100 |
with gr.Blocks(css=css, theme=gr.themes.Soft(primary_hue="blue", secondary_hue="gray")) as demo:
|
| 101 |
|
|
|
|
| 102 |
with gr.Column(elem_id="col-container"):
|
| 103 |
gr.Markdown(f"""# FLUX.1 [dev]
|
| 104 |
12B param rectified flow transformer guidance-distilled from FLUX.1 [pro]
|
|
|
|
| 111 |
</a>
|
| 112 |
""")
|
| 113 |
|
|
|
|
| 114 |
with gr.Row():
|
| 115 |
prompt = gr.Text(
|
| 116 |
label="Prompt",
|
|
|
|
| 189 |
cache_examples="lazy"
|
| 190 |
)
|
| 191 |
|
|
|
|
| 192 |
gr.on(
|
| 193 |
triggers=[run_button.click, prompt.submit],
|
| 194 |
fn=infer,
|
|
|
|
| 202 |
outputs=None
|
| 203 |
)
|
| 204 |
|
|
|
|
| 205 |
demo.launch()
|