Spaces:
Paused
Paused
Update app.py
Browse files
app.py
CHANGED
@@ -107,14 +107,14 @@ examples = [
|
|
107 |
[
|
108 |
"./asset/0.jpg",
|
109 |
None,
|
110 |
-
"
|
111 |
1.0,
|
112 |
0.0,
|
113 |
],
|
114 |
[
|
115 |
"./asset/2.jpg",
|
116 |
"./asset/house.jpg",
|
117 |
-
"
|
118 |
1.0,
|
119 |
0.6,
|
120 |
],
|
@@ -232,30 +232,17 @@ def pil_to_cv2(image_pil):
|
|
232 |
|
233 |
# Description
|
234 |
title = r"""
|
235 |
-
<h1 align="center">
|
236 |
"""
|
237 |
|
238 |
description = r"""
|
239 |
-
<b>
|
240 |
-
<b>
|
241 |
"""
|
242 |
|
243 |
article = r"""
|
244 |
-
---
|
245 |
-
📝 **Citation**
|
246 |
<br>
|
247 |
-
|
248 |
-
```bibtex
|
249 |
-
@article{wang2024instantstyle,
|
250 |
-
title={InstantStyle: Free Lunch towards Style-Preserving in Text-to-Image Generation},
|
251 |
-
author={Wang, Haofan and Wang, Qixun and Bai, Xu and Qin, Zekui and Chen, Anthony},
|
252 |
-
journal={arXiv preprint arXiv:2404.02733},
|
253 |
-
year={2024}
|
254 |
-
}
|
255 |
-
```
|
256 |
-
📧 **Contact**
|
257 |
-
<br>
|
258 |
-
If you have any questions, please feel free to open an issue or directly reach us out at <b>[email protected]</b>.
|
259 |
"""
|
260 |
|
261 |
block = gr.Blocks()
|
@@ -273,14 +260,14 @@ with block:
|
|
273 |
with gr.Column():
|
274 |
prompt = gr.Textbox(
|
275 |
label="Prompt",
|
276 |
-
value="
|
277 |
)
|
278 |
|
279 |
scale = gr.Slider(
|
280 |
minimum=0, maximum=2.0, step=0.01, value=1.0, label="Scale"
|
281 |
)
|
282 |
|
283 |
-
with gr.Accordion(open=False, label="
|
284 |
target = gr.Radio(
|
285 |
[
|
286 |
"Load only style blocks",
|
@@ -288,7 +275,7 @@ with block:
|
|
288 |
"Load original IP-Adapter",
|
289 |
],
|
290 |
value="Load only style blocks",
|
291 |
-
label="Style mode",
|
292 |
)
|
293 |
with gr.Column():
|
294 |
src_image_pil = gr.Image(
|
@@ -299,23 +286,23 @@ with block:
|
|
299 |
maximum=1.0,
|
300 |
step=0.01,
|
301 |
value=0.5,
|
302 |
-
label="
|
303 |
)
|
304 |
|
305 |
n_prompt = gr.Textbox(
|
306 |
-
label="
|
307 |
value="text, watermark, lowres, low quality, worst quality, deformed, glitch, low contrast, noisy, saturation, blurry",
|
308 |
)
|
309 |
|
310 |
neg_content_prompt = gr.Textbox(
|
311 |
-
label="
|
312 |
)
|
313 |
neg_content_scale = gr.Slider(
|
314 |
minimum=0,
|
315 |
maximum=1.0,
|
316 |
step=0.01,
|
317 |
value=0.5,
|
318 |
-
label="
|
319 |
)
|
320 |
|
321 |
guidance_scale = gr.Slider(
|
@@ -323,27 +310,27 @@ with block:
|
|
323 |
maximum=10.0,
|
324 |
step=0.01,
|
325 |
value=0.0,
|
326 |
-
label="
|
327 |
)
|
328 |
num_inference_steps = gr.Slider(
|
329 |
minimum=2,
|
330 |
maximum=50.0,
|
331 |
step=1.0,
|
332 |
value=2,
|
333 |
-
label="
|
334 |
)
|
335 |
seed = gr.Slider(
|
336 |
minimum=-1,
|
337 |
maximum=MAX_SEED,
|
338 |
value=-1,
|
339 |
step=1,
|
340 |
-
label="Seed Value",
|
341 |
)
|
342 |
|
343 |
-
generate_button = gr.Button("
|
344 |
|
345 |
with gr.Column():
|
346 |
-
generated_image = gr.Image(label="
|
347 |
|
348 |
inputs = [
|
349 |
image_pil,
|
|
|
107 |
[
|
108 |
"./asset/0.jpg",
|
109 |
None,
|
110 |
+
"3D model, cute monster, high quality",
|
111 |
1.0,
|
112 |
0.0,
|
113 |
],
|
114 |
[
|
115 |
"./asset/2.jpg",
|
116 |
"./asset/house.jpg",
|
117 |
+
"3d model, house, kawai, cute, sci-fi, solarpunk, high quality",
|
118 |
1.0,
|
119 |
0.6,
|
120 |
],
|
|
|
232 |
|
233 |
# Description
|
234 |
title = r"""
|
235 |
+
<h1 align="center">I2I mit SDXL-Lightning & IP-Adapter</h1>
|
236 |
"""
|
237 |
|
238 |
description = r"""
|
239 |
+
<b>ARM <3 GoldExtra Testversion<br>
|
240 |
+
<b>Wir schauen uns gut funktionierende Prompts. Bitte diese notieren und an Hidéo weiterleiten!</b><br>
|
241 |
"""
|
242 |
|
243 |
article = r"""
|
|
|
|
|
244 |
<br>
|
245 |
+
Bei Fragen: <a href="mailto:[email protected]">Mail an Hidéo</a>
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
246 |
"""
|
247 |
|
248 |
block = gr.Blocks()
|
|
|
260 |
with gr.Column():
|
261 |
prompt = gr.Textbox(
|
262 |
label="Prompt",
|
263 |
+
value="mewmewmew, uWu, space kitten, unicorn",
|
264 |
)
|
265 |
|
266 |
scale = gr.Slider(
|
267 |
minimum=0, maximum=2.0, step=0.01, value=1.0, label="Scale"
|
268 |
)
|
269 |
|
270 |
+
with gr.Accordion(open=False, label="Details (optional)"):
|
271 |
target = gr.Radio(
|
272 |
[
|
273 |
"Load only style blocks",
|
|
|
275 |
"Load original IP-Adapter",
|
276 |
],
|
277 |
value="Load only style blocks",
|
278 |
+
label="Style mode (optional, sb works best!)",
|
279 |
)
|
280 |
with gr.Column():
|
281 |
src_image_pil = gr.Image(
|
|
|
286 |
maximum=1.0,
|
287 |
step=0.01,
|
288 |
value=0.5,
|
289 |
+
label="ControlNet Scale (test this!)",
|
290 |
)
|
291 |
|
292 |
n_prompt = gr.Textbox(
|
293 |
+
label="Negative Prompt // n_prompt",
|
294 |
value="text, watermark, lowres, low quality, worst quality, deformed, glitch, low contrast, noisy, saturation, blurry",
|
295 |
)
|
296 |
|
297 |
neg_content_prompt = gr.Textbox(
|
298 |
+
label="Negative Content Prompt (Ignore this!)", value=""
|
299 |
)
|
300 |
neg_content_scale = gr.Slider(
|
301 |
minimum=0,
|
302 |
maximum=1.0,
|
303 |
step=0.01,
|
304 |
value=0.5,
|
305 |
+
label="NCS (Ignore this!) // neg_content_scale",
|
306 |
)
|
307 |
|
308 |
guidance_scale = gr.Slider(
|
|
|
310 |
maximum=10.0,
|
311 |
step=0.01,
|
312 |
value=0.0,
|
313 |
+
label="Guidance Scale (test this!)",
|
314 |
)
|
315 |
num_inference_steps = gr.Slider(
|
316 |
minimum=2,
|
317 |
maximum=50.0,
|
318 |
step=1.0,
|
319 |
value=2,
|
320 |
+
label="Inference Steps (optional but test with 2+)",
|
321 |
)
|
322 |
seed = gr.Slider(
|
323 |
minimum=-1,
|
324 |
maximum=MAX_SEED,
|
325 |
value=-1,
|
326 |
step=1,
|
327 |
+
label="Seed Value (Seed-Proof) // -1 == random",
|
328 |
)
|
329 |
|
330 |
+
generate_button = gr.Button("Simsalabim")
|
331 |
|
332 |
with gr.Column():
|
333 |
+
generated_image = gr.Image(label="Magix uWu")
|
334 |
|
335 |
inputs = [
|
336 |
image_pil,
|