Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -7,7 +7,7 @@ from diffusers import DiffusionPipeline
|
|
7 |
import torch
|
8 |
|
9 |
device = "cuda" if torch.cuda.is_available() else "cpu"
|
10 |
-
model_repo_id = "
|
11 |
|
12 |
if torch.cuda.is_available():
|
13 |
torch_dtype = torch.float16
|
@@ -52,7 +52,7 @@ def infer(
|
|
52 |
|
53 |
|
54 |
examples = [
|
55 |
-
"
|
56 |
"An astronaut riding a green horse",
|
57 |
"A delicious ceviche cheesecake slice",
|
58 |
]
|
@@ -73,7 +73,7 @@ with gr.Blocks(css=css) as demo:
|
|
73 |
label="Prompt",
|
74 |
show_label=False,
|
75 |
max_lines=1,
|
76 |
-
placeholder="
|
77 |
container=False,
|
78 |
)
|
79 |
|
@@ -85,7 +85,7 @@ with gr.Blocks(css=css) as demo:
|
|
85 |
negative_prompt = gr.Text(
|
86 |
label="Negative prompt",
|
87 |
max_lines=1,
|
88 |
-
placeholder="
|
89 |
visible=False,
|
90 |
)
|
91 |
|
|
|
7 |
import torch
|
8 |
|
9 |
device = "cuda" if torch.cuda.is_available() else "cpu"
|
10 |
+
model_repo_id = "IDK-ab0ut/Yiffymix_V52-XL" # Replace to the model you would like to use
|
11 |
|
12 |
if torch.cuda.is_available():
|
13 |
torch_dtype = torch.float16
|
|
|
52 |
|
53 |
|
54 |
examples = [
|
55 |
+
"by aipeco18, by pony-berserker, by claweddrip, solo, female, fox, orange fur, white fur, two-toned fur, white countershading, brown eyes, smile, looking at viewer, standing, makeup room, indoors, masterpiece, best quality, high quality, hi res, absurd res",
|
56 |
"An astronaut riding a green horse",
|
57 |
"A delicious ceviche cheesecake slice",
|
58 |
]
|
|
|
73 |
label="Prompt",
|
74 |
show_label=False,
|
75 |
max_lines=1,
|
76 |
+
placeholder="Describe your desired fursona.",
|
77 |
container=False,
|
78 |
)
|
79 |
|
|
|
85 |
negative_prompt = gr.Text(
|
86 |
label="Negative prompt",
|
87 |
max_lines=1,
|
88 |
+
placeholder="Tell AI what they shouldn't generate.",
|
89 |
visible=False,
|
90 |
)
|
91 |
|