Krebzonide
commited on
Commit
·
6fed0f7
1
Parent(s):
63aa355
Update app.py
Browse files
app.py
CHANGED
@@ -6,22 +6,16 @@ import random
|
|
6 |
import gradio as gr
|
7 |
import gc
|
8 |
|
9 |
-
|
10 |
-
|
11 |
-
|
12 |
-
#
|
|
|
|
|
13 |
model_url_list = ["stabilityai/stable-diffusion-xl-base-1.0/blob/main/sd_xl_base_1.0.safetensors",
|
14 |
"Krebzonide/Colossus_Project_XL/blob/main/colossusProjectXLSFW_v202BakedVAE.safetensors",
|
15 |
"Krebzonide/Sevenof9_v3_sdxl/blob/main/nsfwSevenof9V3_nsfwSevenof9V3.safetensors"]
|
16 |
|
17 |
-
vae = AutoencoderKL.from_pretrained("madebyollin/sdxl-vae-fp16-fix", torch_dtype=torch.float16)
|
18 |
-
pipe = None
|
19 |
-
|
20 |
-
#pipe = StableDiffusionXLPipeline.from_pretrained(
|
21 |
-
# model_base, vae=vae, torch_dtype=torch.float16, variant="fp16", use_safetensors=True
|
22 |
-
#)
|
23 |
-
|
24 |
-
|
25 |
css = """
|
26 |
.btn-green {
|
27 |
background-image: linear-gradient(to bottom right, #6dd178, #00a613) !important;
|
@@ -49,12 +43,10 @@ def generate(prompt, neg_prompt, samp_steps, guide_scale, batch_size, seed, heig
|
|
49 |
).images
|
50 |
return [(img, f"Image {i+1}") for i, img in enumerate(images)]
|
51 |
|
52 |
-
def set_base_model(base_model_id
|
53 |
-
|
54 |
-
global model_list
|
55 |
global model_url_list
|
56 |
-
|
57 |
-
model_url = "https://huggingface.co/" + model_url_list[new_model]
|
58 |
pipe = StableDiffusionXLPipeline.from_single_file(
|
59 |
model_url,
|
60 |
torch_dtype = torch.float16,
|
@@ -64,11 +56,8 @@ def set_base_model(base_model_id, progress=gr.Progress(track_tqdm=True)):
|
|
64 |
use_auth_token="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"
|
65 |
)
|
66 |
pipe.to("cuda")
|
67 |
-
intro.close()
|
68 |
-
demo.launch(debug=True)
|
69 |
return pipe
|
70 |
|
71 |
-
|
72 |
with gr.Blocks(css=css) as demo:
|
73 |
with gr.Column():
|
74 |
prompt = gr.Textbox(label="Prompt")
|
@@ -85,11 +74,5 @@ with gr.Blocks(css=css) as demo:
|
|
85 |
gallery = gr.Gallery(label="Generated images", height=800)
|
86 |
submit_btn.click(generate, [prompt, negative_prompt, samp_steps, guide_scale, batch_size, seed, height, width], [gallery], queue=True)
|
87 |
|
88 |
-
|
89 |
-
|
90 |
-
with gr.Row():
|
91 |
-
model_id = gr.Dropdown(model_list, label="model", value="stabilityai/stable-diffusion-xl-base-1.0")
|
92 |
-
change_model_btn = gr.Button("Update Model", elem_classes="btn-green")
|
93 |
-
change_model_btn.click(set_base_model, [model_id])
|
94 |
-
|
95 |
-
intro.launch(debug=True)
|
|
|
6 |
import gradio as gr
|
7 |
import gc
|
8 |
|
9 |
+
model_id = os.getenv("Model")
|
10 |
+
|
11 |
+
#stabilityai/stable-diffusion-xl-base-1.0 0 - base model
|
12 |
+
#Colossus_Project_XL 1 - better people
|
13 |
+
#Sevenof9_v3_sdxl 2 - nude women
|
14 |
+
|
15 |
model_url_list = ["stabilityai/stable-diffusion-xl-base-1.0/blob/main/sd_xl_base_1.0.safetensors",
|
16 |
"Krebzonide/Colossus_Project_XL/blob/main/colossusProjectXLSFW_v202BakedVAE.safetensors",
|
17 |
"Krebzonide/Sevenof9_v3_sdxl/blob/main/nsfwSevenof9V3_nsfwSevenof9V3.safetensors"]
|
18 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
19 |
css = """
|
20 |
.btn-green {
|
21 |
background-image: linear-gradient(to bottom right, #6dd178, #00a613) !important;
|
|
|
43 |
).images
|
44 |
return [(img, f"Image {i+1}") for i, img in enumerate(images)]
|
45 |
|
46 |
+
def set_base_model(base_model_id):
|
47 |
+
vae = AutoencoderKL.from_pretrained("madebyollin/sdxl-vae-fp16-fix", torch_dtype=torch.float16)
|
|
|
48 |
global model_url_list
|
49 |
+
model_url = "https://huggingface.co/" + model_url_list[base_model_id]
|
|
|
50 |
pipe = StableDiffusionXLPipeline.from_single_file(
|
51 |
model_url,
|
52 |
torch_dtype = torch.float16,
|
|
|
56 |
use_auth_token="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"
|
57 |
)
|
58 |
pipe.to("cuda")
|
|
|
|
|
59 |
return pipe
|
60 |
|
|
|
61 |
with gr.Blocks(css=css) as demo:
|
62 |
with gr.Column():
|
63 |
prompt = gr.Textbox(label="Prompt")
|
|
|
74 |
gallery = gr.Gallery(label="Generated images", height=800)
|
75 |
submit_btn.click(generate, [prompt, negative_prompt, samp_steps, guide_scale, batch_size, seed, height, width], [gallery], queue=True)
|
76 |
|
77 |
+
pipe = set_base_model(model_id)
|
78 |
+
demo.launch(debug=True)
|
|
|
|
|
|
|
|
|
|
|
|