Upload 10 files
Browse files- LAUNCHER - CASCADE.bat +9 -0
- README.md +18 -1
- app.py +273 -0
- previewer/__pycache__/modules.cpython-310.pyc +0 -0
- previewer/modules.py +45 -0
- previewer/previewer_v1_100k.pt +3 -0
- previewer/text2img_wurstchen_b_v1_previewer_100k.pt +3 -0
- requirements.txt +4 -0
- style.css +24 -0
- user_history.py +423 -0
LAUNCHER - CASCADE.bat
ADDED
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
@echo off
|
2 |
+
CALL env\Scripts\activate
|
3 |
+
python app.py --inbrowser
|
4 |
+
|
5 |
+
REM List of possible arguments
|
6 |
+
|
7 |
+
REM --inbrowser Automatically open the url in browser, if --share is used, the public url will be automatically open instead
|
8 |
+
REM --server_port Choose a specific server port, default=7860 (example --server_port 420 so the local url will be: http://127.0.0.1:420)
|
9 |
+
REM --share Creates a public URL
|
README.md
CHANGED
@@ -1,3 +1,20 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
---
|
2 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
3 |
---
|
|
|
|
|
|
1 |
+
--inbrowser Automatically open the url in browser, if --share is used, the public url will be automatically open instead
|
2 |
+
|
3 |
+
--server_port Choose a specific server port, default=7860 (example --server_port 420 so the local url will be: http://127.0.0.1:420)
|
4 |
+
|
5 |
+
--share Creates a public URL
|
6 |
+
|
7 |
---
|
8 |
+
title: Stable Cascade
|
9 |
+
emoji: 👁
|
10 |
+
colorFrom: blue
|
11 |
+
colorTo: purple
|
12 |
+
sdk: gradio
|
13 |
+
sdk_version: 4.18.0
|
14 |
+
app_file: app.py
|
15 |
+
pinned: false
|
16 |
+
license: mit
|
17 |
+
hf_oauth: true
|
18 |
---
|
19 |
+
|
20 |
+
Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
|
app.py
ADDED
@@ -0,0 +1,273 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
import random
|
3 |
+
import gradio as gr
|
4 |
+
import numpy as np
|
5 |
+
import PIL.Image
|
6 |
+
import torch
|
7 |
+
import argparse
|
8 |
+
from typing import List
|
9 |
+
from diffusers.utils import numpy_to_pil
|
10 |
+
from diffusers import StableCascadeDecoderPipeline, StableCascadePriorPipeline
|
11 |
+
from diffusers.pipelines.wuerstchen import DEFAULT_STAGE_C_TIMESTEPS
|
12 |
+
from previewer.modules import Previewer
|
13 |
+
|
14 |
+
os.environ['TOKENIZERS_PARALLELISM'] = 'false'
|
15 |
+
|
16 |
+
DESCRIPTION = "# Stable Cascade"
|
17 |
+
DESCRIPTION += "\n<p style=\"text-align: center\">Неофициальная демонстрация Stable Cascade от <a href='https://www.youtube.com/@nerual_dreming/' target='_blank'>Nerual Dreming и нейросети</a> основано на <a href='https://huggingface.co/stabilityai/stable-cascade' target='_blank'>Stable Cascade</a>, новая модель высокого разрешения для генерации изображений по текстовому запросу от Stability AI, основанная на архитектуре Würstchen - <a href='https://huggingface.co/stabilityai/stable-cascade/blob/main/LICENSE' target='_blank'>только для некоммерческого и научного использования</a></p>"
|
18 |
+
|
19 |
+
MAX_SEED = np.iinfo(np.int32).max
|
20 |
+
CACHE_EXAMPLES = False
|
21 |
+
MAX_IMAGE_SIZE = int(os.getenv("MAX_IMAGE_SIZE", "1536"))
|
22 |
+
USE_TORCH_COMPILE = False
|
23 |
+
ENABLE_CPU_OFFLOAD = os.getenv("ENABLE_CPU_OFFLOAD") == "1"
|
24 |
+
PREVIEW_IMAGES = True
|
25 |
+
|
26 |
+
parser = argparse.ArgumentParser(description='Gradio App Control')
|
27 |
+
parser.add_argument('--share', action='store_true', help='Create a public shareable URL')
|
28 |
+
parser.add_argument('--inbrowser', action='store_true', help='Automatically launch the application in a browser')
|
29 |
+
parser.add_argument('--server_port', type=int, default=7860, help='Server port')
|
30 |
+
args = parser.parse_args()
|
31 |
+
|
32 |
+
dtype = torch.bfloat16
|
33 |
+
if torch.cuda.is_available():
|
34 |
+
device = "cuda"
|
35 |
+
elif torch.backends.mps.is_available():
|
36 |
+
device = "mps"
|
37 |
+
dtype = torch.float32
|
38 |
+
else:
|
39 |
+
device = "cpu"
|
40 |
+
print(f"device={device}")
|
41 |
+
if device != "cpu":
|
42 |
+
prior_pipeline = StableCascadePriorPipeline.from_pretrained("stabilityai/stable-cascade-prior", torch_dtype=dtype)#.to(device)
|
43 |
+
decoder_pipeline = StableCascadeDecoderPipeline.from_pretrained("stabilityai/stable-cascade", torch_dtype=dtype)#.to(device)
|
44 |
+
|
45 |
+
if ENABLE_CPU_OFFLOAD:
|
46 |
+
prior_pipeline.enable_model_cpu_offload()
|
47 |
+
decoder_pipeline.enable_model_cpu_offload()
|
48 |
+
else:
|
49 |
+
prior_pipeline.to(device)
|
50 |
+
decoder_pipeline.to(device)
|
51 |
+
|
52 |
+
if USE_TORCH_COMPILE:
|
53 |
+
prior_pipeline.prior = torch.compile(prior_pipeline.prior, mode="reduce-overhead", fullgraph=True)
|
54 |
+
decoder_pipeline.decoder = torch.compile(decoder_pipeline.decoder, mode="max-autotune", fullgraph=True)
|
55 |
+
|
56 |
+
if PREVIEW_IMAGES:
|
57 |
+
previewer = Previewer()
|
58 |
+
previewer_state_dict = torch.load("previewer/previewer_v1_100k.pt", map_location=torch.device('cpu'))["state_dict"]
|
59 |
+
previewer.load_state_dict(previewer_state_dict)
|
60 |
+
def callback_prior(i, t, latents):
|
61 |
+
output = previewer(latents)
|
62 |
+
output = numpy_to_pil(output.clamp(0, 1).permute(0, 2, 3, 1).float().cpu().numpy())
|
63 |
+
return output
|
64 |
+
callback_steps = 1
|
65 |
+
else:
|
66 |
+
previewer = None
|
67 |
+
callback_prior = None
|
68 |
+
callback_steps = None
|
69 |
+
else:
|
70 |
+
prior_pipeline = None
|
71 |
+
decoder_pipeline = None
|
72 |
+
|
73 |
+
|
74 |
+
def randomize_seed_fn(seed: int, randomize_seed: bool) -> int:
|
75 |
+
if randomize_seed:
|
76 |
+
seed = random.randint(0, MAX_SEED)
|
77 |
+
return seed
|
78 |
+
|
79 |
+
def generate(
|
80 |
+
prompt: str,
|
81 |
+
negative_prompt: str = "",
|
82 |
+
seed: int = 0,
|
83 |
+
width: int = 1024,
|
84 |
+
height: int = 1024,
|
85 |
+
prior_num_inference_steps: int = 30,
|
86 |
+
# prior_timesteps: List[float] = None,
|
87 |
+
prior_guidance_scale: float = 4.0,
|
88 |
+
decoder_num_inference_steps: int = 12,
|
89 |
+
# decoder_timesteps: List[float] = None,
|
90 |
+
decoder_guidance_scale: float = 0.0,
|
91 |
+
num_images_per_prompt: int = 2,
|
92 |
+
# profile: gr.OAuthProfile | None = None,
|
93 |
+
) -> PIL.Image.Image:
|
94 |
+
previewer.eval().requires_grad_(False).to(device).to(dtype)
|
95 |
+
prior_pipeline.to(device)
|
96 |
+
decoder_pipeline.to(device)
|
97 |
+
|
98 |
+
generator = torch.Generator().manual_seed(seed)
|
99 |
+
prior_output = prior_pipeline(
|
100 |
+
prompt=prompt,
|
101 |
+
height=height,
|
102 |
+
width=width,
|
103 |
+
num_inference_steps=prior_num_inference_steps,
|
104 |
+
timesteps=DEFAULT_STAGE_C_TIMESTEPS,
|
105 |
+
negative_prompt=negative_prompt,
|
106 |
+
guidance_scale=prior_guidance_scale,
|
107 |
+
num_images_per_prompt=num_images_per_prompt,
|
108 |
+
generator=generator,
|
109 |
+
callback=callback_prior,
|
110 |
+
callback_steps=callback_steps
|
111 |
+
)
|
112 |
+
|
113 |
+
if PREVIEW_IMAGES:
|
114 |
+
for _ in range(len(DEFAULT_STAGE_C_TIMESTEPS)):
|
115 |
+
r = next(prior_output)
|
116 |
+
if isinstance(r, list):
|
117 |
+
yield r[0]
|
118 |
+
prior_output = r
|
119 |
+
|
120 |
+
decoder_output = decoder_pipeline(
|
121 |
+
image_embeddings=prior_output.image_embeddings,
|
122 |
+
prompt=prompt,
|
123 |
+
num_inference_steps=decoder_num_inference_steps,
|
124 |
+
# timesteps=decoder_timesteps,
|
125 |
+
guidance_scale=decoder_guidance_scale,
|
126 |
+
negative_prompt=negative_prompt,
|
127 |
+
generator=generator,
|
128 |
+
output_type="pil",
|
129 |
+
).images
|
130 |
+
|
131 |
+
yield decoder_output[0]
|
132 |
+
|
133 |
+
|
134 |
+
examples = [
|
135 |
+
"An astronaut riding a green horse",
|
136 |
+
"A mecha robot in a favela by Tarsila do Amaral",
|
137 |
+
"The sprirt of a Tamagotchi wandering in the city of Los Angeles",
|
138 |
+
"A delicious feijoada ramen dish"
|
139 |
+
]
|
140 |
+
|
141 |
+
with gr.Blocks() as demo:
|
142 |
+
gr.Markdown(DESCRIPTION)
|
143 |
+
gr.DuplicateButton(
|
144 |
+
value="Duplicate Space for private use",
|
145 |
+
elem_id="duplicate-button",
|
146 |
+
visible=os.getenv("SHOW_DUPLICATE_BUTTON") == "1",
|
147 |
+
)
|
148 |
+
with gr.Group():
|
149 |
+
with gr.Row():
|
150 |
+
prompt = gr.Text(
|
151 |
+
label="Prompt",
|
152 |
+
show_label=False,
|
153 |
+
max_lines=1,
|
154 |
+
placeholder="Введите запрос",
|
155 |
+
container=False,
|
156 |
+
)
|
157 |
+
run_button = gr.Button("Создать", scale=0)
|
158 |
+
result = gr.Image(label="Result", show_label=False)
|
159 |
+
with gr.Accordion("Дополнительные опции", open=False):
|
160 |
+
negative_prompt = gr.Text(
|
161 |
+
label="Негативный запорос",
|
162 |
+
max_lines=1,
|
163 |
+
placeholder="Введите негативный запрос",
|
164 |
+
)
|
165 |
+
|
166 |
+
seed = gr.Slider(
|
167 |
+
label="Seed",
|
168 |
+
minimum=0,
|
169 |
+
maximum=MAX_SEED,
|
170 |
+
step=1,
|
171 |
+
value=0,
|
172 |
+
)
|
173 |
+
randomize_seed = gr.Checkbox(label="Случайный seed", value=True)
|
174 |
+
with gr.Row():
|
175 |
+
width = gr.Slider(
|
176 |
+
label="Ширина",
|
177 |
+
minimum=1024,
|
178 |
+
maximum=MAX_IMAGE_SIZE,
|
179 |
+
step=512,
|
180 |
+
value=1024,
|
181 |
+
)
|
182 |
+
height = gr.Slider(
|
183 |
+
label="Высота",
|
184 |
+
minimum=1024,
|
185 |
+
maximum=MAX_IMAGE_SIZE,
|
186 |
+
step=512,
|
187 |
+
value=1024,
|
188 |
+
)
|
189 |
+
num_images_per_prompt = gr.Slider(
|
190 |
+
label="Количество изображений",
|
191 |
+
minimum=1,
|
192 |
+
maximum=2,
|
193 |
+
step=1,
|
194 |
+
value=1,
|
195 |
+
)
|
196 |
+
with gr.Row():
|
197 |
+
prior_guidance_scale = gr.Slider(
|
198 |
+
label="Prior Guidance Scale",
|
199 |
+
minimum=0,
|
200 |
+
maximum=20,
|
201 |
+
step=0.1,
|
202 |
+
value=4.0,
|
203 |
+
)
|
204 |
+
prior_num_inference_steps = gr.Slider(
|
205 |
+
label="Prior Inference Steps",
|
206 |
+
minimum=10,
|
207 |
+
maximum=30,
|
208 |
+
step=1,
|
209 |
+
value=20,
|
210 |
+
)
|
211 |
+
|
212 |
+
decoder_guidance_scale = gr.Slider(
|
213 |
+
label="Decoder Guidance Scale",
|
214 |
+
minimum=0,
|
215 |
+
maximum=0,
|
216 |
+
step=0.1,
|
217 |
+
value=0.0,
|
218 |
+
)
|
219 |
+
decoder_num_inference_steps = gr.Slider(
|
220 |
+
label="Decoder Inference Steps",
|
221 |
+
minimum=4,
|
222 |
+
maximum=12,
|
223 |
+
step=1,
|
224 |
+
value=10,
|
225 |
+
)
|
226 |
+
|
227 |
+
gr.Examples(
|
228 |
+
examples=examples,
|
229 |
+
inputs=prompt,
|
230 |
+
outputs=result,
|
231 |
+
fn=generate,
|
232 |
+
cache_examples=CACHE_EXAMPLES,
|
233 |
+
)
|
234 |
+
|
235 |
+
inputs = [
|
236 |
+
prompt,
|
237 |
+
negative_prompt,
|
238 |
+
seed,
|
239 |
+
width,
|
240 |
+
height,
|
241 |
+
prior_num_inference_steps,
|
242 |
+
# prior_timesteps,
|
243 |
+
prior_guidance_scale,
|
244 |
+
decoder_num_inference_steps,
|
245 |
+
# decoder_timesteps,
|
246 |
+
decoder_guidance_scale,
|
247 |
+
num_images_per_prompt,
|
248 |
+
]
|
249 |
+
gr.on(
|
250 |
+
triggers=[prompt.submit, negative_prompt.submit, run_button.click],
|
251 |
+
fn=randomize_seed_fn,
|
252 |
+
inputs=[seed, randomize_seed],
|
253 |
+
outputs=seed,
|
254 |
+
queue=False,
|
255 |
+
api_name=False,
|
256 |
+
).then(
|
257 |
+
fn=generate,
|
258 |
+
inputs=inputs,
|
259 |
+
outputs=result,
|
260 |
+
api_name="run",
|
261 |
+
)
|
262 |
+
|
263 |
+
with gr.Blocks(css="style.css") as demo_with_history:
|
264 |
+
with gr.Tab("App"):
|
265 |
+
demo.render()
|
266 |
+
|
267 |
+
if __name__ == "__main__":
|
268 |
+
launch_args = {
|
269 |
+
'inbrowser': args.inbrowser,
|
270 |
+
'share': args.share,
|
271 |
+
'server_port' : args.server_port,
|
272 |
+
}
|
273 |
+
demo_with_history.launch(**launch_args)
|
previewer/__pycache__/modules.cpython-310.pyc
ADDED
Binary file (1.22 kB). View file
|
|
previewer/modules.py
ADDED
@@ -0,0 +1,45 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from torch import nn
|
2 |
+
|
3 |
+
|
4 |
+
# Fast Decoder for Stage C latents. E.g. 16 x 24 x 24 -> 3 x 192 x 192
|
5 |
+
class Previewer(nn.Module):
|
6 |
+
def __init__(self, c_in=16, c_hidden=512, c_out=3):
|
7 |
+
super().__init__()
|
8 |
+
self.blocks = nn.Sequential(
|
9 |
+
nn.Conv2d(c_in, c_hidden, kernel_size=1), # 16 channels to 512 channels
|
10 |
+
nn.GELU(),
|
11 |
+
nn.BatchNorm2d(c_hidden),
|
12 |
+
|
13 |
+
nn.Conv2d(c_hidden, c_hidden, kernel_size=3, padding=1),
|
14 |
+
nn.GELU(),
|
15 |
+
nn.BatchNorm2d(c_hidden),
|
16 |
+
|
17 |
+
nn.ConvTranspose2d(c_hidden, c_hidden // 2, kernel_size=2, stride=2), # 16 -> 32
|
18 |
+
nn.GELU(),
|
19 |
+
nn.BatchNorm2d(c_hidden // 2),
|
20 |
+
|
21 |
+
nn.Conv2d(c_hidden // 2, c_hidden // 2, kernel_size=3, padding=1),
|
22 |
+
nn.GELU(),
|
23 |
+
nn.BatchNorm2d(c_hidden // 2),
|
24 |
+
|
25 |
+
nn.ConvTranspose2d(c_hidden // 2, c_hidden // 4, kernel_size=2, stride=2), # 32 -> 64
|
26 |
+
nn.GELU(),
|
27 |
+
nn.BatchNorm2d(c_hidden // 4),
|
28 |
+
|
29 |
+
nn.Conv2d(c_hidden // 4, c_hidden // 4, kernel_size=3, padding=1),
|
30 |
+
nn.GELU(),
|
31 |
+
nn.BatchNorm2d(c_hidden // 4),
|
32 |
+
|
33 |
+
nn.ConvTranspose2d(c_hidden // 4, c_hidden // 4, kernel_size=2, stride=2), # 64 -> 128
|
34 |
+
nn.GELU(),
|
35 |
+
nn.BatchNorm2d(c_hidden // 4),
|
36 |
+
|
37 |
+
nn.Conv2d(c_hidden // 4, c_hidden // 4, kernel_size=3, padding=1),
|
38 |
+
nn.GELU(),
|
39 |
+
nn.BatchNorm2d(c_hidden // 4),
|
40 |
+
|
41 |
+
nn.Conv2d(c_hidden // 4, c_out, kernel_size=1),
|
42 |
+
)
|
43 |
+
|
44 |
+
def forward(self, x):
|
45 |
+
return self.blocks(x)
|
previewer/previewer_v1_100k.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:14a141d7156cf41bd32d6b68e2fc4d2cedb02db1697f862d52458670eb788958
|
3 |
+
size 47820715
|
previewer/text2img_wurstchen_b_v1_previewer_100k.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:76e82483253b24430b20e3e0c98ec2f9aeb45f0b487f7b330bac044b5de0d6f7
|
3 |
+
size 45244773
|
requirements.txt
ADDED
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
1 |
+
git+https://github.com/kashif/diffusers.git@diffusers-yield-callback
|
2 |
+
accelerate
|
3 |
+
safetensors
|
4 |
+
transformers
|
style.css
ADDED
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
h1 {
|
2 |
+
text-align: center;
|
3 |
+
justify-content: center;
|
4 |
+
}
|
5 |
+
[role="tabpanel"]{border: 0}
|
6 |
+
#duplicate-button {
|
7 |
+
margin: auto;
|
8 |
+
color: #fff;
|
9 |
+
background: #1565c0;
|
10 |
+
border-radius: 100vh;
|
11 |
+
}
|
12 |
+
|
13 |
+
.gradio-container {
|
14 |
+
max-width: 690px! important;
|
15 |
+
}
|
16 |
+
|
17 |
+
#share-btn-container{padding-left: 0.5rem !important; padding-right: 0.5rem !important; background-color: #000000; justify-content: center; align-items: center; border-radius: 9999px !important; max-width: 13rem; margin-left: auto;margin-top: 0.35em;}
|
18 |
+
div#share-btn-container > div {flex-direction: row;background: black;align-items: center}
|
19 |
+
#share-btn-container:hover {background-color: #060606}
|
20 |
+
#share-btn {all: initial; color: #ffffff;font-weight: 600; cursor:pointer; font-family: 'IBM Plex Sans', sans-serif; margin-left: 0.5rem !important; padding-top: 0.5rem !important; padding-bottom: 0.5rem !important;right:0;font-size: 15px;}
|
21 |
+
#share-btn * {all: unset}
|
22 |
+
#share-btn-container div:nth-child(-n+2){width: auto !important;min-height: 0px !important;}
|
23 |
+
#share-btn-container .wrap {display: none !important}
|
24 |
+
#share-btn-container.hidden {display: none!important}
|
user_history.py
ADDED
@@ -0,0 +1,423 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
User History is a plugin that you can add to your Spaces to cache generated images for your users.
|
3 |
+
|
4 |
+
Key features:
|
5 |
+
- 🤗 Sign in with Hugging Face
|
6 |
+
- Save generated images with their metadata: prompts, timestamp, hyper-parameters, etc.
|
7 |
+
- Export your history as zip.
|
8 |
+
- Delete your history to respect privacy.
|
9 |
+
- Compatible with Persistent Storage for long-term storage.
|
10 |
+
- Admin panel to check configuration and disk usage .
|
11 |
+
|
12 |
+
Useful links:
|
13 |
+
- Demo: https://huggingface.co/spaces/Wauplin/gradio-user-history
|
14 |
+
- README: https://huggingface.co/spaces/Wauplin/gradio-user-history/blob/main/README.md
|
15 |
+
- Source file: https://huggingface.co/spaces/Wauplin/gradio-user-history/blob/main/user_history.py
|
16 |
+
- Discussions: https://huggingface.co/spaces/Wauplin/gradio-user-history/discussions
|
17 |
+
"""
|
18 |
+
import json
|
19 |
+
import os
|
20 |
+
import shutil
|
21 |
+
import warnings
|
22 |
+
from datetime import datetime
|
23 |
+
from functools import cache
|
24 |
+
from pathlib import Path
|
25 |
+
from typing import Callable, Dict, List, Tuple
|
26 |
+
from uuid import uuid4
|
27 |
+
|
28 |
+
import gradio as gr
|
29 |
+
import numpy as np
|
30 |
+
import requests
|
31 |
+
from filelock import FileLock
|
32 |
+
from PIL.Image import Image
|
33 |
+
|
34 |
+
|
35 |
+
def setup(folder_path: str | Path | None = None) -> None:
|
36 |
+
user_history = _UserHistory()
|
37 |
+
user_history.folder_path = _resolve_folder_path(folder_path)
|
38 |
+
user_history.initialized = True
|
39 |
+
|
40 |
+
|
41 |
+
def render() -> None:
|
42 |
+
user_history = _UserHistory()
|
43 |
+
|
44 |
+
# initialize with default config
|
45 |
+
if not user_history.initialized:
|
46 |
+
print("Initializing user history with default config. Use `user_history.setup(...)` to customize folder_path.")
|
47 |
+
setup()
|
48 |
+
|
49 |
+
# Render user history tab
|
50 |
+
gr.Markdown(
|
51 |
+
"## Your past generations\n\nLog in to keep a gallery of your previous generations. Your history will be saved"
|
52 |
+
" and available on your next visit. Make sure to export your images from time to time as this gallery may be"
|
53 |
+
" deleted in the future."
|
54 |
+
)
|
55 |
+
|
56 |
+
if os.getenv("SYSTEM") == "spaces" and not os.path.exists("/data"):
|
57 |
+
gr.Markdown(
|
58 |
+
"**⚠️ Persistent storage is disabled, meaning your history will be lost if the Space gets restarted."
|
59 |
+
" Only the Space owner can setup a Persistent Storage. If you are not the Space owner, consider"
|
60 |
+
" duplicating this Space to set your own storage.⚠️**"
|
61 |
+
)
|
62 |
+
|
63 |
+
with gr.Row():
|
64 |
+
gr.LoginButton(min_width=250)
|
65 |
+
#gr.LogoutButton(min_width=250)
|
66 |
+
refresh_button = gr.Button(
|
67 |
+
"Refresh",
|
68 |
+
icon="https://huggingface.co/spaces/Wauplin/gradio-user-history/resolve/main/assets/icon_refresh.png",
|
69 |
+
)
|
70 |
+
export_button = gr.Button(
|
71 |
+
"Export",
|
72 |
+
icon="https://huggingface.co/spaces/Wauplin/gradio-user-history/resolve/main/assets/icon_download.png",
|
73 |
+
)
|
74 |
+
delete_button = gr.Button(
|
75 |
+
"Delete history",
|
76 |
+
icon="https://huggingface.co/spaces/Wauplin/gradio-user-history/resolve/main/assets/icon_delete.png",
|
77 |
+
)
|
78 |
+
|
79 |
+
# "Export zip" row (hidden by default)
|
80 |
+
with gr.Row():
|
81 |
+
export_file = gr.File(file_count="single", file_types=[".zip"], label="Exported history", visible=False)
|
82 |
+
|
83 |
+
# "Config deletion" row (hidden by default)
|
84 |
+
with gr.Row():
|
85 |
+
confirm_button = gr.Button("Confirm delete all history", variant="stop", visible=False)
|
86 |
+
cancel_button = gr.Button("Cancel", visible=False)
|
87 |
+
|
88 |
+
# Gallery
|
89 |
+
gallery = gr.Gallery(
|
90 |
+
label="Past images",
|
91 |
+
show_label=True,
|
92 |
+
elem_id="gallery",
|
93 |
+
object_fit="contain",
|
94 |
+
columns=5,
|
95 |
+
height=600,
|
96 |
+
preview=False,
|
97 |
+
show_share_button=False,
|
98 |
+
show_download_button=False,
|
99 |
+
)
|
100 |
+
gr.Markdown(
|
101 |
+
"User history is powered by"
|
102 |
+
" [Wauplin/gradio-user-history](https://huggingface.co/spaces/Wauplin/gradio-user-history). Integrate it to"
|
103 |
+
" your own Space in just a few lines of code!"
|
104 |
+
)
|
105 |
+
gallery.attach_load_event(_fetch_user_history, every=None)
|
106 |
+
|
107 |
+
# Interactions
|
108 |
+
refresh_button.click(fn=_fetch_user_history, inputs=[], outputs=[gallery], queue=False)
|
109 |
+
export_button.click(fn=_export_user_history, inputs=[], outputs=[export_file], queue=False)
|
110 |
+
|
111 |
+
# Taken from https://github.com/gradio-app/gradio/issues/3324#issuecomment-1446382045
|
112 |
+
delete_button.click(
|
113 |
+
lambda: [gr.update(visible=True), gr.update(visible=True)],
|
114 |
+
outputs=[confirm_button, cancel_button],
|
115 |
+
queue=False,
|
116 |
+
)
|
117 |
+
cancel_button.click(
|
118 |
+
lambda: [gr.update(visible=False), gr.update(visible=False)],
|
119 |
+
outputs=[confirm_button, cancel_button],
|
120 |
+
queue=False,
|
121 |
+
)
|
122 |
+
confirm_button.click(_delete_user_history).then(
|
123 |
+
lambda: [gr.update(visible=False), gr.update(visible=False)],
|
124 |
+
outputs=[confirm_button, cancel_button],
|
125 |
+
queue=False,
|
126 |
+
)
|
127 |
+
|
128 |
+
# Admin section (only shown locally or when logged in as Space owner)
|
129 |
+
_admin_section()
|
130 |
+
|
131 |
+
|
132 |
+
def save_image(
|
133 |
+
profile: gr.OAuthProfile | None,
|
134 |
+
image: Image | np.ndarray | str | Path,
|
135 |
+
label: str | None = None,
|
136 |
+
metadata: Dict | None = None,
|
137 |
+
):
|
138 |
+
# Ignore images from logged out users
|
139 |
+
if profile is None:
|
140 |
+
return
|
141 |
+
username = profile["preferred_username"]
|
142 |
+
|
143 |
+
# Ignore images if user history not used
|
144 |
+
user_history = _UserHistory()
|
145 |
+
if not user_history.initialized:
|
146 |
+
warnings.warn(
|
147 |
+
"User history is not set in Gradio demo. Saving image is ignored. You must use `user_history.render(...)`"
|
148 |
+
" first."
|
149 |
+
)
|
150 |
+
return
|
151 |
+
|
152 |
+
# Copy image to storage
|
153 |
+
image_path = _copy_image(image, dst_folder=user_history._user_images_path(username))
|
154 |
+
|
155 |
+
# Save new image + metadata
|
156 |
+
if metadata is None:
|
157 |
+
metadata = {}
|
158 |
+
if "datetime" not in metadata:
|
159 |
+
metadata["datetime"] = str(datetime.now())
|
160 |
+
data = {"path": str(image_path), "label": label, "metadata": metadata}
|
161 |
+
with user_history._user_lock(username):
|
162 |
+
with user_history._user_jsonl_path(username).open("a") as f:
|
163 |
+
f.write(json.dumps(data) + "\n")
|
164 |
+
|
165 |
+
|
166 |
+
#############
|
167 |
+
# Internals #
|
168 |
+
#############
|
169 |
+
|
170 |
+
|
171 |
+
class _UserHistory(object):
|
172 |
+
_instance = None
|
173 |
+
initialized: bool = False
|
174 |
+
folder_path: Path
|
175 |
+
|
176 |
+
def __new__(cls):
|
177 |
+
# Using singleton pattern => we don't want to expose an object (more complex to use) but still want to keep
|
178 |
+
# state between `render` and `save_image` calls.
|
179 |
+
if cls._instance is None:
|
180 |
+
cls._instance = super(_UserHistory, cls).__new__(cls)
|
181 |
+
return cls._instance
|
182 |
+
|
183 |
+
def _user_path(self, username: str) -> Path:
|
184 |
+
path = self.folder_path / username
|
185 |
+
path.mkdir(parents=True, exist_ok=True)
|
186 |
+
return path
|
187 |
+
|
188 |
+
def _user_lock(self, username: str) -> FileLock:
|
189 |
+
"""Ensure history is not corrupted if concurrent calls."""
|
190 |
+
return FileLock(self.folder_path / f"{username}.lock") # lock outside of folder => better when exporting ZIP
|
191 |
+
|
192 |
+
def _user_jsonl_path(self, username: str) -> Path:
|
193 |
+
return self._user_path(username) / "history.jsonl"
|
194 |
+
|
195 |
+
def _user_images_path(self, username: str) -> Path:
|
196 |
+
path = self._user_path(username) / "images"
|
197 |
+
path.mkdir(parents=True, exist_ok=True)
|
198 |
+
return path
|
199 |
+
|
200 |
+
|
201 |
+
def _fetch_user_history(profile: gr.OAuthProfile | None) -> List[Tuple[str, str]]:
|
202 |
+
"""Return saved history for that user, if it exists."""
|
203 |
+
# Cannot load history for logged out users
|
204 |
+
if profile is None:
|
205 |
+
return []
|
206 |
+
username = profile["preferred_username"]
|
207 |
+
|
208 |
+
user_history = _UserHistory()
|
209 |
+
if not user_history.initialized:
|
210 |
+
warnings.warn("User history is not set in Gradio demo. You must use `user_history.render(...)` first.")
|
211 |
+
return []
|
212 |
+
|
213 |
+
with user_history._user_lock(username):
|
214 |
+
# No file => no history saved yet
|
215 |
+
jsonl_path = user_history._user_jsonl_path(username)
|
216 |
+
if not jsonl_path.is_file():
|
217 |
+
return []
|
218 |
+
|
219 |
+
# Read history
|
220 |
+
images = []
|
221 |
+
for line in jsonl_path.read_text().splitlines():
|
222 |
+
data = json.loads(line)
|
223 |
+
images.append((data["path"], data["label"] or ""))
|
224 |
+
return list(reversed(images))
|
225 |
+
|
226 |
+
|
227 |
+
def _export_user_history(profile: gr.OAuthProfile | None) -> Dict | None:
|
228 |
+
"""Zip all history for that user, if it exists and return it as a downloadable file."""
|
229 |
+
# Cannot load history for logged out users
|
230 |
+
if profile is None:
|
231 |
+
return None
|
232 |
+
username = profile["preferred_username"]
|
233 |
+
|
234 |
+
user_history = _UserHistory()
|
235 |
+
if not user_history.initialized:
|
236 |
+
warnings.warn("User history is not set in Gradio demo. You must use `user_history.render(...)` first.")
|
237 |
+
return None
|
238 |
+
|
239 |
+
# Zip history
|
240 |
+
with user_history._user_lock(username):
|
241 |
+
path = shutil.make_archive(
|
242 |
+
str(_archives_path() / f"history_{username}"), "zip", user_history._user_path(username)
|
243 |
+
)
|
244 |
+
|
245 |
+
return gr.update(visible=True, value=path)
|
246 |
+
|
247 |
+
|
248 |
+
def _delete_user_history(profile: gr.OAuthProfile | None) -> None:
|
249 |
+
"""Delete all history for that user."""
|
250 |
+
# Cannot load history for logged out users
|
251 |
+
if profile is None:
|
252 |
+
return
|
253 |
+
username = profile["preferred_username"]
|
254 |
+
|
255 |
+
user_history = _UserHistory()
|
256 |
+
if not user_history.initialized:
|
257 |
+
warnings.warn("User history is not set in Gradio demo. You must use `user_history.render(...)` first.")
|
258 |
+
return
|
259 |
+
|
260 |
+
with user_history._user_lock(username):
|
261 |
+
shutil.rmtree(user_history._user_path(username))
|
262 |
+
|
263 |
+
|
264 |
+
####################
|
265 |
+
# Internal helpers #
|
266 |
+
####################
|
267 |
+
|
268 |
+
|
269 |
+
def _copy_image(image: Image | np.ndarray | str | Path, dst_folder: Path) -> Path:
|
270 |
+
"""Copy image to the images folder."""
|
271 |
+
# Already a path => copy it
|
272 |
+
if isinstance(image, str):
|
273 |
+
image = Path(image)
|
274 |
+
if isinstance(image, Path):
|
275 |
+
dst = dst_folder / f"{uuid4().hex}_{Path(image).name}" # keep file ext
|
276 |
+
shutil.copyfile(image, dst)
|
277 |
+
return dst
|
278 |
+
|
279 |
+
# Still a Python object => serialize it
|
280 |
+
if isinstance(image, np.ndarray):
|
281 |
+
image = Image.fromarray(image)
|
282 |
+
if isinstance(image, Image):
|
283 |
+
dst = dst_folder / f"{uuid4().hex}.png"
|
284 |
+
image.save(dst)
|
285 |
+
return dst
|
286 |
+
|
287 |
+
raise ValueError(f"Unsupported image type: {type(image)}")
|
288 |
+
|
289 |
+
|
290 |
+
def _resolve_folder_path(folder_path: str | Path | None) -> Path:
|
291 |
+
if folder_path is not None:
|
292 |
+
return Path(folder_path).expanduser().resolve()
|
293 |
+
|
294 |
+
if os.getenv("SYSTEM") == "spaces" and os.path.exists("/data"): # Persistent storage is enabled!
|
295 |
+
return Path("/data") / "_user_history"
|
296 |
+
|
297 |
+
# Not in a Space or Persistent storage not enabled => local folder
|
298 |
+
return Path(__file__).parent / "_user_history"
|
299 |
+
|
300 |
+
|
301 |
+
def _archives_path() -> Path:
|
302 |
+
# Doesn't have to be on persistent storage as it's only used for download
|
303 |
+
path = Path(__file__).parent / "_user_history_exports"
|
304 |
+
path.mkdir(parents=True, exist_ok=True)
|
305 |
+
return path
|
306 |
+
|
307 |
+
|
308 |
+
#################
|
309 |
+
# Admin section #
|
310 |
+
#################
|
311 |
+
|
312 |
+
|
313 |
+
def _admin_section() -> None:
|
314 |
+
title = gr.Markdown()
|
315 |
+
title.attach_load_event(_display_if_admin(), every=None)
|
316 |
+
|
317 |
+
|
318 |
+
def _display_if_admin() -> Callable:
|
319 |
+
def _inner(profile: gr.OAuthProfile | None) -> str:
|
320 |
+
if profile is None:
|
321 |
+
return ""
|
322 |
+
if profile["preferred_username"] in _fetch_admins():
|
323 |
+
return _admin_content()
|
324 |
+
return ""
|
325 |
+
|
326 |
+
return _inner
|
327 |
+
|
328 |
+
|
329 |
+
def _admin_content() -> str:
|
330 |
+
return f"""
|
331 |
+
## Admin section
|
332 |
+
|
333 |
+
Running on **{os.getenv("SYSTEM", "local")}** (id: {os.getenv("SPACE_ID")}). {_get_msg_is_persistent_storage_enabled()}
|
334 |
+
|
335 |
+
Admins: {', '.join(_fetch_admins())}
|
336 |
+
|
337 |
+
{_get_nb_users()} user(s), {_get_nb_images()} image(s)
|
338 |
+
|
339 |
+
### Configuration
|
340 |
+
|
341 |
+
History folder: *{_UserHistory().folder_path}*
|
342 |
+
|
343 |
+
Exports folder: *{_archives_path()}*
|
344 |
+
|
345 |
+
### Disk usage
|
346 |
+
|
347 |
+
{_disk_space_warning_message()}
|
348 |
+
"""
|
349 |
+
|
350 |
+
|
351 |
+
def _get_nb_users() -> int:
|
352 |
+
user_history = _UserHistory()
|
353 |
+
if not user_history.initialized:
|
354 |
+
return 0
|
355 |
+
if user_history.folder_path is not None and user_history.folder_path.exists():
|
356 |
+
return len([path for path in user_history.folder_path.iterdir() if path.is_dir()])
|
357 |
+
return 0
|
358 |
+
|
359 |
+
|
360 |
+
def _get_nb_images() -> int:
|
361 |
+
user_history = _UserHistory()
|
362 |
+
if not user_history.initialized:
|
363 |
+
return 0
|
364 |
+
if user_history.folder_path is not None and user_history.folder_path.exists():
|
365 |
+
return len([path for path in user_history.folder_path.glob("*/images/*")])
|
366 |
+
return 0
|
367 |
+
|
368 |
+
|
369 |
+
def _get_msg_is_persistent_storage_enabled() -> str:
|
370 |
+
if os.getenv("SYSTEM") == "spaces":
|
371 |
+
if os.path.exists("/data"):
|
372 |
+
return "Persistent storage is enabled."
|
373 |
+
else:
|
374 |
+
return (
|
375 |
+
"Persistent storage is not enabled. This means that user histories will be deleted when the Space is"
|
376 |
+
" restarted. Consider adding a Persistent Storage in your Space settings."
|
377 |
+
)
|
378 |
+
return ""
|
379 |
+
|
380 |
+
|
381 |
+
def _disk_space_warning_message() -> str:
|
382 |
+
user_history = _UserHistory()
|
383 |
+
if not user_history.initialized:
|
384 |
+
return ""
|
385 |
+
|
386 |
+
message = ""
|
387 |
+
if user_history.folder_path is not None:
|
388 |
+
total, used, _ = _get_disk_usage(user_history.folder_path)
|
389 |
+
message += f"History folder: **{used / 1e9 :.0f}/{total / 1e9 :.0f}GB** used ({100*used/total :.0f}%)."
|
390 |
+
|
391 |
+
total, used, _ = _get_disk_usage(_archives_path())
|
392 |
+
message += f"\n\nExports folder: **{used / 1e9 :.0f}/{total / 1e9 :.0f}GB** used ({100*used/total :.0f}%)."
|
393 |
+
|
394 |
+
return f"{message.strip()}"
|
395 |
+
|
396 |
+
|
397 |
+
def _get_disk_usage(path: Path) -> Tuple[int, int, int]:
|
398 |
+
for path in [path] + list(path.parents): # first check target_dir, then each parents one by one
|
399 |
+
try:
|
400 |
+
return shutil.disk_usage(path)
|
401 |
+
except OSError: # if doesn't exist or can't read => fail silently and try parent one
|
402 |
+
pass
|
403 |
+
return 0, 0, 0
|
404 |
+
|
405 |
+
|
406 |
+
@cache
|
407 |
+
def _fetch_admins() -> List[str]:
|
408 |
+
# Running locally => fake user is admin
|
409 |
+
if os.getenv("SYSTEM") != "spaces":
|
410 |
+
return ["FakeGradioUser"]
|
411 |
+
|
412 |
+
# Running in Space but no space_id => ???
|
413 |
+
space_id = os.getenv("SPACE_ID")
|
414 |
+
if space_id is None:
|
415 |
+
return ["Unknown"]
|
416 |
+
|
417 |
+
# Running in Space => try to fetch organization members
|
418 |
+
# Otherwise, it's not an organization => namespace is the user
|
419 |
+
namespace = space_id.split("/")[0]
|
420 |
+
response = requests.get(f"https://huggingface.co/api/organizations/{namespace}/members")
|
421 |
+
if response.status_code == 200:
|
422 |
+
return sorted((member["user"] for member in response.json()), key=lambda x: x.lower())
|
423 |
+
return [namespace]
|