eienmojiki commited on
Commit
6bd2d1a
·
verified ·
1 Parent(s): 5de4c7b

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +123 -1
app.py CHANGED
@@ -17,6 +17,15 @@ MIN_IMAGE_SIZE = int(os.getenv("MIN_IMAGE_SIZE", "512"))
17
  MAX_IMAGE_SIZE = int(os.getenv("MAX_IMAGE_SIZE", "2048"))
18
  MAX_SEED = np.iinfo(np.int32).max
19
 
 
 
 
 
 
 
 
 
 
20
  torch.backends.cudnn.deterministic = True
21
  torch.backends.cudnn.benchmark = False
22
 
@@ -101,7 +110,7 @@ def generate(
101
  output_type="pil",
102
  ).images[0]
103
 
104
- return img
105
 
106
  except Exception as e:
107
  print(f"An error occurred: {e}")
@@ -112,3 +121,116 @@ if torch.cuda.is_available():
112
  else:
113
  pipe = None
114
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
17
  MAX_IMAGE_SIZE = int(os.getenv("MAX_IMAGE_SIZE", "2048"))
18
  MAX_SEED = np.iinfo(np.int32).max
19
 
20
+ sampler_list = [
21
+ "DPM++ 2M Karras",
22
+ "DPM++ SDE Karras",
23
+ "DPM++ 2M SDE Karras",
24
+ "Euler",
25
+ "Euler a",
26
+ "DDIM",
27
+ ]
28
+
29
  torch.backends.cudnn.deterministic = True
30
  torch.backends.cudnn.benchmark = False
31
 
 
110
  output_type="pil",
111
  ).images[0]
112
 
113
+ return img, seed
114
 
115
  except Exception as e:
116
  print(f"An error occurred: {e}")
 
121
  else:
122
  pipe = None
123
 
124
+ with gr.Blocks(
125
+ theme=gr.themes.Soft()
126
+ ) as demo:
127
+ gr.Markdown("Starry XL 5.2 Demo")
128
+
129
+ with gr.Group():
130
+ prompt = gr.Text(
131
+ label="Prompt",
132
+ placeholder="Enter your prompt here..."
133
+ )
134
+
135
+ negative_prompt = gr.Text(
136
+ label="Negative Prompt",
137
+ placeholder="(Optional) Enter your negative prompt here..."
138
+ )
139
+
140
+ with gr.Row():
141
+ width = gr.Slider(
142
+ label="Width",
143
+ minimum=256,
144
+ maximum=MAX_IMAGE_SIZE,
145
+ step=32,
146
+ value=1024,
147
+ )
148
+ height = gr.Slider(
149
+ label="Height",
150
+ minimum=256,
151
+ maximum=MAX_IMAGE_SIZE,
152
+ step=32,
153
+ value=1024,
154
+ )
155
+
156
+ sampler = gr.Dropdown(
157
+ label="Sampler",
158
+ choices=sampler_list,
159
+ interactive=True,
160
+ value="Euler a",
161
+ )
162
+
163
+ seed = gr.Slider(
164
+ label="Seed",
165
+ minimum=0,
166
+ maximum=MAX_SEED,
167
+ step=1,
168
+ value=0,
169
+ )
170
+
171
+ randomize_seed = gr.Checkbox(label="Randomize seed", value=True)
172
+
173
+ with gr.Row():
174
+ guidance_scale = gr.Slider(
175
+ label="Guidance scale",
176
+ minimum=1,
177
+ maximum=20,
178
+ step=0.1,
179
+ value=5.0,
180
+ )
181
+ num_inference_steps = gr.Slider(
182
+ label="Steps",
183
+ minimum=10,
184
+ maximum=100,
185
+ step=1,
186
+ value=25,
187
+ )
188
+
189
+ clip_skip = gr.Slider(
190
+ label="Clip Skip",
191
+ minimum=1,
192
+ maximum=2,
193
+ step=1,
194
+ value=1
195
+ )
196
+
197
+ run_button = gr.Button("Run")
198
+
199
+ result = gr.Image(
200
+ label="Result",
201
+ show_label=False
202
+ )
203
+
204
+ with gr.Group():
205
+ used_seed = gr.Number(label="Used Seed", interactive=False)
206
+
207
+ gr.on(
208
+ triggers=[
209
+ prompt.submit,
210
+ negative_prompt.submit,
211
+ run_button.click,
212
+ ],
213
+ fn=randomize_seed_fn,
214
+ inputs=[seed, randomize_seed],
215
+ outputs=seed,
216
+ queue=False,
217
+ api_name=False,
218
+ ).then(
219
+ fn=generate,
220
+ inputs=[
221
+ prompt,
222
+ negative_prompt,
223
+ seed,
224
+ width,
225
+ height,
226
+ guidance_scale,
227
+ num_inference_steps,
228
+ sampler,
229
+ clip_skip
230
+ ],
231
+ outputs=[result, used_seed],
232
+ api_name="run"
233
+ )
234
+
235
+ if __name__ == "__main__":
236
+ demo.queue(max_size=20).launch(show_error=True)