fantaxy commited on
Commit
d47ea1e
ยท
verified ยท
1 Parent(s): 82172b4

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +43 -22
app.py CHANGED
@@ -30,26 +30,35 @@ pipe = FluxPipeline.from_pretrained("black-forest-labs/FLUX.1-dev", torch_dtype=
30
  MAX_SEED = np.iinfo(np.int32).max
31
 
32
  @spaces.GPU
33
- def infer(prompt, seed=1, randomize_seed=False, num_inference_steps=28):
34
- print('entered the function')
35
  prompt_template = f"A side by side 4 frame image showing high quality consecutive stills from a looped gif animation moving from left to right. The scene has motion. The stills are of {prompt}"
36
  if randomize_seed:
37
  seed = random.randint(0, MAX_SEED)
38
-
 
39
  generator = torch.Generator().manual_seed(seed)
40
 
 
 
 
 
41
  image = pipe(
42
  prompt=prompt_template,
43
  num_inference_steps=num_inference_steps,
44
  num_images_per_prompt=1,
45
  generator=generator,
46
  height=320,
47
- width=1280
 
 
48
  ).images[0]
49
 
 
50
  gif_name = f"{uuid.uuid4().hex}-flux.gif"
51
  export_to_gif(split_image(image, 4), gif_name, fps=4)
52
 
 
53
  return gif_name, image, seed
54
 
55
  examples = [
@@ -101,32 +110,28 @@ footer {visibility: hidden}
101
  background-color: #f8f9fa;
102
  }
103
 
104
- /* Examples ํ…์ŠคํŠธ ์ƒ‰์ƒ ๊ด€๋ จ ์Šคํƒ€์ผ ์ˆ˜์ • */
105
- .gr-examples-text {
106
- color: black !important;
107
- }
108
-
109
- .gr-examples button {
110
  color: black !important;
111
  }
112
 
113
- .gr-examples span {
114
  color: black !important;
115
  }
116
 
117
- .gr-examples div {
118
  color: black !important;
119
  }
120
 
121
- .gr-examples p {
122
  color: black !important;
123
  }
124
 
125
- .gr-examples h3 {
126
  color: black !important;
127
  }
128
 
129
- .gr-sample-text {
130
  color: black !important;
131
  }
132
 
@@ -145,13 +150,18 @@ footer {visibility: hidden}
145
  width: auto !important;
146
  }
147
 
148
- /* ์ถ”๊ฐ€์ ์ธ Examples ๊ด€๋ จ ์Šคํƒ€์ผ */
149
- .example-text {
150
- color: black !important;
 
 
151
  }
152
 
153
- .example-label {
154
- color: black !important;
 
 
 
155
  }
156
  """
157
 
@@ -217,13 +227,14 @@ with gr.Blocks(theme="Yntec/HaleyCH_Theme_Orange", css=css) as demo:
217
  value=28,
218
  )
219
 
220
- gr.Examples(
221
  examples=examples,
222
  inputs=[prompt],
223
  outputs=[result, result_full, seed],
224
  fn=infer,
225
  cache_examples=True,
226
- label="Click on any example to try it out"
 
227
  )
228
 
229
  gr.on(
@@ -233,4 +244,14 @@ with gr.Blocks(theme="Yntec/HaleyCH_Theme_Orange", css=css) as demo:
233
  outputs=[result, result_full, seed]
234
  )
235
 
 
 
 
 
 
 
 
 
 
 
236
  demo.queue().launch()
 
30
  MAX_SEED = np.iinfo(np.int32).max
31
 
32
  @spaces.GPU
33
+ def infer(prompt, seed=1, randomize_seed=False, num_inference_steps=28, progress=gr.Progress()):
34
+ progress(0, desc="Starting...")
35
  prompt_template = f"A side by side 4 frame image showing high quality consecutive stills from a looped gif animation moving from left to right. The scene has motion. The stills are of {prompt}"
36
  if randomize_seed:
37
  seed = random.randint(0, MAX_SEED)
38
+
39
+ progress(0.2, desc="Generating animation...")
40
  generator = torch.Generator().manual_seed(seed)
41
 
42
+ def callback(step, timestep, latents):
43
+ progress((step + 1) / num_inference_steps, desc=f"Step {step + 1}/{num_inference_steps}")
44
+ return True
45
+
46
  image = pipe(
47
  prompt=prompt_template,
48
  num_inference_steps=num_inference_steps,
49
  num_images_per_prompt=1,
50
  generator=generator,
51
  height=320,
52
+ width=1280,
53
+ callback=callback,
54
+ callback_steps=1
55
  ).images[0]
56
 
57
+ progress(0.9, desc="Creating GIF...")
58
  gif_name = f"{uuid.uuid4().hex}-flux.gif"
59
  export_to_gif(split_image(image, 4), gif_name, fps=4)
60
 
61
+ progress(1.0, desc="Done!")
62
  return gif_name, image, seed
63
 
64
  examples = [
 
110
  background-color: #f8f9fa;
111
  }
112
 
113
+ /* Examples ํ…์ŠคํŠธ ์ƒ‰์ƒ ๊ฐ•์ œ ์ ์šฉ */
114
+ .gallery-item {
 
 
 
 
115
  color: black !important;
116
  }
117
 
118
+ .gallery-item * {
119
  color: black !important;
120
  }
121
 
122
+ .fixed-height {
123
  color: black !important;
124
  }
125
 
126
+ .fixed-height * {
127
  color: black !important;
128
  }
129
 
130
+ .examples-table {
131
  color: black !important;
132
  }
133
 
134
+ .examples-table * {
135
  color: black !important;
136
  }
137
 
 
150
  width: auto !important;
151
  }
152
 
153
+ /* ํ”„๋กœ๊ทธ๋ ˆ์Šค ๋ฐ” ์Šคํƒ€์ผ */
154
+ .progress-bar {
155
+ background-color: #f0f0f0;
156
+ border-radius: 10px;
157
+ padding: 3px;
158
  }
159
 
160
+ .progress-bar-fill {
161
+ background: linear-gradient(45deg, #FF6B6B, #4ECDC4);
162
+ height: 20px;
163
+ border-radius: 7px;
164
+ transition: width 0.5s ease-out;
165
  }
166
  """
167
 
 
227
  value=28,
228
  )
229
 
230
+ examples_section = gr.Examples(
231
  examples=examples,
232
  inputs=[prompt],
233
  outputs=[result, result_full, seed],
234
  fn=infer,
235
  cache_examples=True,
236
+ label="Click on any example to try it out",
237
+ elem_classes=["examples-table"]
238
  )
239
 
240
  gr.on(
 
244
  outputs=[result, result_full, seed]
245
  )
246
 
247
+ # ํ…Œ๋งˆ ์ƒ‰์ƒ ์˜ค๋ฒ„๋ผ์ด๋“œ
248
+ demo.theme = gr.themes.Default().set(
249
+ body_text_color="black",
250
+ block_label_text_color="black",
251
+ block_title_text_color="black",
252
+ body_text_color_subdued="black",
253
+ description_text_color="black",
254
+ background_fill_primary="white",
255
+ )
256
+
257
  demo.queue().launch()