fantaxy commited on
Commit
5a38e1b
·
verified ·
1 Parent(s): a9f15ef

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +57 -16
app.py CHANGED
@@ -13,7 +13,15 @@ from subprocess import getoutput
13
  is_shared_ui = True if "fffiloni/MimicMotion" in os.environ['SPACE_ID'] else False
14
  available_property = False if is_shared_ui else True
15
 
16
-
 
 
 
 
 
 
 
 
17
 
18
  def stream_output(pipe):
19
  for line in iter(pipe.readline, ''):
@@ -240,21 +248,54 @@ div#warning-ready > .gr-prose > h2, div#warning-ready > .gr-prose > p {
240
  color: #030303 !important;
241
  }
242
  """
243
- with gr.Blocks() as demo:
244
- with gr.Row():
245
- ref_image_in = gr.Image()
246
- ref_video_in = gr.Video()
247
- num_inference_steps = gr.Slider(minimum=12, maximum=50, value=25, step=1)
248
- guidance_scale = gr.Slider(minimum=0.1, maximum=10, value=2, step=0.1)
249
- output_frames_per_second = gr.Slider(minimum=1, maximum=60, value=16, step=1)
250
- seed = gr.Number(value=42)
251
- checkpoint_version = gr.Dropdown(choices=["MimicMotion_1.pth", "MimicMotion_1-1.pth"], value="MimicMotion_1.pth")
252
- submit_btn = gr.Button("실행")
253
-
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
254
  submit_btn.click(
255
- fn=infer,
256
- inputs=[ref_image_in, ref_video_in, num_inference_steps, guidance_scale, output_frames_per_second, seed, checkpoint_version],
257
- outputs=[gr.Video()]
258
  )
259
 
260
- demo.launch()
 
13
  is_shared_ui = True if "fffiloni/MimicMotion" in os.environ['SPACE_ID'] else False
14
  available_property = False if is_shared_ui else True
15
 
16
+ is_gpu_associated = torch.cuda.is_available()
17
+ if is_gpu_associated:
18
+ gpu_info = getoutput('nvidia-smi')
19
+ if("A10G" in gpu_info):
20
+ which_gpu = "A10G"
21
+ elif("T4" in gpu_info):
22
+ which_gpu = "T4"
23
+ else:
24
+ which_gpu = "CPU"
25
 
26
  def stream_output(pipe):
27
  for line in iter(pipe.readline, ''):
 
248
  color: #030303 !important;
249
  }
250
  """
251
+ with gr.Blocks(css=css) as demo:
252
+ with gr.Column():
253
+ gr.Markdown("# Mimic")
254
+ gr.Markdown("High-quality")
255
+ gr.HTML("""
256
+ 1
257
+ """)
258
+ with gr.Row():
259
+ with gr.Column():
260
+ if is_shared_ui:
261
+ top_description = gr.HTML(f'''
262
+ 2
263
+ ''', elem_id="warning-duplicate")
264
+ else:
265
+ if(is_gpu_associated):
266
+ top_description = gr.HTML(f'''
267
+ 2
268
+ ''', elem_id="warning-ready")
269
+ else:
270
+ top_description = gr.HTML(f'''
271
+ 2
272
+ ''', elem_id="warning-setgpu")
273
+ with gr.Row():
274
+ ref_image_in = gr.Image(label="Person Image Reference", type="filepath")
275
+ ref_video_in = gr.Video(label="Person Video Reference")
276
+ with gr.Accordion("Advanced Settings", open=False):
277
+ num_inference_steps = gr.Slider(label="num inference steps", minimum=12, maximum=50, value=25, step=1, interactive=available_property)
278
+ guidance_scale = gr.Slider(label="guidance scale", minimum=0.1, maximum=10, value=2, step=0.1, interactive=available_property)
279
+ with gr.Row():
280
+ output_frames_per_second = gr.Slider(label="fps", minimum=1, maximum=60, value=16, step=1, interactive=available_property)
281
+ seed = gr.Number(label="Seed", value=42, interactive=available_property)
282
+ checkpoint_version = gr.Dropdown(label="Checkpoint Version", choices=["MimicMotion_1.pth", "MimicMotion_1-1.pth"], value="MimicMotion_1.pth", interactive=available_property, filterable=False)
283
+ submit_btn = gr.Button("Submit", interactive=available_property)
284
+ gr.Examples(
285
+ examples = [
286
+ ["./examples/demo1.jpg", "./examples/preview_1.mp4"]
287
+ ],
288
+ fn = load_examples,
289
+ inputs = [ref_image_in, ref_video_in],
290
+ outputs = [output_video],
291
+ run_on_click = True,
292
+ cache_examples = False
293
+ )
294
+ output_video.render()
295
  submit_btn.click(
296
+ fn = infer,
297
+ inputs = [ref_image_in, ref_video_in, num_inference_steps, guidance_scale, output_frames_per_second, seed, checkpoint_version],
298
+ outputs = [output_video]
299
  )
300
 
301
+ demo.launch(show_api=False, show_error=False)