abrar-lohia commited on
Commit
ffd7ac5
·
1 Parent(s): 66736eb

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +1 -35
app.py CHANGED
@@ -267,31 +267,6 @@ with demo:
267
  <b>Text-2-Motion-GPT</b> models for human motion creation from textural descriptors.
268
  </div>
269
  ''')
270
- # with gr.Row():
271
- # with gr.Column():
272
- # gr.Markdown('''
273
- # <figure>
274
- # <img src="https://huggingface.co/vumichien/T2M-GPT/resolve/main/demo_slow1.gif" alt="Demo Slow", width="425", height=480/>
275
- # <figcaption> a man starts off in an up right position with botg arms extended out by his sides, he then brings his arms down to his body and claps his hands together. after this he wals down amd the the left where he proceeds to sit on a seat
276
- # </figcaption>
277
- # </figure>
278
- # ''')
279
- # with gr.Column():
280
- # gr.Markdown('''
281
- # <figure>
282
- # <img src="https://huggingface.co/vumichien/T2M-GPT/resolve/main/demo_slow2.gif" alt="Demo Slow 2", width="425", height=480/>
283
- # <figcaption> a person puts their hands together, leans forwards slightly then swings the arms from right to left
284
- # </figcaption>
285
- # </figure>
286
- # ''')
287
- # with gr.Column():
288
- # gr.Markdown('''
289
- # <figure>
290
- # <img src="https://huggingface.co/vumichien/T2M-GPT/resolve/main/demo_slow3.gif" alt="Demo Slow 3", width="425", height=480/>
291
- # <figcaption> a man is practicing the waltz with a partner
292
- # </figcaption>
293
- # </figure>
294
- # ''')
295
  with gr.Row():
296
  with gr.Column():
297
  gr.Markdown('''
@@ -300,21 +275,12 @@ with demo:
300
  with gr.Column():
301
  with gr.Row():
302
  text_prompt.render()
303
- method = gr.Dropdown(["slow", "fast"], label="Method", value="slow")
304
  with gr.Row():
305
  generate_btn = gr.Button("Generate")
306
  generate_btn.click(predict, [text_prompt, method], [video_out], api_name="generate")
307
  print(video_out)
308
  with gr.Row():
309
  video_out.render()
310
- # with gr.Row():
311
- # gr.Markdown('''
312
- # ### You can test by following examples:
313
- # ''')
314
- # examples = gr.Examples(examples=
315
- # [ "a person jogs in place, slowly at first, then increases speed. they then back up and squat down.",
316
- # "a man steps forward and does a handstand",
317
- # "a man rises from the ground, walks in a circle and sits back down on the ground"],
318
- # label="Examples", inputs=[text_prompt])
319
 
320
  demo.launch(debug=True)
 
267
  <b>Text-2-Motion-GPT</b> models for human motion creation from textural descriptors.
268
  </div>
269
  ''')
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
270
  with gr.Row():
271
  with gr.Column():
272
  gr.Markdown('''
 
275
  with gr.Column():
276
  with gr.Row():
277
  text_prompt.render()
278
+ method = gr.Dropdown(["fast"], label="Method", value="fast")
279
  with gr.Row():
280
  generate_btn = gr.Button("Generate")
281
  generate_btn.click(predict, [text_prompt, method], [video_out], api_name="generate")
282
  print(video_out)
283
  with gr.Row():
284
  video_out.render()
 
 
 
 
 
 
 
 
 
285
 
286
  demo.launch(debug=True)