faiimea commited on
Commit
eaf5641
·
verified ·
1 Parent(s): 2ec2c10

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +47 -10
app.py CHANGED
@@ -2,6 +2,7 @@ import time
2
  import shutil
3
  import gradio as gr
4
  import os
 
5
  import json
6
  import torch
7
  import argparse
@@ -36,7 +37,43 @@ def load_and_preprocess(path, config, mean_pose, std_pose):
36
  motion_proj = torch.from_numpy(motion_proj).float()
37
 
38
  return motion_proj, start
39
-
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
40
  def handle_motion_generation(npy1,npy2):
41
  path1 = './data/a.npy'
42
  path2 = './data/b.npy'
@@ -49,7 +86,7 @@ def handle_motion_generation(npy1,npy2):
49
 
50
  config = get_config(config_path)
51
  ae = get_autoencoder(config)
52
- ae.load_state_dict(torch.load(checkpoint_path, map_location=torch.device('cpu')))
53
  # ae.cuda()
54
  ae.eval()
55
  mean_pose, std_pose = get_meanpose("test", config.data)
@@ -88,28 +125,28 @@ def handle_motion_generation(npy1,npy2):
88
  # print(motion_data.shape)
89
  # 调用函数生成视频
90
  motion2video(motion_data, height, width, save_path, colors, bg_color=bg_color, transparency=False, fps=fps)
 
91
  first_frame_image = Image.open('./an-frames/0000.png')
92
- return first_frame_image
93
 
 
94
  with gr.Blocks() as demo:
95
- gr.Markdown("Upload two `.npy` files to generate motion and visualize the first frame of the output animation.")
96
 
97
  with gr.Row():
98
  file1 = gr.File(file_types=[".npy"], label="Upload first .npy file")
99
  file2 = gr.File(file_types=[".npy"], label="Upload second .npy file")
100
 
101
  with gr.Row():
102
- generate_btn = gr.Button("Generate Motion")
103
 
104
- output_image = gr.Image(label="First Frame of the Generated Animation")
105
 
106
  generate_btn.click(
107
  fn=handle_motion_generation,
108
  inputs=[file1, file2],
109
- outputs=output_image
110
  )
111
 
112
-
113
  if __name__ == "__main__":
114
- # tsp_page.launch(debug = True)
115
- demo.launch()
 
2
  import shutil
3
  import gradio as gr
4
  import os
5
+ import cv2
6
  import json
7
  import torch
8
  import argparse
 
37
  motion_proj = torch.from_numpy(motion_proj).float()
38
 
39
  return motion_proj, start
40
+
41
+ def image2video():
42
+ # 指定包含PNG图像的目录
43
+ image_directory = './an-frames'
44
+ # 输出视频的路径
45
+ output_video_path = './adv.mp4'
46
+ # 视频的帧率(每秒中的帧数)
47
+ fps = 24
48
+
49
+ # 获取所有PNG图像的文件名并按顺序排序
50
+ images = sorted([img for img in os.listdir(image_directory) if img.endswith(".png")])
51
+
52
+ # 确定视频的分辨率
53
+ first_image_path = os.path.join(image_directory, images[0])
54
+ first_frame = cv2.imread(first_image_path)
55
+ height, width, layers = first_frame.shape
56
+ size = (width, height)
57
+
58
+ # 创建视频写入器
59
+ out = cv2.VideoWriter(output_video_path, cv2.VideoWriter_fourcc(*'mp4v'), fps, size)
60
+
61
+ # 依次读取图像并写入视频
62
+ for img_name in images:
63
+ img_path = os.path.join(image_directory, img_name)
64
+ frame = cv2.imread(img_path)
65
+
66
+ # 确保帧的大小与视频的分辨率匹配
67
+ if (frame.shape[1], frame.shape[0]) != size:
68
+ frame = cv2.resize(frame, size)
69
+
70
+ out.write(frame)
71
+
72
+ # 关闭视频写入器
73
+ out.release()
74
+
75
+ print(f"Video saved to {output_video_path}")
76
+
77
  def handle_motion_generation(npy1,npy2):
78
  path1 = './data/a.npy'
79
  path2 = './data/b.npy'
 
86
 
87
  config = get_config(config_path)
88
  ae = get_autoencoder(config)
89
+ ae.load_state_dict(torch.load(checkpoint_path))
90
  # ae.cuda()
91
  ae.eval()
92
  mean_pose, std_pose = get_meanpose("test", config.data)
 
125
  # print(motion_data.shape)
126
  # 调用函数生成视频
127
  motion2video(motion_data, height, width, save_path, colors, bg_color=bg_color, transparency=False, fps=fps)
128
+ image2video()
129
  first_frame_image = Image.open('./an-frames/0000.png')
130
+ return './adv.mp4'
131
 
132
+ # 创建 Gradio 界面并展示视频而不是图片
133
  with gr.Blocks() as demo:
134
+ gr.Markdown("Upload two `.npy` files to generate motion and visualize the animation.")
135
 
136
  with gr.Row():
137
  file1 = gr.File(file_types=[".npy"], label="Upload first .npy file")
138
  file2 = gr.File(file_types=[".npy"], label="Upload second .npy file")
139
 
140
  with gr.Row():
141
+ generate_btn = gr.Button("Generate Animation")
142
 
143
+ output_video = gr.Video(label="Generated Animation",width = 500)
144
 
145
  generate_btn.click(
146
  fn=handle_motion_generation,
147
  inputs=[file1, file2],
148
+ outputs=output_video
149
  )
150
 
 
151
  if __name__ == "__main__":
152
+ demo.launch()