Spaces:
Running
on
Zero
Running
on
Zero
Update gradio_utils/utils.py
Browse files- gradio_utils/utils.py +0 -24
gradio_utils/utils.py
CHANGED
@@ -114,15 +114,6 @@ def estimate(model, data):
|
|
114 |
with torch.no_grad():
|
115 |
return model(data)
|
116 |
|
117 |
-
|
118 |
-
# Custom JSON encoder to handle non-serializable objects
|
119 |
-
class CustomEncoder(json.JSONEncoder):
|
120 |
-
def default(self, obj):
|
121 |
-
if isinstance(obj, np.ndarray):
|
122 |
-
return obj.tolist()
|
123 |
-
return super().default(obj)
|
124 |
-
|
125 |
-
|
126 |
def process(query_img, state,
|
127 |
cfg_path='configs/test/1shot_split1.py',
|
128 |
checkpoint_path='ckpt/1shot_split1.pth'):
|
@@ -200,22 +191,7 @@ def process(query_img, state,
|
|
200 |
if fp16_cfg is not None:
|
201 |
wrap_fp16_model(model)
|
202 |
load_checkpoint(model, checkpoint_path, map_location='cpu')
|
203 |
-
|
204 |
-
data["img_s"] = data["img_s"][0].cpu().numpy().tolist()
|
205 |
-
data["img_q"] = data["img_q"].cpu().numpy().tolist()
|
206 |
-
data['target_weight_s'][0] = data['target_weight_s'][0].cpu().numpy().tolist()
|
207 |
-
data['target_s'][0] = data['target_s'][0].cpu().numpy().tolist()
|
208 |
-
|
209 |
-
data['img_metas'][0]['sample_joints_3d'][0] = data['img_metas'][0]['sample_joints_3d'][0].cpu().tolist()
|
210 |
-
data['img_metas'][0]['query_joints_3d'] = data['img_metas'][0]['query_joints_3d'].cpu().tolist()
|
211 |
-
data['img_metas'][0]['sample_center'][0] = data['img_metas'][0]['sample_center'][0].cpu().tolist()
|
212 |
-
data['img_metas'][0]['query_center'] = data['img_metas'][0]['query_center'].cpu().tolist()
|
213 |
-
data['img_metas'][0]['sample_scale'][0] = data['img_metas'][0]['sample_scale'][0].cpu().tolist()
|
214 |
-
data['img_metas'][0]['query_scale'] = data['img_metas'][0]['query_scale'].cpu().tolist()
|
215 |
-
|
216 |
model.eval()
|
217 |
-
|
218 |
-
str_data = json.dumps(data, cls=CustomEncoder)
|
219 |
outputs = estimate(model, str_data)
|
220 |
|
221 |
# visualize results
|
|
|
114 |
with torch.no_grad():
|
115 |
return model(data)
|
116 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
117 |
def process(query_img, state,
|
118 |
cfg_path='configs/test/1shot_split1.py',
|
119 |
checkpoint_path='ckpt/1shot_split1.pth'):
|
|
|
191 |
if fp16_cfg is not None:
|
192 |
wrap_fp16_model(model)
|
193 |
load_checkpoint(model, checkpoint_path, map_location='cpu')
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
194 |
model.eval()
|
|
|
|
|
195 |
outputs = estimate(model, str_data)
|
196 |
|
197 |
# visualize results
|