Feng Wang commited on
Commit
395f082
·
1 Parent(s): f797b2c

feat(evaluator): support logging per class mAP during evaluation (#1026)

Browse files

* feat(evaluator): support logging per class mAP during evaluation

* make linter happy

tools/eval.py CHANGED
@@ -143,6 +143,7 @@ def main(exp, args, num_gpu):
143
  logger.info("Model Structure:\n{}".format(str(model)))
144
 
145
  evaluator = exp.get_evaluator(args.batch_size, is_distributed, args.test, args.legacy)
 
146
 
147
  torch.cuda.set_device(rank)
148
  model.cuda(rank)
 
143
  logger.info("Model Structure:\n{}".format(str(model)))
144
 
145
  evaluator = exp.get_evaluator(args.batch_size, is_distributed, args.test, args.legacy)
146
+ evaluator.per_class_mAP = True
147
 
148
  torch.cuda.set_device(rank)
149
  model.cuda(rank)
yolox/evaluators/coco_evaluator.py CHANGED
@@ -9,10 +9,14 @@ import json
9
  import tempfile
10
  import time
11
  from loguru import logger
 
12
  from tqdm import tqdm
13
 
 
 
14
  import torch
15
 
 
16
  from yolox.utils import (
17
  gather,
18
  is_main_process,
@@ -23,6 +27,30 @@ from yolox.utils import (
23
  )
24
 
25
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
26
  class COCOEvaluator:
27
  """
28
  COCO AP Evaluation class. All the data in the val2017 dataset are processed
@@ -30,16 +58,24 @@ class COCOEvaluator:
30
  """
31
 
32
  def __init__(
33
- self, dataloader, img_size, confthre, nmsthre, num_classes, testdev=False
 
 
 
 
 
 
 
34
  ):
35
  """
36
  Args:
37
  dataloader (Dataloader): evaluate dataloader.
38
- img_size (int): image size after preprocess. images are resized
39
  to squares whose shape is (img_size, img_size).
40
- confthre (float): confidence threshold ranging from 0 to 1, which
41
  is defined in the config file.
42
- nmsthre (float): IoU threshold of non-max supression ranging from 0 to 1.
 
43
  """
44
  self.dataloader = dataloader
45
  self.img_size = img_size
@@ -47,6 +83,7 @@ class COCOEvaluator:
47
  self.nmsthre = nmsthre
48
  self.num_classes = num_classes
49
  self.testdev = testdev
 
50
 
51
  def evaluate(
52
  self,
@@ -216,6 +253,8 @@ class COCOEvaluator:
216
  with contextlib.redirect_stdout(redirect_string):
217
  cocoEval.summarize()
218
  info += redirect_string.getvalue()
 
 
219
  return cocoEval.stats[0], cocoEval.stats[1], info
220
  else:
221
  return 0, 0, info
 
9
  import tempfile
10
  import time
11
  from loguru import logger
12
+ from tabulate import tabulate
13
  from tqdm import tqdm
14
 
15
+ import numpy as np
16
+
17
  import torch
18
 
19
+ from yolox.data.datasets import COCO_CLASSES
20
  from yolox.utils import (
21
  gather,
22
  is_main_process,
 
27
  )
28
 
29
 
30
+ def per_class_mAP_table(coco_eval, class_names=COCO_CLASSES, headers=["class", "AP"], colums=6):
31
+ per_class_mAP = {}
32
+ precisions = coco_eval.eval["precision"]
33
+ # precision has dims (iou, recall, cls, area range, max dets)
34
+ assert len(class_names) == precisions.shape[2]
35
+
36
+ for idx, name in enumerate(class_names):
37
+ # area range index 0: all area ranges
38
+ # max dets index -1: typically 100 per image
39
+ precision = precisions[:, :, idx, 0, -1]
40
+ precision = precision[precision > -1]
41
+ ap = np.mean(precision) if precision.size else float("nan")
42
+ per_class_mAP[name] = float(ap * 100)
43
+
44
+ num_cols = min(colums, len(per_class_mAP) * len(headers))
45
+ result_pair = [x for pair in per_class_mAP.items() for x in pair]
46
+ row_pair = itertools.zip_longest(*[result_pair[i::num_cols] for i in range(num_cols)])
47
+ table_headers = headers * (num_cols // len(headers))
48
+ table = tabulate(
49
+ row_pair, tablefmt="pipe", floatfmt=".3f", headers=table_headers, numalign="left",
50
+ )
51
+ return table
52
+
53
+
54
  class COCOEvaluator:
55
  """
56
  COCO AP Evaluation class. All the data in the val2017 dataset are processed
 
58
  """
59
 
60
  def __init__(
61
+ self,
62
+ dataloader,
63
+ img_size: int,
64
+ confthre: float,
65
+ nmsthre: float,
66
+ num_classes: int,
67
+ testdev: bool = False,
68
+ per_class_mAP: bool = False,
69
  ):
70
  """
71
  Args:
72
  dataloader (Dataloader): evaluate dataloader.
73
+ img_size: image size after preprocess. images are resized
74
  to squares whose shape is (img_size, img_size).
75
+ confthre: confidence threshold ranging from 0 to 1, which
76
  is defined in the config file.
77
+ nmsthre: IoU threshold of non-max supression ranging from 0 to 1.
78
+ per_class_mAP: Show per class mAP during evalution or not. Default to False.
79
  """
80
  self.dataloader = dataloader
81
  self.img_size = img_size
 
83
  self.nmsthre = nmsthre
84
  self.num_classes = num_classes
85
  self.testdev = testdev
86
+ self.per_class_mAP = per_class_mAP
87
 
88
  def evaluate(
89
  self,
 
253
  with contextlib.redirect_stdout(redirect_string):
254
  cocoEval.summarize()
255
  info += redirect_string.getvalue()
256
+ if self.per_class_mAP:
257
+ info += "per class mAP:\n" + per_class_mAP_table(cocoEval)
258
  return cocoEval.stats[0], cocoEval.stats[1], info
259
  else:
260
  return 0, 0, info