File size: 3,826 Bytes
caa56d6 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 |
'''
# author: Zhiyuan Yan
# email: [email protected]
# date: 2023-0706
# description: Class for the EfficientDetector
Functions in the Class are summarized as:
1. __init__: Initialization
2. build_backbone: Backbone-building
3. build_loss: Loss-function-building
4. features: Feature-extraction
5. classifier: Classification
6. get_losses: Loss-computation
7. get_train_metrics: Training-metrics-computation
8. get_test_metrics: Testing-metrics-computation
9. forward: Forward-propagation
Reference:
@inproceedings{tan2019efficientnet,
title={Efficientnet: Rethinking model scaling for convolutional neural networks},
author={Tan, Mingxing and Le, Quoc},
booktitle={International conference on machine learning},
pages={6105--6114},
year={2019},
organization={PMLR}
}
'''
import os
import datetime
import logging
import numpy as np
from sklearn import metrics
from typing import Union
from collections import defaultdict
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
from torch.nn import DataParallel
from torch.utils.tensorboard import SummaryWriter
from metrics.base_metrics_class import calculate_metrics_for_train
from .base_detector import AbstractDetector
from detectors import DETECTOR
from networks import BACKBONE
from loss import LOSSFUNC
import random
logger = logging.getLogger(__name__)
@DETECTOR.register_module(module_name='efficientnetb4')
class EfficientDetector(AbstractDetector):
def __init__(self, config):
super().__init__()
self.config = config
self.backbone = self.build_backbone(config)
self.loss_func = self.build_loss(config)
def build_backbone(self, config):
# prepare the backbone
backbone_class = BACKBONE[config['backbone_name']]
model_config = config['backbone_config']
model_config['pretrained'] = self.config['pretrained']
backbone = backbone_class(model_config)
if config['pretrained'] != 'None':
logger.info('Load pretrained model successfully!')
else:
logger.info('No pretrained model.')
return backbone
return backbone
def build_loss(self, config):
# prepare the loss function
loss_class = LOSSFUNC[config['loss_func']]
loss_func = loss_class()
return loss_func
def features(self, data_dict: dict) -> torch.tensor:
x = self.backbone.features(data_dict['image'])
return x
def classifier(self, features: torch.tensor) -> torch.tensor:
return self.backbone.classifier(features)
def get_losses(self, data_dict: dict, pred_dict: dict) -> dict:
label = data_dict['label']
pred = pred_dict['cls']
loss = self.loss_func(pred, label)
loss_dict = {'overall': loss}
return loss_dict
def get_train_metrics(self, data_dict: dict, pred_dict: dict) -> dict:
label = data_dict['label']
pred = pred_dict['cls']
# compute metrics for batch data
auc, eer, acc, ap = calculate_metrics_for_train(label.detach(), pred.detach())
metric_batch_dict = {'acc': acc, 'auc': auc, 'eer': eer, 'ap': ap}
return metric_batch_dict
def forward(self, data_dict: dict, inference=False) -> dict:
# get the features by backbone
features = self.features(data_dict)
# get the prediction by classifier
pred = self.classifier(features)
# get the probability of the pred
prob = torch.softmax(pred, dim=1)[:, 1]
# build the prediction dict for each output
pred_dict = {'cls': pred, 'prob': prob, 'feat': features}
return pred_dict
|