File size: 1,657 Bytes
cc0dd3c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
# Copyright (c) OpenMMLab. All rights reserved.
from typing import Optional, Sequence

from mmengine.hooks import Hook

from mmpretrain.registry import HOOKS


@HOOKS.register_module()
class SimSiamHook(Hook):
    """Hook for SimSiam.

    This hook is for SimSiam to fix learning rate of predictor.

    Args:
        fix_pred_lr (bool): whether to fix the lr of predictor or not.
        lr (float): the value of fixed lr.
        adjust_by_epoch (bool, optional): whether to set lr by epoch or iter.
            Defaults to True.
    """

    def __init__(self,
                 fix_pred_lr: bool,
                 lr: float,
                 adjust_by_epoch: Optional[bool] = True) -> None:
        self.fix_pred_lr = fix_pred_lr
        self.lr = lr
        self.adjust_by_epoch = adjust_by_epoch

    def before_train_iter(self,
                          runner,
                          batch_idx: int,
                          data_batch: Optional[Sequence[dict]] = None) -> None:
        """fix lr of predictor by iter."""
        if self.adjust_by_epoch:
            return
        else:
            if self.fix_pred_lr:
                for param_group in runner.optim_wrapper.optimizer.param_groups:
                    if 'fix_lr' in param_group and param_group['fix_lr']:
                        param_group['lr'] = self.lr

    def before_train_epoch(self, runner) -> None:
        """fix lr of predictor by epoch."""
        if self.fix_pred_lr:
            for param_group in runner.optim_wrapper.optimizer.param_groups:
                if 'fix_lr' in param_group and param_group['fix_lr']:
                    param_group['lr'] = self.lr