glenn-jocher commited on
Commit
c8e5181
·
1 Parent(s): c687d5c

hyp evolution force-autoanchor fix

Browse files
Files changed (1) hide show
  1. train.py +2 -2
train.py CHANGED
@@ -68,10 +68,10 @@ def train(hyp, opt, device, tb_writer=None):
68
  with torch_distributed_zero_first(rank):
69
  attempt_download(weights) # download if not found locally
70
  ckpt = torch.load(weights, map_location=device) # load checkpoint
71
- if 'anchors' in hyp and hyp['anchors']:
72
  ckpt['model'].yaml['anchors'] = round(hyp['anchors']) # force autoanchor
73
  model = Model(opt.cfg or ckpt['model'].yaml, ch=3, nc=nc).to(device) # create
74
- exclude = ['anchor'] if opt.cfg else [] # exclude keys
75
  state_dict = ckpt['model'].float().state_dict() # to FP32
76
  state_dict = intersect_dicts(state_dict, model.state_dict(), exclude=exclude) # intersect
77
  model.load_state_dict(state_dict, strict=False) # load
 
68
  with torch_distributed_zero_first(rank):
69
  attempt_download(weights) # download if not found locally
70
  ckpt = torch.load(weights, map_location=device) # load checkpoint
71
+ if hyp.get('anchors'):
72
  ckpt['model'].yaml['anchors'] = round(hyp['anchors']) # force autoanchor
73
  model = Model(opt.cfg or ckpt['model'].yaml, ch=3, nc=nc).to(device) # create
74
+ exclude = ['anchor'] if opt.cfg or hyp.get('anchors') else [] # exclude keys
75
  state_dict = ckpt['model'].float().state_dict() # to FP32
76
  state_dict = intersect_dicts(state_dict, model.state_dict(), exclude=exclude) # intersect
77
  model.load_state_dict(state_dict, strict=False) # load