henry000 commited on
Commit
60c4943
·
1 Parent(s): 1132d27

🔨 [Remove] device in loss func, not sure correct

Browse files
yolo/tools/loss_functions.py CHANGED
@@ -14,8 +14,8 @@ class BCELoss(nn.Module):
14
  def __init__(self) -> None:
15
  super().__init__()
16
  # TODO: Refactor the device, should be assign by config
17
- device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
18
- self.bce = BCEWithLogitsLoss(pos_weight=torch.tensor([1.0], device=device), reduction="none")
19
 
20
  def forward(self, predicts_cls: Tensor, targets_cls: Tensor, cls_norm: Tensor) -> Any:
21
  return self.bce(predicts_cls, targets_cls).sum() / cls_norm
 
14
  def __init__(self) -> None:
15
  super().__init__()
16
  # TODO: Refactor the device, should be assign by config
17
+ # TODO: origin v9 assing pos_weight == 1?
18
+ self.bce = BCEWithLogitsLoss(reduction="none")
19
 
20
  def forward(self, predicts_cls: Tensor, targets_cls: Tensor, cls_norm: Tensor) -> Any:
21
  return self.bce(predicts_cls, targets_cls).sum() / cls_norm
yolo/utils/model_utils.py CHANGED
@@ -99,7 +99,7 @@ def initialize_distributed() -> None:
99
 
100
  torch.cuda.set_device(local_rank)
101
  dist.init_process_group(backend="nccl", rank=rank, world_size=world_size)
102
- logger.info(f"Initialized process group; rank: {rank}, size: {world_size}")
103
  return local_rank
104
 
105
 
 
99
 
100
  torch.cuda.set_device(local_rank)
101
  dist.init_process_group(backend="nccl", rank=rank, world_size=world_size)
102
+ logger.info(f"🔢 Initialized process group; rank: {rank}, size: {world_size}")
103
  return local_rank
104
 
105