File size: 820 Bytes
cfeea40 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
from typing import Iterable, Union
import torch
def get_grad_norm(
parameters: Union[torch.Tensor, Iterable[torch.Tensor]], norm_type: float = 2.0
) -> torch.Tensor:
"""
Adapted from: https://pytorch.org/docs/stable/_modules/torch/nn/utils/clip_grad.html#clip_grad_norm_
"""
if isinstance(parameters, torch.Tensor):
parameters = [parameters]
parameters = [p for p in parameters if p.grad is not None]
norm_type = float(norm_type)
if len(parameters) == 0:
return torch.tensor(0.0)
device = parameters[0].grad.device
total_norm = torch.norm(
torch.stack([torch.norm(p.grad.detach(), norm_type).to(device) for p in parameters]),
norm_type,
)
return total_norm
|