glenn-jocher commited on
Commit
9468657
·
unverified ·
1 Parent(s): 8d3c3ef

Update profiler (#4236)

Browse files
Files changed (1) hide show
  1. utils/torch_utils.py +4 -3
utils/torch_utils.py CHANGED
@@ -22,6 +22,8 @@ try:
22
  import thop # for FLOPs computation
23
  except ImportError:
24
  thop = None
 
 
25
  LOGGER = logging.getLogger(__name__)
26
 
27
 
@@ -103,11 +105,10 @@ def profile(x, ops, n=100, device=None):
103
  # m2 = nn.SiLU()
104
  # profile(x, [m1, m2], n=100) # profile speed over 100 iterations
105
 
106
- device = device or torch.device('cuda:0' if torch.cuda.is_available() else 'cpu')
107
  x = x.to(device)
108
  x.requires_grad = True
109
- print(torch.__version__, device.type, torch.cuda.get_device_properties(0) if device.type == 'cuda' else '')
110
- print(f"\n{'Params':>12s}{'GFLOPs':>12s}{'forward (ms)':>16s}{'backward (ms)':>16s}{'input':>24s}{'output':>24s}")
111
  for m in ops if isinstance(ops, list) else [ops]:
112
  m = m.to(device) if hasattr(m, 'to') else m # device
113
  m = m.half() if hasattr(m, 'half') and isinstance(x, torch.Tensor) and x.dtype is torch.float16 else m # type
 
22
  import thop # for FLOPs computation
23
  except ImportError:
24
  thop = None
25
+
26
+ logging.basicConfig(format="%(message)s", level=logging.INFO)
27
  LOGGER = logging.getLogger(__name__)
28
 
29
 
 
105
  # m2 = nn.SiLU()
106
  # profile(x, [m1, m2], n=100) # profile speed over 100 iterations
107
 
108
+ device = device or select_device()
109
  x = x.to(device)
110
  x.requires_grad = True
111
+ print(f"{'Params':>12s}{'GFLOPs':>12s}{'forward (ms)':>16s}{'backward (ms)':>16s}{'input':>24s}{'output':>24s}")
 
112
  for m in ops if isinstance(ops, list) else [ops]:
113
  m = m.to(device) if hasattr(m, 'to') else m # device
114
  m = m.half() if hasattr(m, 'half') and isinstance(x, torch.Tensor) and x.dtype is torch.float16 else m # type