File size: 227 Bytes
ffaa9fc
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
import torch


def main():
    if torch.cuda.is_available():
        num_gpus = torch.cuda.device_count()
    else:
        num_gpus = 0
    print(f"Successfully ran on {num_gpus} GPUs")


if __name__ == "__main__":
    main()