File size: 318 Bytes
cec5823
86d104b
 
 
 
cec5823
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
import numpy as np
try:
    import numba
except ImportError:
    numba = None
import torch

THREADS_PER_BLOCK = 32  # 32 or 16
DEFAULT_NUMPY_FLOAT_TYPE = np.float32
DEFAULT_CUDA_FLOAT_TYPE = numba.float32
DEFAULT_TORCH_FLOAT_TYPE = torch.float32


DEFAULT_NUMPY_INT_TYPE = np.int32
DEFAULT_CUDA_INT_TYPE = numba.int32