File size: 227 Bytes
e225ed6 |
1 2 3 4 5 6 7 8 9 10 11 |
import torch
def clear_cuda_cache():
if torch.cuda.is_available():
torch.cuda.empty_cache()
print("CUDA cache cleared.")
else:
print("CUDA is not available on this device.")
clear_cuda_cache()
|