Llama-3.1-8B-DALv0.1
/
venv
/lib
/python3.12
/site-packages
/torch
/include
/ATen
/xpu
/XPUContext.h
namespace at::xpu { | |
// XPU is available if we compiled with XPU. | |
inline bool is_available() { | |
return c10::xpu::device_count() > 0; | |
} | |
TORCH_XPU_API DeviceProp* getCurrentDeviceProperties(); | |
TORCH_XPU_API DeviceProp* getDeviceProperties(DeviceIndex device); | |
TORCH_XPU_API int32_t getGlobalIdxFromDevice(DeviceIndex device); | |
} // namespace at::xpu | |