File size: 1,042 Bytes
7e50900 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 |
#pragma once
// @generated by torchgen/gen.py from DispatchKeyFunctions_inl.h
// NB: The implementing C++ file is RegisterDispatchKey.cpp
// The only #includes we need are for custom classes that have defaults in the C++ API
#include <c10/core/MemoryFormat.h>
#include <c10/core/Scalar.h>
#include <ATen/core/Reduction.h>
#if defined(AT_PER_OPERATOR_HEADERS) && defined(TORCH_ASSERT_ONLY_METHOD_OPERATORS)
#error This change adds a dependency on all pytorch operators, meaning the \
file will need to be re-compiled every time an operator is changed or added. \
Consider including a specific operator from \
<ATen/ops/{my_operator}_compositeimplicitautogradnestedtensor_dispatch.h>. \
See NOTE [TORCH_ASSERT_ONLY_METHOD_OPERATORS].
#endif
#include <ATen/ops/reshape_compositeimplicitautogradnestedtensor_dispatch.h>
#include <ATen/ops/reshape_as_compositeimplicitautogradnestedtensor_dispatch.h>
#include <ATen/ops/unbind_compositeimplicitautogradnestedtensor_dispatch.h>
|