RedispatchFunctions.h 893 B

1234567891011121314151617181920212223242526272829303132
  1. #pragma once
  2. // ${generated_comment}
  3. #ifdef TORCH_ASSERT_ONLY_METHOD_OPERATORS
  4. #error This change adds a dependency on all pytorch operators, meaning the \
  5. file will need to be re-compiled every time an operator is changed or added. \
  6. Consider using the at::_ops::{name}::redispatch() interface by including \
  7. the specific operator from <ATen/ops/{my_operator}_ops.h>
  8. #endif
  9. #include <c10/core/Scalar.h>
  10. #include <ATen/Tensor.h>
  11. #include <c10/core/Storage.h>
  12. #include <ATen/core/Generator.h>
  13. #include <c10/util/Deprecated.h>
  14. #include <ATen/DeviceGuard.h>
  15. #include <c10/core/TensorOptions.h>
  16. #include <ATen/core/Reduction.h>
  17. #include <c10/util/Optional.h>
  18. #include <ATen/TensorUtils.h>
  19. #include <ATen/Context.h>
  20. #include <ATen/TracerMode.h>
  21. #include <ATen/Operators.h>
  22. namespace at {
  23. namespace redispatch {
  24. ${function_redispatch_definitions}
  25. } // namespace redispatch
  26. }