rrelu.h 1.3 KB

1234567891011121314151617181920212223242526272829303132333435
  1. #pragma once
  2. // @generated by torchgen/gen.py from Function.h
  3. #include <ATen/Context.h>
  4. #include <ATen/DeviceGuard.h>
  5. #include <ATen/TensorUtils.h>
  6. #include <ATen/TracerMode.h>
  7. #include <ATen/core/Generator.h>
  8. #include <ATen/core/Reduction.h>
  9. #include <ATen/core/Tensor.h>
  10. #include <c10/core/Scalar.h>
  11. #include <c10/core/Storage.h>
  12. #include <c10/core/TensorOptions.h>
  13. #include <c10/util/Deprecated.h>
  14. #include <c10/util/Optional.h>
  15. #include <ATen/ops/rrelu_ops.h>
  16. namespace at {
  17. // aten::rrelu(Tensor self, Scalar lower=0.125, Scalar upper=0.3333333333333333, bool training=False, Generator? generator=None) -> Tensor
  18. inline at::Tensor rrelu(const at::Tensor & self, const at::Scalar & lower=0.125, const at::Scalar & upper=0.3333333333333333, bool training=false, c10::optional<at::Generator> generator=c10::nullopt) {
  19. return at::_ops::rrelu::call(self, lower, upper, training, generator);
  20. }
  21. // aten::rrelu_(Tensor(a!) self, Scalar lower=0.125, Scalar upper=0.3333333333333333, bool training=False, Generator? generator=None) -> Tensor(a!)
  22. inline at::Tensor & rrelu_(at::Tensor & self, const at::Scalar & lower=0.125, const at::Scalar & upper=0.3333333333333333, bool training=false, c10::optional<at::Generator> generator=c10::nullopt) {
  23. return at::_ops::rrelu_::call(self, lower, upper, training, generator);
  24. }
  25. }