Activation.h 4.0 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192
  1. #pragma once
  2. #include <ATen/native/DispatchStub.h>
  3. #include <c10/util/Exception.h>
  4. #include <c10/util/string_view.h>
  5. namespace c10 {
  6. class Scalar;
  7. }
  8. namespace at {
  9. struct TensorIterator;
  10. struct TensorIteratorBase;
  11. class TensorBase;
  12. }
  13. namespace at { namespace native {
  14. // These constants control the approximation behavior of gelu function.
  15. enum GeluType {
  16. None, // Baseline Gelu
  17. Tanh, // Tahn Gelu Approximation
  18. END
  19. };
  20. static GeluType get_gelutype_enum(const c10::string_view approximate) {
  21. if (approximate == "none") {
  22. return GeluType::None;
  23. } else if (approximate == "tanh") {
  24. return GeluType::Tanh;
  25. } else {
  26. TORCH_CHECK(false, "approximate argument must be either none or tanh.");
  27. }
  28. }
  29. using structured_activation_fn = void (*)(TensorIteratorBase&);
  30. using structured_activation_backward_fn = void (*)(TensorIteratorBase&);
  31. using activation_fn = void (*)(TensorIterator&);
  32. using activation_backward_fn = void (*)(TensorIterator&);
  33. using softplus_fn = void (*)(TensorIteratorBase&, const c10::Scalar&, const c10::Scalar&);
  34. using softplus_backward_fn = void (*)(TensorIteratorBase&, const c10::Scalar&, const c10::Scalar&);
  35. using threshold_fn = void (*)(TensorIteratorBase&, const c10::Scalar&, const c10::Scalar&);
  36. using hardtanh_backward_fn = void (*)(TensorIterator&, const c10::Scalar&, const c10::Scalar&);
  37. using hardsigmoid_fn = void(*)(TensorIteratorBase&);
  38. using hardsigmoid_backward_fn = void(*)(TensorIteratorBase&);
  39. using hardswish_fn = void(*)(TensorIterator&);
  40. using hardswish_backward_fn = void(*)(TensorIterator&);
  41. using shrink_fn = void (*)(TensorIteratorBase&, const c10::Scalar&);
  42. using softshrink_fn = void (*)(TensorIteratorBase&, const c10::Scalar&);
  43. using shrink_backward_fn = void (*)(TensorIteratorBase&, const c10::Scalar&);
  44. using elu_fn = void (*)(TensorIteratorBase&, const c10::Scalar&, const c10::Scalar&, const c10::Scalar&);
  45. using elu_backward_fn = void (*)(TensorIteratorBase&, const c10::Scalar&, const c10::Scalar&, const c10::Scalar&, bool);
  46. using leaky_relu_fn = void (*)(TensorIteratorBase&, const c10::Scalar&);
  47. using leaky_relu_backward_fn = void (*)(TensorIteratorBase&, const c10::Scalar&);
  48. using log_sigmoid_cpu_fn = void (*)(TensorBase&, TensorBase&, const TensorBase&);
  49. using gelu_fn = void (*)(TensorIteratorBase&, GeluType);
  50. using gelu_backward_fn = void (*)(TensorIteratorBase&, GeluType);
  51. using glu_jvp_fn = void (*)(TensorIteratorBase&);
  52. DECLARE_DISPATCH(elu_fn, elu_stub);
  53. DECLARE_DISPATCH(elu_backward_fn, elu_backward_stub);
  54. DECLARE_DISPATCH(softplus_fn, softplus_stub);
  55. DECLARE_DISPATCH(softplus_backward_fn, softplus_backward_stub);
  56. DECLARE_DISPATCH(log_sigmoid_cpu_fn, log_sigmoid_cpu_stub);
  57. DECLARE_DISPATCH(activation_backward_fn, log_sigmoid_backward_stub);
  58. DECLARE_DISPATCH(threshold_fn, threshold_stub);
  59. DECLARE_DISPATCH(gelu_fn, GeluKernel);
  60. DECLARE_DISPATCH(gelu_backward_fn, GeluBackwardKernel);
  61. DECLARE_DISPATCH(hardtanh_backward_fn, hardtanh_backward_stub);
  62. DECLARE_DISPATCH(hardsigmoid_fn, hardsigmoid_stub);
  63. DECLARE_DISPATCH(hardsigmoid_backward_fn, hardsigmoid_backward_stub);
  64. DECLARE_DISPATCH(hardswish_fn, hardswish_stub);
  65. DECLARE_DISPATCH(hardswish_backward_fn, hardswish_backward_stub);
  66. DECLARE_DISPATCH(shrink_fn, hardshrink_stub);
  67. DECLARE_DISPATCH(softshrink_fn, softshrink_stub);
  68. DECLARE_DISPATCH(shrink_backward_fn, shrink_backward_stub);
  69. DECLARE_DISPATCH(leaky_relu_fn, leaky_relu_stub);
  70. DECLARE_DISPATCH(leaky_relu_backward_fn, leaky_relu_backward_stub);
  71. DECLARE_DISPATCH(structured_activation_fn, glu_stub);
  72. DECLARE_DISPATCH(activation_backward_fn, glu_backward_stub);
  73. DECLARE_DISPATCH(glu_jvp_fn, glu_jvp_stub);
  74. DECLARE_DISPATCH(structured_activation_fn, silu_stub);
  75. DECLARE_DISPATCH(structured_activation_backward_fn, silu_backward_stub);
  76. DECLARE_DISPATCH(structured_activation_fn, mish_stub);
  77. DECLARE_DISPATCH(activation_backward_fn, mish_backward_stub);
  78. DECLARE_DISPATCH(activation_fn, prelu_stub);
  79. DECLARE_DISPATCH(activation_backward_fn, prelu_backward_stub);
  80. } // namespace native
  81. } // namespace at