activation.py 854 B

123456789101112131415161718
  1. # flake8: noqa: F401
  2. r"""Quantized Modules
  3. This file is in the process of migration to `torch/ao/nn/quantized`, and
  4. is kept here for compatibility while the migration process is ongoing.
  5. If you are adding a new entry/functionality, please, add it to the
  6. appropriate file under the `torch/ao/nn/quantized/modules`,
  7. while adding an import statement here.
  8. """
  9. from torch.ao.nn.quantized.modules.activation import ELU
  10. from torch.ao.nn.quantized.modules.activation import Hardswish
  11. from torch.ao.nn.quantized.modules.activation import LeakyReLU
  12. from torch.ao.nn.quantized.modules.activation import MultiheadAttention
  13. from torch.ao.nn.quantized.modules.activation import PReLU
  14. from torch.ao.nn.quantized.modules.activation import ReLU6
  15. from torch.ao.nn.quantized.modules.activation import Sigmoid
  16. from torch.ao.nn.quantized.modules.activation import Softmax