test_log_softmax.py 3.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109
  1. import numpy as np
  2. from numpy.testing import assert_allclose
  3. import pytest
  4. import scipy.special as sc
  5. @pytest.mark.parametrize('x, expected', [
  6. (np.array([1000, 1]), np.array([0, -999])),
  7. # Expected value computed using mpmath (with mpmath.mp.dps = 200) and then
  8. # converted to float.
  9. (np.arange(4), np.array([-3.4401896985611953,
  10. -2.4401896985611953,
  11. -1.4401896985611953,
  12. -0.44018969856119533]))
  13. ])
  14. def test_log_softmax(x, expected):
  15. assert_allclose(sc.log_softmax(x), expected, rtol=1e-13)
  16. @pytest.fixture
  17. def log_softmax_x():
  18. x = np.arange(4)
  19. return x
  20. @pytest.fixture
  21. def log_softmax_expected():
  22. # Expected value computed using mpmath (with mpmath.mp.dps = 200) and then
  23. # converted to float.
  24. expected = np.array([-3.4401896985611953,
  25. -2.4401896985611953,
  26. -1.4401896985611953,
  27. -0.44018969856119533])
  28. return expected
  29. def test_log_softmax_translation(log_softmax_x, log_softmax_expected):
  30. # Translation property. If all the values are changed by the same amount,
  31. # the softmax result does not change.
  32. x = log_softmax_x + 100
  33. expected = log_softmax_expected
  34. assert_allclose(sc.log_softmax(x), expected, rtol=1e-13)
  35. def test_log_softmax_noneaxis(log_softmax_x, log_softmax_expected):
  36. # When axis=None, softmax operates on the entire array, and preserves
  37. # the shape.
  38. x = log_softmax_x.reshape(2, 2)
  39. expected = log_softmax_expected.reshape(2, 2)
  40. assert_allclose(sc.log_softmax(x), expected, rtol=1e-13)
  41. @pytest.mark.parametrize('axis_2d, expected_2d', [
  42. (0, np.log(0.5) * np.ones((2, 2))),
  43. (1, np.array([[0, -999], [0, -999]]))
  44. ])
  45. def test_axes(axis_2d, expected_2d):
  46. assert_allclose(
  47. sc.log_softmax([[1000, 1], [1000, 1]], axis=axis_2d),
  48. expected_2d,
  49. rtol=1e-13,
  50. )
  51. @pytest.fixture
  52. def log_softmax_2d_x():
  53. x = np.arange(8).reshape(2, 4)
  54. return x
  55. @pytest.fixture
  56. def log_softmax_2d_expected():
  57. # Expected value computed using mpmath (with mpmath.mp.dps = 200) and then
  58. # converted to float.
  59. expected = np.array([[-3.4401896985611953,
  60. -2.4401896985611953,
  61. -1.4401896985611953,
  62. -0.44018969856119533],
  63. [-3.4401896985611953,
  64. -2.4401896985611953,
  65. -1.4401896985611953,
  66. -0.44018969856119533]])
  67. return expected
  68. def test_log_softmax_2d_axis1(log_softmax_2d_x, log_softmax_2d_expected):
  69. x = log_softmax_2d_x
  70. expected = log_softmax_2d_expected
  71. assert_allclose(sc.log_softmax(x, axis=1), expected, rtol=1e-13)
  72. def test_log_softmax_2d_axis0(log_softmax_2d_x, log_softmax_2d_expected):
  73. x = log_softmax_2d_x.T
  74. expected = log_softmax_2d_expected.T
  75. assert_allclose(sc.log_softmax(x, axis=0), expected, rtol=1e-13)
  76. def test_log_softmax_3d(log_softmax_2d_x, log_softmax_2d_expected):
  77. # 3-d input, with a tuple for the axis.
  78. x_3d = log_softmax_2d_x.reshape(2, 2, 2)
  79. expected_3d = log_softmax_2d_expected.reshape(2, 2, 2)
  80. assert_allclose(sc.log_softmax(x_3d, axis=(1, 2)), expected_3d, rtol=1e-13)
  81. def test_log_softmax_scalar():
  82. assert_allclose(sc.log_softmax(1.0), 0.0, rtol=1e-13)