dogleg_strategy.h 6.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165
  1. // Ceres Solver - A fast non-linear least squares minimizer
  2. // Copyright 2023 Google Inc. All rights reserved.
  3. // http://ceres-solver.org/
  4. //
  5. // Redistribution and use in source and binary forms, with or without
  6. // modification, are permitted provided that the following conditions are met:
  7. //
  8. // * Redistributions of source code must retain the above copyright notice,
  9. // this list of conditions and the following disclaimer.
  10. // * Redistributions in binary form must reproduce the above copyright notice,
  11. // this list of conditions and the following disclaimer in the documentation
  12. // and/or other materials provided with the distribution.
  13. // * Neither the name of Google Inc. nor the names of its contributors may be
  14. // used to endorse or promote products derived from this software without
  15. // specific prior written permission.
  16. //
  17. // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
  18. // AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
  19. // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
  20. // ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
  21. // LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
  22. // CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
  23. // SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
  24. // INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
  25. // CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
  26. // ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
  27. // POSSIBILITY OF SUCH DAMAGE.
  28. //
  29. // Author: sameeragarwal@google.com (Sameer Agarwal)
  30. #ifndef CERES_INTERNAL_DOGLEG_STRATEGY_H_
  31. #define CERES_INTERNAL_DOGLEG_STRATEGY_H_
  32. #include "ceres/internal/disable_warnings.h"
  33. #include "ceres/internal/export.h"
  34. #include "ceres/linear_solver.h"
  35. #include "ceres/trust_region_strategy.h"
  36. namespace ceres::internal {
  37. // Dogleg step computation and trust region sizing strategy based on
  38. // on "Methods for Nonlinear Least Squares" by K. Madsen, H.B. Nielsen
  39. // and O. Tingleff. Available to download from
  40. //
  41. // http://www2.imm.dtu.dk/pubdb/views/edoc_download.php/3215/pdf/imm3215.pdf
  42. //
  43. // One minor modification is that instead of computing the pure
  44. // Gauss-Newton step, we compute a regularized version of it. This is
  45. // because the Jacobian is often rank-deficient and in such cases
  46. // using a direct solver leads to numerical failure.
  47. //
  48. // If SUBSPACE is passed as the type argument to the constructor, the
  49. // DoglegStrategy follows the approach by Shultz, Schnabel, Byrd.
  50. // This finds the exact optimum over the two-dimensional subspace
  51. // spanned by the two Dogleg vectors.
  52. class CERES_NO_EXPORT DoglegStrategy final : public TrustRegionStrategy {
  53. public:
  54. explicit DoglegStrategy(const TrustRegionStrategy::Options& options);
  55. // TrustRegionStrategy interface
  56. Summary ComputeStep(const PerSolveOptions& per_solve_options,
  57. SparseMatrix* jacobian,
  58. const double* residuals,
  59. double* step) final;
  60. void StepAccepted(double step_quality) final;
  61. void StepRejected(double step_quality) final;
  62. void StepIsInvalid() override;
  63. double Radius() const final;
  64. // These functions are predominantly for testing.
  65. Vector gradient() const { return gradient_; }
  66. Vector gauss_newton_step() const { return gauss_newton_step_; }
  67. Matrix subspace_basis() const { return subspace_basis_; }
  68. Vector subspace_g() const { return subspace_g_; }
  69. Matrix subspace_B() const { return subspace_B_; }
  70. private:
  71. using Vector2d = Eigen::Matrix<double, 2, 1, Eigen::DontAlign>;
  72. using Matrix2d = Eigen::Matrix<double, 2, 2, Eigen::DontAlign>;
  73. LinearSolver::Summary ComputeGaussNewtonStep(
  74. const PerSolveOptions& per_solve_options,
  75. SparseMatrix* jacobian,
  76. const double* residuals);
  77. void ComputeCauchyPoint(SparseMatrix* jacobian);
  78. void ComputeGradient(SparseMatrix* jacobian, const double* residuals);
  79. void ComputeTraditionalDoglegStep(double* step);
  80. bool ComputeSubspaceModel(SparseMatrix* jacobian);
  81. void ComputeSubspaceDoglegStep(double* step);
  82. bool FindMinimumOnTrustRegionBoundary(Vector2d* minimum) const;
  83. Vector MakePolynomialForBoundaryConstrainedProblem() const;
  84. Vector2d ComputeSubspaceStepFromRoot(double lambda) const;
  85. double EvaluateSubspaceModel(const Vector2d& x) const;
  86. LinearSolver* linear_solver_;
  87. double radius_;
  88. const double max_radius_;
  89. const double min_diagonal_;
  90. const double max_diagonal_;
  91. // mu is used to scale the diagonal matrix used to make the
  92. // Gauss-Newton solve full rank. In each solve, the strategy starts
  93. // out with mu = min_mu, and tries values up to max_mu. If the user
  94. // reports an invalid step, the value of mu_ is increased so that
  95. // the next solve starts with a stronger regularization.
  96. //
  97. // If a successful step is reported, then the value of mu_ is
  98. // decreased with a lower bound of min_mu_.
  99. double mu_;
  100. const double min_mu_;
  101. const double max_mu_;
  102. const double mu_increase_factor_;
  103. const double increase_threshold_;
  104. const double decrease_threshold_;
  105. Vector diagonal_; // sqrt(diag(J^T J))
  106. Vector lm_diagonal_;
  107. Vector gradient_;
  108. Vector gauss_newton_step_;
  109. // cauchy_step = alpha * gradient
  110. double alpha_;
  111. double dogleg_step_norm_;
  112. // When, ComputeStep is called, reuse_ indicates whether the
  113. // Gauss-Newton and Cauchy steps from the last call to ComputeStep
  114. // can be reused or not.
  115. //
  116. // If the user called StepAccepted, then it is expected that the
  117. // user has recomputed the Jacobian matrix and new Gauss-Newton
  118. // solve is needed and reuse is set to false.
  119. //
  120. // If the user called StepRejected, then it is expected that the
  121. // user wants to solve the trust region problem with the same matrix
  122. // but a different trust region radius and the Gauss-Newton and
  123. // Cauchy steps can be reused to compute the Dogleg, thus reuse is
  124. // set to true.
  125. //
  126. // If the user called StepIsInvalid, then there was a numerical
  127. // problem with the step computed in the last call to ComputeStep,
  128. // and the regularization used to do the Gauss-Newton solve is
  129. // increased and a new solve should be done when ComputeStep is
  130. // called again, thus reuse is set to false.
  131. bool reuse_;
  132. // The dogleg type determines how the minimum of the local
  133. // quadratic model is found.
  134. DoglegType dogleg_type_;
  135. // If the type is SUBSPACE_DOGLEG, the two-dimensional
  136. // model 1/2 x^T B x + g^T x has to be computed and stored.
  137. bool subspace_is_one_dimensional_;
  138. Matrix subspace_basis_;
  139. Vector2d subspace_g_;
  140. Matrix2d subspace_B_;
  141. };
  142. } // namespace ceres::internal
  143. #include "ceres/internal/reenable_warnings.h"
  144. #endif // CERES_INTERNAL_DOGLEG_STRATEGY_H_