gradient_checking_cost_function.cc 11 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280
  1. // Ceres Solver - A fast non-linear least squares minimizer
  2. // Copyright 2023 Google Inc. All rights reserved.
  3. // http://ceres-solver.org/
  4. //
  5. // Redistribution and use in source and binary forms, with or without
  6. // modification, are permitted provided that the following conditions are met:
  7. //
  8. // * Redistributions of source code must retain the above copyright notice,
  9. // this list of conditions and the following disclaimer.
  10. // * Redistributions in binary form must reproduce the above copyright notice,
  11. // this list of conditions and the following disclaimer in the documentation
  12. // and/or other materials provided with the distribution.
  13. // * Neither the name of Google Inc. nor the names of its contributors may be
  14. // used to endorse or promote products derived from this software without
  15. // specific prior written permission.
  16. //
  17. // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
  18. // AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
  19. // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
  20. // ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
  21. // LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
  22. // CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
  23. // SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
  24. // INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
  25. // CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
  26. // ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
  27. // POSSIBILITY OF SUCH DAMAGE.
  28. //
  29. // Authors: keir@google.com (Keir Mierle),
  30. // dgossow@google.com (David Gossow)
  31. #include "ceres/gradient_checking_cost_function.h"
  32. #include <algorithm>
  33. #include <cmath>
  34. #include <cstdint>
  35. #include <memory>
  36. #include <numeric>
  37. #include <string>
  38. #include <utility>
  39. #include <vector>
  40. #include "ceres/dynamic_numeric_diff_cost_function.h"
  41. #include "ceres/gradient_checker.h"
  42. #include "ceres/internal/eigen.h"
  43. #include "ceres/parameter_block.h"
  44. #include "ceres/problem.h"
  45. #include "ceres/problem_impl.h"
  46. #include "ceres/program.h"
  47. #include "ceres/residual_block.h"
  48. #include "ceres/stringprintf.h"
  49. #include "ceres/types.h"
  50. #include "glog/logging.h"
  51. namespace ceres::internal {
  52. namespace {
  53. class GradientCheckingCostFunction final : public CostFunction {
  54. public:
  55. GradientCheckingCostFunction(const CostFunction* function,
  56. const std::vector<const Manifold*>* manifolds,
  57. const NumericDiffOptions& options,
  58. double relative_precision,
  59. std::string extra_info,
  60. GradientCheckingIterationCallback* callback)
  61. : function_(function),
  62. gradient_checker_(function, manifolds, options),
  63. relative_precision_(relative_precision),
  64. extra_info_(std::move(extra_info)),
  65. callback_(callback) {
  66. CHECK(callback_ != nullptr);
  67. const std::vector<int32_t>& parameter_block_sizes =
  68. function->parameter_block_sizes();
  69. *mutable_parameter_block_sizes() = parameter_block_sizes;
  70. set_num_residuals(function->num_residuals());
  71. }
  72. bool Evaluate(double const* const* parameters,
  73. double* residuals,
  74. double** jacobians) const final {
  75. if (!jacobians) {
  76. // Nothing to check in this case; just forward.
  77. return function_->Evaluate(parameters, residuals, nullptr);
  78. }
  79. GradientChecker::ProbeResults results;
  80. bool okay =
  81. gradient_checker_.Probe(parameters, relative_precision_, &results);
  82. // If the cost function returned false, there's nothing we can say about
  83. // the gradients.
  84. if (results.return_value == false) {
  85. return false;
  86. }
  87. // Copy the residuals.
  88. const int num_residuals = function_->num_residuals();
  89. MatrixRef(residuals, num_residuals, 1) = results.residuals;
  90. // Copy the original jacobian blocks into the jacobians array.
  91. const std::vector<int32_t>& block_sizes =
  92. function_->parameter_block_sizes();
  93. for (int k = 0; k < block_sizes.size(); k++) {
  94. if (jacobians[k] != nullptr) {
  95. MatrixRef(jacobians[k],
  96. results.jacobians[k].rows(),
  97. results.jacobians[k].cols()) = results.jacobians[k];
  98. }
  99. }
  100. if (!okay) {
  101. std::string error_log =
  102. "Gradient Error detected!\nExtra info for this residual: " +
  103. extra_info_ + "\n" + results.error_log;
  104. callback_->SetGradientErrorDetected(error_log);
  105. }
  106. return true;
  107. }
  108. private:
  109. const CostFunction* function_;
  110. GradientChecker gradient_checker_;
  111. double relative_precision_;
  112. std::string extra_info_;
  113. GradientCheckingIterationCallback* callback_;
  114. };
  115. } // namespace
  116. GradientCheckingIterationCallback::GradientCheckingIterationCallback()
  117. : gradient_error_detected_(false) {}
  118. CallbackReturnType GradientCheckingIterationCallback::operator()(
  119. const IterationSummary& /*summary*/) {
  120. if (gradient_error_detected_) {
  121. LOG(ERROR) << "Gradient error detected. Terminating solver.";
  122. return SOLVER_ABORT;
  123. }
  124. return SOLVER_CONTINUE;
  125. }
  126. void GradientCheckingIterationCallback::SetGradientErrorDetected(
  127. std::string& error_log) {
  128. std::lock_guard<std::mutex> l(mutex_);
  129. gradient_error_detected_ = true;
  130. error_log_ += "\n" + error_log;
  131. }
  132. std::unique_ptr<CostFunction> CreateGradientCheckingCostFunction(
  133. const CostFunction* cost_function,
  134. const std::vector<const Manifold*>* manifolds,
  135. double relative_step_size,
  136. double relative_precision,
  137. const std::string& extra_info,
  138. GradientCheckingIterationCallback* callback) {
  139. NumericDiffOptions numeric_diff_options;
  140. numeric_diff_options.relative_step_size = relative_step_size;
  141. return std::make_unique<GradientCheckingCostFunction>(cost_function,
  142. manifolds,
  143. numeric_diff_options,
  144. relative_precision,
  145. extra_info,
  146. callback);
  147. }
  148. std::unique_ptr<ProblemImpl> CreateGradientCheckingProblemImpl(
  149. ProblemImpl* problem_impl,
  150. double relative_step_size,
  151. double relative_precision,
  152. GradientCheckingIterationCallback* callback) {
  153. CHECK(callback != nullptr);
  154. // We create new CostFunctions by wrapping the original CostFunction in a
  155. // gradient checking CostFunction. So its okay for the ProblemImpl to take
  156. // ownership of it and destroy it. The LossFunctions and Manifolds are reused
  157. // and since they are owned by problem_impl, gradient_checking_problem_impl
  158. // should not take ownership of it.
  159. Problem::Options gradient_checking_problem_options;
  160. gradient_checking_problem_options.cost_function_ownership = TAKE_OWNERSHIP;
  161. gradient_checking_problem_options.loss_function_ownership =
  162. DO_NOT_TAKE_OWNERSHIP;
  163. gradient_checking_problem_options.manifold_ownership = DO_NOT_TAKE_OWNERSHIP;
  164. gradient_checking_problem_options.context = problem_impl->context();
  165. NumericDiffOptions numeric_diff_options;
  166. numeric_diff_options.relative_step_size = relative_step_size;
  167. auto gradient_checking_problem_impl =
  168. std::make_unique<ProblemImpl>(gradient_checking_problem_options);
  169. Program* program = problem_impl->mutable_program();
  170. // For every ParameterBlock in problem_impl, create a new parameter block with
  171. // the same manifold and constancy.
  172. const std::vector<ParameterBlock*>& parameter_blocks =
  173. program->parameter_blocks();
  174. for (auto* parameter_block : parameter_blocks) {
  175. gradient_checking_problem_impl->AddParameterBlock(
  176. parameter_block->mutable_user_state(),
  177. parameter_block->Size(),
  178. parameter_block->mutable_manifold());
  179. if (parameter_block->IsConstant()) {
  180. gradient_checking_problem_impl->SetParameterBlockConstant(
  181. parameter_block->mutable_user_state());
  182. }
  183. for (int i = 0; i < parameter_block->Size(); ++i) {
  184. gradient_checking_problem_impl->SetParameterUpperBound(
  185. parameter_block->mutable_user_state(),
  186. i,
  187. parameter_block->UpperBound(i));
  188. gradient_checking_problem_impl->SetParameterLowerBound(
  189. parameter_block->mutable_user_state(),
  190. i,
  191. parameter_block->LowerBound(i));
  192. }
  193. }
  194. // For every ResidualBlock in problem_impl, create a new
  195. // ResidualBlock by wrapping its CostFunction inside a
  196. // GradientCheckingCostFunction.
  197. const std::vector<ResidualBlock*>& residual_blocks =
  198. program->residual_blocks();
  199. for (int i = 0; i < residual_blocks.size(); ++i) {
  200. ResidualBlock* residual_block = residual_blocks[i];
  201. // Build a human readable string which identifies the
  202. // ResidualBlock. This is used by the GradientCheckingCostFunction
  203. // when logging debugging information.
  204. std::string extra_info =
  205. StringPrintf("Residual block id %d; depends on parameters [", i);
  206. std::vector<double*> parameter_blocks;
  207. std::vector<const Manifold*> manifolds;
  208. parameter_blocks.reserve(residual_block->NumParameterBlocks());
  209. manifolds.reserve(residual_block->NumParameterBlocks());
  210. for (int j = 0; j < residual_block->NumParameterBlocks(); ++j) {
  211. ParameterBlock* parameter_block = residual_block->parameter_blocks()[j];
  212. parameter_blocks.push_back(parameter_block->mutable_user_state());
  213. StringAppendF(&extra_info, "%p", parameter_block->mutable_user_state());
  214. extra_info += (j < residual_block->NumParameterBlocks() - 1) ? ", " : "]";
  215. manifolds.push_back(
  216. problem_impl->GetManifold(parameter_block->mutable_user_state()));
  217. }
  218. // Wrap the original CostFunction in a GradientCheckingCostFunction.
  219. CostFunction* gradient_checking_cost_function =
  220. new GradientCheckingCostFunction(residual_block->cost_function(),
  221. &manifolds,
  222. numeric_diff_options,
  223. relative_precision,
  224. extra_info,
  225. callback);
  226. // The const_cast is necessary because
  227. // ProblemImpl::AddResidualBlock can potentially take ownership of
  228. // the LossFunction, but in this case we are guaranteed that this
  229. // will not be the case, so this const_cast is harmless.
  230. gradient_checking_problem_impl->AddResidualBlock(
  231. gradient_checking_cost_function,
  232. const_cast<LossFunction*>(residual_block->loss_function()),
  233. parameter_blocks.data(),
  234. static_cast<int>(parameter_blocks.size()));
  235. }
  236. // Normally, when a problem is given to the solver, we guarantee
  237. // that the state pointers for each parameter block point to the
  238. // user provided data. Since we are creating this new problem from a
  239. // problem given to us at an arbitrary stage of the solve, we cannot
  240. // depend on this being the case, so we explicitly call
  241. // SetParameterBlockStatePtrsToUserStatePtrs to ensure that this is
  242. // the case.
  243. gradient_checking_problem_impl->mutable_program()
  244. ->SetParameterBlockStatePtrsToUserStatePtrs();
  245. return gradient_checking_problem_impl;
  246. }
  247. } // namespace ceres::internal