testDoglegOptimizer.cpp 7.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187
  1. /* ----------------------------------------------------------------------------
  2. * GTSAM Copyright 2010, Georgia Tech Research Corporation,
  3. * Atlanta, Georgia 30332-0415
  4. * All Rights Reserved
  5. * Authors: Frank Dellaert, et al. (see THANKS for the full author list)
  6. * See LICENSE for the license information
  7. * -------------------------------------------------------------------------- */
  8. /**
  9. * @file testDoglegOptimizer.cpp
  10. * @brief Unit tests for DoglegOptimizer
  11. * @author Richard Roberts
  12. * @author Frank dellaert
  13. */
  14. #include <CppUnitLite/TestHarness.h>
  15. #include <tests/smallExample.h>
  16. #include <gtsam/geometry/Pose2.h>
  17. #include <gtsam/nonlinear/DoglegOptimizer.h>
  18. #include <gtsam/nonlinear/DoglegOptimizerImpl.h>
  19. #include <gtsam/nonlinear/NonlinearEquality.h>
  20. #include <gtsam/slam/BetweenFactor.h>
  21. #include <gtsam/inference/Symbol.h>
  22. #include <gtsam/linear/JacobianFactor.h>
  23. #include <gtsam/linear/GaussianBayesTree.h>
  24. #include <gtsam/base/numericalDerivative.h>
  25. #include <boost/assign/list_of.hpp> // for 'list_of()'
  26. #include <functional>
  27. #include <boost/iterator/counting_iterator.hpp>
  28. using namespace std;
  29. using namespace gtsam;
  30. // Convenience for named keys
  31. using symbol_shorthand::X;
  32. using symbol_shorthand::L;
  33. /* ************************************************************************* */
  34. TEST(DoglegOptimizer, ComputeBlend) {
  35. // Create an arbitrary Bayes Net
  36. GaussianBayesNet gbn;
  37. gbn += GaussianConditional::shared_ptr(new GaussianConditional(
  38. 0, Vector2(1.0,2.0), (Matrix(2, 2) << 3.0,4.0,0.0,6.0).finished(),
  39. 3, (Matrix(2, 2) << 7.0,8.0,9.0,10.0).finished(),
  40. 4, (Matrix(2, 2) << 11.0,12.0,13.0,14.0).finished()));
  41. gbn += GaussianConditional::shared_ptr(new GaussianConditional(
  42. 1, Vector2(15.0,16.0), (Matrix(2, 2) << 17.0,18.0,0.0,20.0).finished(),
  43. 2, (Matrix(2, 2) << 21.0,22.0,23.0,24.0).finished(),
  44. 4, (Matrix(2, 2) << 25.0,26.0,27.0,28.0).finished()));
  45. gbn += GaussianConditional::shared_ptr(new GaussianConditional(
  46. 2, Vector2(29.0,30.0), (Matrix(2, 2) << 31.0,32.0,0.0,34.0).finished(),
  47. 3, (Matrix(2, 2) << 35.0,36.0,37.0,38.0).finished()));
  48. gbn += GaussianConditional::shared_ptr(new GaussianConditional(
  49. 3, Vector2(39.0,40.0), (Matrix(2, 2) << 41.0,42.0,0.0,44.0).finished(),
  50. 4, (Matrix(2, 2) << 45.0,46.0,47.0,48.0).finished()));
  51. gbn += GaussianConditional::shared_ptr(new GaussianConditional(
  52. 4, Vector2(49.0,50.0), (Matrix(2, 2) << 51.0,52.0,0.0,54.0).finished()));
  53. // Compute steepest descent point
  54. VectorValues xu = gbn.optimizeGradientSearch();
  55. // Compute Newton's method point
  56. VectorValues xn = gbn.optimize();
  57. // The Newton's method point should be more "adventurous", i.e. larger, than the steepest descent point
  58. EXPECT(xu.vector().norm() < xn.vector().norm());
  59. // Compute blend
  60. double Delta = 1.5;
  61. VectorValues xb = DoglegOptimizerImpl::ComputeBlend(Delta, xu, xn);
  62. DOUBLES_EQUAL(Delta, xb.vector().norm(), 1e-10);
  63. }
  64. /* ************************************************************************* */
  65. TEST(DoglegOptimizer, ComputeDoglegPoint) {
  66. // Create an arbitrary Bayes Net
  67. GaussianBayesNet gbn;
  68. gbn += GaussianConditional::shared_ptr(new GaussianConditional(
  69. 0, Vector2(1.0,2.0), (Matrix(2, 2) << 3.0,4.0,0.0,6.0).finished(),
  70. 3, (Matrix(2, 2) << 7.0,8.0,9.0,10.0).finished(),
  71. 4, (Matrix(2, 2) << 11.0,12.0,13.0,14.0).finished()));
  72. gbn += GaussianConditional::shared_ptr(new GaussianConditional(
  73. 1, Vector2(15.0,16.0), (Matrix(2, 2) << 17.0,18.0,0.0,20.0).finished(),
  74. 2, (Matrix(2, 2) << 21.0,22.0,23.0,24.0).finished(),
  75. 4, (Matrix(2, 2) << 25.0,26.0,27.0,28.0).finished()));
  76. gbn += GaussianConditional::shared_ptr(new GaussianConditional(
  77. 2, Vector2(29.0,30.0), (Matrix(2, 2) << 31.0,32.0,0.0,34.0).finished(),
  78. 3, (Matrix(2, 2) << 35.0,36.0,37.0,38.0).finished()));
  79. gbn += GaussianConditional::shared_ptr(new GaussianConditional(
  80. 3, Vector2(39.0,40.0), (Matrix(2, 2) << 41.0,42.0,0.0,44.0).finished(),
  81. 4, (Matrix(2, 2) << 45.0,46.0,47.0,48.0).finished()));
  82. gbn += GaussianConditional::shared_ptr(new GaussianConditional(
  83. 4, Vector2(49.0,50.0), (Matrix(2, 2) << 51.0,52.0,0.0,54.0).finished()));
  84. // Compute dogleg point for different deltas
  85. double Delta1 = 0.5; // Less than steepest descent
  86. VectorValues actual1 = DoglegOptimizerImpl::ComputeDoglegPoint(Delta1, gbn.optimizeGradientSearch(), gbn.optimize());
  87. DOUBLES_EQUAL(Delta1, actual1.vector().norm(), 1e-5);
  88. double Delta2 = 1.5; // Between steepest descent and Newton's method
  89. VectorValues expected2 = DoglegOptimizerImpl::ComputeBlend(Delta2, gbn.optimizeGradientSearch(), gbn.optimize());
  90. VectorValues actual2 = DoglegOptimizerImpl::ComputeDoglegPoint(Delta2, gbn.optimizeGradientSearch(), gbn.optimize());
  91. DOUBLES_EQUAL(Delta2, actual2.vector().norm(), 1e-5);
  92. EXPECT(assert_equal(expected2, actual2));
  93. double Delta3 = 5.0; // Larger than Newton's method point
  94. VectorValues expected3 = gbn.optimize();
  95. VectorValues actual3 = DoglegOptimizerImpl::ComputeDoglegPoint(Delta3, gbn.optimizeGradientSearch(), gbn.optimize());
  96. EXPECT(assert_equal(expected3, actual3));
  97. }
  98. /* ************************************************************************* */
  99. TEST(DoglegOptimizer, Iterate) {
  100. // really non-linear factor graph
  101. NonlinearFactorGraph fg = example::createReallyNonlinearFactorGraph();
  102. // config far from minimum
  103. Point2 x0(3,0);
  104. Values config;
  105. config.insert(X(1), x0);
  106. double Delta = 1.0;
  107. for(size_t it=0; it<10; ++it) {
  108. GaussianBayesNet gbn = *fg.linearize(config)->eliminateSequential();
  109. // Iterate assumes that linear error = nonlinear error at the linearization point, and this should be true
  110. double nonlinearError = fg.error(config);
  111. double linearError = GaussianFactorGraph(gbn).error(config.zeroVectors());
  112. DOUBLES_EQUAL(nonlinearError, linearError, 1e-5);
  113. // cout << "it " << it << ", Delta = " << Delta << ", error = " << fg->error(*config) << endl;
  114. VectorValues dx_u = gbn.optimizeGradientSearch();
  115. VectorValues dx_n = gbn.optimize();
  116. DoglegOptimizerImpl::IterationResult result = DoglegOptimizerImpl::Iterate(Delta, DoglegOptimizerImpl::SEARCH_EACH_ITERATION, dx_u, dx_n, gbn, fg, config, fg.error(config));
  117. Delta = result.delta;
  118. EXPECT(result.f_error < fg.error(config)); // Check that error decreases
  119. Values newConfig(config.retract(result.dx_d));
  120. config = newConfig;
  121. DOUBLES_EQUAL(fg.error(config), result.f_error, 1e-5); // Check that error is correctly filled in
  122. }
  123. }
  124. /* ************************************************************************* */
  125. TEST(DoglegOptimizer, Constraint) {
  126. // Create a pose-graph graph with a constraint on the first pose
  127. NonlinearFactorGraph graph;
  128. const Pose2 origin(0, 0, 0), pose2(2, 0, 0);
  129. graph.emplace_shared<NonlinearEquality<Pose2> >(1, origin);
  130. auto model = noiseModel::Diagonal::Sigmas(Vector3(0.2, 0.2, 0.1));
  131. graph.emplace_shared<BetweenFactor<Pose2> >(1, 2, pose2, model);
  132. // Create feasible initial estimate
  133. Values initial;
  134. initial.insert(1, origin); // feasible !
  135. initial.insert(2, Pose2(2.3, 0.1, -0.2));
  136. // Optimize the initial values using DoglegOptimizer
  137. DoglegParams params;
  138. params.setVerbosityDL("VERBOSITY");
  139. DoglegOptimizer optimizer(graph, initial, params);
  140. Values result = optimizer.optimize();
  141. // Check result
  142. EXPECT(assert_equal(pose2, result.at<Pose2>(2)));
  143. // Create infeasible initial estimate
  144. Values infeasible;
  145. infeasible.insert(1, Pose2(0.1, 0, 0)); // infeasible !
  146. infeasible.insert(2, Pose2(2.3, 0.1, -0.2));
  147. // Try optimizing with infeasible initial estimate
  148. DoglegOptimizer optimizer2(graph, infeasible, params);
  149. #ifdef GTSAM_USE_TBB
  150. CHECK_EXCEPTION(optimizer2.optimize(), std::exception);
  151. #else
  152. CHECK_EXCEPTION(optimizer2.optimize(), std::invalid_argument);
  153. #endif
  154. }
  155. /* ************************************************************************* */
  156. int main() { TestResult tr; return TestRegistry::runAllTests(tr); }
  157. /* ************************************************************************* */