123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670 |
- /* ----------------------------------------------------------------------------
- * GTSAM Copyright 2010, Georgia Tech Research Corporation,
- * Atlanta, Georgia 30332-0415
- * All Rights Reserved
- * Authors: Frank Dellaert, et al. (see THANKS for the full author list)
- * See LICENSE for the license information
- * -------------------------------------------------------------------------- */
- /**
- * @file testNonlinearOptimizer.cpp
- * @brief Unit tests for NonlinearOptimizer class
- * @author Frank Dellaert
- */
- #include <tests/smallExample.h>
- #include <gtsam/slam/BetweenFactor.h>
- #include <gtsam/nonlinear/NonlinearFactorGraph.h>
- #include <gtsam/nonlinear/Values.h>
- #include <gtsam/nonlinear/NonlinearConjugateGradientOptimizer.h>
- #include <gtsam/nonlinear/GaussNewtonOptimizer.h>
- #include <gtsam/nonlinear/DoglegOptimizer.h>
- #include <gtsam/nonlinear/LevenbergMarquardtOptimizer.h>
- #include <gtsam/linear/GaussianFactorGraph.h>
- #include <gtsam/linear/NoiseModel.h>
- #include <gtsam/inference/Symbol.h>
- #include <gtsam/geometry/Pose2.h>
- #include <gtsam/base/Matrix.h>
- #include <CppUnitLite/TestHarness.h>
- #include <boost/range/adaptor/map.hpp>
- #include <boost/shared_ptr.hpp>
- #include <boost/assign/std/list.hpp> // for operator +=
- using namespace boost::assign;
- using boost::adaptors::map_values;
- #include <iostream>
- #include <fstream>
- using namespace std;
- using namespace gtsam;
- const double tol = 1e-5;
- using symbol_shorthand::X;
- using symbol_shorthand::L;
- /* ************************************************************************* */
- TEST( NonlinearOptimizer, paramsEquals )
- {
- // default constructors lead to two identical params
- GaussNewtonParams gnParams1;
- GaussNewtonParams gnParams2;
- CHECK(gnParams1.equals(gnParams2));
- // but the params become different if we change something in gnParams2
- gnParams2.setVerbosity("DELTA");
- CHECK(!gnParams1.equals(gnParams2));
- }
- /* ************************************************************************* */
- TEST( NonlinearOptimizer, iterateLM )
- {
- // really non-linear factor graph
- NonlinearFactorGraph fg(example::createReallyNonlinearFactorGraph());
- // config far from minimum
- Point2 x0(3,0);
- Values config;
- config.insert(X(1), x0);
- // normal iterate
- GaussNewtonParams gnParams;
- GaussNewtonOptimizer gnOptimizer(fg, config, gnParams);
- gnOptimizer.iterate();
- // LM iterate with lambda 0 should be the same
- LevenbergMarquardtParams lmParams;
- lmParams.lambdaInitial = 0.0;
- LevenbergMarquardtOptimizer lmOptimizer(fg, config, lmParams);
- lmOptimizer.iterate();
- CHECK(assert_equal(gnOptimizer.values(), lmOptimizer.values(), 1e-9));
- }
- /* ************************************************************************* */
- TEST( NonlinearOptimizer, optimize )
- {
- NonlinearFactorGraph fg(example::createReallyNonlinearFactorGraph());
- // test error at minimum
- Point2 xstar(0,0);
- Values cstar;
- cstar.insert(X(1), xstar);
- DOUBLES_EQUAL(0.0,fg.error(cstar),0.0);
- // test error at initial = [(1-cos(3))^2 + (sin(3))^2]*50 =
- Point2 x0(3,3);
- Values c0;
- c0.insert(X(1), x0);
- DOUBLES_EQUAL(199.0,fg.error(c0),1e-3);
- // optimize parameters
- Ordering ord;
- ord.push_back(X(1));
- // Gauss-Newton
- GaussNewtonParams gnParams;
- gnParams.ordering = ord;
- Values actual1 = GaussNewtonOptimizer(fg, c0, gnParams).optimize();
- DOUBLES_EQUAL(0,fg.error(actual1),tol);
- // Levenberg-Marquardt
- LevenbergMarquardtParams lmParams;
- lmParams.ordering = ord;
- Values actual2 = LevenbergMarquardtOptimizer(fg, c0, lmParams).optimize();
- DOUBLES_EQUAL(0,fg.error(actual2),tol);
- // Dogleg
- DoglegParams dlParams;
- dlParams.ordering = ord;
- Values actual3 = DoglegOptimizer(fg, c0, dlParams).optimize();
- DOUBLES_EQUAL(0,fg.error(actual3),tol);
- }
- /* ************************************************************************* */
- TEST( NonlinearOptimizer, SimpleLMOptimizer )
- {
- NonlinearFactorGraph fg(example::createReallyNonlinearFactorGraph());
- Point2 x0(3,3);
- Values c0;
- c0.insert(X(1), x0);
- Values actual = LevenbergMarquardtOptimizer(fg, c0).optimize();
- DOUBLES_EQUAL(0,fg.error(actual),tol);
- }
- /* ************************************************************************* */
- TEST( NonlinearOptimizer, SimpleGNOptimizer )
- {
- NonlinearFactorGraph fg(example::createReallyNonlinearFactorGraph());
- Point2 x0(3,3);
- Values c0;
- c0.insert(X(1), x0);
- Values actual = GaussNewtonOptimizer(fg, c0).optimize();
- DOUBLES_EQUAL(0,fg.error(actual),tol);
- }
- /* ************************************************************************* */
- TEST( NonlinearOptimizer, SimpleDLOptimizer )
- {
- NonlinearFactorGraph fg(example::createReallyNonlinearFactorGraph());
- Point2 x0(3,3);
- Values c0;
- c0.insert(X(1), x0);
- Values actual = DoglegOptimizer(fg, c0).optimize();
- DOUBLES_EQUAL(0,fg.error(actual),tol);
- }
- /* ************************************************************************* */
- TEST( NonlinearOptimizer, optimization_method )
- {
- LevenbergMarquardtParams paramsQR;
- paramsQR.linearSolverType = LevenbergMarquardtParams::MULTIFRONTAL_QR;
- LevenbergMarquardtParams paramsChol;
- paramsChol.linearSolverType = LevenbergMarquardtParams::MULTIFRONTAL_CHOLESKY;
- NonlinearFactorGraph fg = example::createReallyNonlinearFactorGraph();
- Point2 x0(3,3);
- Values c0;
- c0.insert(X(1), x0);
- Values actualMFQR = LevenbergMarquardtOptimizer(fg, c0, paramsQR).optimize();
- DOUBLES_EQUAL(0,fg.error(actualMFQR),tol);
- Values actualMFChol = LevenbergMarquardtOptimizer(fg, c0, paramsChol).optimize();
- DOUBLES_EQUAL(0,fg.error(actualMFChol),tol);
- }
- /* ************************************************************************* */
- TEST( NonlinearOptimizer, Factorization )
- {
- Values config;
- config.insert(X(1), Pose2(0.,0.,0.));
- config.insert(X(2), Pose2(1.5,0.,0.));
- NonlinearFactorGraph graph;
- graph.addPrior(X(1), Pose2(0.,0.,0.), noiseModel::Isotropic::Sigma(3, 1e-10));
- graph += BetweenFactor<Pose2>(X(1),X(2), Pose2(1.,0.,0.), noiseModel::Isotropic::Sigma(3, 1));
- Ordering ordering;
- ordering.push_back(X(1));
- ordering.push_back(X(2));
- LevenbergMarquardtParams params;
- LevenbergMarquardtParams::SetLegacyDefaults(¶ms);
- LevenbergMarquardtOptimizer optimizer(graph, config, ordering, params);
- optimizer.iterate();
- Values expected;
- expected.insert(X(1), Pose2(0.,0.,0.));
- expected.insert(X(2), Pose2(1.,0.,0.));
- CHECK(assert_equal(expected, optimizer.values(), 1e-5));
- }
- /* ************************************************************************* */
- TEST(NonlinearOptimizer, NullFactor) {
- NonlinearFactorGraph fg = example::createReallyNonlinearFactorGraph();
- // Add null factor
- fg.push_back(NonlinearFactorGraph::sharedFactor());
- // test error at minimum
- Point2 xstar(0,0);
- Values cstar;
- cstar.insert(X(1), xstar);
- DOUBLES_EQUAL(0.0,fg.error(cstar),0.0);
- // test error at initial = [(1-cos(3))^2 + (sin(3))^2]*50 =
- Point2 x0(3,3);
- Values c0;
- c0.insert(X(1), x0);
- DOUBLES_EQUAL(199.0,fg.error(c0),1e-3);
- // optimize parameters
- Ordering ord;
- ord.push_back(X(1));
- // Gauss-Newton
- Values actual1 = GaussNewtonOptimizer(fg, c0, ord).optimize();
- DOUBLES_EQUAL(0,fg.error(actual1),tol);
- // Levenberg-Marquardt
- Values actual2 = LevenbergMarquardtOptimizer(fg, c0, ord).optimize();
- DOUBLES_EQUAL(0,fg.error(actual2),tol);
- // Dogleg
- Values actual3 = DoglegOptimizer(fg, c0, ord).optimize();
- DOUBLES_EQUAL(0,fg.error(actual3),tol);
- }
- /* ************************************************************************* */
- TEST_UNSAFE(NonlinearOptimizer, MoreOptimization) {
- NonlinearFactorGraph fg;
- fg.addPrior(0, Pose2(0, 0, 0), noiseModel::Isotropic::Sigma(3, 1));
- fg += BetweenFactor<Pose2>(0, 1, Pose2(1, 0, M_PI / 2),
- noiseModel::Isotropic::Sigma(3, 1));
- fg += BetweenFactor<Pose2>(1, 2, Pose2(1, 0, M_PI / 2),
- noiseModel::Isotropic::Sigma(3, 1));
- Values init;
- init.insert(0, Pose2(3, 4, -M_PI));
- init.insert(1, Pose2(10, 2, -M_PI));
- init.insert(2, Pose2(11, 7, -M_PI));
- Values expected;
- expected.insert(0, Pose2(0, 0, 0));
- expected.insert(1, Pose2(1, 0, M_PI / 2));
- expected.insert(2, Pose2(1, 1, M_PI));
- VectorValues expectedGradient;
- expectedGradient.insert(0,Z_3x1);
- expectedGradient.insert(1,Z_3x1);
- expectedGradient.insert(2,Z_3x1);
- // Try LM and Dogleg
- LevenbergMarquardtParams params = LevenbergMarquardtParams::LegacyDefaults();
- {
- LevenbergMarquardtOptimizer optimizer(fg, init, params);
- // test convergence
- Values actual = optimizer.optimize();
- EXPECT(assert_equal(expected, actual));
- // Check that the gradient is zero
- GaussianFactorGraph::shared_ptr linear = optimizer.linearize();
- EXPECT(assert_equal(expectedGradient,linear->gradientAtZero()));
- }
- EXPECT(assert_equal(expected, DoglegOptimizer(fg, init).optimize()));
- // cout << "===================================================================================" << endl;
- // Try LM with diagonal damping
- Values initBetter;
- initBetter.insert(0, Pose2(3,4,0));
- initBetter.insert(1, Pose2(10,2,M_PI/3));
- initBetter.insert(2, Pose2(11,7,M_PI/2));
- {
- params.diagonalDamping = true;
- LevenbergMarquardtOptimizer optimizer(fg, initBetter, params);
- // test the diagonal
- GaussianFactorGraph::shared_ptr linear = optimizer.linearize();
- VectorValues d = linear->hessianDiagonal();
- VectorValues sqrtHessianDiagonal = d;
- for (Vector& v : sqrtHessianDiagonal | map_values) v = v.cwiseSqrt();
- GaussianFactorGraph damped = optimizer.buildDampedSystem(*linear, sqrtHessianDiagonal);
- VectorValues expectedDiagonal = d + params.lambdaInitial * d;
- EXPECT(assert_equal(expectedDiagonal, damped.hessianDiagonal()));
- // test convergence (does not!)
- Values actual = optimizer.optimize();
- EXPECT(assert_equal(expected, actual));
- // Check that the gradient is zero (it is not!)
- linear = optimizer.linearize();
- EXPECT(assert_equal(expectedGradient,linear->gradientAtZero()));
- // Check that the gradient is zero for damped system (it is not!)
- damped = optimizer.buildDampedSystem(*linear, sqrtHessianDiagonal);
- VectorValues actualGradient = damped.gradientAtZero();
- EXPECT(assert_equal(expectedGradient,actualGradient));
- /* This block was made to test the original initial guess "init"
- // Check errors at convergence and errors in direction of gradient (decreases!)
- EXPECT_DOUBLES_EQUAL(46.02558,fg.error(actual),1e-5);
- EXPECT_DOUBLES_EQUAL(44.742237,fg.error(actual.retract(-0.01*actualGradient)),1e-5);
- // Check that solve yields gradient (it's not equal to gradient, as predicted)
- VectorValues delta = damped.optimize();
- double factor = actualGradient[0][0]/delta[0][0];
- EXPECT(assert_equal(actualGradient,factor*delta));
- // Still pointing downhill wrt actual gradient !
- EXPECT_DOUBLES_EQUAL( 0.1056157,dot(-1*actualGradient,delta),1e-3);
- // delta.print("This is the delta value computed by LM with diagonal damping");
- // Still pointing downhill wrt expected gradient (IT DOES NOT! actually they are perpendicular)
- EXPECT_DOUBLES_EQUAL( 0.0,dot(-1*expectedGradient,delta),1e-5);
- // Check errors at convergence and errors in direction of solution (does not decrease!)
- EXPECT_DOUBLES_EQUAL(46.0254859,fg.error(actual.retract(delta)),1e-5);
- // Check errors at convergence and errors at a small step in direction of solution (does not decrease!)
- EXPECT_DOUBLES_EQUAL(46.0255,fg.error(actual.retract(0.01*delta)),1e-3);
- */
- }
- }
- /* ************************************************************************* */
- TEST(NonlinearOptimizer, Pose2OptimizationWithHuberNoOutlier) {
- NonlinearFactorGraph fg;
- fg.addPrior(0, Pose2(0,0,0), noiseModel::Isotropic::Sigma(3,1));
- fg += BetweenFactor<Pose2>(0, 1, Pose2(1,1.1,M_PI/4),
- noiseModel::Robust::Create(noiseModel::mEstimator::Huber::Create(2.0),
- noiseModel::Isotropic::Sigma(3,1)));
- fg += BetweenFactor<Pose2>(0, 1, Pose2(1,0.9,M_PI/2),
- noiseModel::Robust::Create(noiseModel::mEstimator::Huber::Create(3.0),
- noiseModel::Isotropic::Sigma(3,1)));
- Values init;
- init.insert(0, Pose2(0,0,0));
- init.insert(1, Pose2(0.961187, 0.99965, 1.1781));
- Values expected;
- expected.insert(0, Pose2(0,0,0));
- expected.insert(1, Pose2(0.961187, 0.99965, 1.1781));
- LevenbergMarquardtParams lm_params;
- auto gn_result = GaussNewtonOptimizer(fg, init).optimize();
- auto lm_result = LevenbergMarquardtOptimizer(fg, init, lm_params).optimize();
- auto dl_result = DoglegOptimizer(fg, init).optimize();
- EXPECT(assert_equal(expected, gn_result, 3e-2));
- EXPECT(assert_equal(expected, lm_result, 3e-2));
- EXPECT(assert_equal(expected, dl_result, 3e-2));
- }
- /* ************************************************************************* */
- TEST(NonlinearOptimizer, Point2LinearOptimizationWithHuber) {
- NonlinearFactorGraph fg;
- fg.addPrior(0, Point2(0,0), noiseModel::Isotropic::Sigma(2,0.01));
- fg += BetweenFactor<Point2>(0, 1, Point2(1,1.8),
- noiseModel::Robust::Create(noiseModel::mEstimator::Huber::Create(1.0),
- noiseModel::Isotropic::Sigma(2,1)));
- fg += BetweenFactor<Point2>(0, 1, Point2(1,0.9),
- noiseModel::Robust::Create(noiseModel::mEstimator::Huber::Create(1.0),
- noiseModel::Isotropic::Sigma(2,1)));
- fg += BetweenFactor<Point2>(0, 1, Point2(1,90),
- noiseModel::Robust::Create(noiseModel::mEstimator::Huber::Create(1.0),
- noiseModel::Isotropic::Sigma(2,1)));
- Values init;
- init.insert(0, Point2(1,1));
- init.insert(1, Point2(1,0));
- Values expected;
- expected.insert(0, Point2(0,0));
- expected.insert(1, Point2(1,1.85));
- LevenbergMarquardtParams params;
- auto gn_result = GaussNewtonOptimizer(fg, init).optimize();
- auto lm_result = LevenbergMarquardtOptimizer(fg, init, params).optimize();
- auto dl_result = DoglegOptimizer(fg, init).optimize();
- EXPECT(assert_equal(expected, gn_result, 1e-4));
- EXPECT(assert_equal(expected, lm_result, 1e-4));
- EXPECT(assert_equal(expected, dl_result, 1e-4));
- }
- /* ************************************************************************* */
- TEST(NonlinearOptimizer, Pose2OptimizationWithHuber) {
- NonlinearFactorGraph fg;
- fg.addPrior(0, Pose2(0,0, 0), noiseModel::Isotropic::Sigma(3,0.1));
- fg += BetweenFactor<Pose2>(0, 1, Pose2(0,9, M_PI/2),
- noiseModel::Robust::Create(noiseModel::mEstimator::Huber::Create(0.2),
- noiseModel::Isotropic::Sigma(3,1)));
- fg += BetweenFactor<Pose2>(0, 1, Pose2(0, 11, M_PI/2),
- noiseModel::Robust::Create(noiseModel::mEstimator::Huber::Create(0.2),
- noiseModel::Isotropic::Sigma(3,1)));
- fg += BetweenFactor<Pose2>(0, 1, Pose2(0, 10, M_PI/2),
- noiseModel::Robust::Create(noiseModel::mEstimator::Huber::Create(0.2),
- noiseModel::Isotropic::Sigma(3,1)));
- fg += BetweenFactor<Pose2>(0, 1, Pose2(0,9, 0),
- noiseModel::Robust::Create(noiseModel::mEstimator::Huber::Create(0.2),
- noiseModel::Isotropic::Sigma(3,1)));
- Values init;
- init.insert(0, Pose2(0, 0, 0));
- init.insert(1, Pose2(0, 10, M_PI/4));
- Values expected;
- expected.insert(0, Pose2(0, 0, 0));
- expected.insert(1, Pose2(0, 10, 1.45212));
- LevenbergMarquardtParams params;
- auto gn_result = GaussNewtonOptimizer(fg, init).optimize();
- auto lm_result = LevenbergMarquardtOptimizer(fg, init, params).optimize();
- auto dl_result = DoglegOptimizer(fg, init).optimize();
- EXPECT(assert_equal(expected, gn_result, 1e-1));
- EXPECT(assert_equal(expected, lm_result, 1e-1));
- EXPECT(assert_equal(expected, dl_result, 1e-1));
- }
- /* ************************************************************************* */
- TEST(NonlinearOptimizer, RobustMeanCalculation) {
- NonlinearFactorGraph fg;
- Values init;
- Values expected;
- auto huber = noiseModel::Robust::Create(noiseModel::mEstimator::Huber::Create(20),
- noiseModel::Isotropic::Sigma(1, 1));
- vector<double> pts{-10,-3,-1,1,3,10,1000};
- for(auto pt : pts) {
- fg.addPrior(0, pt, huber);
- }
- init.insert(0, 100.0);
- expected.insert(0, 3.33333333);
- DoglegParams params_dl;
- params_dl.setRelativeErrorTol(1e-10);
- auto gn_result = GaussNewtonOptimizer(fg, init).optimize();
- auto lm_result = LevenbergMarquardtOptimizer(fg, init).optimize();
- auto dl_result = DoglegOptimizer(fg, init, params_dl).optimize();
- EXPECT(assert_equal(expected, gn_result, tol));
- EXPECT(assert_equal(expected, lm_result, tol));
- EXPECT(assert_equal(expected, dl_result, tol));
- }
- /* ************************************************************************* */
- TEST(NonlinearOptimizer, disconnected_graph) {
- Values expected;
- expected.insert(X(1), Pose2(0.,0.,0.));
- expected.insert(X(2), Pose2(1.5,0.,0.));
- expected.insert(X(3), Pose2(3.0,0.,0.));
- Values init;
- init.insert(X(1), Pose2(0.,0.,0.));
- init.insert(X(2), Pose2(0.,0.,0.));
- init.insert(X(3), Pose2(0.,0.,0.));
- NonlinearFactorGraph graph;
- graph.addPrior(X(1), Pose2(0.,0.,0.), noiseModel::Isotropic::Sigma(3,1));
- graph += BetweenFactor<Pose2>(X(1),X(2), Pose2(1.5,0.,0.), noiseModel::Isotropic::Sigma(3,1));
- graph.addPrior(X(3), Pose2(3.,0.,0.), noiseModel::Isotropic::Sigma(3,1));
- EXPECT(assert_equal(expected, LevenbergMarquardtOptimizer(graph, init).optimize()));
- }
- /* ************************************************************************* */
- #include <gtsam/linear/iterative.h>
- class IterativeLM : public LevenbergMarquardtOptimizer {
- /// Solver specific parameters
- ConjugateGradientParameters cgParams_;
- Values initial_;
- public:
- /// Constructor
- IterativeLM(const NonlinearFactorGraph& graph, const Values& initialValues,
- const ConjugateGradientParameters& p,
- const LevenbergMarquardtParams& params =
- LevenbergMarquardtParams::LegacyDefaults())
- : LevenbergMarquardtOptimizer(graph, initialValues, params),
- cgParams_(p),
- initial_(initialValues) {}
- /// Solve that uses conjugate gradient
- VectorValues solve(const GaussianFactorGraph& gfg,
- const NonlinearOptimizerParams& params) const override {
- VectorValues zeros = initial_.zeroVectors();
- return conjugateGradientDescent(gfg, zeros, cgParams_);
- }
- };
- /* ************************************************************************* */
- TEST(NonlinearOptimizer, subclass_solver) {
- Values expected;
- expected.insert(X(1), Pose2(0., 0., 0.));
- expected.insert(X(2), Pose2(1.5, 0., 0.));
- expected.insert(X(3), Pose2(3.0, 0., 0.));
- Values init;
- init.insert(X(1), Pose2(0., 0., 0.));
- init.insert(X(2), Pose2(0., 0., 0.));
- init.insert(X(3), Pose2(0., 0., 0.));
- NonlinearFactorGraph graph;
- graph.addPrior(X(1), Pose2(0., 0., 0.), noiseModel::Isotropic::Sigma(3, 1));
- graph += BetweenFactor<Pose2>(X(1), X(2), Pose2(1.5, 0., 0.),
- noiseModel::Isotropic::Sigma(3, 1));
- graph.addPrior(X(3), Pose2(3., 0., 0.), noiseModel::Isotropic::Sigma(3, 1));
- ConjugateGradientParameters p;
- Values actual = IterativeLM(graph, init, p).optimize();
- EXPECT(assert_equal(expected, actual, 1e-4));
- }
- /* ************************************************************************* */
- TEST( NonlinearOptimizer, logfile )
- {
- NonlinearFactorGraph fg(example::createReallyNonlinearFactorGraph());
- Point2 x0(3,3);
- Values c0;
- c0.insert(X(1), x0);
- // Levenberg-Marquardt
- LevenbergMarquardtParams lmParams;
- static const string filename("testNonlinearOptimizer.log");
- lmParams.logFile = filename;
- LevenbergMarquardtOptimizer(fg, c0, lmParams).optimize();
- // stringstream expected,actual;
- // ifstream ifs(("../../gtsam/tests/" + filename).c_str());
- // if(!ifs) throw std::runtime_error(filename);
- // expected << ifs.rdbuf();
- // ifs.close();
- // ifstream ifs2(filename.c_str());
- // if(!ifs2) throw std::runtime_error(filename);
- // actual << ifs2.rdbuf();
- // EXPECT(actual.str()==expected.str());
- }
- /* ************************************************************************* */
- TEST( NonlinearOptimizer, iterationHook_LM )
- {
- NonlinearFactorGraph fg(example::createReallyNonlinearFactorGraph());
- Point2 x0(3,3);
- Values c0;
- c0.insert(X(1), x0);
- // Levenberg-Marquardt
- LevenbergMarquardtParams lmParams;
- size_t lastIterCalled = 0;
- lmParams.iterationHook = [&](size_t iteration, double oldError, double newError)
- {
- // Tests:
- lastIterCalled = iteration;
- EXPECT(newError<oldError);
-
- // Example of evolution printout:
- //std::cout << "iter: " << iteration << " error: " << oldError << " => " << newError <<"\n";
- };
- LevenbergMarquardtOptimizer(fg, c0, lmParams).optimize();
-
- EXPECT(lastIterCalled>5);
- }
- /* ************************************************************************* */
- TEST( NonlinearOptimizer, iterationHook_CG )
- {
- NonlinearFactorGraph fg(example::createReallyNonlinearFactorGraph());
- Point2 x0(3,3);
- Values c0;
- c0.insert(X(1), x0);
- // Levenberg-Marquardt
- NonlinearConjugateGradientOptimizer::Parameters cgParams;
- size_t lastIterCalled = 0;
- cgParams.iterationHook = [&](size_t iteration, double oldError, double newError)
- {
- // Tests:
- lastIterCalled = iteration;
- EXPECT(newError<oldError);
-
- // Example of evolution printout:
- //std::cout << "iter: " << iteration << " error: " << oldError << " => " << newError <<"\n";
- };
- NonlinearConjugateGradientOptimizer(fg, c0, cgParams).optimize();
-
- EXPECT(lastIterCalled>5);
- }
- /* ************************************************************************* */
- //// Minimal traits example
- struct MyType : public Vector3 {
- using Vector3::Vector3;
- };
- namespace gtsam {
- template <>
- struct traits<MyType> {
- static bool Equals(const MyType& a, const MyType& b, double tol) {
- return (a - b).array().abs().maxCoeff() < tol;
- }
- static void Print(const MyType&, const string&) {}
- static int GetDimension(const MyType&) { return 3; }
- static MyType Retract(const MyType& a, const Vector3& b) { return a + b; }
- static Vector3 Local(const MyType& a, const MyType& b) { return b - a; }
- };
- }
- TEST(NonlinearOptimizer, Traits) {
- NonlinearFactorGraph fg;
- fg.addPrior(0, MyType(0, 0, 0), noiseModel::Isotropic::Sigma(3, 1));
- Values init;
- init.insert(0, MyType(0,0,0));
- LevenbergMarquardtOptimizer optimizer(fg, init);
- Values actual = optimizer.optimize();
- EXPECT(assert_equal(init, actual));
- }
- /* ************************************************************************* */
- int main() {
- TestResult tr;
- return TestRegistry::runAllTests(tr);
- }
- /* ************************************************************************* */
|