TransformBtwRobotsUnaryFactorEM.h 15 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426
  1. /* ----------------------------------------------------------------------------
  2. * GTSAM Copyright 2010, Georgia Tech Research Corporation,
  3. * Atlanta, Georgia 30332-0415
  4. * All Rights Reserved
  5. * Authors: Frank Dellaert, et al. (see THANKS for the full author list)
  6. * See LICENSE for the license information
  7. * -------------------------------------------------------------------------- */
  8. /**
  9. * @file TransformBtwRobotsUnaryFactorEM.h
  10. * @brief Unary factor for determining transformation between given trajectories of two robots
  11. * @author Vadim Indelman
  12. **/
  13. #pragma once
  14. #include <gtsam/slam/BetweenFactor.h>
  15. #include <gtsam/nonlinear/NonlinearFactor.h>
  16. #include <gtsam/nonlinear/NonlinearFactorGraph.h>
  17. #include <gtsam/nonlinear/Marginals.h>
  18. #include <gtsam/linear/GaussianFactor.h>
  19. #include <gtsam/base/Testable.h>
  20. #include <gtsam/base/Lie.h>
  21. #include <ostream>
  22. namespace gtsam {
  23. /**
  24. * A class for a measurement predicted by "between(config[key1],config[key2])"
  25. * @tparam VALUE the Value type
  26. * @addtogroup SLAM
  27. */
  28. template<class VALUE>
  29. class TransformBtwRobotsUnaryFactorEM: public NonlinearFactor {
  30. public:
  31. typedef VALUE T;
  32. private:
  33. typedef TransformBtwRobotsUnaryFactorEM<VALUE> This;
  34. typedef NonlinearFactor Base;
  35. Key key_;
  36. VALUE measured_; /** The measurement */
  37. Values valA_; // given values for robot A map\trajectory
  38. Values valB_; // given values for robot B map\trajectory
  39. Key keyA_; // key of robot A to which the measurement refers
  40. Key keyB_; // key of robot B to which the measurement refers
  41. // TODO: create function to update valA_ and valB_
  42. SharedGaussian model_inlier_;
  43. SharedGaussian model_outlier_;
  44. double prior_inlier_;
  45. double prior_outlier_;
  46. bool flag_bump_up_near_zero_probs_;
  47. mutable bool start_with_M_step_;
  48. /** concept check by type */
  49. GTSAM_CONCEPT_LIE_TYPE(T)
  50. GTSAM_CONCEPT_TESTABLE_TYPE(T)
  51. public:
  52. // shorthand for a smart pointer to a factor
  53. typedef typename boost::shared_ptr<TransformBtwRobotsUnaryFactorEM> shared_ptr;
  54. /** default constructor - only use for serialization */
  55. TransformBtwRobotsUnaryFactorEM() {}
  56. /** Constructor */
  57. TransformBtwRobotsUnaryFactorEM(Key key, const VALUE& measured, Key keyA, Key keyB,
  58. const Values& valA, const Values& valB,
  59. const SharedGaussian& model_inlier, const SharedGaussian& model_outlier,
  60. const double prior_inlier, const double prior_outlier,
  61. const bool flag_bump_up_near_zero_probs = false,
  62. const bool start_with_M_step = false) :
  63. Base(cref_list_of<1>(key)), key_(key), measured_(measured), keyA_(keyA), keyB_(keyB),
  64. model_inlier_(model_inlier), model_outlier_(model_outlier),
  65. prior_inlier_(prior_inlier), prior_outlier_(prior_outlier), flag_bump_up_near_zero_probs_(flag_bump_up_near_zero_probs),
  66. start_with_M_step_(false){
  67. setValAValB(valA, valB);
  68. }
  69. ~TransformBtwRobotsUnaryFactorEM() override {}
  70. /** Clone */
  71. NonlinearFactor::shared_ptr clone() const override { return boost::make_shared<This>(*this); }
  72. /** implement functions needed for Testable */
  73. /** print */
  74. void print(const std::string& s, const KeyFormatter& keyFormatter = DefaultKeyFormatter) const override {
  75. std::cout << s << "TransformBtwRobotsUnaryFactorEM("
  76. << keyFormatter(key_) << ")\n";
  77. std::cout << "MR between factor keys: "
  78. << keyFormatter(keyA_) << ","
  79. << keyFormatter(keyB_) << "\n";
  80. measured_.print(" measured: ");
  81. model_inlier_->print(" noise model inlier: ");
  82. model_outlier_->print(" noise model outlier: ");
  83. std::cout << "(prior_inlier, prior_outlier_) = ("
  84. << prior_inlier_ << ","
  85. << prior_outlier_ << ")\n";
  86. // Base::print(s, keyFormatter);
  87. }
  88. /** equals */
  89. bool equals(const NonlinearFactor& f, double tol=1e-9) const override {
  90. const This *t = dynamic_cast<const This*> (&f);
  91. if(t && Base::equals(f))
  92. return key_ == t->key_ && measured_.equals(t->measured_) &&
  93. // model_inlier_->equals(t->model_inlier_ ) && // TODO: fix here
  94. // model_outlier_->equals(t->model_outlier_ ) &&
  95. prior_outlier_ == t->prior_outlier_ && prior_inlier_ == t->prior_inlier_;
  96. else
  97. return false;
  98. }
  99. /** implement functions needed to derive from Factor */
  100. /* ************************************************************************* */
  101. void setValAValB(const Values& valA, const Values& valB){
  102. if ( (!valA.exists(keyA_)) && (!valB.exists(keyA_)) && (!valA.exists(keyB_)) && (!valB.exists(keyB_)) )
  103. throw("something is wrong!");
  104. // TODO: make sure the two keys belong to different robots
  105. if (valA.exists(keyA_)){
  106. valA_ = valA;
  107. valB_ = valB;
  108. }
  109. else {
  110. valA_ = valB;
  111. valB_ = valA;
  112. }
  113. }
  114. /* ************************************************************************* */
  115. double error(const Values& x) const override {
  116. return whitenedError(x).squaredNorm();
  117. }
  118. /* ************************************************************************* */
  119. /**
  120. * Linearize a non-linearFactorN to get a GaussianFactor,
  121. * \f$ Ax-b \approx h(x+\delta x)-z = h(x) + A \delta x - z \f$
  122. * Hence \f$ b = z - h(x) = - \mathtt{error\_vector}(x) \f$
  123. */
  124. /* This version of linearize recalculates the noise model each time */
  125. boost::shared_ptr<GaussianFactor> linearize(const Values& x) const override {
  126. // Only linearize if the factor is active
  127. if (!this->active(x))
  128. return boost::shared_ptr<JacobianFactor>();
  129. //std::cout<<"About to linearize"<<std::endl;
  130. Matrix A1;
  131. std::vector<Matrix> A(this->size());
  132. Vector b = -whitenedError(x, A);
  133. A1 = A[0];
  134. return GaussianFactor::shared_ptr(
  135. new JacobianFactor(key_, A1, b, noiseModel::Unit::Create(b.size())));
  136. }
  137. /* ************************************************************************* */
  138. Vector whitenedError(const Values& x,
  139. boost::optional<std::vector<Matrix>&> H = boost::none) const {
  140. bool debug = true;
  141. Matrix H_compose, H_between1, H_dummy;
  142. T orgA_T_currA = valA_.at<T>(keyA_);
  143. T orgB_T_currB = valB_.at<T>(keyB_);
  144. T orgA_T_orgB = x.at<T>(key_);
  145. T orgA_T_currB = orgA_T_orgB.compose(orgB_T_currB, H_compose, H_dummy);
  146. T currA_T_currB_pred = orgA_T_currA.between(orgA_T_currB, H_dummy, H_between1);
  147. T currA_T_currB_msr = measured_;
  148. Vector err = currA_T_currB_msr.localCoordinates(currA_T_currB_pred);
  149. // Calculate indicator probabilities (inlier and outlier)
  150. Vector p_inlier_outlier = calcIndicatorProb(x, err);
  151. double p_inlier = p_inlier_outlier[0];
  152. double p_outlier = p_inlier_outlier[1];
  153. if (start_with_M_step_){
  154. start_with_M_step_ = false;
  155. p_inlier = 0.5;
  156. p_outlier = 0.5;
  157. }
  158. Vector err_wh_inlier = model_inlier_->whiten(err);
  159. Vector err_wh_outlier = model_outlier_->whiten(err);
  160. Matrix invCov_inlier = model_inlier_->R().transpose() * model_inlier_->R();
  161. Matrix invCov_outlier = model_outlier_->R().transpose() * model_outlier_->R();
  162. Vector err_wh_eq;
  163. err_wh_eq.resize(err_wh_inlier.rows()*2);
  164. err_wh_eq << sqrt(p_inlier) * err_wh_inlier.array() , sqrt(p_outlier) * err_wh_outlier.array();
  165. Matrix H_unwh = H_compose * H_between1;
  166. if (H){
  167. Matrix H_inlier = sqrt(p_inlier)*model_inlier_->Whiten(H_unwh);
  168. Matrix H_outlier = sqrt(p_outlier)*model_outlier_->Whiten(H_unwh);
  169. Matrix H_aug = stack(2, &H_inlier, &H_outlier);
  170. (*H)[0].resize(H_aug.rows(),H_aug.cols());
  171. (*H)[0] = H_aug;
  172. }
  173. if (debug){
  174. // std::cout<<"H_compose - rows, cols, : "<<H_compose.rows()<<", "<< H_compose.cols()<<std::endl;
  175. // std::cout<<"H_between1 - rows, cols, : "<<H_between1.rows()<<", "<< H_between1.cols()<<std::endl;
  176. // std::cout<<"H_unwh - rows, cols, : "<<H_unwh.rows()<<", "<< H_unwh.cols()<<std::endl;
  177. // std::cout<<"H_unwh: "<<std:endl<<H_unwh[0]
  178. }
  179. return err_wh_eq;
  180. }
  181. /* ************************************************************************* */
  182. Vector calcIndicatorProb(const Values& x) const {
  183. Vector err = unwhitenedError(x);
  184. return this->calcIndicatorProb(x, err);
  185. }
  186. /* ************************************************************************* */
  187. Vector calcIndicatorProb(const Values& x, const Vector& err) const {
  188. // Calculate indicator probabilities (inlier and outlier)
  189. Vector err_wh_inlier = model_inlier_->whiten(err);
  190. Vector err_wh_outlier = model_outlier_->whiten(err);
  191. Matrix invCov_inlier = model_inlier_->R().transpose() * model_inlier_->R();
  192. Matrix invCov_outlier = model_outlier_->R().transpose() * model_outlier_->R();
  193. double p_inlier = prior_inlier_ * sqrt(invCov_inlier.norm()) * exp( -0.5 * err_wh_inlier.dot(err_wh_inlier) );
  194. double p_outlier = prior_outlier_ * sqrt(invCov_outlier.norm()) * exp( -0.5 * err_wh_outlier.dot(err_wh_outlier) );
  195. double sumP = p_inlier + p_outlier;
  196. p_inlier /= sumP;
  197. p_outlier /= sumP;
  198. if (flag_bump_up_near_zero_probs_){
  199. // Bump up near-zero probabilities (as in linerFlow.h)
  200. double minP = 0.05; // == 0.1 / 2 indicator variables
  201. if (p_inlier < minP || p_outlier < minP){
  202. if (p_inlier < minP)
  203. p_inlier = minP;
  204. if (p_outlier < minP)
  205. p_outlier = minP;
  206. sumP = p_inlier + p_outlier;
  207. p_inlier /= sumP;
  208. p_outlier /= sumP;
  209. }
  210. }
  211. return (Vector(2) << p_inlier, p_outlier).finished();
  212. }
  213. /* ************************************************************************* */
  214. Vector unwhitenedError(const Values& x) const {
  215. T orgA_T_currA = valA_.at<T>(keyA_);
  216. T orgB_T_currB = valB_.at<T>(keyB_);
  217. T orgA_T_orgB = x.at<T>(key_);
  218. T orgA_T_currB = orgA_T_orgB.compose(orgB_T_currB);
  219. T currA_T_currB_pred = orgA_T_currA.between(orgA_T_currB);
  220. T currA_T_currB_msr = measured_;
  221. return currA_T_currB_msr.localCoordinates(currA_T_currB_pred);
  222. }
  223. /* ************************************************************************* */
  224. SharedGaussian get_model_inlier() const {
  225. return model_inlier_;
  226. }
  227. /* ************************************************************************* */
  228. SharedGaussian get_model_outlier() const {
  229. return model_outlier_;
  230. }
  231. /* ************************************************************************* */
  232. Matrix get_model_inlier_cov() const {
  233. return (model_inlier_->R().transpose()*model_inlier_->R()).inverse();
  234. }
  235. /* ************************************************************************* */
  236. Matrix get_model_outlier_cov() const {
  237. return (model_outlier_->R().transpose()*model_outlier_->R()).inverse();
  238. }
  239. /* ************************************************************************* */
  240. void updateNoiseModels(const Values& values, const Marginals& marginals) {
  241. /* given marginals version, don't need to marginal multiple times if update a lot */
  242. KeyVector Keys;
  243. Keys.push_back(keyA_);
  244. Keys.push_back(keyB_);
  245. JointMarginal joint_marginal12 = marginals.jointMarginalCovariance(Keys);
  246. Matrix cov1 = joint_marginal12(keyA_, keyA_);
  247. Matrix cov2 = joint_marginal12(keyB_, keyB_);
  248. Matrix cov12 = joint_marginal12(keyA_, keyB_);
  249. updateNoiseModels_givenCovs(values, cov1, cov2, cov12);
  250. }
  251. /* ************************************************************************* */
  252. void updateNoiseModels(const Values& values, const NonlinearFactorGraph& graph){
  253. /* Update model_inlier_ and model_outlier_ to account for uncertainty in robot trajectories
  254. * (note these are given in the E step, where indicator probabilities are calculated).
  255. *
  256. * Principle: R += [H1 H2] * joint_cov12 * [H1 H2]', where H1, H2 are Jacobians of the
  257. * unwhitened error w.r.t. states, and R is the measurement covariance (inlier or outlier modes).
  258. *
  259. * TODO: improve efficiency (info form)
  260. */
  261. // get joint covariance of the involved states
  262. Marginals marginals(graph, values, Marginals::QR);
  263. this->updateNoiseModels(values, marginals);
  264. }
  265. /* ************************************************************************* */
  266. void updateNoiseModels_givenCovs(const Values& values, const Matrix& cov1, const Matrix& cov2, const Matrix& cov12){
  267. /* Update model_inlier_ and model_outlier_ to account for uncertainty in robot trajectories
  268. * (note these are given in the E step, where indicator probabilities are calculated).
  269. *
  270. * Principle: R += [H1 H2] * joint_cov12 * [H1 H2]', where H1, H2 are Jacobians of the
  271. * unwhitened error w.r.t. states, and R is the measurement covariance (inlier or outlier modes).
  272. *
  273. * TODO: improve efficiency (info form)
  274. */
  275. const T& p1 = values.at<T>(keyA_);
  276. const T& p2 = values.at<T>(keyB_);
  277. Matrix H1, H2;
  278. p1.between(p2, H1, H2); // h(x)
  279. Matrix H;
  280. H.resize(H1.rows(), H1.rows()+H2.rows());
  281. H << H1, H2; // H = [H1 H2]
  282. Matrix joint_cov;
  283. joint_cov.resize(cov1.rows()+cov2.rows(), cov1.cols()+cov2.cols());
  284. joint_cov << cov1, cov12,
  285. cov12.transpose(), cov2;
  286. Matrix cov_state = H*joint_cov*H.transpose();
  287. // model_inlier_->print("before:");
  288. // update inlier and outlier noise models
  289. Matrix covRinlier = (model_inlier_->R().transpose()*model_inlier_->R()).inverse();
  290. model_inlier_ = noiseModel::Gaussian::Covariance(covRinlier + cov_state);
  291. Matrix covRoutlier = (model_outlier_->R().transpose()*model_outlier_->R()).inverse();
  292. model_outlier_ = noiseModel::Gaussian::Covariance(covRoutlier + cov_state);
  293. // model_inlier_->print("after:");
  294. // std::cout<<"covRinlier + cov_state: "<<covRinlier + cov_state<<std::endl;
  295. }
  296. /* ************************************************************************* */
  297. size_t dim() const override {
  298. return model_inlier_->R().rows() + model_inlier_->R().cols();
  299. }
  300. private:
  301. /** Serialization function */
  302. friend class boost::serialization::access;
  303. template<class ARCHIVE>
  304. void serialize(ARCHIVE & ar, const unsigned int /*version*/) {
  305. ar & boost::serialization::make_nvp("NonlinearFactor",
  306. boost::serialization::base_object<Base>(*this));
  307. //ar & BOOST_SERIALIZATION_NVP(measured_);
  308. }
  309. }; // \class TransformBtwRobotsUnaryFactorEM
  310. /// traits
  311. template<class VALUE>
  312. struct traits<TransformBtwRobotsUnaryFactorEM<VALUE> > :
  313. public Testable<TransformBtwRobotsUnaryFactorEM<VALUE> > {
  314. };
  315. } /// namespace gtsam