diff --git a/src/feature_base.cpp b/src/feature_base.cpp index 607b1b9cd09a1fe6871654490acd5f636ccd8296..ea510640f82eaea2d10277ce99052ca7b359b4f3 100644 --- a/src/feature_base.cpp +++ b/src/feature_base.cpp @@ -89,8 +89,7 @@ void FeatureBase::getConstraintList(ConstraintBaseList & _ctr_list) void FeatureBase::setMeasurementCovariance(const Eigen::MatrixXs & _meas_cov) { - // Check symmetry (soft) - assert((_meas_cov - _meas_cov.transpose()).cwiseAbs().maxCoeff() < Constants::EPS && "Not symmetric measurement covariance"); + WOLF_ASSERT_COVARIANCE_MATRIX(_meas_cov); // set (ensuring strong symmetry) measurement_covariance_ = _meas_cov.selfadjointView<Eigen::Upper>(); @@ -104,11 +103,11 @@ void FeatureBase::setMeasurementCovariance(const Eigen::MatrixXs & _meas_cov) void FeatureBase::setMeasurementInformation(const Eigen::MatrixXs & _meas_info) { - assert(_meas_info.determinant() > Constants::EPS_SMALL && "Not positive definite measurement information"); - assert((_meas_info - _meas_info.transpose()).cwiseAbs().maxCoeff() < Constants::EPS && "Not symmetric measurement information"); + WOLF_ASSERT_INFORMATION_MATRIX(_meas_info); // set (ensuring strong symmetry) measurement_covariance_ = _meas_info.inverse().selfadjointView<Eigen::Upper>(); + WOLF_ASSERT_COVARIANCE_MATRIX(measurement_covariance_); // Avoid singular covariance avoidSingularCovariance(); @@ -128,12 +127,12 @@ Eigen::MatrixXs FeatureBase::computeSqrtUpper(const Eigen::MatrixXs & _info) con Eigen::MatrixXs R = llt_of_info.matrixU(); // Good factorization - if ((R.transpose() * R - info).cwiseAbs().maxCoeff() < Constants::EPS) + if (info.isApprox(R.transpose() * R, Constants::EPS)) return R; // Not good factorization: SelfAdjointEigenSolver Eigen::SelfAdjointEigenSolver<Eigen::MatrixXs> es(info); - Eigen::VectorXs eval = es.eigenvalues().real().cwiseMax(0); + Eigen::VectorXs eval = es.eigenvalues().real().cwiseMax(Constants::EPS); R = eval.cwiseSqrt().asDiagonal() * es.eigenvectors().real().transpose(); @@ -150,7 +149,7 @@ void FeatureBase::avoidSingularCovariance() //std::cout << "pre\n" << measurement_covariance_ << std::endl; if ((eigensolver.eigenvalues().array() < Constants::EPS).all()) measurement_covariance_= eigensolver.eigenvectors() * - (eigensolver.eigenvalues().array() > Constants::EPS).select(eigensolver.eigenvalues(), Constants::EPS).asDiagonal() * + eigensolver.eigenvalues().cwiseMax(Constants::EPS).asDiagonal() * eigensolver.eigenvectors().transpose(); //std::cout << "post\n" << measurement_covariance_ << std::endl; } diff --git a/src/wolf.h b/src/wolf.h index f062e7b00aa29c723fbb227fa79af130108f1643..ad13c1fb341011720b7744d9b8732f8e25714403 100644 --- a/src/wolf.h +++ b/src/wolf.h @@ -303,28 +303,32 @@ bool isSymmetric(const Eigen::Matrix<T, N, N, RC>& M, } template <typename T, int N, int RC> -bool isPositiveSemiDefinite(const Eigen::Matrix<T, N, N, RC>& M) +bool isPositiveSemiDefinite(const Eigen::Matrix<T, N, N, RC>& M, const T& eps = Constants::EPS) { Eigen::SelfAdjointEigenSolver<Eigen::Matrix<T, N, N, RC> > eigensolver(M); if (eigensolver.info() == Eigen::Success) { // All eigenvalues must be >= 0: - return (eigensolver.eigenvalues().array() >= T(0)).all(); + return (eigensolver.eigenvalues().array() >= T(eps)).all(); } return false; } template <typename T, int N, int RC> -bool isCovariance(const Eigen::Matrix<T, N, N, RC>& M) +bool isCovariance(const Eigen::Matrix<T, N, N, RC>& M, const T& eps = Constants::EPS) { - return isSymmetric(M) && isPositiveSemiDefinite(M); + return isSymmetric(M) && isPositiveSemiDefinite(M, eps); } #define WOLF_ASSERT_COVARIANCE_MATRIX(x) \ assert(x.determinant() > 0 && "Not positive definite measurement covariance"); \ - assert(isCovariance(x) && "Not a covariance"); + assert(isCovariance(x, Constants::EPS_SMALL) && "Not a covariance"); + +#define WOLF_ASSERT_INFORMATION_MATRIX(x) \ + assert(x.determinant() > 0 && "Not positive definite measurement covariance"); \ + assert(isCovariance(x, 0.) && "Not an information matrix"); //===================================================