diff --git a/include/core/math/covariance.h b/include/core/math/covariance.h
new file mode 100644
index 0000000000000000000000000000000000000000..4721c3def15ec0f5cdf8371387daf989402dcc12
--- /dev/null
+++ b/include/core/math/covariance.h
@@ -0,0 +1,108 @@
+/**
+ * \file covariance.h
+ *
+ *  Created on: Feb 26, 2020
+ *      \author: jsola
+ */
+
+#ifndef MATH_COVARIANCE_H_
+#define MATH_COVARIANCE_H_
+
+#include <Eigen/Dense>
+
+namespace wolf{
+
+template <typename T, int N, int RC>
+inline bool isSymmetric(const Eigen::Matrix<T, N, N, RC>& M,
+                 const T eps = wolf::Constants::EPS)
+{
+  return M.isApprox(M.transpose(), eps);
+}
+
+template <typename T, int N, int RC>
+inline bool isPositiveSemiDefinite(const Eigen::Matrix<T, N, N, RC>& M, const T& eps = Constants::EPS)
+{
+  Eigen::SelfAdjointEigenSolver<Eigen::Matrix<T, N, N, RC> > eigensolver(M);
+
+  if (eigensolver.info() == Eigen::Success)
+  {
+    // All eigenvalues must be >= 0:
+    return (eigensolver.eigenvalues().array() >= eps).all();
+  }
+
+  return false;
+}
+
+template <typename T, int N, int RC>
+inline bool isCovariance(const Eigen::Matrix<T, N, N, RC>& M, const T& eps = Constants::EPS)
+{
+  return isSymmetric(M) && isPositiveSemiDefinite(M, eps);
+}
+
+#define WOLF_ASSERT_COVARIANCE_MATRIX(x) \
+  assert(isCovariance(x, Constants::EPS_SMALL) && "Not a covariance");
+
+#define WOLF_ASSERT_INFORMATION_MATRIX(x) \
+  assert(isCovariance(x, double(0.0)) && "Not an information matrix");
+
+template <typename T, int N, int RC>
+inline bool makePosDef(Eigen::Matrix<T,N,N,RC>& M, const T& eps = Constants::EPS)
+{
+    Eigen::SelfAdjointEigenSolver<Eigen::Matrix<T,N,N,RC> > eigensolver(M);
+
+    if (eigensolver.info() == Eigen::Success)
+    {
+        // All eigenvalues must be >= 0:
+        double epsilon = eps;
+        while ((eigensolver.eigenvalues().array() < eps).any())
+        {
+            //std::cout << "----- any negative eigenvalue or too close to zero\n";
+            //std::cout << "previous eigenvalues: " << eigensolver.eigenvalues().transpose() << std::endl;
+            //std::cout << "previous determinant: " << M.determinant() << std::endl;
+            M = eigensolver.eigenvectors() *
+                eigensolver.eigenvalues().cwiseMax(epsilon).asDiagonal() *
+                eigensolver.eigenvectors().transpose();
+            eigensolver.compute(M);
+            //std::cout << "epsilon used: " << epsilon << std::endl;
+            //std::cout << "posterior eigenvalues: " << eigensolver.eigenvalues().transpose() << std::endl;
+            //std::cout << "posterior determinant: " << M.determinant() << std::endl;
+            epsilon *=10;
+        }
+        WOLF_ASSERT_COVARIANCE_MATRIX(M);
+
+        return epsilon != eps;
+    }
+    else
+        WOLF_ERROR("Couldn't compute covariance eigen decomposition");
+
+    return false;
+}
+
+inline Eigen::MatrixXd computeSqrtUpper(const Eigen::MatrixXd & _info)
+{
+    // impose symmetry
+    Eigen::MatrixXd info = _info.selfadjointView<Eigen::Upper>();
+
+    // Normal Cholesky factorization
+    Eigen::LLT<Eigen::MatrixXd> llt_of_info(info);
+    Eigen::MatrixXd R = llt_of_info.matrixU();
+
+    // Good factorization
+    if (info.isApprox(R.transpose() * R, Constants::EPS))
+        return R;
+
+    // Not good factorization: SelfAdjointEigenSolver
+    Eigen::SelfAdjointEigenSolver<Eigen::MatrixXd> es(info);
+    Eigen::VectorXd eval = es.eigenvalues().real().cwiseMax(Constants::EPS);
+
+    R = eval.cwiseSqrt().asDiagonal() * es.eigenvectors().real().transpose();
+
+    return R;
+}
+
+
+}
+
+
+
+#endif /* MATH_COVARIANCE_H_ */