aboutsummaryrefslogtreecommitdiff
path: root/include/ceres/covariance.h
diff options
context:
space:
mode:
Diffstat (limited to 'include/ceres/covariance.h')
-rw-r--r--include/ceres/covariance.h82
1 files changed, 22 insertions, 60 deletions
diff --git a/include/ceres/covariance.h b/include/ceres/covariance.h
index 83126b5..35fde4d 100644
--- a/include/ceres/covariance.h
+++ b/include/ceres/covariance.h
@@ -36,6 +36,7 @@
#include "ceres/internal/port.h"
#include "ceres/internal/scoped_ptr.h"
#include "ceres/types.h"
+#include "ceres/internal/disable_warnings.h"
namespace ceres {
@@ -196,14 +197,14 @@ class CovarianceImpl;
// covariance.GetCovarianceBlock(y, y, covariance_yy)
// covariance.GetCovarianceBlock(x, y, covariance_xy)
//
-class Covariance {
+class CERES_EXPORT Covariance {
public:
- struct Options {
+ struct CERES_EXPORT Options {
Options()
#ifndef CERES_NO_SUITESPARSE
- : algorithm_type(SPARSE_QR),
+ : algorithm_type(SUITE_SPARSE_QR),
#else
- : algorithm_type(DENSE_SVD),
+ : algorithm_type(EIGEN_SPARSE_QR),
#endif
min_reciprocal_condition_number(1e-14),
null_space_rank(0),
@@ -228,47 +229,22 @@ class Covariance {
// for small to moderate sized problems. It can handle
// full-rank as well as rank deficient Jacobians.
//
- // 2. SPARSE_CHOLESKY uses the CHOLMOD sparse Cholesky
- // factorization library to compute the decomposition :
- //
- // R'R = J'J
- //
- // and then
- //
- // [J'J]^-1 = [R'R]^-1
- //
- // It a fast algorithm for sparse matrices that should be used
- // when the Jacobian matrix J is well conditioned. For
- // ill-conditioned matrices, this algorithm can fail
- // unpredictabily. This is because Cholesky factorization is
- // not a rank-revealing factorization, i.e., it cannot reliably
- // detect when the matrix being factorized is not of full
- // rank. SuiteSparse/CHOLMOD supplies a heuristic for checking
- // if the matrix is rank deficient (cholmod_rcond), but it is
- // only a heuristic and can have both false positive and false
- // negatives.
- //
- // Recent versions of SuiteSparse (>= 4.2.0) provide a much
- // more efficient method for solving for rows of the covariance
- // matrix. Therefore, if you are doing SPARSE_CHOLESKY, we
- // strongly recommend using a recent version of SuiteSparse.
- //
- // 3. SPARSE_QR uses the SuiteSparseQR sparse QR factorization
- // library to compute the decomposition
+ // 2. EIGEN_SPARSE_QR uses the sparse QR factorization algorithm
+ // in Eigen to compute the decomposition
//
// Q * R = J
//
// [J'J]^-1 = [R*R']^-1
//
- // It is a moderately fast algorithm for sparse matrices, which
- // at the price of more time and memory than the
- // SPARSE_CHOLESKY algorithm is numerically better behaved and
- // is rank revealing, i.e., it can reliably detect when the
- // Jacobian matrix is rank deficient.
+ // It is a moderately fast algorithm for sparse matrices.
//
- // Neither SPARSE_CHOLESKY or SPARSE_QR are capable of computing
- // the covariance if the Jacobian is rank deficient.
-
+ // 3. SUITE_SPARSE_QR uses the SuiteSparseQR sparse QR
+ // factorization algorithm. It uses dense linear algebra and is
+ // multi threaded, so for large sparse sparse matrices it is
+ // significantly faster than EIGEN_SPARSE_QR.
+ //
+ // Neither EIGEN_SPARSE_QR not SUITE_SPARSE_QR are capable of
+ // computing the covariance if the Jacobian is rank deficient.
CovarianceAlgorithmType algorithm_type;
// If the Jacobian matrix is near singular, then inverting J'J
@@ -294,29 +270,13 @@ class Covariance {
// where min_sigma and max_sigma are the minimum and maxiumum
// singular values of J respectively.
//
- // 2. SPARSE_CHOLESKY
- //
- // cholmod_rcond < min_reciprocal_conditioner_number
- //
- // Here cholmod_rcond is a crude estimate of the reciprocal
- // condition number of J'J by using the maximum and minimum
- // diagonal entries of the Cholesky factor R. There are no
- // theoretical guarantees associated with this test. It can
- // give false positives and negatives. Use at your own
- // risk. The default value of min_reciprocal_condition_number
- // has been set to a conservative value, and sometimes the
- // Covariance::Compute may return false even if it is possible
- // to estimate the covariance reliably. In such cases, the user
- // should exercise their judgement before lowering the value of
- // min_reciprocal_condition_number.
- //
- // 3. SPARSE_QR
+ // 2. SUITE_SPARSE_QR and EIGEN_SPARSE_QR
//
// rank(J) < num_col(J)
//
// Here rank(J) is the estimate of the rank of J returned by the
- // SuiteSparseQR algorithm. It is a fairly reliable indication
- // of rank deficiency.
+ // sparse QR factorization algorithm. It is a fairly reliable
+ // indication of rank deficiency.
//
double min_reciprocal_condition_number;
@@ -351,8 +311,8 @@ class Covariance {
//
// lambda_i / lambda_max < min_reciprocal_condition_number.
//
- // This option has no effect on the SPARSE_CHOLESKY or SPARSE_QR
- // algorithms.
+ // This option has no effect on the SUITE_SPARSE_QR and
+ // EIGEN_SPARSE_QR algorithms.
int null_space_rank;
int num_threads;
@@ -419,4 +379,6 @@ class Covariance {
} // namespace ceres
+#include "ceres/internal/reenable_warnings.h"
+
#endif // CERES_PUBLIC_COVARIANCE_H_