1 // g2o - General Graph Optimization
2 // Copyright (C) 2011 R. Kuemmerle, G. Grisetti, W. Burgard
3 // All rights reserved.
4 //
5 // Redistribution and use in source and binary forms, with or without
6 // modification, are permitted provided that the following conditions are
7 // met:
8 //
9 // * Redistributions of source code must retain the above copyright notice,
10 //   this list of conditions and the following disclaimer.
11 // * Redistributions in binary form must reproduce the above copyright
12 //   notice, this list of conditions and the following disclaimer in the
13 //   documentation and/or other materials provided with the distribution.
14 //
15 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
16 // IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
17 // TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
18 // PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
19 // HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
20 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
21 // TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
22 // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
23 // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
24 // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
25 // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26 
27 #ifndef G2O_LINEAR_SOLVER_CHOLMOD
28 #define G2O_LINEAR_SOLVER_CHOLMOD
29 
30 #include <cholmod.h>
31 
32 #include "g2o/core/batch_stats.h"
33 #include "g2o/core/linear_solver.h"
34 #include "g2o/core/marginal_covariance_cholesky.h"
35 #include "g2o/stuff/sparse_helper.h"
36 #include "g2o/stuff/timeutil.h"
37 
38 namespace g2o {
39 
40 /**
41  * \brief Our extension of the CHOLMOD matrix struct
42  */
43 struct CholmodExt : public cholmod_sparse {
CholmodExtCholmodExt44   CholmodExt() {
45     nzmax = 0;
46     nrow = 0;
47     ncol = 0;
48     p = nullptr;
49     i = nullptr;
50     nz = nullptr;
51     x = nullptr;
52     z = nullptr;
53     stype = 1;  // upper triangular block only
54     itype = CHOLMOD_INT;
55     xtype = CHOLMOD_REAL;
56     dtype = CHOLMOD_DOUBLE;
57     sorted = 1;
58     packed = 1;
59     columnsAllocated = 0;
60   }
~CholmodExtCholmodExt61   ~CholmodExt() {
62     delete[](int*) p;
63     p = nullptr;
64     delete[](double*) x;
65     x = nullptr;
66     delete[](int*) i;
67     i = nullptr;
68   }
69   size_t columnsAllocated;
70 };
71 
72 /**
73  * \brief basic solver for Ax = b which has to reimplemented for different linear algebra libraries
74  */
75 template <typename MatrixType>
76 class LinearSolverCholmod : public LinearSolverCCS<MatrixType> {
77  public:
LinearSolverCholmod()78   LinearSolverCholmod() : LinearSolverCCS<MatrixType>(), _cholmodFactor(nullptr) {
79     cholmod_start(&_cholmodCommon);
80 
81     // setup ordering strategy
82     _cholmodCommon.nmethods = 1;
83     _cholmodCommon.method[0].ordering = CHOLMOD_AMD;  // CHOLMOD_COLAMD
84     //_cholmodCommon.postorder = 0;
85 
86     _cholmodCommon.supernodal = CHOLMOD_AUTO;  // CHOLMOD_SUPERNODAL; //CHOLMOD_SIMPLICIAL;
87   }
88 
89   LinearSolverCholmod(LinearSolverCholmod<MatrixType> const&) = delete;
90   LinearSolverCholmod& operator=(LinearSolverCholmod<MatrixType> const&) = delete;
91 
~LinearSolverCholmod()92   virtual ~LinearSolverCholmod() {
93     freeCholdmodFactor();
94     cholmod_finish(&_cholmodCommon);
95   }
96 
init()97   virtual bool init() {
98     freeCholdmodFactor();
99     return true;
100   }
101 
solve(const SparseBlockMatrix<MatrixType> & A,double * x,double * b)102   bool solve(const SparseBlockMatrix<MatrixType>& A, double* x, double* b) {
103     double t;
104     bool cholState = computeCholmodFactor(A, t);
105     if (!cholState) return false;
106 
107     // setting up b for calling cholmod
108     cholmod_dense bcholmod;
109     bcholmod.nrow = bcholmod.d = _cholmodSparse.nrow;
110     bcholmod.ncol = 1;
111     bcholmod.x = b;
112     bcholmod.xtype = CHOLMOD_REAL;
113     bcholmod.dtype = CHOLMOD_DOUBLE;
114     cholmod_dense* xcholmod = cholmod_solve(CHOLMOD_A, _cholmodFactor, &bcholmod, &_cholmodCommon);
115     memcpy(x, xcholmod->x, sizeof(double) * bcholmod.nrow);  // copy back to our array
116     cholmod_free_dense(&xcholmod, &_cholmodCommon);
117 
118     G2OBatchStatistics* globalStats = G2OBatchStatistics::globalStats();
119     if (globalStats) {
120       globalStats->timeNumericDecomposition = get_monotonic_time() - t;
121       globalStats->choleskyNNZ = static_cast<size_t>(_cholmodCommon.method[0].lnz);
122     }
123 
124     return true;
125   }
126 
saveMatrix(const std::string & fileName)127   virtual bool saveMatrix(const std::string& fileName) {
128     writeCCSMatrix(fileName, _cholmodSparse.nrow, _cholmodSparse.ncol, (int*)_cholmodSparse.p,
129                    (int*)_cholmodSparse.i, (double*)_cholmodSparse.x, true);
130     return true;
131   }
132 
133  protected:
134   // temp used for cholesky with cholmod
135   cholmod_common _cholmodCommon;
136   CholmodExt _cholmodSparse;
137   cholmod_factor* _cholmodFactor;
138   MatrixStructure _matrixStructure;
139   VectorXI _scalarPermutation, _blockPermutation;
140 
computeSymbolicDecomposition(const SparseBlockMatrix<MatrixType> & A)141   void computeSymbolicDecomposition(const SparseBlockMatrix<MatrixType>& A) {
142     double t = get_monotonic_time();
143     if (!this->blockOrdering()) {
144       // setup ordering strategy
145       _cholmodCommon.nmethods = 1;
146       _cholmodCommon.method[0].ordering = CHOLMOD_AMD;                     // CHOLMOD_COLAMD
147       _cholmodFactor = cholmod_analyze(&_cholmodSparse, &_cholmodCommon);  // symbolic factorization
148     } else {
149       A.fillBlockStructure(_matrixStructure);
150 
151       // get the ordering for the block matrix
152       if (_blockPermutation.size() == 0) _blockPermutation.resize(_matrixStructure.n);
153       if (_blockPermutation.size() < _matrixStructure.n)  // double space if resizing
154         _blockPermutation.resize(2 * _matrixStructure.n);
155 
156       // prepare AMD call via CHOLMOD
157       cholmod_sparse auxCholmodSparse;
158       auxCholmodSparse.nzmax = _matrixStructure.nzMax();
159       auxCholmodSparse.nrow = auxCholmodSparse.ncol = _matrixStructure.n;
160       auxCholmodSparse.p = _matrixStructure.Ap;
161       auxCholmodSparse.i = _matrixStructure.Aii;
162       auxCholmodSparse.nz = 0;
163       auxCholmodSparse.x = 0;
164       auxCholmodSparse.z = 0;
165       auxCholmodSparse.stype = 1;
166       auxCholmodSparse.xtype = CHOLMOD_PATTERN;
167       auxCholmodSparse.itype = CHOLMOD_INT;
168       auxCholmodSparse.dtype = CHOLMOD_DOUBLE;
169       auxCholmodSparse.sorted = 1;
170       auxCholmodSparse.packed = 1;
171       int amdStatus =
172           cholmod_amd(&auxCholmodSparse, NULL, 0, _blockPermutation.data(), &_cholmodCommon);
173       if (!amdStatus) return;
174 
175       // blow up the permutation to the scalar matrix
176       this->blockToScalarPermutation(A, _blockPermutation, _scalarPermutation);
177 
178       // apply the ordering
179       _cholmodCommon.nmethods = 1;
180       _cholmodCommon.method[0].ordering = CHOLMOD_GIVEN;
181       _cholmodFactor =
182           cholmod_analyze_p(&_cholmodSparse, _scalarPermutation.data(), NULL, 0, &_cholmodCommon);
183     }
184     G2OBatchStatistics* globalStats = G2OBatchStatistics::globalStats();
185     if (globalStats) globalStats->timeSymbolicDecomposition = get_monotonic_time() - t;
186   }
187 
fillCholmodExt(const SparseBlockMatrix<MatrixType> & A,bool onlyValues)188   void fillCholmodExt(const SparseBlockMatrix<MatrixType>& A, bool onlyValues) {
189     if (!onlyValues) this->initMatrixStructure(A);
190     size_t m = A.rows();
191     size_t n = A.cols();
192     assert(m > 0 && n > 0 && "Hessian has 0 rows/cols");
193 
194     if (_cholmodSparse.columnsAllocated < n) {
195       // pre-allocate more space if re-allocating
196       _cholmodSparse.columnsAllocated = _cholmodSparse.columnsAllocated == 0 ? n : 2 * n;
197       delete[](int*) _cholmodSparse.p;
198       _cholmodSparse.p = new int[_cholmodSparse.columnsAllocated + 1];
199     }
200     if (!onlyValues) {
201       size_t nzmax = A.nonZeros();
202       if (_cholmodSparse.nzmax < nzmax) {
203         // pre-allocate more space if re-allocating
204         _cholmodSparse.nzmax = _cholmodSparse.nzmax == 0 ? nzmax : 2 * nzmax;
205         delete[](double*) _cholmodSparse.x;
206         delete[](int*) _cholmodSparse.i;
207         _cholmodSparse.i = new int[_cholmodSparse.nzmax];
208         _cholmodSparse.x = new double[_cholmodSparse.nzmax];
209       }
210     }
211     _cholmodSparse.ncol = n;
212     _cholmodSparse.nrow = m;
213 
214     if (onlyValues)
215       this->_ccsMatrix->fillCCS((double*)_cholmodSparse.x, true);
216     else
217       this->_ccsMatrix->fillCCS((int*)_cholmodSparse.p, (int*)_cholmodSparse.i,
218                                 (double*)_cholmodSparse.x, true);
219   }
220 
221   //! compute the cholmodFactor for the given matrix A
computeCholmodFactor(const SparseBlockMatrix<MatrixType> & A,double & t)222   bool computeCholmodFactor(const SparseBlockMatrix<MatrixType>& A, double& t) {
223     // _cholmodFactor used as bool, if not existing will copy the whole structure, otherwise only
224     // the values
225     fillCholmodExt(A, _cholmodFactor != nullptr);
226 
227     if (_cholmodFactor == 0) {
228       computeSymbolicDecomposition(A);
229       assert(_cholmodFactor != 0 && "Symbolic cholesky failed");
230     }
231     t = get_monotonic_time();
232 
233     cholmod_factorize(&_cholmodSparse, _cholmodFactor, &_cholmodCommon);
234     if (_cholmodCommon.status == CHOLMOD_NOT_POSDEF) {
235       if (this->writeDebug()) {
236         std::cerr << "Cholesky failure, writing debug.txt (Hessian loadable by Octave)"
237                   << std::endl;
238         saveMatrix("debug.txt");
239       }
240       return false;
241     }
242     return true;
243   }
244 
solveBlocks_impl(const SparseBlockMatrix<MatrixType> & A,std::function<void (MarginalCovarianceCholesky &)> compute)245   bool solveBlocks_impl(const SparseBlockMatrix<MatrixType>& A,
246                         std::function<void(MarginalCovarianceCholesky&)> compute) {
247     double t;
248     bool cholState = computeCholmodFactor(A, t);
249     if (!cholState) return false;
250 
251     // convert the factorization to LL, simplical, packed, monotonic
252     int change_status =
253         cholmod_change_factor(CHOLMOD_REAL, 1, 0, 1, 1, _cholmodFactor, &_cholmodCommon);
254     if (!change_status) return false;
255     assert(_cholmodFactor->is_ll && !_cholmodFactor->is_super && _cholmodFactor->is_monotonic &&
256            "Cholesky factor has wrong format");
257 
258     // invert the permutation
259     int* p = (int*)_cholmodFactor->Perm;
260     VectorXI pinv(_cholmodSparse.ncol);
261     for (size_t i = 0; i < _cholmodSparse.ncol; ++i) pinv(p[i]) = i;
262 
263     // compute the marginal covariance
264     MarginalCovarianceCholesky mcc;
265     mcc.setCholeskyFactor(_cholmodSparse.ncol, (int*)_cholmodFactor->p, (int*)_cholmodFactor->i,
266                           (double*)_cholmodFactor->x, pinv.data());
267     compute(mcc);
268 
269     G2OBatchStatistics* globalStats = G2OBatchStatistics::globalStats();
270     if (globalStats) {
271       globalStats->choleskyNNZ =
272           static_cast<size_t>(_cholmodCommon.method[_cholmodCommon.selected].lnz);
273     }
274     return true;
275   }
276 
freeCholdmodFactor()277   void freeCholdmodFactor() {
278     if (_cholmodFactor != nullptr) {
279       cholmod_free_factor(&_cholmodFactor, &_cholmodCommon);
280       _cholmodFactor = nullptr;
281     }
282   }
283 };
284 
285 }  // namespace g2o
286 #endif
287