xref: /aosp_15_r20/external/eigen/unsupported/Eigen/src/SparseExtra/DynamicSparseMatrix.h (revision bf2c37156dfe67e5dfebd6d394bad8b2ab5804d4)
1 // This file is part of Eigen, a lightweight C++ template library
2 // for linear algebra.
3 //
4 // Copyright (C) 2008-2009 Gael Guennebaud <[email protected]>
5 //
6 // This Source Code Form is subject to the terms of the Mozilla
7 // Public License v. 2.0. If a copy of the MPL was not distributed
8 // with this file, You can obtain one at http://mozilla.org/MPL/2.0/.
9 
10 #ifndef EIGEN_DYNAMIC_SPARSEMATRIX_H
11 #define EIGEN_DYNAMIC_SPARSEMATRIX_H
12 
13 namespace Eigen {
14 
15 /** \deprecated use a SparseMatrix in an uncompressed mode
16   *
17   * \class DynamicSparseMatrix
18   *
19   * \brief A sparse matrix class designed for matrix assembly purpose
20   *
21   * \param _Scalar the scalar type, i.e. the type of the coefficients
22   *
23   * Unlike SparseMatrix, this class provides a much higher degree of flexibility. In particular, it allows
24   * random read/write accesses in log(rho*outer_size) where \c rho is the probability that a coefficient is
25   * nonzero and outer_size is the number of columns if the matrix is column-major and the number of rows
26   * otherwise.
27   *
28   * Internally, the data are stored as a std::vector of compressed vector. The performances of random writes might
29   * decrease as the number of nonzeros per inner-vector increase. In practice, we observed very good performance
30   * till about 100 nonzeros/vector, and the performance remains relatively good till 500 nonzeros/vectors.
31   *
32   * \see SparseMatrix
33   */
34 
35 namespace internal {
36 template<typename _Scalar, int _Options, typename _StorageIndex>
37 struct traits<DynamicSparseMatrix<_Scalar, _Options, _StorageIndex> >
38 {
39   typedef _Scalar Scalar;
40   typedef _StorageIndex StorageIndex;
41   typedef Sparse StorageKind;
42   typedef MatrixXpr XprKind;
43   enum {
44     RowsAtCompileTime = Dynamic,
45     ColsAtCompileTime = Dynamic,
46     MaxRowsAtCompileTime = Dynamic,
47     MaxColsAtCompileTime = Dynamic,
48     Flags = _Options | NestByRefBit | LvalueBit,
49     CoeffReadCost = NumTraits<Scalar>::ReadCost,
50     SupportedAccessPatterns = OuterRandomAccessPattern
51   };
52 };
53 }
54 
55 template<typename _Scalar, int _Options, typename _StorageIndex>
56  class  DynamicSparseMatrix
57   : public SparseMatrixBase<DynamicSparseMatrix<_Scalar, _Options, _StorageIndex> >
58 {
59     typedef SparseMatrixBase<DynamicSparseMatrix> Base;
60     using Base::convert_index;
61   public:
62     EIGEN_SPARSE_PUBLIC_INTERFACE(DynamicSparseMatrix)
63     // FIXME: why are these operator already alvailable ???
64     // EIGEN_SPARSE_INHERIT_ASSIGNMENT_OPERATOR(DynamicSparseMatrix, +=)
65     // EIGEN_SPARSE_INHERIT_ASSIGNMENT_OPERATOR(DynamicSparseMatrix, -=)
66     typedef MappedSparseMatrix<Scalar,Flags> Map;
67     using Base::IsRowMajor;
68     using Base::operator=;
69     enum {
70       Options = _Options
71     };
72 
73   protected:
74 
75     typedef DynamicSparseMatrix<Scalar,(Flags&~RowMajorBit)|(IsRowMajor?RowMajorBit:0), StorageIndex> TransposedSparseMatrix;
76 
77     Index m_innerSize;
78     std::vector<internal::CompressedStorage<Scalar,StorageIndex> > m_data;
79 
80   public:
81 
82     inline Index rows() const { return IsRowMajor ? outerSize() : m_innerSize; }
83     inline Index cols() const { return IsRowMajor ? m_innerSize : outerSize(); }
84     inline Index innerSize() const { return m_innerSize; }
85     inline Index outerSize() const { return convert_index(m_data.size()); }
86     inline Index innerNonZeros(Index j) const { return m_data[j].size(); }
87 
88     std::vector<internal::CompressedStorage<Scalar,StorageIndex> >& _data() { return m_data; }
89     const std::vector<internal::CompressedStorage<Scalar,StorageIndex> >& _data() const { return m_data; }
90 
91     /** \returns the coefficient value at given position \a row, \a col
92       * This operation involes a log(rho*outer_size) binary search.
93       */
94     inline Scalar coeff(Index row, Index col) const
95     {
96       const Index outer = IsRowMajor ? row : col;
97       const Index inner = IsRowMajor ? col : row;
98       return m_data[outer].at(inner);
99     }
100 
101     /** \returns a reference to the coefficient value at given position \a row, \a col
102       * This operation involes a log(rho*outer_size) binary search. If the coefficient does not
103       * exist yet, then a sorted insertion into a sequential buffer is performed.
104       */
105     inline Scalar& coeffRef(Index row, Index col)
106     {
107       const Index outer = IsRowMajor ? row : col;
108       const Index inner = IsRowMajor ? col : row;
109       return m_data[outer].atWithInsertion(inner);
110     }
111 
112     class InnerIterator;
113     class ReverseInnerIterator;
114 
115     void setZero()
116     {
117       for (Index j=0; j<outerSize(); ++j)
118         m_data[j].clear();
119     }
120 
121     /** \returns the number of non zero coefficients */
122     Index nonZeros() const
123     {
124       Index res = 0;
125       for (Index j=0; j<outerSize(); ++j)
126         res += m_data[j].size();
127       return res;
128     }
129 
130 
131 
132     void reserve(Index reserveSize = 1000)
133     {
134       if (outerSize()>0)
135       {
136         Index reserveSizePerVector = (std::max)(reserveSize/outerSize(),Index(4));
137         for (Index j=0; j<outerSize(); ++j)
138         {
139           m_data[j].reserve(reserveSizePerVector);
140         }
141       }
142     }
143 
144     /** Does nothing: provided for compatibility with SparseMatrix */
145     inline void startVec(Index /*outer*/) {}
146 
147     /** \returns a reference to the non zero coefficient at position \a row, \a col assuming that:
148       * - the nonzero does not already exist
149       * - the new coefficient is the last one of the given inner vector.
150       *
151       * \sa insert, insertBackByOuterInner */
152     inline Scalar& insertBack(Index row, Index col)
153     {
154       return insertBackByOuterInner(IsRowMajor?row:col, IsRowMajor?col:row);
155     }
156 
157     /** \sa insertBack */
158     inline Scalar& insertBackByOuterInner(Index outer, Index inner)
159     {
160       eigen_assert(outer<Index(m_data.size()) && inner<m_innerSize && "out of range");
161       eigen_assert(((m_data[outer].size()==0) || (m_data[outer].index(m_data[outer].size()-1)<inner))
162                 && "wrong sorted insertion");
163       m_data[outer].append(0, inner);
164       return m_data[outer].value(m_data[outer].size()-1);
165     }
166 
167     inline Scalar& insert(Index row, Index col)
168     {
169       const Index outer = IsRowMajor ? row : col;
170       const Index inner = IsRowMajor ? col : row;
171 
172       Index startId = 0;
173       Index id = static_cast<Index>(m_data[outer].size()) - 1;
174       m_data[outer].resize(id+2,1);
175 
176       while ( (id >= startId) && (m_data[outer].index(id) > inner) )
177       {
178         m_data[outer].index(id+1) = m_data[outer].index(id);
179         m_data[outer].value(id+1) = m_data[outer].value(id);
180         --id;
181       }
182       m_data[outer].index(id+1) = inner;
183       m_data[outer].value(id+1) = 0;
184       return m_data[outer].value(id+1);
185     }
186 
187     /** Does nothing: provided for compatibility with SparseMatrix */
188     inline void finalize() {}
189 
190     /** Suppress all nonzeros which are smaller than \a reference under the tolerance \a epsilon */
191     void prune(Scalar reference, RealScalar epsilon = NumTraits<RealScalar>::dummy_precision())
192     {
193       for (Index j=0; j<outerSize(); ++j)
194         m_data[j].prune(reference,epsilon);
195     }
196 
197     /** Resize the matrix without preserving the data (the matrix is set to zero)
198       */
199     void resize(Index rows, Index cols)
200     {
201       const Index outerSize = IsRowMajor ? rows : cols;
202       m_innerSize = convert_index(IsRowMajor ? cols : rows);
203       setZero();
204       if (Index(m_data.size()) != outerSize)
205       {
206         m_data.resize(outerSize);
207       }
208     }
209 
210     void resizeAndKeepData(Index rows, Index cols)
211     {
212       const Index outerSize = IsRowMajor ? rows : cols;
213       const Index innerSize = IsRowMajor ? cols : rows;
214       if (m_innerSize>innerSize)
215       {
216         // remove all coefficients with innerCoord>=innerSize
217         // TODO
218         //std::cerr << "not implemented yet\n";
219         exit(2);
220       }
221       if (m_data.size() != outerSize)
222       {
223         m_data.resize(outerSize);
224       }
225     }
226 
227     /** The class DynamicSparseMatrix is deprecated */
228     EIGEN_DEPRECATED inline DynamicSparseMatrix()
229       : m_innerSize(0), m_data(0)
230     {
231       #ifdef EIGEN_SPARSE_CREATE_TEMPORARY_PLUGIN
232         EIGEN_SPARSE_CREATE_TEMPORARY_PLUGIN
233       #endif
234       eigen_assert(innerSize()==0 && outerSize()==0);
235     }
236 
237     /** The class DynamicSparseMatrix is deprecated */
238     EIGEN_DEPRECATED inline DynamicSparseMatrix(Index rows, Index cols)
239       : m_innerSize(0)
240     {
241       #ifdef EIGEN_SPARSE_CREATE_TEMPORARY_PLUGIN
242         EIGEN_SPARSE_CREATE_TEMPORARY_PLUGIN
243       #endif
244       resize(rows, cols);
245     }
246 
247     /** The class DynamicSparseMatrix is deprecated */
248     template<typename OtherDerived>
249     EIGEN_DEPRECATED explicit inline DynamicSparseMatrix(const SparseMatrixBase<OtherDerived>& other)
250       : m_innerSize(0)
251     {
252       #ifdef EIGEN_SPARSE_CREATE_TEMPORARY_PLUGIN
253         EIGEN_SPARSE_CREATE_TEMPORARY_PLUGIN
254       #endif
255       Base::operator=(other.derived());
256     }
257 
258     inline DynamicSparseMatrix(const DynamicSparseMatrix& other)
259       : Base(), m_innerSize(0)
260     {
261       #ifdef EIGEN_SPARSE_CREATE_TEMPORARY_PLUGIN
262         EIGEN_SPARSE_CREATE_TEMPORARY_PLUGIN
263       #endif
264       *this = other.derived();
265     }
266 
267     inline void swap(DynamicSparseMatrix& other)
268     {
269       //EIGEN_DBG_SPARSE(std::cout << "SparseMatrix:: swap\n");
270       std::swap(m_innerSize, other.m_innerSize);
271       //std::swap(m_outerSize, other.m_outerSize);
272       m_data.swap(other.m_data);
273     }
274 
275     inline DynamicSparseMatrix& operator=(const DynamicSparseMatrix& other)
276     {
277       if (other.isRValue())
278       {
279         swap(other.const_cast_derived());
280       }
281       else
282       {
283         resize(other.rows(), other.cols());
284         m_data = other.m_data;
285       }
286       return *this;
287     }
288 
289     /** Destructor */
290     inline ~DynamicSparseMatrix() {}
291 
292   public:
293 
294     /** \deprecated
295       * Set the matrix to zero and reserve the memory for \a reserveSize nonzero coefficients. */
296     EIGEN_DEPRECATED void startFill(Index reserveSize = 1000)
297     {
298       setZero();
299       reserve(reserveSize);
300     }
301 
302     /** \deprecated use insert()
303       * inserts a nonzero coefficient at given coordinates \a row, \a col and returns its reference assuming that:
304       *  1 - the coefficient does not exist yet
305       *  2 - this the coefficient with greater inner coordinate for the given outer coordinate.
306       * In other words, assuming \c *this is column-major, then there must not exists any nonzero coefficient of coordinates
307       * \c i \c x \a col such that \c i >= \a row. Otherwise the matrix is invalid.
308       *
309       * \see fillrand(), coeffRef()
310       */
311     EIGEN_DEPRECATED Scalar& fill(Index row, Index col)
312     {
313       const Index outer = IsRowMajor ? row : col;
314       const Index inner = IsRowMajor ? col : row;
315       return insertBack(outer,inner);
316     }
317 
318     /** \deprecated use insert()
319       * Like fill() but with random inner coordinates.
320       * Compared to the generic coeffRef(), the unique limitation is that we assume
321       * the coefficient does not exist yet.
322       */
323     EIGEN_DEPRECATED Scalar& fillrand(Index row, Index col)
324     {
325       return insert(row,col);
326     }
327 
328     /** \deprecated use finalize()
329       * Does nothing. Provided for compatibility with SparseMatrix. */
330     EIGEN_DEPRECATED void endFill() {}
331 
332 #   ifdef EIGEN_DYNAMICSPARSEMATRIX_PLUGIN
333 #     include EIGEN_DYNAMICSPARSEMATRIX_PLUGIN
334 #   endif
335  };
336 
337 template<typename Scalar, int _Options, typename _StorageIndex>
338 class DynamicSparseMatrix<Scalar,_Options,_StorageIndex>::InnerIterator : public SparseVector<Scalar,_Options,_StorageIndex>::InnerIterator
339 {
340     typedef typename SparseVector<Scalar,_Options,_StorageIndex>::InnerIterator Base;
341   public:
342     InnerIterator(const DynamicSparseMatrix& mat, Index outer)
343       : Base(mat.m_data[outer]), m_outer(outer)
344     {}
345 
346     inline Index row() const { return IsRowMajor ? m_outer : Base::index(); }
347     inline Index col() const { return IsRowMajor ? Base::index() : m_outer; }
348     inline Index outer() const { return m_outer; }
349 
350   protected:
351     const Index m_outer;
352 };
353 
354 template<typename Scalar, int _Options, typename _StorageIndex>
355 class DynamicSparseMatrix<Scalar,_Options,_StorageIndex>::ReverseInnerIterator : public SparseVector<Scalar,_Options,_StorageIndex>::ReverseInnerIterator
356 {
357     typedef typename SparseVector<Scalar,_Options,_StorageIndex>::ReverseInnerIterator Base;
358   public:
359     ReverseInnerIterator(const DynamicSparseMatrix& mat, Index outer)
360       : Base(mat.m_data[outer]), m_outer(outer)
361     {}
362 
363     inline Index row() const { return IsRowMajor ? m_outer : Base::index(); }
364     inline Index col() const { return IsRowMajor ? Base::index() : m_outer; }
365     inline Index outer() const { return m_outer; }
366 
367   protected:
368     const Index m_outer;
369 };
370 
371 namespace internal {
372 
373 template<typename _Scalar, int _Options, typename _StorageIndex>
374 struct evaluator<DynamicSparseMatrix<_Scalar,_Options,_StorageIndex> >
375   : evaluator_base<DynamicSparseMatrix<_Scalar,_Options,_StorageIndex> >
376 {
377   typedef _Scalar Scalar;
378   typedef DynamicSparseMatrix<_Scalar,_Options,_StorageIndex> SparseMatrixType;
379   typedef typename SparseMatrixType::InnerIterator InnerIterator;
380   typedef typename SparseMatrixType::ReverseInnerIterator ReverseInnerIterator;
381 
382   enum {
383     CoeffReadCost = NumTraits<_Scalar>::ReadCost,
384     Flags = SparseMatrixType::Flags
385   };
386 
387   evaluator() : m_matrix(0) {}
388   evaluator(const SparseMatrixType &mat) : m_matrix(&mat) {}
389 
390   operator SparseMatrixType&() { return m_matrix->const_cast_derived(); }
391   operator const SparseMatrixType&() const { return *m_matrix; }
392 
393   Scalar coeff(Index row, Index col) const { return m_matrix->coeff(row,col); }
394 
395   Index nonZerosEstimate() const { return m_matrix->nonZeros(); }
396 
397   const SparseMatrixType *m_matrix;
398 };
399 
400 }
401 
402 } // end namespace Eigen
403 
404 #endif // EIGEN_DYNAMIC_SPARSEMATRIX_H
405