1 // This file is part of Eigen, a lightweight C++ template library 2 // for linear algebra. 3 // 4 // Copyright (C) 2015 Gael Guennebaud <[email protected]> 5 // 6 // This Source Code Form is subject to the terms of the Mozilla 7 // Public License v. 2.0. If a copy of the MPL was not distributed 8 // with this file, You can obtain one at http://mozilla.org/MPL/2.0/. 9 10 #ifndef EIGEN_SPARSE_COMPRESSED_BASE_H 11 #define EIGEN_SPARSE_COMPRESSED_BASE_H 12 13 namespace Eigen { 14 15 template<typename Derived> class SparseCompressedBase; 16 17 namespace internal { 18 19 template<typename Derived> 20 struct traits<SparseCompressedBase<Derived> > : traits<Derived> 21 {}; 22 23 } // end namespace internal 24 25 /** \ingroup SparseCore_Module 26 * \class SparseCompressedBase 27 * \brief Common base class for sparse [compressed]-{row|column}-storage format. 28 * 29 * This class defines the common interface for all derived classes implementing the compressed sparse storage format, such as: 30 * - SparseMatrix 31 * - Ref<SparseMatrixType,Options> 32 * - Map<SparseMatrixType> 33 * 34 */ 35 template<typename Derived> 36 class SparseCompressedBase 37 : public SparseMatrixBase<Derived> 38 { 39 public: 40 typedef SparseMatrixBase<Derived> Base; 41 EIGEN_SPARSE_PUBLIC_INTERFACE(SparseCompressedBase) 42 using Base::operator=; 43 using Base::IsRowMajor; 44 45 class InnerIterator; 46 class ReverseInnerIterator; 47 48 protected: 49 typedef typename Base::IndexVector IndexVector; 50 Eigen::Map<IndexVector> innerNonZeros() { return Eigen::Map<IndexVector>(innerNonZeroPtr(), isCompressed()?0:derived().outerSize()); } 51 const Eigen::Map<const IndexVector> innerNonZeros() const { return Eigen::Map<const IndexVector>(innerNonZeroPtr(), isCompressed()?0:derived().outerSize()); } 52 53 public: 54 55 /** \returns the number of non zero coefficients */ 56 inline Index nonZeros() const 57 { 58 if(Derived::IsVectorAtCompileTime && outerIndexPtr()==0) 59 return derived().nonZeros(); 60 else if(isCompressed()) 61 return outerIndexPtr()[derived().outerSize()]-outerIndexPtr()[0]; 62 else if(derived().outerSize()==0) 63 return 0; 64 else 65 return innerNonZeros().sum(); 66 } 67 68 /** \returns a const pointer to the array of values. 69 * This function is aimed at interoperability with other libraries. 70 * \sa innerIndexPtr(), outerIndexPtr() */ 71 inline const Scalar* valuePtr() const { return derived().valuePtr(); } 72 /** \returns a non-const pointer to the array of values. 73 * This function is aimed at interoperability with other libraries. 74 * \sa innerIndexPtr(), outerIndexPtr() */ 75 inline Scalar* valuePtr() { return derived().valuePtr(); } 76 77 /** \returns a const pointer to the array of inner indices. 78 * This function is aimed at interoperability with other libraries. 79 * \sa valuePtr(), outerIndexPtr() */ 80 inline const StorageIndex* innerIndexPtr() const { return derived().innerIndexPtr(); } 81 /** \returns a non-const pointer to the array of inner indices. 82 * This function is aimed at interoperability with other libraries. 83 * \sa valuePtr(), outerIndexPtr() */ 84 inline StorageIndex* innerIndexPtr() { return derived().innerIndexPtr(); } 85 86 /** \returns a const pointer to the array of the starting positions of the inner vectors. 87 * This function is aimed at interoperability with other libraries. 88 * \warning it returns the null pointer 0 for SparseVector 89 * \sa valuePtr(), innerIndexPtr() */ 90 inline const StorageIndex* outerIndexPtr() const { return derived().outerIndexPtr(); } 91 /** \returns a non-const pointer to the array of the starting positions of the inner vectors. 92 * This function is aimed at interoperability with other libraries. 93 * \warning it returns the null pointer 0 for SparseVector 94 * \sa valuePtr(), innerIndexPtr() */ 95 inline StorageIndex* outerIndexPtr() { return derived().outerIndexPtr(); } 96 97 /** \returns a const pointer to the array of the number of non zeros of the inner vectors. 98 * This function is aimed at interoperability with other libraries. 99 * \warning it returns the null pointer 0 in compressed mode */ 100 inline const StorageIndex* innerNonZeroPtr() const { return derived().innerNonZeroPtr(); } 101 /** \returns a non-const pointer to the array of the number of non zeros of the inner vectors. 102 * This function is aimed at interoperability with other libraries. 103 * \warning it returns the null pointer 0 in compressed mode */ 104 inline StorageIndex* innerNonZeroPtr() { return derived().innerNonZeroPtr(); } 105 106 /** \returns whether \c *this is in compressed form. */ 107 inline bool isCompressed() const { return innerNonZeroPtr()==0; } 108 109 /** \returns a read-only view of the stored coefficients as a 1D array expression. 110 * 111 * \warning this method is for \b compressed \b storage \b only, and it will trigger an assertion otherwise. 112 * 113 * \sa valuePtr(), isCompressed() */ 114 const Map<const Array<Scalar,Dynamic,1> > coeffs() const { eigen_assert(isCompressed()); return Array<Scalar,Dynamic,1>::Map(valuePtr(),nonZeros()); } 115 116 /** \returns a read-write view of the stored coefficients as a 1D array expression 117 * 118 * \warning this method is for \b compressed \b storage \b only, and it will trigger an assertion otherwise. 119 * 120 * Here is an example: 121 * \include SparseMatrix_coeffs.cpp 122 * and the output is: 123 * \include SparseMatrix_coeffs.out 124 * 125 * \sa valuePtr(), isCompressed() */ 126 Map<Array<Scalar,Dynamic,1> > coeffs() { eigen_assert(isCompressed()); return Array<Scalar,Dynamic,1>::Map(valuePtr(),nonZeros()); } 127 128 protected: 129 /** Default constructor. Do nothing. */ 130 SparseCompressedBase() {} 131 132 /** \internal return the index of the coeff at (row,col) or just before if it does not exist. 133 * This is an analogue of std::lower_bound. 134 */ 135 internal::LowerBoundIndex lower_bound(Index row, Index col) const 136 { 137 eigen_internal_assert(row>=0 && row<this->rows() && col>=0 && col<this->cols()); 138 139 const Index outer = Derived::IsRowMajor ? row : col; 140 const Index inner = Derived::IsRowMajor ? col : row; 141 142 Index start = this->outerIndexPtr()[outer]; 143 Index end = this->isCompressed() ? this->outerIndexPtr()[outer+1] : this->outerIndexPtr()[outer] + this->innerNonZeroPtr()[outer]; 144 eigen_assert(end>=start && "you are using a non finalized sparse matrix or written coefficient does not exist"); 145 internal::LowerBoundIndex p; 146 p.value = std::lower_bound(this->innerIndexPtr()+start, this->innerIndexPtr()+end,inner) - this->innerIndexPtr(); 147 p.found = (p.value<end) && (this->innerIndexPtr()[p.value]==inner); 148 return p; 149 } 150 151 friend struct internal::evaluator<SparseCompressedBase<Derived> >; 152 153 private: 154 template<typename OtherDerived> explicit SparseCompressedBase(const SparseCompressedBase<OtherDerived>&); 155 }; 156 157 template<typename Derived> 158 class SparseCompressedBase<Derived>::InnerIterator 159 { 160 public: 161 InnerIterator() 162 : m_values(0), m_indices(0), m_outer(0), m_id(0), m_end(0) 163 {} 164 165 InnerIterator(const InnerIterator& other) 166 : m_values(other.m_values), m_indices(other.m_indices), m_outer(other.m_outer), m_id(other.m_id), m_end(other.m_end) 167 {} 168 169 InnerIterator& operator=(const InnerIterator& other) 170 { 171 m_values = other.m_values; 172 m_indices = other.m_indices; 173 const_cast<OuterType&>(m_outer).setValue(other.m_outer.value()); 174 m_id = other.m_id; 175 m_end = other.m_end; 176 return *this; 177 } 178 179 InnerIterator(const SparseCompressedBase& mat, Index outer) 180 : m_values(mat.valuePtr()), m_indices(mat.innerIndexPtr()), m_outer(outer) 181 { 182 if(Derived::IsVectorAtCompileTime && mat.outerIndexPtr()==0) 183 { 184 m_id = 0; 185 m_end = mat.nonZeros(); 186 } 187 else 188 { 189 m_id = mat.outerIndexPtr()[outer]; 190 if(mat.isCompressed()) 191 m_end = mat.outerIndexPtr()[outer+1]; 192 else 193 m_end = m_id + mat.innerNonZeroPtr()[outer]; 194 } 195 } 196 197 explicit InnerIterator(const SparseCompressedBase& mat) 198 : m_values(mat.valuePtr()), m_indices(mat.innerIndexPtr()), m_outer(0), m_id(0), m_end(mat.nonZeros()) 199 { 200 EIGEN_STATIC_ASSERT_VECTOR_ONLY(Derived); 201 } 202 203 explicit InnerIterator(const internal::CompressedStorage<Scalar,StorageIndex>& data) 204 : m_values(data.valuePtr()), m_indices(data.indexPtr()), m_outer(0), m_id(0), m_end(data.size()) 205 { 206 EIGEN_STATIC_ASSERT_VECTOR_ONLY(Derived); 207 } 208 209 inline InnerIterator& operator++() { m_id++; return *this; } 210 inline InnerIterator& operator+=(Index i) { m_id += i ; return *this; } 211 212 inline InnerIterator operator+(Index i) 213 { 214 InnerIterator result = *this; 215 result += i; 216 return result; 217 } 218 219 inline const Scalar& value() const { return m_values[m_id]; } 220 inline Scalar& valueRef() { return const_cast<Scalar&>(m_values[m_id]); } 221 222 inline StorageIndex index() const { return m_indices[m_id]; } 223 inline Index outer() const { return m_outer.value(); } 224 inline Index row() const { return IsRowMajor ? m_outer.value() : index(); } 225 inline Index col() const { return IsRowMajor ? index() : m_outer.value(); } 226 227 inline operator bool() const { return (m_id < m_end); } 228 229 protected: 230 const Scalar* m_values; 231 const StorageIndex* m_indices; 232 typedef internal::variable_if_dynamic<Index,Derived::IsVectorAtCompileTime?0:Dynamic> OuterType; 233 const OuterType m_outer; 234 Index m_id; 235 Index m_end; 236 private: 237 // If you get here, then you're not using the right InnerIterator type, e.g.: 238 // SparseMatrix<double,RowMajor> A; 239 // SparseMatrix<double>::InnerIterator it(A,0); 240 template<typename T> InnerIterator(const SparseMatrixBase<T>&, Index outer); 241 }; 242 243 template<typename Derived> 244 class SparseCompressedBase<Derived>::ReverseInnerIterator 245 { 246 public: 247 ReverseInnerIterator(const SparseCompressedBase& mat, Index outer) 248 : m_values(mat.valuePtr()), m_indices(mat.innerIndexPtr()), m_outer(outer) 249 { 250 if(Derived::IsVectorAtCompileTime && mat.outerIndexPtr()==0) 251 { 252 m_start = 0; 253 m_id = mat.nonZeros(); 254 } 255 else 256 { 257 m_start = mat.outerIndexPtr()[outer]; 258 if(mat.isCompressed()) 259 m_id = mat.outerIndexPtr()[outer+1]; 260 else 261 m_id = m_start + mat.innerNonZeroPtr()[outer]; 262 } 263 } 264 265 explicit ReverseInnerIterator(const SparseCompressedBase& mat) 266 : m_values(mat.valuePtr()), m_indices(mat.innerIndexPtr()), m_outer(0), m_start(0), m_id(mat.nonZeros()) 267 { 268 EIGEN_STATIC_ASSERT_VECTOR_ONLY(Derived); 269 } 270 271 explicit ReverseInnerIterator(const internal::CompressedStorage<Scalar,StorageIndex>& data) 272 : m_values(data.valuePtr()), m_indices(data.indexPtr()), m_outer(0), m_start(0), m_id(data.size()) 273 { 274 EIGEN_STATIC_ASSERT_VECTOR_ONLY(Derived); 275 } 276 277 inline ReverseInnerIterator& operator--() { --m_id; return *this; } 278 inline ReverseInnerIterator& operator-=(Index i) { m_id -= i; return *this; } 279 280 inline ReverseInnerIterator operator-(Index i) 281 { 282 ReverseInnerIterator result = *this; 283 result -= i; 284 return result; 285 } 286 287 inline const Scalar& value() const { return m_values[m_id-1]; } 288 inline Scalar& valueRef() { return const_cast<Scalar&>(m_values[m_id-1]); } 289 290 inline StorageIndex index() const { return m_indices[m_id-1]; } 291 inline Index outer() const { return m_outer.value(); } 292 inline Index row() const { return IsRowMajor ? m_outer.value() : index(); } 293 inline Index col() const { return IsRowMajor ? index() : m_outer.value(); } 294 295 inline operator bool() const { return (m_id > m_start); } 296 297 protected: 298 const Scalar* m_values; 299 const StorageIndex* m_indices; 300 typedef internal::variable_if_dynamic<Index,Derived::IsVectorAtCompileTime?0:Dynamic> OuterType; 301 const OuterType m_outer; 302 Index m_start; 303 Index m_id; 304 }; 305 306 namespace internal { 307 308 template<typename Derived> 309 struct evaluator<SparseCompressedBase<Derived> > 310 : evaluator_base<Derived> 311 { 312 typedef typename Derived::Scalar Scalar; 313 typedef typename Derived::InnerIterator InnerIterator; 314 315 enum { 316 CoeffReadCost = NumTraits<Scalar>::ReadCost, 317 Flags = Derived::Flags 318 }; 319 320 evaluator() : m_matrix(0), m_zero(0) 321 { 322 EIGEN_INTERNAL_CHECK_COST_VALUE(CoeffReadCost); 323 } 324 explicit evaluator(const Derived &mat) : m_matrix(&mat), m_zero(0) 325 { 326 EIGEN_INTERNAL_CHECK_COST_VALUE(CoeffReadCost); 327 } 328 329 inline Index nonZerosEstimate() const { 330 return m_matrix->nonZeros(); 331 } 332 333 operator Derived&() { return m_matrix->const_cast_derived(); } 334 operator const Derived&() const { return *m_matrix; } 335 336 typedef typename DenseCoeffsBase<Derived,ReadOnlyAccessors>::CoeffReturnType CoeffReturnType; 337 const Scalar& coeff(Index row, Index col) const 338 { 339 Index p = find(row,col); 340 341 if(p==Dynamic) 342 return m_zero; 343 else 344 return m_matrix->const_cast_derived().valuePtr()[p]; 345 } 346 347 Scalar& coeffRef(Index row, Index col) 348 { 349 Index p = find(row,col); 350 eigen_assert(p!=Dynamic && "written coefficient does not exist"); 351 return m_matrix->const_cast_derived().valuePtr()[p]; 352 } 353 354 protected: 355 356 Index find(Index row, Index col) const 357 { 358 internal::LowerBoundIndex p = m_matrix->lower_bound(row,col); 359 return p.found ? p.value : Dynamic; 360 } 361 362 const Derived *m_matrix; 363 const Scalar m_zero; 364 }; 365 366 } 367 368 } // end namespace Eigen 369 370 #endif // EIGEN_SPARSE_COMPRESSED_BASE_H 371