Eigen  3.2.92
SparseCompressedBase.h
1 // This file is part of Eigen, a lightweight C++ template library
2 // for linear algebra.
3 //
4 // Copyright (C) 2015 Gael Guennebaud <gael.guennebaud@inria.fr>
5 //
6 // This Source Code Form is subject to the terms of the Mozilla
7 // Public License v. 2.0. If a copy of the MPL was not distributed
8 // with this file, You can obtain one at http://mozilla.org/MPL/2.0/.
9 
10 #ifndef EIGEN_SPARSE_COMPRESSED_BASE_H
11 #define EIGEN_SPARSE_COMPRESSED_BASE_H
12 
13 namespace Eigen {
14 
15 template<typename Derived> class SparseCompressedBase;
16 
17 namespace internal {
18 
19 template<typename Derived>
20 struct traits<SparseCompressedBase<Derived> > : traits<Derived>
21 {};
22 
23 } // end namespace internal
24 
25 template<typename Derived>
26 class SparseCompressedBase
27  : public SparseMatrixBase<Derived>
28 {
29  public:
30  typedef SparseMatrixBase<Derived> Base;
31  EIGEN_SPARSE_PUBLIC_INTERFACE(SparseCompressedBase)
32  using Base::operator=;
33  using Base::IsRowMajor;
34 
35  class InnerIterator;
36  class ReverseInnerIterator;
37 
38  protected:
39  typedef typename Base::IndexVector IndexVector;
40  Eigen::Map<IndexVector> innerNonZeros() { return Eigen::Map<IndexVector>(innerNonZeroPtr(), isCompressed()?0:derived().outerSize()); }
41  const Eigen::Map<const IndexVector> innerNonZeros() const { return Eigen::Map<const IndexVector>(innerNonZeroPtr(), isCompressed()?0:derived().outerSize()); }
42 
43  public:
44 
46  inline Index nonZeros() const
47  {
48  if(Derived::IsVectorAtCompileTime && outerIndexPtr()==0)
49  return derived().nonZeros();
50  else if(isCompressed())
51  return outerIndexPtr()[derived().outerSize()]-outerIndexPtr()[0];
52  else if(derived().outerSize()==0)
53  return 0;
54  else
55  return innerNonZeros().sum();
56  }
57 
61  inline const Scalar* valuePtr() const { return derived().valuePtr(); }
65  inline Scalar* valuePtr() { return derived().valuePtr(); }
66 
70  inline const StorageIndex* innerIndexPtr() const { return derived().innerIndexPtr(); }
74  inline StorageIndex* innerIndexPtr() { return derived().innerIndexPtr(); }
75 
80  inline const StorageIndex* outerIndexPtr() const { return derived().outerIndexPtr(); }
85  inline StorageIndex* outerIndexPtr() { return derived().outerIndexPtr(); }
86 
90  inline const StorageIndex* innerNonZeroPtr() const { return derived().innerNonZeroPtr(); }
94  inline StorageIndex* innerNonZeroPtr() { return derived().innerNonZeroPtr(); }
95 
97  inline bool isCompressed() const { return innerNonZeroPtr()==0; }
98 
99  protected:
101  SparseCompressedBase() {}
102  private:
103  template<typename OtherDerived> explicit SparseCompressedBase(const SparseCompressedBase<OtherDerived>&);
104 };
105 
106 template<typename Derived>
107 class SparseCompressedBase<Derived>::InnerIterator
108 {
109  public:
110  InnerIterator(const SparseCompressedBase& mat, Index outer)
111  : m_values(mat.valuePtr()), m_indices(mat.innerIndexPtr()), m_outer(outer)
112  {
113  if(Derived::IsVectorAtCompileTime && mat.outerIndexPtr()==0)
114  {
115  m_id = 0;
116  m_end = mat.nonZeros();
117  }
118  else
119  {
120  m_id = mat.outerIndexPtr()[outer];
121  if(mat.isCompressed())
122  m_end = mat.outerIndexPtr()[outer+1];
123  else
124  m_end = m_id + mat.innerNonZeroPtr()[outer];
125  }
126  }
127 
128  explicit InnerIterator(const SparseCompressedBase& mat)
129  : m_values(mat.valuePtr()), m_indices(mat.innerIndexPtr()), m_outer(0), m_id(0), m_end(mat.nonZeros())
130  {
131  EIGEN_STATIC_ASSERT_VECTOR_ONLY(Derived);
132  }
133 
134  explicit InnerIterator(const internal::CompressedStorage<Scalar,StorageIndex>& data)
135  : m_values(&data.value(0)), m_indices(&data.index(0)), m_outer(0), m_id(0), m_end(data.size())
136  {
137  EIGEN_STATIC_ASSERT_VECTOR_ONLY(Derived);
138  }
139 
140  inline InnerIterator& operator++() { m_id++; return *this; }
141 
142  inline const Scalar& value() const { return m_values[m_id]; }
143  inline Scalar& valueRef() { return const_cast<Scalar&>(m_values[m_id]); }
144 
145  inline StorageIndex index() const { return m_indices[m_id]; }
146  inline Index outer() const { return m_outer.value(); }
147  inline Index row() const { return IsRowMajor ? m_outer.value() : index(); }
148  inline Index col() const { return IsRowMajor ? index() : m_outer.value(); }
149 
150  inline operator bool() const { return (m_id < m_end); }
151 
152  protected:
153  const Scalar* m_values;
154  const StorageIndex* m_indices;
155  const internal::variable_if_dynamic<Index,Derived::IsVectorAtCompileTime?0:Dynamic> m_outer;
156  Index m_id;
157  Index m_end;
158  private:
159  // If you get here, then you're not using the right InnerIterator type, e.g.:
160  // SparseMatrix<double,RowMajor> A;
161  // SparseMatrix<double>::InnerIterator it(A,0);
162  template<typename T> InnerIterator(const SparseMatrixBase<T>&, Index outer);
163 };
164 
165 template<typename Derived>
166 class SparseCompressedBase<Derived>::ReverseInnerIterator
167 {
168  public:
169  ReverseInnerIterator(const SparseCompressedBase& mat, Index outer)
170  : m_values(mat.valuePtr()), m_indices(mat.innerIndexPtr()), m_outer(outer)
171  {
172  if(Derived::IsVectorAtCompileTime && mat.outerIndexPtr()==0)
173  {
174  m_start = 0;
175  m_id = mat.nonZeros();
176  }
177  else
178  {
179  m_start.value() = mat.outerIndexPtr()[outer];
180  if(mat.isCompressed())
181  m_id = mat.outerIndexPtr()[outer+1];
182  else
183  m_id = m_start.value() + mat.innerNonZeroPtr()[outer];
184  }
185  }
186 
187  explicit ReverseInnerIterator(const SparseCompressedBase& mat)
188  : m_values(mat.valuePtr()), m_indices(mat.innerIndexPtr()), m_outer(0), m_start(0), m_id(mat.nonZeros())
189  {
190  EIGEN_STATIC_ASSERT_VECTOR_ONLY(Derived);
191  }
192 
193  explicit ReverseInnerIterator(const internal::CompressedStorage<Scalar,StorageIndex>& data)
194  : m_values(&data.value(0)), m_indices(&data.index(0)), m_outer(0), m_start(0), m_id(data.size())
195  {
196  EIGEN_STATIC_ASSERT_VECTOR_ONLY(Derived);
197  }
198 
199  inline ReverseInnerIterator& operator--() { --m_id; return *this; }
200 
201  inline const Scalar& value() const { return m_values[m_id-1]; }
202  inline Scalar& valueRef() { return const_cast<Scalar&>(m_values[m_id-1]); }
203 
204  inline StorageIndex index() const { return m_indices[m_id-1]; }
205  inline Index outer() const { return m_outer.value(); }
206  inline Index row() const { return IsRowMajor ? m_outer.value() : index(); }
207  inline Index col() const { return IsRowMajor ? index() : m_outer.value(); }
208 
209  inline operator bool() const { return (m_id > m_start.value()); }
210 
211  protected:
212  const Scalar* m_values;
213  const StorageIndex* m_indices;
214  const internal::variable_if_dynamic<Index,Derived::IsVectorAtCompileTime?0:Dynamic> m_outer;
215  Index m_id;
216  const internal::variable_if_dynamic<Index,Derived::IsVectorAtCompileTime?0:Dynamic> m_start;
217 };
218 
219 namespace internal {
220 
221 template<typename Derived>
222 struct evaluator<SparseCompressedBase<Derived> >
223  : evaluator_base<Derived>
224 {
225  typedef typename Derived::Scalar Scalar;
226  typedef typename Derived::InnerIterator InnerIterator;
227  typedef typename Derived::ReverseInnerIterator ReverseInnerIterator;
228 
229  enum {
230  CoeffReadCost = NumTraits<Scalar>::ReadCost,
231  Flags = Derived::Flags
232  };
233 
234  evaluator() : m_matrix(0)
235  {
236  EIGEN_INTERNAL_CHECK_COST_VALUE(CoeffReadCost);
237  }
238  explicit evaluator(const Derived &mat) : m_matrix(&mat)
239  {
240  EIGEN_INTERNAL_CHECK_COST_VALUE(CoeffReadCost);
241  }
242 
243  inline Index nonZerosEstimate() const {
244  return m_matrix->nonZeros();
245  }
246 
247  operator Derived&() { return m_matrix->const_cast_derived(); }
248  operator const Derived&() const { return *m_matrix; }
249 
250  typedef typename DenseCoeffsBase<Derived,ReadOnlyAccessors>::CoeffReturnType CoeffReturnType;
251  Scalar coeff(Index row, Index col) const
252  { return m_matrix->coeff(row,col); }
253 
254  Scalar& coeffRef(Index row, Index col)
255  {
256  eigen_internal_assert(row>=0 && row<m_matrix->rows() && col>=0 && col<m_matrix->cols());
257 
258  const Index outer = Derived::IsRowMajor ? row : col;
259  const Index inner = Derived::IsRowMajor ? col : row;
260 
261  Index start = m_matrix->outerIndexPtr()[outer];
262  Index end = m_matrix->isCompressed() ? m_matrix->outerIndexPtr()[outer+1] : m_matrix->outerIndexPtr()[outer] + m_matrix->innerNonZeroPtr()[outer];
263  eigen_assert(end>start && "you are using a non finalized sparse matrix or written coefficient does not exist");
264  const Index p = std::lower_bound(m_matrix->innerIndexPtr()+start, m_matrix->innerIndexPtr()+end,inner)
265  - m_matrix->innerIndexPtr();
266  eigen_assert((p<end) && (m_matrix->innerIndexPtr()[p]==inner) && "written coefficient does not exist");
267  return m_matrix->const_cast_derived().valuePtr()[p];
268  }
269 
270  const Derived *m_matrix;
271 };
272 
273 }
274 
275 } // end namespace Eigen
276 
277 #endif // EIGEN_SPARSE_COMPRESSED_BASE_H
Index size() const
Definition: SparseMatrixBase.h:168
A matrix or vector expression mapping an existing array of data.
Definition: Map.h:89
RowXpr row(Index i)
Definition: SparseMatrixBase.h:797
Definition: LDLT.h:16
Derived & derived()
Definition: EigenBase.h:44
Eigen::Index Index
The interface type of indices.
Definition: EigenBase.h:37
Index outerSize() const
Definition: SparseMatrixBase.h:176
Definition: Eigen_Colamd.h:54
ColXpr col(Index i)
Definition: SparseMatrixBase.h:778