SparseBlock.h
Go to the documentation of this file.
1 // This file is part of Eigen, a lightweight C++ template library
2 // for linear algebra.
3 //
4 // Copyright (C) 2008-2014 Gael Guennebaud <gael.guennebaud@inria.fr>
5 //
6 // This Source Code Form is subject to the terms of the Mozilla
7 // Public License v. 2.0. If a copy of the MPL was not distributed
8 // with this file, You can obtain one at http://mozilla.org/MPL/2.0/.
9 
10 #ifndef EIGEN_SPARSE_BLOCK_H
11 #define EIGEN_SPARSE_BLOCK_H
12 
13 // IWYU pragma: private
14 #include "./InternalHeaderCheck.h"
15 
16 namespace Eigen {
17 
18 // Subset of columns or rows
19 template <typename XprType, int BlockRows, int BlockCols>
20 class BlockImpl<XprType, BlockRows, BlockCols, true, Sparse>
21  : public SparseMatrixBase<Block<XprType, BlockRows, BlockCols, true> > {
24 
25  public:
27 
28  protected:
29  enum { OuterSize = IsRowMajor ? BlockRows : BlockCols };
31  using Base::convert_index;
32 
33  public:
35 
36  inline BlockImpl(XprType& xpr, Index i) : m_matrix(xpr), m_outerStart(convert_index(i)), m_outerSize(OuterSize) {}
37 
38  inline BlockImpl(XprType& xpr, Index startRow, Index startCol, Index blockRows, Index blockCols)
39  : m_matrix(xpr),
40  m_outerStart(convert_index(IsRowMajor ? startRow : startCol)),
41  m_outerSize(convert_index(IsRowMajor ? blockRows : blockCols)) {}
42 
43  EIGEN_STRONG_INLINE Index rows() const { return IsRowMajor ? m_outerSize.value() : m_matrix.rows(); }
44  EIGEN_STRONG_INLINE Index cols() const { return IsRowMajor ? m_matrix.cols() : m_outerSize.value(); }
45 
46  Index nonZeros() const {
47  typedef internal::evaluator<XprType> EvaluatorType;
48  EvaluatorType matEval(m_matrix);
49  Index nnz = 0;
50  Index end = m_outerStart + m_outerSize.value();
51  for (Index j = m_outerStart; j < end; ++j)
52  for (typename EvaluatorType::InnerIterator it(matEval, j); it; ++it) ++nnz;
53  return nnz;
54  }
55 
56  inline const Scalar coeff(Index row, Index col) const {
57  return m_matrix.coeff(row + (IsRowMajor ? m_outerStart : 0), col + (IsRowMajor ? 0 : m_outerStart));
58  }
59 
60  inline const Scalar coeff(Index index) const {
61  return m_matrix.coeff(IsRowMajor ? m_outerStart : index, IsRowMajor ? index : m_outerStart);
62  }
63 
64  inline const XprType& nestedExpression() const { return m_matrix; }
65  inline XprType& nestedExpression() { return m_matrix; }
66  Index startRow() const { return IsRowMajor ? m_outerStart : 0; }
67  Index startCol() const { return IsRowMajor ? 0 : m_outerStart; }
68  Index blockRows() const { return IsRowMajor ? m_outerSize.value() : m_matrix.rows(); }
69  Index blockCols() const { return IsRowMajor ? m_matrix.cols() : m_outerSize.value(); }
70 
71  protected:
75 
76  protected:
77  // Disable assignment with clear error message.
78  // Note that simply removing operator= yields compilation errors with ICC+MSVC
79  template <typename T>
80  BlockImpl& operator=(const T&) {
81  EIGEN_STATIC_ASSERT(sizeof(T) == 0, THIS_SPARSE_BLOCK_SUBEXPRESSION_IS_READ_ONLY);
82  return *this;
83  }
84 };
85 
86 /***************************************************************************
87  * specialization for SparseMatrix
88  ***************************************************************************/
89 
90 namespace internal {
91 
92 template <typename SparseMatrixType, int BlockRows, int BlockCols>
93 class sparse_matrix_block_impl : public SparseCompressedBase<Block<SparseMatrixType, BlockRows, BlockCols, true> > {
97  using Base::convert_index;
98 
99  public:
102  protected:
103  typedef typename Base::IndexVector IndexVector;
104  enum { OuterSize = IsRowMajor ? BlockRows : BlockCols };
105 
106  public:
107  inline sparse_matrix_block_impl(SparseMatrixType& xpr, Index i)
109 
112  : m_matrix(xpr),
115 
116  template <typename OtherDerived>
119  NestedMatrixType_& matrix = m_matrix;
120  // This assignment is slow if this vector set is not empty
121  // and/or it is not at the end of the nonzeros of the underlying matrix.
122 
123  // 1 - eval to a temporary to avoid transposition and/or aliasing issues
125  eigen_internal_assert(tmp.outerSize() == m_outerSize.value());
126 
127  // 2 - let's check whether there is enough allocated memory
128  Index nnz = tmp.nonZeros();
129  Index start =
130  m_outerStart == 0 ? 0 : m_matrix.outerIndexPtr()[m_outerStart]; // starting position of the current block
131  Index end = m_matrix.outerIndexPtr()[m_outerStart + m_outerSize.value()]; // ending position of the current block
132  Index block_size = end - start; // available room in the current block
133  Index tail_size = m_matrix.outerIndexPtr()[m_matrix.outerSize()] - end;
134 
135  Index free_size = m_matrix.isCompressed() ? Index(matrix.data().allocatedSize()) + block_size : block_size;
136 
137  Index tmp_start = tmp.outerIndexPtr()[0];
138 
139  bool update_trailing_pointers = false;
140  if (nnz > free_size) {
141  // realloc manually to reduce copies
142  typename SparseMatrixType::Storage newdata(m_matrix.data().allocatedSize() - block_size + nnz);
143 
144  internal::smart_copy(m_matrix.valuePtr(), m_matrix.valuePtr() + start, newdata.valuePtr());
145  internal::smart_copy(m_matrix.innerIndexPtr(), m_matrix.innerIndexPtr() + start, newdata.indexPtr());
146 
147  internal::smart_copy(tmp.valuePtr() + tmp_start, tmp.valuePtr() + tmp_start + nnz, newdata.valuePtr() + start);
148  internal::smart_copy(tmp.innerIndexPtr() + tmp_start, tmp.innerIndexPtr() + tmp_start + nnz,
149  newdata.indexPtr() + start);
150 
151  internal::smart_copy(matrix.valuePtr() + end, matrix.valuePtr() + end + tail_size,
152  newdata.valuePtr() + start + nnz);
153  internal::smart_copy(matrix.innerIndexPtr() + end, matrix.innerIndexPtr() + end + tail_size,
154  newdata.indexPtr() + start + nnz);
155 
156  newdata.resize(m_matrix.outerIndexPtr()[m_matrix.outerSize()] - block_size + nnz);
157 
158  matrix.data().swap(newdata);
159 
160  update_trailing_pointers = true;
161  } else {
162  if (m_matrix.isCompressed() && nnz != block_size) {
163  // no need to realloc, simply copy the tail at its respective position and insert tmp
164  matrix.data().resize(start + nnz + tail_size);
165 
166  internal::smart_memmove(matrix.valuePtr() + end, matrix.valuePtr() + end + tail_size,
167  matrix.valuePtr() + start + nnz);
168  internal::smart_memmove(matrix.innerIndexPtr() + end, matrix.innerIndexPtr() + end + tail_size,
169  matrix.innerIndexPtr() + start + nnz);
170 
171  update_trailing_pointers = true;
172  }
173 
174  internal::smart_copy(tmp.valuePtr() + tmp_start, tmp.valuePtr() + tmp_start + nnz, matrix.valuePtr() + start);
175  internal::smart_copy(tmp.innerIndexPtr() + tmp_start, tmp.innerIndexPtr() + tmp_start + nnz,
176  matrix.innerIndexPtr() + start);
177  }
178 
179  // update outer index pointers and innerNonZeros
180  if (IsVectorAtCompileTime) {
181  if (!m_matrix.isCompressed()) matrix.innerNonZeroPtr()[m_outerStart] = StorageIndex(nnz);
182  matrix.outerIndexPtr()[m_outerStart] = StorageIndex(start);
183  } else {
185  for (Index k = 0; k < m_outerSize.value(); ++k) {
186  StorageIndex nnz_k = internal::convert_index<StorageIndex>(tmp.innerVector(k).nonZeros());
187  if (!m_matrix.isCompressed()) matrix.innerNonZeroPtr()[m_outerStart + k] = nnz_k;
188  matrix.outerIndexPtr()[m_outerStart + k] = p;
189  p += nnz_k;
190  }
191  }
192 
193  if (update_trailing_pointers) {
194  StorageIndex offset = internal::convert_index<StorageIndex>(nnz - block_size);
195  for (Index k = m_outerStart + m_outerSize.value(); k <= matrix.outerSize(); ++k) {
196  matrix.outerIndexPtr()[k] += offset;
197  }
198  }
199 
200  return derived();
201  }
202 
203  inline BlockType& operator=(const BlockType& other) { return operator= <BlockType>(other); }
204 
205  inline const Scalar* valuePtr() const { return m_matrix.valuePtr(); }
206  inline Scalar* valuePtr() { return m_matrix.valuePtr(); }
207 
208  inline const StorageIndex* innerIndexPtr() const { return m_matrix.innerIndexPtr(); }
209  inline StorageIndex* innerIndexPtr() { return m_matrix.innerIndexPtr(); }
210 
211  inline const StorageIndex* outerIndexPtr() const { return m_matrix.outerIndexPtr() + m_outerStart; }
212  inline StorageIndex* outerIndexPtr() { return m_matrix.outerIndexPtr() + m_outerStart; }
213 
214  inline const StorageIndex* innerNonZeroPtr() const {
215  return isCompressed() ? 0 : (m_matrix.innerNonZeroPtr() + m_outerStart);
216  }
217  inline StorageIndex* innerNonZeroPtr() { return isCompressed() ? 0 : (m_matrix.innerNonZeroPtr() + m_outerStart); }
218 
219  bool isCompressed() const { return m_matrix.innerNonZeroPtr() == 0; }
220 
222  return m_matrix.coeffRef(row + (IsRowMajor ? m_outerStart : 0), col + (IsRowMajor ? 0 : m_outerStart));
223  }
224 
225  inline const Scalar coeff(Index row, Index col) const {
226  return m_matrix.coeff(row + (IsRowMajor ? m_outerStart : 0), col + (IsRowMajor ? 0 : m_outerStart));
227  }
228 
229  inline const Scalar coeff(Index index) const {
230  return m_matrix.coeff(IsRowMajor ? m_outerStart : index, IsRowMajor ? index : m_outerStart);
231  }
232 
233  const Scalar& lastCoeff() const {
236  if (m_matrix.isCompressed())
237  return m_matrix.valuePtr()[m_matrix.outerIndexPtr()[m_outerStart + 1] - 1];
238  else
239  return m_matrix.valuePtr()[m_matrix.outerIndexPtr()[m_outerStart] + m_matrix.innerNonZeroPtr()[m_outerStart] - 1];
240  }
241 
244 
245  inline const SparseMatrixType& nestedExpression() const { return m_matrix; }
246  inline SparseMatrixType& nestedExpression() { return m_matrix; }
247  Index startRow() const { return IsRowMajor ? m_outerStart : 0; }
248  Index startCol() const { return IsRowMajor ? 0 : m_outerStart; }
249  Index blockRows() const { return IsRowMajor ? m_outerSize.value() : m_matrix.rows(); }
250  Index blockCols() const { return IsRowMajor ? m_matrix.cols() : m_outerSize.value(); }
251 
252  protected:
256 };
257 
258 } // namespace internal
259 
260 template <typename Scalar_, int Options_, typename StorageIndex_, int BlockRows, int BlockCols>
261 class BlockImpl<SparseMatrix<Scalar_, Options_, StorageIndex_>, BlockRows, BlockCols, true, Sparse>
262  : public internal::sparse_matrix_block_impl<SparseMatrix<Scalar_, Options_, StorageIndex_>, BlockRows, BlockCols> {
263  public:
264  typedef StorageIndex_ StorageIndex;
267  inline BlockImpl(SparseMatrixType& xpr, Index i) : Base(xpr, i) {}
268 
269  inline BlockImpl(SparseMatrixType& xpr, Index startRow, Index startCol, Index blockRows, Index blockCols)
270  : Base(xpr, startRow, startCol, blockRows, blockCols) {}
271 
272  using Base::operator=;
273 };
274 
275 template <typename Scalar_, int Options_, typename StorageIndex_, int BlockRows, int BlockCols>
276 class BlockImpl<const SparseMatrix<Scalar_, Options_, StorageIndex_>, BlockRows, BlockCols, true, Sparse>
277  : public internal::sparse_matrix_block_impl<const SparseMatrix<Scalar_, Options_, StorageIndex_>, BlockRows,
278  BlockCols> {
279  public:
280  typedef StorageIndex_ StorageIndex;
283  inline BlockImpl(SparseMatrixType& xpr, Index i) : Base(xpr, i) {}
284 
285  inline BlockImpl(SparseMatrixType& xpr, Index startRow, Index startCol, Index blockRows, Index blockCols)
286  : Base(xpr, startRow, startCol, blockRows, blockCols) {}
287 
288  using Base::operator=;
289 
290  private:
291  template <typename Derived>
293  template <typename Derived>
295 };
296 
297 //----------
298 
302 template <typename XprType, int BlockRows, int BlockCols, bool InnerPanel>
303 class BlockImpl<XprType, BlockRows, BlockCols, InnerPanel, Sparse>
304  : public SparseMatrixBase<Block<XprType, BlockRows, BlockCols, InnerPanel> >, internal::no_assignment_operator {
307  using Base::convert_index;
308 
309  public:
312 
313  typedef internal::remove_all_t<typename XprType::Nested> MatrixTypeNested_;
314 
317  inline BlockImpl(XprType& xpr, Index i)
318  : m_matrix(xpr),
319  m_startRow((BlockRows == 1) && (BlockCols == XprType::ColsAtCompileTime) ? convert_index(i) : 0),
320  m_startCol((BlockRows == XprType::RowsAtCompileTime) && (BlockCols == 1) ? convert_index(i) : 0),
321  m_blockRows(BlockRows == 1 ? 1 : xpr.rows()),
322  m_blockCols(BlockCols == 1 ? 1 : xpr.cols()) {}
323 
326  inline BlockImpl(XprType& xpr, Index startRow, Index startCol, Index blockRows, Index blockCols)
327  : m_matrix(xpr),
328  m_startRow(convert_index(startRow)),
329  m_startCol(convert_index(startCol)),
330  m_blockRows(convert_index(blockRows)),
331  m_blockCols(convert_index(blockCols)) {}
332 
333  inline Index rows() const { return m_blockRows.value(); }
334  inline Index cols() const { return m_blockCols.value(); }
335 
337  return m_matrix.coeffRef(row + m_startRow.value(), col + m_startCol.value());
338  }
339 
340  inline const Scalar coeff(Index row, Index col) const {
341  return m_matrix.coeff(row + m_startRow.value(), col + m_startCol.value());
342  }
343 
344  inline Scalar& coeffRef(Index index) {
345  return m_matrix.coeffRef(m_startRow.value() + (RowsAtCompileTime == 1 ? 0 : index),
346  m_startCol.value() + (RowsAtCompileTime == 1 ? index : 0));
347  }
348 
349  inline const Scalar coeff(Index index) const {
350  return m_matrix.coeff(m_startRow.value() + (RowsAtCompileTime == 1 ? 0 : index),
351  m_startCol.value() + (RowsAtCompileTime == 1 ? index : 0));
352  }
353 
354  inline const XprType& nestedExpression() const { return m_matrix; }
355  inline XprType& nestedExpression() { return m_matrix; }
356  Index startRow() const { return m_startRow.value(); }
357  Index startCol() const { return m_startCol.value(); }
358  Index blockRows() const { return m_blockRows.value(); }
359  Index blockCols() const { return m_blockCols.value(); }
360 
361  protected:
362  // friend class internal::GenericSparseBlockInnerIteratorImpl<XprType,BlockRows,BlockCols,InnerPanel>;
363  friend struct internal::unary_evaluator<Block<XprType, BlockRows, BlockCols, InnerPanel>, internal::IteratorBased,
364  Scalar>;
365 
366  Index nonZeros() const { return Dynamic; }
367 
373 
374  protected:
375  // Disable assignment with clear error message.
376  // Note that simply removing operator= yields compilation errors with ICC+MSVC
377  template <typename T>
378  BlockImpl& operator=(const T&) {
379  EIGEN_STATIC_ASSERT(sizeof(T) == 0, THIS_SPARSE_BLOCK_SUBEXPRESSION_IS_READ_ONLY);
380  return *this;
381  }
382 };
383 
384 namespace internal {
385 
386 template <typename ArgType, int BlockRows, int BlockCols, bool InnerPanel>
387 struct unary_evaluator<Block<ArgType, BlockRows, BlockCols, InnerPanel>, IteratorBased>
388  : public evaluator_base<Block<ArgType, BlockRows, BlockCols, InnerPanel> > {
389  class InnerVectorInnerIterator;
390  class OuterVectorInnerIterator;
391 
392  public:
394  typedef typename XprType::StorageIndex StorageIndex;
395  typedef typename XprType::Scalar Scalar;
396 
397  enum {
398  IsRowMajor = XprType::IsRowMajor,
399  OuterVector = (BlockCols == 1 && ArgType::IsRowMajor) || (BlockRows == 1 && !ArgType::IsRowMajor),
401  Flags = XprType::Flags
402  };
403 
404  typedef std::conditional_t<OuterVector, OuterVectorInnerIterator, InnerVectorInnerIterator> InnerIterator;
405 
406  explicit unary_evaluator(const XprType& op) : m_argImpl(op.nestedExpression()), m_block(op) {}
407 
408  inline Index nonZerosEstimate() const {
409  const Index nnz = m_block.nonZeros();
410  if (nnz < 0) {
411  // Scale the non-zero estimate for the underlying expression linearly with block size.
412  // Return zero if the underlying block is empty.
413  const Index nested_sz = m_block.nestedExpression().size();
414  return nested_sz == 0 ? 0 : m_argImpl.nonZerosEstimate() * m_block.size() / nested_sz;
415  }
416  return nnz;
417  }
418 
419  protected:
421 
423  const XprType& m_block;
424 };
425 
426 template <typename ArgType, int BlockRows, int BlockCols, bool InnerPanel>
427 class unary_evaluator<Block<ArgType, BlockRows, BlockCols, InnerPanel>, IteratorBased>::InnerVectorInnerIterator
428  : public EvalIterator {
429  // NOTE MSVC fails to compile if we don't explicitly "import" IsRowMajor from unary_evaluator
430  // because the base class EvalIterator has a private IsRowMajor enum too. (bug #1786)
431  // NOTE We cannot call it IsRowMajor because it would shadow unary_evaluator::IsRowMajor
432  enum { XprIsRowMajor = unary_evaluator::IsRowMajor };
433  const XprType& m_block;
435 
436  public:
438  : EvalIterator(aEval.m_argImpl, outer + (XprIsRowMajor ? aEval.m_block.startRow() : aEval.m_block.startCol())),
439  m_block(aEval.m_block),
440  m_end(XprIsRowMajor ? aEval.m_block.startCol() + aEval.m_block.blockCols()
441  : aEval.m_block.startRow() + aEval.m_block.blockRows()) {
442  while ((EvalIterator::operator bool()) &&
443  (EvalIterator::index() < (XprIsRowMajor ? m_block.startCol() : m_block.startRow())))
445  }
446 
447  inline StorageIndex index() const {
448  return EvalIterator::index() - convert_index<StorageIndex>(XprIsRowMajor ? m_block.startCol() : m_block.startRow());
449  }
450  inline Index outer() const {
451  return EvalIterator::outer() - (XprIsRowMajor ? m_block.startRow() : m_block.startCol());
452  }
453  inline Index row() const { return EvalIterator::row() - m_block.startRow(); }
454  inline Index col() const { return EvalIterator::col() - m_block.startCol(); }
455 
456  inline operator bool() const { return EvalIterator::operator bool() && EvalIterator::index() < m_end; }
457 };
458 
459 template <typename ArgType, int BlockRows, int BlockCols, bool InnerPanel>
460 class unary_evaluator<Block<ArgType, BlockRows, BlockCols, InnerPanel>, IteratorBased>::OuterVectorInnerIterator {
461  // NOTE see above
462  enum { XprIsRowMajor = unary_evaluator::IsRowMajor };
468 
469  public:
471  : m_eval(aEval),
472  m_outerPos((XprIsRowMajor ? aEval.m_block.startCol() : aEval.m_block.startRow())),
473  m_innerIndex(XprIsRowMajor ? aEval.m_block.startRow() : aEval.m_block.startCol()),
474  m_end(XprIsRowMajor ? aEval.m_block.startCol() + aEval.m_block.blockCols()
475  : aEval.m_block.startRow() + aEval.m_block.blockRows()),
476  m_it(m_eval.m_argImpl, m_outerPos) {
477  EIGEN_UNUSED_VARIABLE(outer);
478  eigen_assert(outer == 0);
479 
480  while (m_it && m_it.index() < m_innerIndex) ++m_it;
481  if ((!m_it) || (m_it.index() != m_innerIndex)) ++(*this);
482  }
483 
484  inline StorageIndex index() const {
485  return convert_index<StorageIndex>(m_outerPos -
486  (XprIsRowMajor ? m_eval.m_block.startCol() : m_eval.m_block.startRow()));
487  }
488  inline Index outer() const { return 0; }
489  inline Index row() const { return XprIsRowMajor ? 0 : index(); }
490  inline Index col() const { return XprIsRowMajor ? index() : 0; }
491 
492  inline Scalar value() const { return m_it.value(); }
493  inline Scalar& valueRef() { return m_it.valueRef(); }
494 
495  inline OuterVectorInnerIterator& operator++() {
496  // search next non-zero entry
497  while (++m_outerPos < m_end) {
498  // Restart iterator at the next inner-vector:
499  internal::destroy_at(&m_it);
500  internal::construct_at(&m_it, m_eval.m_argImpl, m_outerPos);
501  // search for the key m_innerIndex in the current outer-vector
502  while (m_it && m_it.index() < m_innerIndex) ++m_it;
503  if (m_it && m_it.index() == m_innerIndex) break;
504  }
505  return *this;
506  }
507 
508  inline operator bool() const { return m_outerPos < m_end; }
509 };
510 
511 template <typename Scalar_, int Options_, typename StorageIndex_, int BlockRows, int BlockCols>
512 struct unary_evaluator<Block<SparseMatrix<Scalar_, Options_, StorageIndex_>, BlockRows, BlockCols, true>, IteratorBased>
513  : evaluator<
514  SparseCompressedBase<Block<SparseMatrix<Scalar_, Options_, StorageIndex_>, BlockRows, BlockCols, true> > > {
515  typedef Block<SparseMatrix<Scalar_, Options_, StorageIndex_>, BlockRows, BlockCols, true> XprType;
517  explicit unary_evaluator(const XprType& xpr) : Base(xpr) {}
518 };
519 
520 template <typename Scalar_, int Options_, typename StorageIndex_, int BlockRows, int BlockCols>
521 struct unary_evaluator<Block<const SparseMatrix<Scalar_, Options_, StorageIndex_>, BlockRows, BlockCols, true>,
523  : evaluator<SparseCompressedBase<
524  Block<const SparseMatrix<Scalar_, Options_, StorageIndex_>, BlockRows, BlockCols, true> > > {
527  explicit unary_evaluator(const XprType& xpr) : Base(xpr) {}
528 };
529 
530 } // end namespace internal
531 
532 } // end namespace Eigen
533 
534 #endif // EIGEN_SPARSE_BLOCK_H
int i
Definition: BiCGSTAB_step_by_step.cpp:9
#define eigen_internal_assert(x)
Definition: Macros.h:916
#define EIGEN_UNUSED_VARIABLE(var)
Definition: Macros.h:966
#define eigen_assert(x)
Definition: Macros.h:910
#define EIGEN_STRONG_INLINE
Definition: Macros.h:834
m col(1)
m row(1)
#define EIGEN_SPARSE_PUBLIC_INTERFACE(Derived)
Definition: SparseUtil.h:39
#define EIGEN_STATIC_ASSERT(X, MSG)
Definition: StaticAssert.h:26
#define EIGEN_STATIC_ASSERT_VECTOR_ONLY(TYPE)
Definition: StaticAssert.h:36
float * p
Definition: Tutorial_Map_using.cpp:9
int rows
Definition: Tutorial_commainit_02.cpp:1
int cols
Definition: Tutorial_commainit_02.cpp:1
SCALAR Scalar
Definition: bench_gemm.cpp:45
internal::sparse_matrix_block_impl< SparseMatrixType, BlockRows, BlockCols > Base
Definition: SparseBlock.h:266
SparseMatrix< Scalar_, Options_, StorageIndex_ > SparseMatrixType
Definition: SparseBlock.h:265
BlockImpl(SparseMatrixType &xpr, Index startRow, Index startCol, Index blockRows, Index blockCols)
Definition: SparseBlock.h:269
Scalar & coeffRef(Index index)
Definition: SparseBlock.h:344
const Scalar coeff(Index index) const
Definition: SparseBlock.h:349
internal::ref_selector< XprType >::non_const_type m_matrix
Definition: SparseBlock.h:368
const internal::variable_if_dynamic< Index, XprType::ColsAtCompileTime==1 ? 0 :Dynamic > m_startCol
Definition: SparseBlock.h:370
BlockImpl & operator=(const T &)
Definition: SparseBlock.h:378
BlockImpl(XprType &xpr, Index startRow, Index startCol, Index blockRows, Index blockCols)
Definition: SparseBlock.h:326
internal::remove_all_t< typename XprType::Nested > MatrixTypeNested_
Definition: SparseBlock.h:313
const internal::variable_if_dynamic< Index, RowsAtCompileTime > m_blockRows
Definition: SparseBlock.h:371
const internal::variable_if_dynamic< Index, ColsAtCompileTime > m_blockCols
Definition: SparseBlock.h:372
Block< XprType, BlockRows, BlockCols, InnerPanel > BlockType
Definition: SparseBlock.h:305
const Scalar coeff(Index row, Index col) const
Definition: SparseBlock.h:340
Scalar & coeffRef(Index row, Index col)
Definition: SparseBlock.h:336
SparseMatrixBase< BlockType > Base
Definition: SparseBlock.h:306
const XprType & nestedExpression() const
Definition: SparseBlock.h:354
const internal::variable_if_dynamic< Index, XprType::RowsAtCompileTime==1 ? 0 :Dynamic > m_startRow
Definition: SparseBlock.h:369
const XprType & nestedExpression() const
Definition: SparseBlock.h:64
Block< XprType, BlockRows, BlockCols, true > BlockType
Definition: SparseBlock.h:23
Index startCol() const
Definition: SparseBlock.h:67
EIGEN_STRONG_INLINE Index rows() const
Definition: SparseBlock.h:43
const internal::variable_if_dynamic< Index, OuterSize > m_outerSize
Definition: SparseBlock.h:74
const Scalar coeff(Index row, Index col) const
Definition: SparseBlock.h:56
internal::ref_selector< XprType >::non_const_type m_matrix
Definition: SparseBlock.h:72
EIGEN_STRONG_INLINE Index cols() const
Definition: SparseBlock.h:44
BlockImpl(XprType &xpr, Index startRow, Index startCol, Index blockRows, Index blockCols)
Definition: SparseBlock.h:38
XprType & nestedExpression()
Definition: SparseBlock.h:65
Index blockCols() const
Definition: SparseBlock.h:69
SparseMatrixBase< BlockType > Base
Definition: SparseBlock.h:30
Index blockRows() const
Definition: SparseBlock.h:68
BlockImpl & operator=(const T &)
Definition: SparseBlock.h:80
Index nonZeros() const
Definition: SparseBlock.h:46
Index startRow() const
Definition: SparseBlock.h:66
internal::remove_all_t< typename XprType::Nested > MatrixTypeNested_
Definition: SparseBlock.h:22
const Scalar coeff(Index index) const
Definition: SparseBlock.h:60
internal::sparse_matrix_block_impl< SparseMatrixType, BlockRows, BlockCols > Base
Definition: SparseBlock.h:282
BlockImpl(SparseMatrixType &xpr, Index startRow, Index startCol, Index blockRows, Index blockCols)
Definition: SparseBlock.h:285
const SparseMatrix< Scalar_, Options_, StorageIndex_ > SparseMatrixType
Definition: SparseBlock.h:281
Definition: Block.h:72
Expression of a fixed-size or dynamic-size block.
Definition: Block.h:110
Generic expression where a coefficient-wise binary operator is applied to two expressions.
Definition: CwiseBinaryOp.h:79
A matrix or vector expression mapping an existing expression.
Definition: Ref.h:264
Common base class for sparse [compressed]-{row|column}-storage format.
Definition: SparseCompressedBase.h:43
Base::IndexVector IndexVector
Definition: SparseCompressedBase.h:51
Base class of any sparse matrices or sparse expressions.
Definition: SparseMatrixBase.h:30
internal::traits< Block< SparseMatrixType, BlockRows, BlockCols, true > >::StorageIndex StorageIndex
Definition: SparseMatrixBase.h:44
internal::traits< Block< XprType, BlockRows, BlockCols, true > >::Scalar Scalar
Definition: SparseMatrixBase.h:32
const Derived & derived() const
Definition: SparseMatrixBase.h:144
static StorageIndex convert_index(const Index idx)
Definition: SparseMatrixBase.h:391
A versatible sparse matrix representation.
Definition: SparseMatrix.h:121
Definition: XprHelper.h:134
Definition: SparseBlock.h:93
@ OuterSize
Definition: SparseBlock.h:104
const StorageIndex * innerIndexPtr() const
Definition: SparseBlock.h:208
BlockType & operator=(const SparseMatrixBase< OtherDerived > &other)
Definition: SparseBlock.h:117
StorageIndex * outerIndexPtr()
Definition: SparseBlock.h:212
const StorageIndex * outerIndexPtr() const
Definition: SparseBlock.h:211
SparseCompressedBase< Block< SparseMatrixType, BlockRows, BlockCols, true > > Base
Definition: SparseBlock.h:96
Index blockRows() const
Definition: SparseBlock.h:249
internal::ref_selector< SparseMatrixType >::non_const_type m_matrix
Definition: SparseBlock.h:253
StorageIndex * innerNonZeroPtr()
Definition: SparseBlock.h:217
@ IsRowMajor
Definition: SparseBlock.h:100
const Scalar coeff(Index index) const
Definition: SparseBlock.h:229
SparseMatrixType & nestedExpression()
Definition: SparseBlock.h:246
Block< SparseMatrixType, BlockRows, BlockCols, true > BlockType
Definition: SparseBlock.h:95
const SparseMatrixType & nestedExpression() const
Definition: SparseBlock.h:245
sparse_matrix_block_impl(SparseMatrixType &xpr, Index i)
Definition: SparseBlock.h:107
Scalar & coeffRef(Index row, Index col)
Definition: SparseBlock.h:221
internal::remove_all_t< typename SparseMatrixType::Nested > MatrixTypeNested_
Definition: SparseBlock.h:94
const internal::variable_if_dynamic< Index, OuterSize > m_outerSize
Definition: SparseBlock.h:255
EIGEN_STRONG_INLINE Index cols() const
Definition: SparseBlock.h:243
Base::IndexVector IndexVector
Definition: SparseBlock.h:103
const Scalar & lastCoeff() const
Definition: SparseBlock.h:233
Index startCol() const
Definition: SparseBlock.h:248
BlockType & operator=(const BlockType &other)
Definition: SparseBlock.h:203
const Scalar coeff(Index row, Index col) const
Definition: SparseBlock.h:225
Index blockCols() const
Definition: SparseBlock.h:250
StorageIndex * innerIndexPtr()
Definition: SparseBlock.h:209
const Scalar * valuePtr() const
Definition: SparseBlock.h:205
Scalar * valuePtr()
Definition: SparseBlock.h:206
Index startRow() const
Definition: SparseBlock.h:247
Index m_outerStart
Definition: SparseBlock.h:254
const StorageIndex * innerNonZeroPtr() const
Definition: SparseBlock.h:214
EIGEN_STRONG_INLINE Index rows() const
Definition: SparseBlock.h:242
sparse_matrix_block_impl(SparseMatrixType &xpr, Index startRow, Index startCol, Index blockRows, Index blockCols)
Definition: SparseBlock.h:110
bool isCompressed() const
Definition: SparseBlock.h:219
EIGEN_STRONG_INLINE OuterVectorInnerIterator(const unary_evaluator &aEval, Index outer)
Definition: SparseBlock.h:470
EIGEN_STRONG_INLINE InnerVectorInnerIterator(const unary_evaluator &aEval, Index outer)
Definition: SparseBlock.h:437
static EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE EIGEN_CONSTEXPR T value()
Definition: XprHelper.h:161
Eigen::Map< Eigen::Matrix< T, Eigen::Dynamic, Eigen::Dynamic, Eigen::ColMajor >, 0, Eigen::OuterStride<> > matrix(T *data, int rows, int cols, int stride)
Definition: common.h:85
static constexpr lastp1_t end
Definition: IndexedViewHelper.h:79
char char char int int * k
Definition: level2_impl.h:374
char char * op
Definition: level2_impl.h:374
Eigen::Matrix< Scalar, Dynamic, Dynamic, ColMajor > tmp
Definition: level3_impl.h:365
EIGEN_STRONG_INLINE EIGEN_DEVICE_FUNC bfloat16 operator++(bfloat16 &a)
Definition: BFloat16.h:307
EIGEN_DEVICE_FUNC T * construct_at(T *p, Args &&... args)
Definition: Memory.h:1321
EIGEN_DEVICE_FUNC void destroy_at(T *p)
Definition: Memory.h:1335
EIGEN_DEVICE_FUNC IndexDest convert_index(const IndexSrc &idx)
Definition: XprHelper.h:63
typename remove_all< T >::type remove_all_t
Definition: Meta.h:142
EIGEN_DEVICE_FUNC void smart_copy(const T *start, const T *end, T *target)
Definition: Memory.h:569
void smart_memmove(const T *start, const T *end, T *target)
Definition: Memory.h:594
Namespace containing all symbols from the Eigen library.
Definition: bench_norm.cpp:70
EIGEN_DEFAULT_DENSE_INDEX_TYPE Index
The Index type as used for the API.
Definition: Meta.h:83
const int Dynamic
Definition: Constants.h:25
Extend namespace for flags.
Definition: fsi_chan_precond_driver.cc:56
Definition: Eigen_Colamd.h:49
void start(const unsigned &i)
(Re-)start i-th timer
Definition: oomph_utilities.cc:243
Eigen::Index Index
The interface type of indices.
Definition: EigenBase.h:43
Definition: Constants.h:522
Definition: Constants.h:580
Definition: CoreEvaluators.h:118
std::conditional_t< bool(traits< T >::Flags &NestByRefBit), T &, T > non_const_type
Definition: XprHelper.h:509
Definition: ForwardDeclarations.h:21
Block< ArgType, BlockRows, BlockCols, InnerPanel > XprType
Definition: SparseBlock.h:390
evaluator< ArgType >::InnerIterator EvalIterator
Definition: SparseBlock.h:420
std::conditional_t< OuterVector, OuterVectorInnerIterator, InnerVectorInnerIterator > InnerIterator
Definition: SparseBlock.h:404
Block< SparseMatrix< Scalar_, Options_, StorageIndex_ >, BlockRows, BlockCols, true > XprType
Definition: SparseBlock.h:515
Block< const SparseMatrix< Scalar_, Options_, StorageIndex_ >, BlockRows, BlockCols, true > XprType
Definition: SparseBlock.h:525
Definition: CoreEvaluators.h:82
std::ptrdiff_t j
Definition: tut_arithmetic_redux_minmax.cpp:2