make the accessors to internal sparse storage part of the public API and remove their "_" prefix.

This commit is contained in:
Gael Guennebaud 2011-12-04 12:19:26 +01:00
parent 1cdbae62db
commit 32917515df
13 changed files with 153 additions and 130 deletions

View File

@ -69,9 +69,9 @@ cholmod_sparse viewAsCholmod(SparseMatrix<_Scalar,_Options,_Index>& mat)
res.nzmax = mat.nonZeros(); res.nzmax = mat.nonZeros();
res.nrow = mat.rows();; res.nrow = mat.rows();;
res.ncol = mat.cols(); res.ncol = mat.cols();
res.p = mat._outerIndexPtr(); res.p = mat.outerIndexPtr();
res.i = mat._innerIndexPtr(); res.i = mat.innerIndexPtr();
res.x = mat._valuePtr(); res.x = mat.valuePtr();
res.sorted = 1; res.sorted = 1;
res.packed = 1; res.packed = 1;
res.dtype = 0; res.dtype = 0;

View File

@ -133,8 +133,8 @@ void minimum_degree_ordering(SparseMatrix<Scalar,ColMajor,Index>& C, Permutation
Index* last = perm.indices().data(); /* use P as workspace for last */ Index* last = perm.indices().data(); /* use P as workspace for last */
/* --- Initialize quotient graph ---------------------------------------- */ /* --- Initialize quotient graph ---------------------------------------- */
Index* Cp = C._outerIndexPtr(); Index* Cp = C.outerIndexPtr();
Index* Ci = C._innerIndexPtr(); Index* Ci = C.innerIndexPtr();
for(k = 0; k < n; k++) for(k = 0; k < n; k++)
len[k] = Cp[k+1] - Cp[k]; len[k] = Cp[k+1] - Cp[k];
len[n] = 0; len[n] = 0;

View File

@ -675,7 +675,7 @@ void SimplicialCholeskyBase<Derived>::analyzePattern(const MatrixType& a, bool d
} }
/* construct Lp index array from m_nonZerosPerCol column counts */ /* construct Lp index array from m_nonZerosPerCol column counts */
Index* Lp = m_matrix._outerIndexPtr(); Index* Lp = m_matrix.outerIndexPtr();
Lp[0] = 0; Lp[0] = 0;
for(Index k = 0; k < size; ++k) for(Index k = 0; k < size; ++k)
Lp[k+1] = Lp[k] + m_nonZerosPerCol[k] + (doLDLt ? 0 : 1); Lp[k+1] = Lp[k] + m_nonZerosPerCol[k] + (doLDLt ? 0 : 1);
@ -699,9 +699,9 @@ void SimplicialCholeskyBase<Derived>::factorize(const MatrixType& a)
eigen_assert(m_parent.size()==size); eigen_assert(m_parent.size()==size);
eigen_assert(m_nonZerosPerCol.size()==size); eigen_assert(m_nonZerosPerCol.size()==size);
const Index* Lp = m_matrix._outerIndexPtr(); const Index* Lp = m_matrix.outerIndexPtr();
Index* Li = m_matrix._innerIndexPtr(); Index* Li = m_matrix.innerIndexPtr();
Scalar* Lx = m_matrix._valuePtr(); Scalar* Lx = m_matrix.valuePtr();
ei_declare_aligned_stack_constructed_variable(Scalar, y, size, 0); ei_declare_aligned_stack_constructed_variable(Scalar, y, size, 0);
ei_declare_aligned_stack_constructed_variable(Index, pattern, size, 0); ei_declare_aligned_stack_constructed_variable(Index, pattern, size, 0);

View File

@ -66,14 +66,14 @@ class MappedSparseMatrix
//---------------------------------------- //----------------------------------------
// direct access interface // direct access interface
inline const Scalar* _valuePtr() const { return m_values; } inline const Scalar* valuePtr() const { return m_values; }
inline Scalar* _valuePtr() { return m_values; } inline Scalar* valuePtr() { return m_values; }
inline const Index* _innerIndexPtr() const { return m_innerIndices; } inline const Index* innerIndexPtr() const { return m_innerIndices; }
inline Index* _innerIndexPtr() { return m_innerIndices; } inline Index* innerIndexPtr() { return m_innerIndices; }
inline const Index* _outerIndexPtr() const { return m_outerIndex; } inline const Index* outerIndexPtr() const { return m_outerIndex; }
inline Index* _outerIndexPtr() { return m_outerIndex; } inline Index* outerIndexPtr() { return m_outerIndex; }
//---------------------------------------- //----------------------------------------
inline Scalar coeff(Index row, Index col) const inline Scalar coeff(Index row, Index col) const
@ -131,23 +131,23 @@ class MappedSparseMatrix<Scalar,_Flags,_Index>::InnerIterator
InnerIterator(const MappedSparseMatrix& mat, Index outer) InnerIterator(const MappedSparseMatrix& mat, Index outer)
: m_matrix(mat), : m_matrix(mat),
m_outer(outer), m_outer(outer),
m_id(mat._outerIndexPtr()[outer]), m_id(mat.outerIndexPtr()[outer]),
m_start(m_id), m_start(m_id),
m_end(mat._outerIndexPtr()[outer+1]) m_end(mat.outerIndexPtr()[outer+1])
{} {}
template<unsigned int Added, unsigned int Removed> template<unsigned int Added, unsigned int Removed>
InnerIterator(const Flagged<MappedSparseMatrix,Added,Removed>& mat, Index outer) InnerIterator(const Flagged<MappedSparseMatrix,Added,Removed>& mat, Index outer)
: m_matrix(mat._expression()), m_id(m_matrix._outerIndexPtr()[outer]), : m_matrix(mat._expression()), m_id(m_matrix.outerIndexPtr()[outer]),
m_start(m_id), m_end(m_matrix._outerIndexPtr()[outer+1]) m_start(m_id), m_end(m_matrix.outerIndexPtr()[outer+1])
{} {}
inline InnerIterator& operator++() { m_id++; return *this; } inline InnerIterator& operator++() { m_id++; return *this; }
inline Scalar value() const { return m_matrix._valuePtr()[m_id]; } inline Scalar value() const { return m_matrix.valuePtr()[m_id]; }
inline Scalar& valueRef() { return const_cast<Scalar&>(m_matrix._valuePtr()[m_id]); } inline Scalar& valueRef() { return const_cast<Scalar&>(m_matrix.valuePtr()[m_id]); }
inline Index index() const { return m_matrix._innerIndexPtr()[m_id]; } inline Index index() const { return m_matrix.innerIndexPtr()[m_id]; }
inline Index row() const { return IsRowMajor ? m_outer : index(); } inline Index row() const { return IsRowMajor ? m_outer : index(); }
inline Index col() const { return IsRowMajor ? index() : m_outer; } inline Index col() const { return IsRowMajor ? index() : m_outer; }

View File

@ -156,8 +156,8 @@ class SparseInnerVectorSet<SparseMatrix<_Scalar, _Options, _Index>, Size>
Index nnz = tmp.nonZeros(); Index nnz = tmp.nonZeros();
Index nnz_previous = nonZeros(); Index nnz_previous = nonZeros();
Index free_size = matrix.data().allocatedSize() + nnz_previous; Index free_size = matrix.data().allocatedSize() + nnz_previous;
std::size_t nnz_head = m_outerStart==0 ? 0 : matrix._outerIndexPtr()[m_outerStart]; std::size_t nnz_head = m_outerStart==0 ? 0 : matrix.outerIndexPtr()[m_outerStart];
std::size_t tail = m_matrix._outerIndexPtr()[m_outerStart+m_outerSize.value()]; std::size_t tail = m_matrix.outerIndexPtr()[m_outerStart+m_outerSize.value()];
std::size_t nnz_tail = matrix.nonZeros() - tail; std::size_t nnz_tail = matrix.nonZeros() - tail;
if(nnz>free_size) if(nnz>free_size)
@ -203,13 +203,13 @@ class SparseInnerVectorSet<SparseMatrix<_Scalar, _Options, _Index>, Size>
Index p = nnz_head; Index p = nnz_head;
for(Index k=0; k<m_outerSize.value(); ++k) for(Index k=0; k<m_outerSize.value(); ++k)
{ {
matrix._outerIndexPtr()[m_outerStart+k] = p; matrix.outerIndexPtr()[m_outerStart+k] = p;
p += tmp.innerVector(k).nonZeros(); p += tmp.innerVector(k).nonZeros();
} }
std::ptrdiff_t offset = nnz - nnz_previous; std::ptrdiff_t offset = nnz - nnz_previous;
for(Index k = m_outerStart + m_outerSize.value(); k<=matrix.outerSize(); ++k) for(Index k = m_outerStart + m_outerSize.value(); k<=matrix.outerSize(); ++k)
{ {
matrix._outerIndexPtr()[k] += offset; matrix.outerIndexPtr()[k] += offset;
} }
return *this; return *this;
@ -220,30 +220,30 @@ class SparseInnerVectorSet<SparseMatrix<_Scalar, _Options, _Index>, Size>
return operator=<SparseInnerVectorSet>(other); return operator=<SparseInnerVectorSet>(other);
} }
inline const Scalar* _valuePtr() const inline const Scalar* valuePtr() const
{ return m_matrix._valuePtr() + m_matrix._outerIndexPtr()[m_outerStart]; } { return m_matrix.valuePtr() + m_matrix.outerIndexPtr()[m_outerStart]; }
inline Scalar* _valuePtr() inline Scalar* valuePtr()
{ return m_matrix.const_cast_derived()._valuePtr() + m_matrix._outerIndexPtr()[m_outerStart]; } { return m_matrix.const_cast_derived().valuePtr() + m_matrix.outerIndexPtr()[m_outerStart]; }
inline const Index* _innerIndexPtr() const inline const Index* innerIndexPtr() const
{ return m_matrix._innerIndexPtr() + m_matrix._outerIndexPtr()[m_outerStart]; } { return m_matrix.innerIndexPtr() + m_matrix.outerIndexPtr()[m_outerStart]; }
inline Index* _innerIndexPtr() inline Index* innerIndexPtr()
{ return m_matrix.const_cast_derived()._innerIndexPtr() + m_matrix._outerIndexPtr()[m_outerStart]; } { return m_matrix.const_cast_derived().innerIndexPtr() + m_matrix.outerIndexPtr()[m_outerStart]; }
inline const Index* _outerIndexPtr() const inline const Index* outerIndexPtr() const
{ return m_matrix._outerIndexPtr() + m_outerStart; } { return m_matrix.outerIndexPtr() + m_outerStart; }
inline Index* _outerIndexPtr() inline Index* outerIndexPtr()
{ return m_matrix.const_cast_derived()._outerIndexPtr() + m_outerStart; } { return m_matrix.const_cast_derived().outerIndexPtr() + m_outerStart; }
Index nonZeros() const Index nonZeros() const
{ {
if(m_matrix.compressed()) if(m_matrix.compressed())
return std::size_t(m_matrix._outerIndexPtr()[m_outerStart+m_outerSize.value()]) return std::size_t(m_matrix.outerIndexPtr()[m_outerStart+m_outerSize.value()])
- std::size_t(m_matrix._outerIndexPtr()[m_outerStart]); - std::size_t(m_matrix.outerIndexPtr()[m_outerStart]);
else if(m_outerSize.value()==0) else if(m_outerSize.value()==0)
return 0; return 0;
else else
return Map<const Matrix<Index,Size,1> >(m_matrix._innerNonZeroPtr(), m_outerSize.value()).sum(); return Map<const Matrix<Index,Size,1> >(m_matrix.innerNonZeroPtr(), m_outerSize.value()).sum();
} }
const Scalar& lastCoeff() const const Scalar& lastCoeff() const
@ -251,9 +251,9 @@ class SparseInnerVectorSet<SparseMatrix<_Scalar, _Options, _Index>, Size>
EIGEN_STATIC_ASSERT_VECTOR_ONLY(SparseInnerVectorSet); EIGEN_STATIC_ASSERT_VECTOR_ONLY(SparseInnerVectorSet);
eigen_assert(nonZeros()>0); eigen_assert(nonZeros()>0);
if(m_matrix.compressed()) if(m_matrix.compressed())
return m_matrix._valuePtr()[m_matrix._outerIndexPtr()[m_outerStart+1]-1]; return m_matrix.valuePtr()[m_matrix.outerIndexPtr()[m_outerStart+1]-1];
else else
return m_matrix._valuePtr()[m_matrix._outerIndexPtr()[m_outerStart]+m_matrix._innerNonZeroPtr()[m_outerStart]-1]; return m_matrix.valuePtr()[m_matrix.outerIndexPtr()[m_outerStart]+m_matrix.innerNonZeroPtr()[m_outerStart]-1];
} }
// template<typename Sparse> // template<typename Sparse>

View File

@ -116,35 +116,41 @@ class SparseMatrix
/** \returns the number of columns (resp. rows) of the matrix if the storage order column major (resp. row major) */ /** \returns the number of columns (resp. rows) of the matrix if the storage order column major (resp. row major) */
inline Index outerSize() const { return m_outerSize; } inline Index outerSize() const { return m_outerSize; }
/** \internal /** \returns a const pointer to the array of values.
* \returns a const pointer to the array of values */ * This function is aimed at interoperability with other libraries.
inline const Scalar* _valuePtr() const { return &m_data.value(0); } * \sa innerIndexPtr(), outerIndexPtr() */
/** \internal inline const Scalar* valuePtr() const { return &m_data.value(0); }
* \returns a non-const pointer to the array of values */ /** \returns a non-const pointer to the array of values.
inline Scalar* _valuePtr() { return &m_data.value(0); } * This function is aimed at interoperability with other libraries.
* \sa innerIndexPtr(), outerIndexPtr() */
inline Scalar* valuePtr() { return &m_data.value(0); }
/** \internal /** \returns a const pointer to the array of inner indices.
* \returns a const pointer to the array of inner indices */ * This function is aimed at interoperability with other libraries.
inline const Index* _innerIndexPtr() const { return &m_data.index(0); } * \sa valuePtr(), outerIndexPtr() */
/** \internal inline const Index* innerIndexPtr() const { return &m_data.index(0); }
* \returns a non-const pointer to the array of inner indices */ /** \returns a non-const pointer to the array of inner indices.
inline Index* _innerIndexPtr() { return &m_data.index(0); } * This function is aimed at interoperability with other libraries.
* \sa valuePtr(), outerIndexPtr() */
inline Index* innerIndexPtr() { return &m_data.index(0); }
/** \internal /** \returns a const pointer to the array of the starting positions of the inner vectors.
* \returns a const pointer to the array of the starting positions of the inner vectors */ * This function is aimed at interoperability with other libraries.
inline const Index* _outerIndexPtr() const { return m_outerIndex; } * \sa valuePtr(), innerIndexPtr() */
/** \internal inline const Index* outerIndexPtr() const { return m_outerIndex; }
* \returns a non-const pointer to the array of the starting positions of the inner vectors */ /** \returns a non-const pointer to the array of the starting positions of the inner vectors.
inline Index* _outerIndexPtr() { return m_outerIndex; } * This function is aimed at interoperability with other libraries.
* \sa valuePtr(), innerIndexPtr() */
inline Index* outerIndexPtr() { return m_outerIndex; }
/** \internal /** \returns a const pointer to the array of the number of non zeros of the inner vectors.
* \returns a const pointer to the array of the number of non zeros of the inner vectors * This function is aimed at interoperability with other libraries.
* \warning it returns 0 in compressed mode */ * \warning it returns the null pointer 0 in compressed mode */
inline const Index* _innerNonZeroPtr() const { return m_innerNonZeros; } inline const Index* innerNonZeroPtr() const { return m_innerNonZeros; }
/** \internal /** \returns a non-const pointer to the array of the number of non zeros of the inner vectors.
* \returns a non-const pointer to the array of the number of non zeros of the inner vectors * This function is aimed at interoperability with other libraries.
* \warning it returns 0 in compressed mode */ * \warning it returns the null pointer 0 in compressed mode */
inline Index* _innerNonZeroPtr() { return m_innerNonZeros; } inline Index* innerNonZeroPtr() { return m_innerNonZeros; }
/** \internal */ /** \internal */
inline Storage& data() { return m_data; } inline Storage& data() { return m_data; }
@ -862,7 +868,7 @@ class SparseMatrix<Scalar,_Options,_Index>::InnerIterator
{ {
public: public:
InnerIterator(const SparseMatrix& mat, Index outer) InnerIterator(const SparseMatrix& mat, Index outer)
: m_values(mat._valuePtr()), m_indices(mat._innerIndexPtr()), m_outer(outer), m_id(mat.m_outerIndex[outer]) : m_values(mat.valuePtr()), m_indices(mat.innerIndexPtr()), m_outer(outer), m_id(mat.m_outerIndex[outer])
{ {
if(mat.compressed()) if(mat.compressed())
m_end = mat.m_outerIndex[outer+1]; m_end = mat.m_outerIndex[outer+1];
@ -895,7 +901,7 @@ class SparseMatrix<Scalar,_Options,_Index>::ReverseInnerIterator
{ {
public: public:
ReverseInnerIterator(const SparseMatrix& mat, Index outer) ReverseInnerIterator(const SparseMatrix& mat, Index outer)
: m_values(mat._valuePtr()), m_indices(mat._innerIndexPtr()), m_outer(outer), m_start(mat.m_outerIndex[outer]) : m_values(mat.valuePtr()), m_indices(mat.innerIndexPtr()), m_outer(outer), m_start(mat.m_outerIndex[outer])
{ {
if(mat.compressed()) if(mat.compressed())
m_id = mat.m_outerIndex[outer+1]; m_id = mat.m_outerIndex[outer+1];

View File

@ -326,11 +326,11 @@ void permute_symm_to_fullsymm(const MatrixType& mat, SparseMatrix<typename Matri
// reserve space // reserve space
dest.reserve(nnz); dest.reserve(nnz);
dest._outerIndexPtr()[0] = 0; dest.outerIndexPtr()[0] = 0;
for(Index j=0; j<size; ++j) for(Index j=0; j<size; ++j)
dest._outerIndexPtr()[j+1] = dest._outerIndexPtr()[j] + count[j]; dest.outerIndexPtr()[j+1] = dest.outerIndexPtr()[j] + count[j];
for(Index j=0; j<size; ++j) for(Index j=0; j<size; ++j)
count[j] = dest._outerIndexPtr()[j]; count[j] = dest.outerIndexPtr()[j];
// copy data // copy data
for(Index j = 0; j<size; ++j) for(Index j = 0; j<size; ++j)
@ -343,17 +343,17 @@ void permute_symm_to_fullsymm(const MatrixType& mat, SparseMatrix<typename Matri
if(i==j) if(i==j)
{ {
int k = count[ip]++; int k = count[ip]++;
dest._innerIndexPtr()[k] = ip; dest.innerIndexPtr()[k] = ip;
dest._valuePtr()[k] = it.value(); dest.valuePtr()[k] = it.value();
} }
else if((UpLo==Lower && i>j) || (UpLo==Upper && i<j)) else if((UpLo==Lower && i>j) || (UpLo==Upper && i<j))
{ {
int k = count[jp]++; int k = count[jp]++;
dest._innerIndexPtr()[k] = ip; dest.innerIndexPtr()[k] = ip;
dest._valuePtr()[k] = it.value(); dest.valuePtr()[k] = it.value();
k = count[ip]++; k = count[ip]++;
dest._innerIndexPtr()[k] = jp; dest.innerIndexPtr()[k] = jp;
dest._valuePtr()[k] = internal::conj(it.value()); dest.valuePtr()[k] = internal::conj(it.value());
} }
} }
} }
@ -386,12 +386,12 @@ void permute_symm_to_symm(const MatrixType& mat, SparseMatrix<typename MatrixTyp
count[DstUpLo==Lower ? (std::min)(ip,jp) : (std::max)(ip,jp)]++; count[DstUpLo==Lower ? (std::min)(ip,jp) : (std::max)(ip,jp)]++;
} }
} }
dest._outerIndexPtr()[0] = 0; dest.outerIndexPtr()[0] = 0;
for(Index j=0; j<size; ++j) for(Index j=0; j<size; ++j)
dest._outerIndexPtr()[j+1] = dest._outerIndexPtr()[j] + count[j]; dest.outerIndexPtr()[j+1] = dest.outerIndexPtr()[j] + count[j];
dest.resizeNonZeros(dest._outerIndexPtr()[size]); dest.resizeNonZeros(dest.outerIndexPtr()[size]);
for(Index j=0; j<size; ++j) for(Index j=0; j<size; ++j)
count[j] = dest._outerIndexPtr()[j]; count[j] = dest.outerIndexPtr()[j];
for(Index j = 0; j<size; ++j) for(Index j = 0; j<size; ++j)
{ {
@ -404,12 +404,12 @@ void permute_symm_to_symm(const MatrixType& mat, SparseMatrix<typename MatrixTyp
Index ip = perm? perm[i] : i; Index ip = perm? perm[i] : i;
Index k = count[DstUpLo==Lower ? (std::min)(ip,jp) : (std::max)(ip,jp)]++; Index k = count[DstUpLo==Lower ? (std::min)(ip,jp) : (std::max)(ip,jp)]++;
dest._innerIndexPtr()[k] = DstUpLo==Lower ? (std::max)(ip,jp) : (std::min)(ip,jp); dest.innerIndexPtr()[k] = DstUpLo==Lower ? (std::max)(ip,jp) : (std::min)(ip,jp);
if((DstUpLo==Lower && ip<jp) || (DstUpLo==Upper && ip>jp)) if((DstUpLo==Lower && ip<jp) || (DstUpLo==Upper && ip>jp))
dest._valuePtr()[k] = conj(it.value()); dest.valuePtr()[k] = conj(it.value());
else else
dest._valuePtr()[k] = it.value(); dest.valuePtr()[k] = it.value();
} }
} }
} }

View File

@ -92,11 +92,11 @@ class SparseVector
EIGEN_STRONG_INLINE Index innerSize() const { return m_size; } EIGEN_STRONG_INLINE Index innerSize() const { return m_size; }
EIGEN_STRONG_INLINE Index outerSize() const { return 1; } EIGEN_STRONG_INLINE Index outerSize() const { return 1; }
EIGEN_STRONG_INLINE const Scalar* _valuePtr() const { return &m_data.value(0); } EIGEN_STRONG_INLINE const Scalar* valuePtr() const { return &m_data.value(0); }
EIGEN_STRONG_INLINE Scalar* _valuePtr() { return &m_data.value(0); } EIGEN_STRONG_INLINE Scalar* valuePtr() { return &m_data.value(0); }
EIGEN_STRONG_INLINE const Index* _innerIndexPtr() const { return &m_data.index(0); } EIGEN_STRONG_INLINE const Index* innerIndexPtr() const { return &m_data.index(0); }
EIGEN_STRONG_INLINE Index* _innerIndexPtr() { return &m_data.index(0); } EIGEN_STRONG_INLINE Index* innerIndexPtr() { return &m_data.index(0); }
inline Scalar coeff(Index row, Index col) const inline Scalar coeff(Index row, Index col) const
{ {

View File

@ -197,9 +197,9 @@ struct SluMatrix : SuperMatrix
res.Mtype = SLU_GE; res.Mtype = SLU_GE;
res.storage.nnz = mat.nonZeros(); res.storage.nnz = mat.nonZeros();
res.storage.values = mat.derived()._valuePtr(); res.storage.values = mat.derived().valuePtr();
res.storage.innerInd = mat.derived()._innerIndexPtr(); res.storage.innerInd = mat.derived().innerIndexPtr();
res.storage.outerInd = mat.derived()._outerIndexPtr(); res.storage.outerInd = mat.derived().outerIndexPtr();
res.setScalarType<typename MatrixType::Scalar>(); res.setScalarType<typename MatrixType::Scalar>();
@ -256,9 +256,9 @@ struct SluMatrixMapHelper<SparseMatrixBase<Derived> >
res.Mtype = SLU_GE; res.Mtype = SLU_GE;
res.storage.nnz = mat.nonZeros(); res.storage.nnz = mat.nonZeros();
res.storage.values = mat._valuePtr(); res.storage.values = mat.valuePtr();
res.storage.innerInd = mat._innerIndexPtr(); res.storage.innerInd = mat.innerIndexPtr();
res.storage.outerInd = mat._outerIndexPtr(); res.storage.outerInd = mat.outerIndexPtr();
res.setScalarType<typename MatrixType::Scalar>(); res.setScalarType<typename MatrixType::Scalar>();
@ -707,13 +707,13 @@ void SuperLUBase<MatrixType,Derived>::extractData() const
m_u.resize(size,size); m_u.resize(size,size);
m_u.resizeNonZeros(Ustore->nnz); m_u.resizeNonZeros(Ustore->nnz);
int* Lcol = m_l._outerIndexPtr(); int* Lcol = m_l.outerIndexPtr();
int* Lrow = m_l._innerIndexPtr(); int* Lrow = m_l.innerIndexPtr();
Scalar* Lval = m_l._valuePtr(); Scalar* Lval = m_l.valuePtr();
int* Ucol = m_u._outerIndexPtr(); int* Ucol = m_u.outerIndexPtr();
int* Urow = m_u._innerIndexPtr(); int* Urow = m_u.innerIndexPtr();
Scalar* Uval = m_u._valuePtr(); Scalar* Uval = m_u.valuePtr();
Ucol[0] = 0; Ucol[0] = 0;
Ucol[0] = 0; Ucol[0] = 0;
@ -785,12 +785,12 @@ typename SuperLU<MatrixType>::Scalar SuperLU<MatrixType>::determinant() const
Scalar det = Scalar(1); Scalar det = Scalar(1);
for (int j=0; j<m_u.cols(); ++j) for (int j=0; j<m_u.cols(); ++j)
{ {
if (m_u._outerIndexPtr()[j+1]-m_u._outerIndexPtr()[j] > 0) if (m_u.outerIndexPtr()[j+1]-m_u.outerIndexPtr()[j] > 0)
{ {
int lastId = m_u._outerIndexPtr()[j+1]-1; int lastId = m_u.outerIndexPtr()[j+1]-1;
eigen_assert(m_u._innerIndexPtr()[lastId]<=j); eigen_assert(m_u.innerIndexPtr()[lastId]<=j);
if (m_u._innerIndexPtr()[lastId]==j) if (m_u.innerIndexPtr()[lastId]==j)
det *= m_u._valuePtr()[lastId]; det *= m_u.valuePtr()[lastId];
} }
} }
if(m_sluEqued!='N') if(m_sluEqued!='N')

View File

@ -247,7 +247,7 @@ class UmfPackLU
umfpack_free_numeric(&m_numeric,Scalar()); umfpack_free_numeric(&m_numeric,Scalar());
int errorCode = 0; int errorCode = 0;
errorCode = umfpack_symbolic(matrix.rows(), matrix.cols(), matrix._outerIndexPtr(), matrix._innerIndexPtr(), matrix._valuePtr(), errorCode = umfpack_symbolic(matrix.rows(), matrix.cols(), matrix.outerIndexPtr(), matrix.innerIndexPtr(), matrix.valuePtr(),
&m_symbolic, 0, 0); &m_symbolic, 0, 0);
m_isInitialized = true; m_isInitialized = true;
@ -271,7 +271,7 @@ class UmfPackLU
m_matrixRef = &matrix; m_matrixRef = &matrix;
int errorCode; int errorCode;
errorCode = umfpack_numeric(matrix._outerIndexPtr(), matrix._innerIndexPtr(), matrix._valuePtr(), errorCode = umfpack_numeric(matrix.outerIndexPtr(), matrix.innerIndexPtr(), matrix.valuePtr(),
m_symbolic, &m_numeric, 0, 0); m_symbolic, &m_numeric, 0, 0);
m_info = errorCode ? NumericalIssue : Success; m_info = errorCode ? NumericalIssue : Success;
@ -337,8 +337,8 @@ void UmfPackLU<MatrixType>::extractData() const
m_q.resize(cols); m_q.resize(cols);
// extract // extract
umfpack_get_numeric(m_l._outerIndexPtr(), m_l._innerIndexPtr(), m_l._valuePtr(), umfpack_get_numeric(m_l.outerIndexPtr(), m_l.innerIndexPtr(), m_l.valuePtr(),
m_u._outerIndexPtr(), m_u._innerIndexPtr(), m_u._valuePtr(), m_u.outerIndexPtr(), m_u.innerIndexPtr(), m_u.valuePtr(),
m_p.data(), m_q.data(), 0, 0, 0, m_numeric); m_p.data(), m_q.data(), 0, 0, 0, m_numeric);
m_extractedDataAreDirty = false; m_extractedDataAreDirty = false;
@ -365,7 +365,7 @@ bool UmfPackLU<MatrixType>::_solve(const MatrixBase<BDerived> &b, MatrixBase<XDe
for (int j=0; j<rhsCols; ++j) for (int j=0; j<rhsCols; ++j)
{ {
errorCode = umfpack_solve(UMFPACK_A, errorCode = umfpack_solve(UMFPACK_A,
m_matrixRef->_outerIndexPtr(), m_matrixRef->_innerIndexPtr(), m_matrixRef->_valuePtr(), m_matrixRef->outerIndexPtr(), m_matrixRef->innerIndexPtr(), m_matrixRef->valuePtr(),
&x.col(j).coeffRef(0), &b.const_cast_derived().col(j).coeffRef(0), m_numeric, 0, 0); &x.col(j).coeffRef(0), &b.const_cast_derived().col(j).coeffRef(0), m_numeric, 0, 0);
if (errorCode!=0) if (errorCode!=0)
return false; return false;

View File

@ -121,6 +121,7 @@ template<typename _Scalar, int _Options, typename _Index>
} }
class InnerIterator; class InnerIterator;
class ReverseInnerIterator;
void setZero() void setZero()
{ {
@ -348,4 +349,20 @@ class DynamicSparseMatrix<Scalar,_Options,_Index>::InnerIterator : public Sparse
const Index m_outer; const Index m_outer;
}; };
template<typename Scalar, int _Options, typename _Index>
class DynamicSparseMatrix<Scalar,_Options,_Index>::ReverseInnerIterator : public SparseVector<Scalar,_Options,_Index>::ReverseInnerIterator
{
typedef typename SparseVector<Scalar,_Options,_Index>::ReverseInnerIterator Base;
public:
ReverseInnerIterator(const DynamicSparseMatrix& mat, Index outer)
: Base(mat.m_data[outer]), m_outer(outer)
{}
inline Index row() const { return IsRowMajor ? m_outer : Base::index(); }
inline Index col() const { return IsRowMajor ? Base::index() : m_outer; }
protected:
const Index m_outer;
};
#endif // EIGEN_DYNAMIC_SPARSEMATRIX_H #endif // EIGEN_DYNAMIC_SPARSEMATRIX_H

View File

@ -268,12 +268,12 @@ class RandomSetter
for (Index j=0; j<mp_target->outerSize(); ++j) for (Index j=0; j<mp_target->outerSize(); ++j)
{ {
Index tmp = positions[j]; Index tmp = positions[j];
mp_target->_outerIndexPtr()[j] = count; mp_target->outerIndexPtr()[j] = count;
positions[j] = count; positions[j] = count;
count += tmp; count += tmp;
} }
mp_target->makeCompressed(); mp_target->makeCompressed();
mp_target->_outerIndexPtr()[mp_target->outerSize()] = count; mp_target->outerIndexPtr()[mp_target->outerSize()] = count;
mp_target->resizeNonZeros(count); mp_target->resizeNonZeros(count);
// pass 2 // pass 2
for (Index k=0; k<m_outerPackets; ++k) for (Index k=0; k<m_outerPackets; ++k)
@ -288,16 +288,16 @@ class RandomSetter
// Note that we have to deal with at most 2^OuterPacketBits unsorted coefficients, // Note that we have to deal with at most 2^OuterPacketBits unsorted coefficients,
// moreover those 2^OuterPacketBits coeffs are likely to be sparse, an so only a // moreover those 2^OuterPacketBits coeffs are likely to be sparse, an so only a
// small fraction of them have to be sorted, whence the following simple procedure: // small fraction of them have to be sorted, whence the following simple procedure:
Index posStart = mp_target->_outerIndexPtr()[outer]; Index posStart = mp_target->outerIndexPtr()[outer];
Index i = (positions[outer]++) - 1; Index i = (positions[outer]++) - 1;
while ( (i >= posStart) && (mp_target->_innerIndexPtr()[i] > inner) ) while ( (i >= posStart) && (mp_target->innerIndexPtr()[i] > inner) )
{ {
mp_target->_valuePtr()[i+1] = mp_target->_valuePtr()[i]; mp_target->valuePtr()[i+1] = mp_target->valuePtr()[i];
mp_target->_innerIndexPtr()[i+1] = mp_target->_innerIndexPtr()[i]; mp_target->innerIndexPtr()[i+1] = mp_target->innerIndexPtr()[i];
--i; --i;
} }
mp_target->_innerIndexPtr()[i+1] = inner; mp_target->innerIndexPtr()[i+1] = inner;
mp_target->_valuePtr()[i+1] = it->second.value; mp_target->valuePtr()[i+1] = it->second.value;
} }
} }
} }

View File

@ -250,9 +250,9 @@ void SparseLDLT<_MatrixType,Backend>::_symbolic(const _MatrixType& a)
ei_declare_aligned_stack_constructed_variable(Index, tags, size, 0); ei_declare_aligned_stack_constructed_variable(Index, tags, size, 0);
const Index* Ap = a._outerIndexPtr(); const Index* Ap = a.outerIndexPtr();
const Index* Ai = a._innerIndexPtr(); const Index* Ai = a.innerIndexPtr();
Index* Lp = m_matrix._outerIndexPtr(); Index* Lp = m_matrix.outerIndexPtr();
const Index* P = 0; const Index* P = 0;
Index* Pinv = 0; Index* Pinv = 0;
@ -311,12 +311,12 @@ bool SparseLDLT<_MatrixType,Backend>::_numeric(const _MatrixType& a)
assert(m_parent.size()==size); assert(m_parent.size()==size);
assert(m_nonZerosPerCol.size()==size); assert(m_nonZerosPerCol.size()==size);
const Index* Ap = a._outerIndexPtr(); const Index* Ap = a.outerIndexPtr();
const Index* Ai = a._innerIndexPtr(); const Index* Ai = a.innerIndexPtr();
const Scalar* Ax = a._valuePtr(); const Scalar* Ax = a.valuePtr();
const Index* Lp = m_matrix._outerIndexPtr(); const Index* Lp = m_matrix.outerIndexPtr();
Index* Li = m_matrix._innerIndexPtr(); Index* Li = m_matrix.innerIndexPtr();
Scalar* Lx = m_matrix._valuePtr(); Scalar* Lx = m_matrix.valuePtr();
m_diag.resize(size); m_diag.resize(size);
ei_declare_aligned_stack_constructed_variable(Scalar, y, size, 0); ei_declare_aligned_stack_constructed_variable(Scalar, y, size, 0);