s/compressed()/isCompressed()

This commit is contained in:
Gael Guennebaud 2011-12-10 23:08:10 +01:00
parent 594fd2d11d
commit f60e6f5ee8
3 changed files with 16 additions and 16 deletions

View File

@ -73,7 +73,7 @@ cholmod_sparse viewAsCholmod(SparseMatrix<_Scalar,_Options,_Index>& mat)
res.i = mat.innerIndexPtr(); res.i = mat.innerIndexPtr();
res.x = mat.valuePtr(); res.x = mat.valuePtr();
res.sorted = 1; res.sorted = 1;
if(mat.compressed()) if(mat.isCompressed())
{ {
res.packed = 1; res.packed = 1;
} }
@ -170,7 +170,7 @@ enum CholmodMode {
* \tparam _UpLo the triangular part that will be used for the computations. It can be Lower * \tparam _UpLo the triangular part that will be used for the computations. It can be Lower
* or Upper. Default is Lower. * or Upper. Default is Lower.
* *
* This class supports all kind of SparseMatrix<>: row or column major; upper, lower, or both; compressed or uncompressed. * This class supports all kind of SparseMatrix<>: row or column major; upper, lower, or both; isCompressed() or unisCompressed().
* *
* \sa \ref TutorialSparseDirectSolvers * \sa \ref TutorialSparseDirectSolvers
*/ */

View File

@ -237,7 +237,7 @@ class SparseInnerVectorSet<SparseMatrix<_Scalar, _Options, _Index>, Size>
Index nonZeros() const Index nonZeros() const
{ {
if(m_matrix.compressed()) if(m_matrix.isCompressed())
return std::size_t(m_matrix.outerIndexPtr()[m_outerStart+m_outerSize.value()]) return std::size_t(m_matrix.outerIndexPtr()[m_outerStart+m_outerSize.value()])
- std::size_t(m_matrix.outerIndexPtr()[m_outerStart]); - std::size_t(m_matrix.outerIndexPtr()[m_outerStart]);
else if(m_outerSize.value()==0) else if(m_outerSize.value()==0)
@ -250,7 +250,7 @@ class SparseInnerVectorSet<SparseMatrix<_Scalar, _Options, _Index>, Size>
{ {
EIGEN_STATIC_ASSERT_VECTOR_ONLY(SparseInnerVectorSet); EIGEN_STATIC_ASSERT_VECTOR_ONLY(SparseInnerVectorSet);
eigen_assert(nonZeros()>0); eigen_assert(nonZeros()>0);
if(m_matrix.compressed()) if(m_matrix.isCompressed())
return m_matrix.valuePtr()[m_matrix.outerIndexPtr()[m_outerStart+1]-1]; return m_matrix.valuePtr()[m_matrix.outerIndexPtr()[m_outerStart+1]-1];
else else
return m_matrix.valuePtr()[m_matrix.outerIndexPtr()[m_outerStart]+m_matrix.innerNonZeroPtr()[m_outerStart]-1]; return m_matrix.valuePtr()[m_matrix.outerIndexPtr()[m_outerStart]+m_matrix.innerNonZeroPtr()[m_outerStart]-1];

View File

@ -126,7 +126,7 @@ class SparseMatrix
public: public:
/** \returns whether \c *this is in compressed form. */ /** \returns whether \c *this is in compressed form. */
inline bool compressed() const { return m_innerNonZeros==0; } inline bool isCompressed() const { return m_innerNonZeros==0; }
/** \returns the number of rows of the matrix */ /** \returns the number of rows of the matrix */
inline Index rows() const { return IsRowMajor ? m_outerSize : m_innerSize; } inline Index rows() const { return IsRowMajor ? m_outerSize : m_innerSize; }
@ -228,7 +228,7 @@ class SparseMatrix
*/ */
EIGEN_DONT_INLINE Scalar& insert(Index row, Index col) EIGEN_DONT_INLINE Scalar& insert(Index row, Index col)
{ {
if(compressed()) if(isCompressed())
{ {
reserve(VectorXi::Constant(outerSize(), 2)); reserve(VectorXi::Constant(outerSize(), 2));
} }
@ -262,14 +262,14 @@ class SparseMatrix
* Precondition: the matrix must be in compressed mode. */ * Precondition: the matrix must be in compressed mode. */
inline void reserve(Index reserveSize) inline void reserve(Index reserveSize)
{ {
eigen_assert(compressed() && "This function does not make sense in non compressed mode."); eigen_assert(isCompressed() && "This function does not make sense in non compressed mode.");
m_data.reserve(reserveSize); m_data.reserve(reserveSize);
} }
#ifdef EIGEN_PARSED_BY_DOXYGEN #ifdef EIGEN_PARSED_BY_DOXYGEN
/** Preallocates \a reserveSize[\c j] non zeros for each column (resp. row) \c j. /** Preallocates \a reserveSize[\c j] non zeros for each column (resp. row) \c j.
* *
* This function turns the matrix in non-compressed() mode */ * This function turns the matrix in non-compressed mode */
template<class SizesType> template<class SizesType>
inline void reserve(const SizesType& reserveSizes); inline void reserve(const SizesType& reserveSizes);
#else #else
@ -291,7 +291,7 @@ class SparseMatrix
inline void reserveInnerVectors(const SizesType& reserveSizes) inline void reserveInnerVectors(const SizesType& reserveSizes)
{ {
if(compressed()) if(isCompressed())
{ {
std::size_t totalReserveSize = 0; std::size_t totalReserveSize = 0;
// turn the matrix into non-compressed mode // turn the matrix into non-compressed mode
@ -413,7 +413,7 @@ class SparseMatrix
*/ */
inline void finalize() inline void finalize()
{ {
if(compressed()) if(isCompressed())
{ {
Index size = static_cast<Index>(m_data.size()); Index size = static_cast<Index>(m_data.size());
Index i = m_outerSize; Index i = m_outerSize;
@ -444,7 +444,7 @@ class SparseMatrix
*/ */
void makeCompressed() void makeCompressed()
{ {
if(compressed()) if(isCompressed())
return; return;
Index oldStart = m_outerIndex[1]; Index oldStart = m_outerIndex[1];
@ -593,7 +593,7 @@ class SparseMatrix
else else
{ {
initAssignment(other); initAssignment(other);
if(other.compressed()) if(other.isCompressed())
{ {
memcpy(m_outerIndex, other.m_outerIndex, (m_outerSize+1)*sizeof(Index)); memcpy(m_outerIndex, other.m_outerIndex, (m_outerSize+1)*sizeof(Index));
m_data = other.m_data; m_data = other.m_data;
@ -729,7 +729,7 @@ protected:
* \sa insert(Index,Index) */ * \sa insert(Index,Index) */
EIGEN_DONT_INLINE Scalar& insertCompressed(Index row, Index col) EIGEN_DONT_INLINE Scalar& insertCompressed(Index row, Index col)
{ {
eigen_assert(compressed()); eigen_assert(isCompressed());
const Index outer = IsRowMajor ? row : col; const Index outer = IsRowMajor ? row : col;
const Index inner = IsRowMajor ? col : row; const Index inner = IsRowMajor ? col : row;
@ -852,7 +852,7 @@ protected:
* \sa insert(Index,Index) */ * \sa insert(Index,Index) */
EIGEN_DONT_INLINE Scalar& insertUncompressed(Index row, Index col) EIGEN_DONT_INLINE Scalar& insertUncompressed(Index row, Index col)
{ {
eigen_assert(!compressed()); eigen_assert(!isCompressed());
const Index outer = IsRowMajor ? row : col; const Index outer = IsRowMajor ? row : col;
const Index inner = IsRowMajor ? col : row; const Index inner = IsRowMajor ? col : row;
@ -904,7 +904,7 @@ class SparseMatrix<Scalar,_Options,_Index>::InnerIterator
InnerIterator(const SparseMatrix& mat, Index outer) InnerIterator(const SparseMatrix& mat, Index outer)
: m_values(mat.valuePtr()), m_indices(mat.innerIndexPtr()), m_outer(outer), m_id(mat.m_outerIndex[outer]) : m_values(mat.valuePtr()), m_indices(mat.innerIndexPtr()), m_outer(outer), m_id(mat.m_outerIndex[outer])
{ {
if(mat.compressed()) if(mat.isCompressed())
m_end = mat.m_outerIndex[outer+1]; m_end = mat.m_outerIndex[outer+1];
else else
m_end = m_id + mat.m_innerNonZeros[outer]; m_end = m_id + mat.m_innerNonZeros[outer];
@ -937,7 +937,7 @@ class SparseMatrix<Scalar,_Options,_Index>::ReverseInnerIterator
ReverseInnerIterator(const SparseMatrix& mat, Index outer) ReverseInnerIterator(const SparseMatrix& mat, Index outer)
: m_values(mat.valuePtr()), m_indices(mat.innerIndexPtr()), m_outer(outer), m_start(mat.m_outerIndex[outer]) : m_values(mat.valuePtr()), m_indices(mat.innerIndexPtr()), m_outer(outer), m_start(mat.m_outerIndex[outer])
{ {
if(mat.compressed()) if(mat.isCompressed())
m_id = mat.m_outerIndex[outer+1]; m_id = mat.m_outerIndex[outer+1];
else else
m_id = m_start + mat.m_innerNonZeros[outer]; m_id = m_start + mat.m_innerNonZeros[outer];