mirror of
https://gitlab.com/libeigen/eigen.git
synced 2025-09-12 17:33:15 +08:00
Make SparseVector derive from SparseCompressedBase, thus improving compatibility between sparse vectors and matrices
This commit is contained in:
parent
6100d1ae64
commit
1b43860bc1
@ -45,13 +45,14 @@ class SparseCompressedBase
|
|||||||
/** \returns the number of non zero coefficients */
|
/** \returns the number of non zero coefficients */
|
||||||
inline Index nonZeros() const
|
inline Index nonZeros() const
|
||||||
{
|
{
|
||||||
if(isCompressed())
|
if(Derived::IsVectorAtCompileTime && outerIndexPtr()==0)
|
||||||
|
return derived().nonZeros();
|
||||||
|
else if(isCompressed())
|
||||||
return outerIndexPtr()[derived().outerSize()]-outerIndexPtr()[0];
|
return outerIndexPtr()[derived().outerSize()]-outerIndexPtr()[0];
|
||||||
else if(derived().outerSize()==0)
|
else if(derived().outerSize()==0)
|
||||||
return 0;
|
return 0;
|
||||||
else
|
else
|
||||||
return innerNonZeros().sum();
|
return innerNonZeros().sum();
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/** \returns a const pointer to the array of values.
|
/** \returns a const pointer to the array of values.
|
||||||
@ -74,10 +75,12 @@ class SparseCompressedBase
|
|||||||
|
|
||||||
/** \returns a const pointer to the array of the starting positions of the inner vectors.
|
/** \returns a const pointer to the array of the starting positions of the inner vectors.
|
||||||
* This function is aimed at interoperability with other libraries.
|
* This function is aimed at interoperability with other libraries.
|
||||||
|
* \warning it returns the null pointer 0 for SparseVector
|
||||||
* \sa valuePtr(), innerIndexPtr() */
|
* \sa valuePtr(), innerIndexPtr() */
|
||||||
inline const StorageIndex* outerIndexPtr() const { return derived().outerIndexPtr(); }
|
inline const StorageIndex* outerIndexPtr() const { return derived().outerIndexPtr(); }
|
||||||
/** \returns a non-const pointer to the array of the starting positions of the inner vectors.
|
/** \returns a non-const pointer to the array of the starting positions of the inner vectors.
|
||||||
* This function is aimed at interoperability with other libraries.
|
* This function is aimed at interoperability with other libraries.
|
||||||
|
* \warning it returns the null pointer 0 for SparseVector
|
||||||
* \sa valuePtr(), innerIndexPtr() */
|
* \sa valuePtr(), innerIndexPtr() */
|
||||||
inline StorageIndex* outerIndexPtr() { return derived().outerIndexPtr(); }
|
inline StorageIndex* outerIndexPtr() { return derived().outerIndexPtr(); }
|
||||||
|
|
||||||
@ -100,12 +103,27 @@ class SparseCompressedBase<Derived>::InnerIterator
|
|||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
InnerIterator(const SparseCompressedBase& mat, Index outer)
|
InnerIterator(const SparseCompressedBase& mat, Index outer)
|
||||||
: m_values(mat.valuePtr()), m_indices(mat.innerIndexPtr()), m_outer(outer), m_id(mat.outerIndexPtr()[outer])
|
: m_values(mat.valuePtr()), m_indices(mat.innerIndexPtr()), m_outer(outer)
|
||||||
{
|
{
|
||||||
if(mat.isCompressed())
|
if(Derived::IsVectorAtCompileTime && mat.outerIndexPtr()==0)
|
||||||
m_end = mat.outerIndexPtr()[outer+1];
|
{
|
||||||
|
m_id = 0;
|
||||||
|
m_end = mat.nonZeros();
|
||||||
|
}
|
||||||
else
|
else
|
||||||
m_end = m_id + mat.innerNonZeroPtr()[outer];
|
{
|
||||||
|
m_id = mat.outerIndexPtr()[outer];
|
||||||
|
if(mat.isCompressed())
|
||||||
|
m_end = mat.outerIndexPtr()[outer+1];
|
||||||
|
else
|
||||||
|
m_end = m_id + mat.innerNonZeroPtr()[outer];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
InnerIterator(const SparseCompressedBase& mat)
|
||||||
|
: m_values(mat.valuePtr()), m_indices(mat.innerIndexPtr()), m_outer(0), m_id(0), m_end(mat.nonZeros())
|
||||||
|
{
|
||||||
|
EIGEN_STATIC_ASSERT_VECTOR_ONLY(Derived);
|
||||||
}
|
}
|
||||||
|
|
||||||
inline InnerIterator& operator++() { m_id++; return *this; }
|
inline InnerIterator& operator++() { m_id++; return *this; }
|
||||||
@ -114,16 +132,16 @@ class SparseCompressedBase<Derived>::InnerIterator
|
|||||||
inline Scalar& valueRef() { return const_cast<Scalar&>(m_values[m_id]); }
|
inline Scalar& valueRef() { return const_cast<Scalar&>(m_values[m_id]); }
|
||||||
|
|
||||||
inline StorageIndex index() const { return m_indices[m_id]; }
|
inline StorageIndex index() const { return m_indices[m_id]; }
|
||||||
inline Index outer() const { return m_outer; }
|
inline Index outer() const { return m_outer.value(); }
|
||||||
inline Index row() const { return IsRowMajor ? m_outer : index(); }
|
inline Index row() const { return IsRowMajor ? m_outer.value() : index(); }
|
||||||
inline Index col() const { return IsRowMajor ? index() : m_outer; }
|
inline Index col() const { return IsRowMajor ? index() : m_outer.value(); }
|
||||||
|
|
||||||
inline operator bool() const { return (m_id < m_end); }
|
inline operator bool() const { return (m_id < m_end); }
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
const Scalar* m_values;
|
const Scalar* m_values;
|
||||||
const StorageIndex* m_indices;
|
const StorageIndex* m_indices;
|
||||||
const Index m_outer;
|
const internal::variable_if_dynamic<Index,Derived::IsVectorAtCompileTime?0:Dynamic> m_outer;
|
||||||
Index m_id;
|
Index m_id;
|
||||||
Index m_end;
|
Index m_end;
|
||||||
private:
|
private:
|
||||||
@ -138,32 +156,45 @@ class SparseCompressedBase<Derived>::ReverseInnerIterator
|
|||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
ReverseInnerIterator(const SparseCompressedBase& mat, Index outer)
|
ReverseInnerIterator(const SparseCompressedBase& mat, Index outer)
|
||||||
: m_values(mat.valuePtr()), m_indices(mat.innerIndexPtr()), m_outer(outer), m_start(mat.outerIndexPtr()[outer])
|
: m_values(mat.valuePtr()), m_indices(mat.innerIndexPtr()), m_outer(outer)
|
||||||
{
|
{
|
||||||
if(mat.isCompressed())
|
if(Derived::IsVectorAtCompileTime && mat.outerIndexPtr()==0)
|
||||||
m_id = mat.outerIndexPtr()[outer+1];
|
{
|
||||||
|
m_start = 0;
|
||||||
|
m_id = mat.nonZeros();
|
||||||
|
}
|
||||||
else
|
else
|
||||||
m_id = m_start + mat.innerNonZeroPtr()[outer];
|
{
|
||||||
|
m_start.value() = mat.outerIndexPtr()[outer];
|
||||||
|
if(mat.isCompressed())
|
||||||
|
m_id = mat.outerIndexPtr()[outer+1];
|
||||||
|
else
|
||||||
|
m_id = m_start.value() + mat.innerNonZeroPtr()[outer];
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
ReverseInnerIterator(const SparseCompressedBase& mat)
|
||||||
|
: m_values(mat.valuePtr()), m_indices(mat.innerIndexPtr()), m_outer(0), m_start(0), m_id(mat.nonZeros())
|
||||||
|
{}
|
||||||
|
|
||||||
inline ReverseInnerIterator& operator--() { --m_id; return *this; }
|
inline ReverseInnerIterator& operator--() { --m_id; return *this; }
|
||||||
|
|
||||||
inline const Scalar& value() const { return m_values[m_id-1]; }
|
inline const Scalar& value() const { return m_values[m_id-1]; }
|
||||||
inline Scalar& valueRef() { return const_cast<Scalar&>(m_values[m_id-1]); }
|
inline Scalar& valueRef() { return const_cast<Scalar&>(m_values[m_id-1]); }
|
||||||
|
|
||||||
inline StorageIndex index() const { return m_indices[m_id-1]; }
|
inline StorageIndex index() const { return m_indices[m_id-1]; }
|
||||||
inline Index outer() const { return m_outer; }
|
inline Index outer() const { return m_outer.value(); }
|
||||||
inline Index row() const { return IsRowMajor ? m_outer : index(); }
|
inline Index row() const { return IsRowMajor ? m_outer.value() : index(); }
|
||||||
inline Index col() const { return IsRowMajor ? index() : m_outer; }
|
inline Index col() const { return IsRowMajor ? index() : m_outer.value(); }
|
||||||
|
|
||||||
inline operator bool() const { return (m_id > m_start); }
|
inline operator bool() const { return (m_id > m_start.value()); }
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
const Scalar* m_values;
|
const Scalar* m_values;
|
||||||
const StorageIndex* m_indices;
|
const StorageIndex* m_indices;
|
||||||
const Index m_outer;
|
const internal::variable_if_dynamic<Index,Derived::IsVectorAtCompileTime?0:Dynamic> m_outer;
|
||||||
Index m_id;
|
Index m_id;
|
||||||
const Index m_start;
|
const internal::variable_if_dynamic<Index,Derived::IsVectorAtCompileTime?0:Dynamic> m_start;
|
||||||
};
|
};
|
||||||
|
|
||||||
namespace internal {
|
namespace internal {
|
||||||
|
@ -40,7 +40,7 @@ struct traits<SparseVector<_Scalar, _Options, _StorageIndex> >
|
|||||||
ColsAtCompileTime = IsColVector ? 1 : Dynamic,
|
ColsAtCompileTime = IsColVector ? 1 : Dynamic,
|
||||||
MaxRowsAtCompileTime = RowsAtCompileTime,
|
MaxRowsAtCompileTime = RowsAtCompileTime,
|
||||||
MaxColsAtCompileTime = ColsAtCompileTime,
|
MaxColsAtCompileTime = ColsAtCompileTime,
|
||||||
Flags = _Options | NestByRefBit | LvalueBit | (IsColVector ? 0 : RowMajorBit),
|
Flags = _Options | NestByRefBit | LvalueBit | (IsColVector ? 0 : RowMajorBit) | CompressedAccessBit,
|
||||||
CoeffReadCost = NumTraits<Scalar>::ReadCost,
|
CoeffReadCost = NumTraits<Scalar>::ReadCost,
|
||||||
SupportedAccessPatterns = InnerRandomAccessPattern
|
SupportedAccessPatterns = InnerRandomAccessPattern
|
||||||
};
|
};
|
||||||
@ -63,12 +63,12 @@ struct sparse_vector_assign_selector;
|
|||||||
|
|
||||||
template<typename _Scalar, int _Options, typename _StorageIndex>
|
template<typename _Scalar, int _Options, typename _StorageIndex>
|
||||||
class SparseVector
|
class SparseVector
|
||||||
: public SparseMatrixBase<SparseVector<_Scalar, _Options, _StorageIndex> >
|
: public SparseCompressedBase<SparseVector<_Scalar, _Options, _StorageIndex> >
|
||||||
{
|
{
|
||||||
typedef SparseMatrixBase<SparseVector> SparseBase;
|
typedef SparseCompressedBase<SparseVector> Base;
|
||||||
|
|
||||||
public:
|
public:
|
||||||
EIGEN_SPARSE_PUBLIC_INTERFACE(SparseVector)
|
_EIGEN_SPARSE_PUBLIC_INTERFACE(SparseVector)
|
||||||
EIGEN_SPARSE_INHERIT_ASSIGNMENT_OPERATOR(SparseVector, +=)
|
EIGEN_SPARSE_INHERIT_ASSIGNMENT_OPERATOR(SparseVector, +=)
|
||||||
EIGEN_SPARSE_INHERIT_ASSIGNMENT_OPERATOR(SparseVector, -=)
|
EIGEN_SPARSE_INHERIT_ASSIGNMENT_OPERATOR(SparseVector, -=)
|
||||||
|
|
||||||
@ -89,6 +89,11 @@ class SparseVector
|
|||||||
|
|
||||||
EIGEN_STRONG_INLINE const StorageIndex* innerIndexPtr() const { return &m_data.index(0); }
|
EIGEN_STRONG_INLINE const StorageIndex* innerIndexPtr() const { return &m_data.index(0); }
|
||||||
EIGEN_STRONG_INLINE StorageIndex* innerIndexPtr() { return &m_data.index(0); }
|
EIGEN_STRONG_INLINE StorageIndex* innerIndexPtr() { return &m_data.index(0); }
|
||||||
|
|
||||||
|
inline const StorageIndex* outerIndexPtr() const { return 0; }
|
||||||
|
inline StorageIndex* outerIndexPtr() { return 0; }
|
||||||
|
inline const StorageIndex* innerNonZeroPtr() const { return 0; }
|
||||||
|
inline StorageIndex* innerNonZeroPtr() { return 0; }
|
||||||
|
|
||||||
/** \internal */
|
/** \internal */
|
||||||
inline Storage& data() { return m_data; }
|
inline Storage& data() { return m_data; }
|
||||||
@ -126,8 +131,8 @@ class SparseVector
|
|||||||
|
|
||||||
public:
|
public:
|
||||||
|
|
||||||
class InnerIterator;
|
typedef typename Base::InnerIterator InnerIterator;
|
||||||
class ReverseInnerIterator;
|
typedef typename Base::ReverseInnerIterator ReverseInnerIterator;
|
||||||
|
|
||||||
inline void setZero() { m_data.clear(); }
|
inline void setZero() { m_data.clear(); }
|
||||||
|
|
||||||
@ -235,7 +240,7 @@ class SparseVector
|
|||||||
}
|
}
|
||||||
|
|
||||||
inline SparseVector(const SparseVector& other)
|
inline SparseVector(const SparseVector& other)
|
||||||
: SparseBase(other), m_size(0)
|
: Base(other), m_size(0)
|
||||||
{
|
{
|
||||||
check_template_parameters();
|
check_template_parameters();
|
||||||
*this = other.derived();
|
*this = other.derived();
|
||||||
@ -357,75 +362,6 @@ protected:
|
|||||||
Index m_size;
|
Index m_size;
|
||||||
};
|
};
|
||||||
|
|
||||||
template<typename Scalar, int _Options, typename _StorageIndex>
|
|
||||||
class SparseVector<Scalar,_Options,_StorageIndex>::InnerIterator
|
|
||||||
{
|
|
||||||
public:
|
|
||||||
explicit InnerIterator(const SparseVector& vec, Index outer=0)
|
|
||||||
: m_data(vec.m_data), m_id(0), m_end(m_data.size())
|
|
||||||
{
|
|
||||||
EIGEN_UNUSED_VARIABLE(outer);
|
|
||||||
eigen_assert(outer==0);
|
|
||||||
}
|
|
||||||
|
|
||||||
explicit InnerIterator(const internal::CompressedStorage<Scalar,StorageIndex>& data)
|
|
||||||
: m_data(data), m_id(0), m_end(m_data.size())
|
|
||||||
{}
|
|
||||||
|
|
||||||
inline InnerIterator& operator++() { m_id++; return *this; }
|
|
||||||
|
|
||||||
inline Scalar value() const { return m_data.value(m_id); }
|
|
||||||
inline Scalar& valueRef() { return const_cast<Scalar&>(m_data.value(m_id)); }
|
|
||||||
|
|
||||||
inline StorageIndex index() const { return m_data.index(m_id); }
|
|
||||||
inline Index row() const { return IsColVector ? index() : 0; }
|
|
||||||
inline Index col() const { return IsColVector ? 0 : index(); }
|
|
||||||
|
|
||||||
inline operator bool() const { return (m_id < m_end); }
|
|
||||||
|
|
||||||
protected:
|
|
||||||
const internal::CompressedStorage<Scalar,StorageIndex>& m_data;
|
|
||||||
Index m_id;
|
|
||||||
const Index m_end;
|
|
||||||
private:
|
|
||||||
// If you get here, then you're not using the right InnerIterator type, e.g.:
|
|
||||||
// SparseMatrix<double,RowMajor> A;
|
|
||||||
// SparseMatrix<double>::InnerIterator it(A,0);
|
|
||||||
template<typename T> InnerIterator(const SparseMatrixBase<T>&,Index outer=0);
|
|
||||||
};
|
|
||||||
|
|
||||||
template<typename Scalar, int _Options, typename _StorageIndex>
|
|
||||||
class SparseVector<Scalar,_Options,_StorageIndex>::ReverseInnerIterator
|
|
||||||
{
|
|
||||||
public:
|
|
||||||
explicit ReverseInnerIterator(const SparseVector& vec, Index outer=0)
|
|
||||||
: m_data(vec.m_data), m_id(m_data.size()), m_start(0)
|
|
||||||
{
|
|
||||||
EIGEN_UNUSED_VARIABLE(outer);
|
|
||||||
eigen_assert(outer==0);
|
|
||||||
}
|
|
||||||
|
|
||||||
explicit ReverseInnerIterator(const internal::CompressedStorage<Scalar,StorageIndex>& data)
|
|
||||||
: m_data(data), m_id(m_data.size()), m_start(0)
|
|
||||||
{}
|
|
||||||
|
|
||||||
inline ReverseInnerIterator& operator--() { m_id--; return *this; }
|
|
||||||
|
|
||||||
inline Scalar value() const { return m_data.value(m_id-1); }
|
|
||||||
inline Scalar& valueRef() { return const_cast<Scalar&>(m_data.value(m_id-1)); }
|
|
||||||
|
|
||||||
inline StorageIndex index() const { return m_data.index(m_id-1); }
|
|
||||||
inline Index row() const { return IsColVector ? index() : 0; }
|
|
||||||
inline Index col() const { return IsColVector ? 0 : index(); }
|
|
||||||
|
|
||||||
inline operator bool() const { return (m_id > m_start); }
|
|
||||||
|
|
||||||
protected:
|
|
||||||
const internal::CompressedStorage<Scalar,StorageIndex>& m_data;
|
|
||||||
Index m_id;
|
|
||||||
const Index m_start;
|
|
||||||
};
|
|
||||||
|
|
||||||
namespace internal {
|
namespace internal {
|
||||||
|
|
||||||
template<typename _Scalar, int _Options, typename _Index>
|
template<typename _Scalar, int _Options, typename _Index>
|
||||||
|
Loading…
x
Reference in New Issue
Block a user