From 44fe539150419c933d0c661a39c91fa796b8a912 Mon Sep 17 00:00:00 2001 From: Charles Schlosser Date: Thu, 1 Dec 2022 19:28:56 +0000 Subject: [PATCH] add sparse sort inner vectors function --- Eigen/src/SparseCore/SparseCompressedBase.h | 167 ++++++++++++++++++++ test/sparse_basic.cpp | 109 ++++++++++--- test/sparse_vector.cpp | 27 ++++ 3 files changed, 285 insertions(+), 18 deletions(-) diff --git a/Eigen/src/SparseCore/SparseCompressedBase.h b/Eigen/src/SparseCore/SparseCompressedBase.h index ede576681..e034ee07b 100644 --- a/Eigen/src/SparseCore/SparseCompressedBase.h +++ b/Eigen/src/SparseCore/SparseCompressedBase.h @@ -22,6 +22,9 @@ template struct traits > : traits {}; +template +struct inner_sort_impl; + } // end namespace internal /** \ingroup SparseCore_Module @@ -126,6 +129,40 @@ class SparseCompressedBase * * \sa valuePtr(), isCompressed() */ Map > coeffs() { eigen_assert(isCompressed()); return Array::Map(valuePtr(),nonZeros()); } + + /** sorts the inner vectors in the range [begin,end) with respect to `Comp` + * \sa innerIndicesAreSorted() */ + template > + inline void sortInnerIndices(Index begin, Index end) { + eigen_assert(begin >= 0 && end <= derived().outerSize() && end >= begin); + internal::inner_sort_impl::run(*this, begin, end); + } + + /** \returns the index of the first inner vector in the range [begin,end) that is not sorted with respect to `Comp`, or `end` if the range is fully sorted + * \sa sortInnerIndices() */ + template > + inline Index innerIndicesAreSorted(Index begin, Index end) const { + eigen_assert(begin >= 0 && end <= derived().outerSize() && end >= begin); + return internal::inner_sort_impl::check(*this, begin, end); + } + + /** sorts the inner vectors in the range [0,outerSize) with respect to `Comp` + * \sa innerIndicesAreSorted() */ + template > + inline void sortInnerIndices() { + Index begin = 0; + Index end = derived().outerSize(); + internal::inner_sort_impl::run(*this, begin, end); + } + + /** \returns the index of the first inner vector in the range [0,outerSize) that is not sorted with respect to `Comp`, or `outerSize` if the range is fully sorted + * \sa sortInnerIndices() */ + template> + inline Index innerIndicesAreSorted() const { + Index begin = 0; + Index end = derived().outerSize(); + return internal::inner_sort_impl::check(*this, begin, end); + } protected: /** Default constructor. Do nothing. */ @@ -306,6 +343,136 @@ class SparseCompressedBase::ReverseInnerIterator namespace internal { +// modified from https://artificial-mind.net/blog/2020/11/28/std-sort-multiple-ranges +template +class CompressedStorageIterator; + +// wrapper class analogous to std::pair +// used to define assignment, swap, and comparison operators for CompressedStorageIterator +template +class StorageRef +{ +public: + using value_type = std::pair; + + inline StorageRef& operator=(const StorageRef& other) { + *m_innerIndexIterator = *other.m_innerIndexIterator; + *m_valueIterator = *other.m_valueIterator; + return *this; + } + inline StorageRef& operator=(const value_type& other) { + std::tie(*m_innerIndexIterator, *m_valueIterator) = other; + return *this; + } + inline operator value_type() const { return std::make_pair(*m_innerIndexIterator, *m_valueIterator); } + inline friend void swap(const StorageRef& a, const StorageRef& b) { + std::iter_swap(a.m_innerIndexIterator, b.m_innerIndexIterator); + std::iter_swap(a.m_valueIterator, b.m_valueIterator); + } + + inline static const StorageIndex& key(const StorageRef& a) { return *a.m_innerIndexIterator; } + inline static const StorageIndex& key(const value_type& a) { return a.first; } + #define REF_COMP_REF(OP) inline friend bool operator OP(const StorageRef& a, const StorageRef& b) { return key(a) OP key(b); }; + #define REF_COMP_VAL(OP) inline friend bool operator OP(const StorageRef& a, const value_type& b) { return key(a) OP key(b); }; + #define VAL_COMP_REF(OP) inline friend bool operator OP(const value_type& a, const StorageRef& b) { return key(a) OP key(b); }; + #define MAKE_COMPS(OP) REF_COMP_REF(OP) REF_COMP_VAL(OP) VAL_COMP_REF(OP) + MAKE_COMPS(<) MAKE_COMPS(>) MAKE_COMPS(<=) MAKE_COMPS(>=) MAKE_COMPS(==) MAKE_COMPS(!=) + +protected: + StorageIndex* m_innerIndexIterator; + Scalar* m_valueIterator; +private: + StorageRef() = delete; + // these constructors are only called by the CompressedStorageIterator constructors for convenience only + StorageRef(StorageIndex* innerIndexIterator, Scalar* valueIterator) : m_innerIndexIterator(innerIndexIterator), m_valueIterator(valueIterator) {} + StorageRef(const StorageRef& other) : m_innerIndexIterator(other.m_innerIndexIterator), m_valueIterator(other.m_valueIterator) {} + + friend class CompressedStorageIterator; +}; + +// STL-compatible iterator class that operates on inner indices and values +template +class CompressedStorageIterator +{ +public: + using iterator_category = std::random_access_iterator_tag; + using reference = StorageRef; + using difference_type = Index; + using value_type = typename reference::value_type; + using pointer = value_type*; + + CompressedStorageIterator() = delete; + CompressedStorageIterator(difference_type index, StorageIndex* innerIndexPtr, Scalar* valuePtr) : m_index(index), m_data(innerIndexPtr, valuePtr) {} + CompressedStorageIterator(difference_type index, reference data) : m_index(index), m_data(data) {} + CompressedStorageIterator(const CompressedStorageIterator& other) : m_index(other.m_index), m_data(other.m_data) {} + inline CompressedStorageIterator& operator=(const CompressedStorageIterator& other) { + m_index = other.m_index; + m_data = other.m_data; + return *this; + } + + inline bool operator==(const CompressedStorageIterator& other) const { return m_index == other.m_index; } + inline bool operator!=(const CompressedStorageIterator& other) const { return m_index != other.m_index; } + inline bool operator< (const CompressedStorageIterator& other) const { return m_index < other.m_index; } + inline CompressedStorageIterator operator+(difference_type offset) const { return CompressedStorageIterator(m_index + offset, m_data); } + inline CompressedStorageIterator operator-(difference_type offset) const { return CompressedStorageIterator(m_index - offset, m_data); } + inline difference_type operator-(const CompressedStorageIterator& other) const { return m_index - other.m_index; } + inline CompressedStorageIterator& operator++() { ++m_index; return *this; } + inline CompressedStorageIterator& operator--() { --m_index; return *this; } + inline reference operator*() const { return reference(m_data.m_innerIndexIterator + m_index, m_data.m_valueIterator + m_index); } + +protected: + difference_type m_index; + reference m_data; +}; + +template +struct inner_sort_impl { + typedef typename Derived::Scalar Scalar; + typedef typename Derived::StorageIndex StorageIndex; + static inline void run(SparseCompressedBase& obj, Index begin, Index end) { + const bool is_compressed = obj.isCompressed(); + for (Index outer = begin; outer < end; outer++) { + Index begin_offset = obj.outerIndexPtr()[outer]; + Index end_offset = is_compressed ? obj.outerIndexPtr()[outer + 1] : (begin_offset + obj.innerNonZeroPtr()[outer]); + CompressedStorageIterator begin_it(begin_offset, obj.innerIndexPtr(), obj.valuePtr()); + CompressedStorageIterator end_it(end_offset, obj.innerIndexPtr(), obj.valuePtr()); + std::sort(begin_it, end_it, Comp()); + } + } + static inline Index check(const SparseCompressedBase& obj, Index begin, Index end) { + const bool is_compressed = obj.isCompressed(); + for (Index outer = begin; outer < end; outer++) { + Index begin_offset = obj.outerIndexPtr()[outer]; + Index end_offset = is_compressed ? obj.outerIndexPtr()[outer + 1] : (begin_offset + obj.innerNonZeroPtr()[outer]); + const StorageIndex* begin_it = obj.innerIndexPtr() + begin_offset; + const StorageIndex* end_it = obj.innerIndexPtr() + end_offset; + bool is_sorted = std::is_sorted(begin_it, end_it, Comp()); + if (!is_sorted) return outer; + } + return end; + } +}; +template +struct inner_sort_impl { + typedef typename Derived::Scalar Scalar; + typedef typename Derived::StorageIndex StorageIndex; + static inline void run(SparseCompressedBase& obj, Index, Index) { + Index begin_offset = 0; + Index end_offset = obj.nonZeros(); + CompressedStorageIterator begin_it(begin_offset, obj.innerIndexPtr(), obj.valuePtr()); + CompressedStorageIterator end_it(end_offset, obj.innerIndexPtr(), obj.valuePtr()); + std::sort(begin_it, end_it, Comp()); + } + static inline Index check(const SparseCompressedBase& obj, Index, Index) { + Index begin_offset = 0; + Index end_offset = obj.nonZeros(); + const StorageIndex* begin_it = obj.innerIndexPtr() + begin_offset; + const StorageIndex* end_it = obj.innerIndexPtr() + end_offset; + return std::is_sorted(begin_it, end_it, Comp()) ? 1 : 0; + } +}; + template struct evaluator > : evaluator_base diff --git a/test/sparse_basic.cpp b/test/sparse_basic.cpp index 86944900b..52ff7b70d 100644 --- a/test/sparse_basic.cpp +++ b/test/sparse_basic.cpp @@ -28,8 +28,8 @@ template void sparse_basic(const SparseMatrixType& re const Index rows = ref.rows(); const Index cols = ref.cols(); - //const Index inner = ref.innerSize(); - //const Index outer = ref.outerSize(); + const Index inner = ref.innerSize(); + const Index outer = ref.outerSize(); typedef typename SparseMatrixType::Scalar Scalar; typedef typename SparseMatrixType::RealScalar RealScalar; @@ -91,8 +91,12 @@ template void sparse_basic(const SparseMatrixType& re for (Index k=0; k(0,rows-1); - if (m1.coeff(i,j)==Scalar(0)) - m2.insert(i,j) = m1(i,j) = internal::random(); + if (m1.coeff(i, j) == Scalar(0)) { + Scalar v = internal::random(); + if (v == Scalar(0)) v = Scalar(1); + m1(i, j) = v; + m2.insert(i, j) = v; + } } } @@ -116,13 +120,18 @@ template void sparse_basic(const SparseMatrixType& re { Index i = internal::random(0,rows-1); Index j = internal::random(0,cols-1); - if ((m1.coeff(i,j)==Scalar(0)) && (internal::random()%2)) - m2.insert(i,j) = m1(i,j) = internal::random(); + if ((m1.coeff(i, j) == Scalar(0)) && (internal::random() % 2)) { + Scalar v = internal::random(); + if (v == Scalar(0)) v = Scalar(1); + m1(i, j) = v; + m2.insert(i, j) = v; + } else { Scalar v = internal::random(); - m2.coeffRef(i,j) += v; - m1(i,j) += v; + if (v == Scalar(0)) v = Scalar(1); + m1(i, j) = v; + m2.coeffRef(i, j) = v; } } VERIFY_IS_APPROX(m2,m1); @@ -140,8 +149,12 @@ template void sparse_basic(const SparseMatrixType& re { Index i = internal::random(0,rows-1); Index j = internal::random(0,cols-1); - if (m1.coeff(i,j)==Scalar(0)) - m2.insert(i,j) = m1(i,j) = internal::random(); + if (m1.coeff(i, j) == Scalar(0)) { + Scalar v = internal::random(); + if (v == Scalar(0)) v = Scalar(1); + m1(i, j) = v; + m2.insert(i, j) = v; + } if(mode==3) m2.reserve(r); } @@ -150,6 +163,63 @@ template void sparse_basic(const SparseMatrixType& re VERIFY_IS_APPROX(m2,m1); } + // test sort + if (inner > 1) { + bool StorageOrdersMatch = DenseMatrix::IsRowMajor == SparseMatrixType::IsRowMajor; + DenseMatrix m1(rows, cols); + m1.setZero(); + SparseMatrixType m2(rows, cols); + // generate random inner indices with no repeats + Vector innerIndices(inner); + innerIndices.setLinSpaced(inner, 0, inner - 1); + for (Index j = 0; j < outer; j++) { + std::random_shuffle(innerIndices.begin(), innerIndices.end()); + Index nzj = internal::random(2, inner / 2); + for (Index k = 0; k < nzj; k++) { + Index i = innerIndices[k]; + Scalar val = internal::random(); + m1.coeffRefByOuterInner(StorageOrdersMatch ? j : i, StorageOrdersMatch ? i : j) = val; + m2.insertByOuterInner(j, i) = val; + } + } + + VERIFY_IS_APPROX(m2, m1); + // sort wrt greater + m2.template sortInnerIndices>(); + // verify that all inner vectors are not sorted wrt less + VERIFY_IS_EQUAL(m2.template innerIndicesAreSorted>(), 0); + // verify that all inner vectors are sorted wrt greater + VERIFY_IS_EQUAL(m2.template innerIndicesAreSorted>(), m2.outerSize()); + // verify that sort does not change evaluation + VERIFY_IS_APPROX(m2, m1); + // sort wrt less + m2.template sortInnerIndices>(); + // verify that all inner vectors are sorted wrt less + VERIFY_IS_EQUAL(m2.template innerIndicesAreSorted>(), m2.outerSize()); + // verify that all inner vectors are not sorted wrt greater + VERIFY_IS_EQUAL(m2.template innerIndicesAreSorted>(), 0); + // verify that sort does not change evaluation + VERIFY_IS_APPROX(m2, m1); + + m2.makeCompressed(); + // sort wrt greater + m2.template sortInnerIndices>(); + // verify that all inner vectors are not sorted wrt less + VERIFY_IS_EQUAL(m2.template innerIndicesAreSorted>(), 0); + // verify that all inner vectors are sorted wrt greater + VERIFY_IS_EQUAL(m2.template innerIndicesAreSorted>(), m2.outerSize()); + // verify that sort does not change evaluation + VERIFY_IS_APPROX(m2, m1); + // sort wrt less + m2.template sortInnerIndices>(); + // verify that all inner vectors are sorted wrt less + VERIFY_IS_EQUAL(m2.template innerIndicesAreSorted>(), m2.outerSize()); + // verify that all inner vectors are not sorted wrt greater + VERIFY_IS_EQUAL(m2.template innerIndicesAreSorted>(), 0); + // verify that sort does not change evaluation + VERIFY_IS_APPROX(m2, m1); + } + // test basic computations { DenseMatrix refM1 = DenseMatrix::Zero(rows, cols); @@ -729,9 +799,12 @@ EIGEN_DECLARE_TEST(sparse_basic) CALL_SUBTEST_1(( sparse_basic(SparseMatrix(8, 8)) )); CALL_SUBTEST_2(( sparse_basic(SparseMatrix, ColMajor>(r, c)) )); CALL_SUBTEST_2(( sparse_basic(SparseMatrix, RowMajor>(r, c)) )); - CALL_SUBTEST_1(( sparse_basic(SparseMatrix(r, c)) )); - CALL_SUBTEST_5(( sparse_basic(SparseMatrix(r, c)) )); - CALL_SUBTEST_5(( sparse_basic(SparseMatrix(r, c)) )); + CALL_SUBTEST_2(( sparse_basic(SparseMatrix(r, c)))); + CALL_SUBTEST_2(( sparse_basic(SparseMatrix(r, c)))); + CALL_SUBTEST_3(( sparse_basic(SparseMatrix(r, c)))); + CALL_SUBTEST_3(( sparse_basic(SparseMatrix(r, c)))); + CALL_SUBTEST_4(( sparse_basic(SparseMatrix(r, c)) )); + CALL_SUBTEST_4(( sparse_basic(SparseMatrix(r, c)) )); r = Eigen::internal::random(1,100); c = Eigen::internal::random(1,100); @@ -739,14 +812,14 @@ EIGEN_DECLARE_TEST(sparse_basic) r = c; // check square matrices in 25% of tries } - CALL_SUBTEST_6(( sparse_basic(SparseMatrix(short(r), short(c))) )); - CALL_SUBTEST_6(( sparse_basic(SparseMatrix(short(r), short(c))) )); + CALL_SUBTEST_5(( sparse_basic(SparseMatrix(short(r), short(c))) )); + CALL_SUBTEST_5(( sparse_basic(SparseMatrix(short(r), short(c))) )); } // Regression test for bug 900: (manually insert higher values here, if you have enough RAM): - CALL_SUBTEST_3((big_sparse_triplet >(10000, 10000, 0.125))); - CALL_SUBTEST_4((big_sparse_triplet >(10000, 10000, 0.125))); + CALL_SUBTEST_5(( big_sparse_triplet>(10000, 10000, 0.125))); + CALL_SUBTEST_5(( big_sparse_triplet>(10000, 10000, 0.125))); - CALL_SUBTEST_7( bug1105<0>() ); + CALL_SUBTEST_5(bug1105<0>()); } #endif diff --git a/test/sparse_vector.cpp b/test/sparse_vector.cpp index e1e74f340..43de50d81 100644 --- a/test/sparse_vector.cpp +++ b/test/sparse_vector.cpp @@ -143,6 +143,33 @@ template void sparse_vector(int rows, int } } + // test sort + if(rows > 1) + { + SparseVectorType vec1(rows); + DenseVector refVec1 = DenseVector::Zero(rows); + DenseVector innerIndices(rows); + innerIndices.setLinSpaced(0, rows - 1); + std::random_shuffle(innerIndices.begin(), innerIndices.end()); + Index nz = internal::random(2, rows / 2); + for (Index k = 0; k < nz; k++) + { + Index i = innerIndices[k]; + Scalar val = internal::random(); + refVec1.coeffRef(i) = val; + vec1.insert(i) = val; + } + + vec1.template sortInnerIndices>(); + VERIFY_IS_APPROX(vec1, refVec1); + VERIFY_IS_EQUAL(vec1.template innerIndicesAreSorted>(), 1); + VERIFY_IS_EQUAL(vec1.template innerIndicesAreSorted>(), 0); + vec1.template sortInnerIndices>(); + VERIFY_IS_APPROX(vec1, refVec1); + VERIFY_IS_EQUAL(vec1.template innerIndicesAreSorted>(), 0); + VERIFY_IS_EQUAL(vec1.template innerIndicesAreSorted>(), 1); + } + } void test_pruning() { using SparseVectorType = SparseVector;