mirror of
https://gitlab.com/libeigen/eigen.git
synced 2025-06-04 18:54:00 +08:00
bug #1172: make valuePtr and innderIndexPtr properly return null for empty matrices.
This commit is contained in:
parent
ac2e6e0d03
commit
8e6faab51e
@ -110,11 +110,16 @@ class CompressedStorage
|
||||
inline Index allocatedSize() const { return m_allocatedSize; }
|
||||
inline void clear() { m_size = 0; }
|
||||
|
||||
inline Scalar& value(Index i) { return m_values[i]; }
|
||||
inline const Scalar& value(Index i) const { return m_values[i]; }
|
||||
const Scalar* valuePtr() const { return m_values; }
|
||||
Scalar* valuePtr() { return m_values; }
|
||||
const StorageIndex* indexPtr() const { return m_indices; }
|
||||
StorageIndex* indexPtr() { return m_indices; }
|
||||
|
||||
inline StorageIndex& index(Index i) { return m_indices[i]; }
|
||||
inline const StorageIndex& index(Index i) const { return m_indices[i]; }
|
||||
inline Scalar& value(Index i) { eigen_internal_assert(m_values!=0); return m_values[i]; }
|
||||
inline const Scalar& value(Index i) const { eigen_internal_assert(m_values!=0); return m_values[i]; }
|
||||
|
||||
inline StorageIndex& index(Index i) { eigen_internal_assert(m_indices!=0); return m_indices[i]; }
|
||||
inline const StorageIndex& index(Index i) const { eigen_internal_assert(m_indices!=0); return m_indices[i]; }
|
||||
|
||||
/** \returns the largest \c k such that for all \c j in [0,k) index[\c j]\<\a key */
|
||||
inline Index searchLowerIndex(Index key) const
|
||||
|
@ -145,14 +145,14 @@ public:
|
||||
// realloc manually to reduce copies
|
||||
typename SparseMatrixType::Storage newdata(m_matrix.data().allocatedSize() - block_size + nnz);
|
||||
|
||||
internal::smart_copy(&m_matrix.data().value(0), &m_matrix.data().value(0) + start, &newdata.value(0));
|
||||
internal::smart_copy(&m_matrix.data().index(0), &m_matrix.data().index(0) + start, &newdata.index(0));
|
||||
internal::smart_copy(m_matrix.valuePtr(), m_matrix.valuePtr() + start, newdata.valuePtr());
|
||||
internal::smart_copy(m_matrix.innerIndexPtr(), m_matrix.innerIndexPtr() + start, newdata.indexPtr());
|
||||
|
||||
internal::smart_copy(tmp.valuePtr(), tmp.valuePtr() + nnz, &newdata.value(start));
|
||||
internal::smart_copy(tmp.innerIndexPtr(), tmp.innerIndexPtr() + nnz, &newdata.index(start));
|
||||
internal::smart_copy(tmp.valuePtr(), tmp.valuePtr() + nnz, newdata.valuePtr() + start);
|
||||
internal::smart_copy(tmp.innerIndexPtr(), tmp.innerIndexPtr() + nnz, newdata.indexPtr() + start);
|
||||
|
||||
internal::smart_copy(&matrix.data().value(end), &matrix.data().value(end) + tail_size, &newdata.value(start+nnz));
|
||||
internal::smart_copy(&matrix.data().index(end), &matrix.data().index(end) + tail_size, &newdata.index(start+nnz));
|
||||
internal::smart_copy(matrix.valuePtr()+end, matrix.valuePtr()+end + tail_size, newdata.valuePtr()+start+nnz);
|
||||
internal::smart_copy(matrix.innerIndexPtr()+end, matrix.innerIndexPtr()+end + tail_size, newdata.indexPtr()+start+nnz);
|
||||
|
||||
newdata.resize(m_matrix.outerIndexPtr()[m_matrix.outerSize()] - block_size + nnz);
|
||||
|
||||
@ -167,14 +167,14 @@ public:
|
||||
// no need to realloc, simply copy the tail at its respective position and insert tmp
|
||||
matrix.data().resize(start + nnz + tail_size);
|
||||
|
||||
internal::smart_memmove(&matrix.data().value(end), &matrix.data().value(end) + tail_size, &matrix.data().value(start + nnz));
|
||||
internal::smart_memmove(&matrix.data().index(end), &matrix.data().index(end) + tail_size, &matrix.data().index(start + nnz));
|
||||
internal::smart_memmove(matrix.valuePtr()+end, matrix.valuePtr() + end+tail_size, matrix.valuePtr() + start+nnz);
|
||||
internal::smart_memmove(matrix.innerIndexPtr()+end, matrix.innerIndexPtr() + end+tail_size, matrix.innerIndexPtr() + start+nnz);
|
||||
|
||||
update_trailing_pointers = true;
|
||||
}
|
||||
|
||||
internal::smart_copy(tmp.valuePtr(), tmp.valuePtr() + nnz, &matrix.data().value(start));
|
||||
internal::smart_copy(tmp.innerIndexPtr(), tmp.innerIndexPtr() + nnz, &matrix.data().index(start));
|
||||
internal::smart_copy(tmp.valuePtr(), tmp.valuePtr() + nnz, matrix.valuePtr() + start);
|
||||
internal::smart_copy(tmp.innerIndexPtr(), tmp.innerIndexPtr() + nnz, matrix.innerIndexPtr() + start);
|
||||
}
|
||||
|
||||
// update outer index pointers and innerNonZeros
|
||||
|
@ -160,7 +160,7 @@ class SparseCompressedBase<Derived>::InnerIterator
|
||||
}
|
||||
|
||||
explicit InnerIterator(const internal::CompressedStorage<Scalar,StorageIndex>& data)
|
||||
: m_values(&data.value(0)), m_indices(&data.index(0)), m_outer(0), m_id(0), m_end(data.size())
|
||||
: m_values(&data.valuePtr()), m_indices(&data.indexPtr()), m_outer(0), m_id(0), m_end(data.size())
|
||||
{
|
||||
EIGEN_STATIC_ASSERT_VECTOR_ONLY(Derived);
|
||||
}
|
||||
@ -220,7 +220,7 @@ class SparseCompressedBase<Derived>::ReverseInnerIterator
|
||||
}
|
||||
|
||||
explicit ReverseInnerIterator(const internal::CompressedStorage<Scalar,StorageIndex>& data)
|
||||
: m_values(&data.value(0)), m_indices(&data.index(0)), m_outer(0), m_start(0), m_id(data.size())
|
||||
: m_values(&data.valuePtr()), m_indices(&data.indexPtr()), m_outer(0), m_start(0), m_id(data.size())
|
||||
{
|
||||
EIGEN_STATIC_ASSERT_VECTOR_ONLY(Derived);
|
||||
}
|
||||
|
@ -140,20 +140,20 @@ class SparseMatrix
|
||||
/** \returns a const pointer to the array of values.
|
||||
* This function is aimed at interoperability with other libraries.
|
||||
* \sa innerIndexPtr(), outerIndexPtr() */
|
||||
inline const Scalar* valuePtr() const { return &m_data.value(0); }
|
||||
inline const Scalar* valuePtr() const { return m_data.valuePtr(); }
|
||||
/** \returns a non-const pointer to the array of values.
|
||||
* This function is aimed at interoperability with other libraries.
|
||||
* \sa innerIndexPtr(), outerIndexPtr() */
|
||||
inline Scalar* valuePtr() { return &m_data.value(0); }
|
||||
inline Scalar* valuePtr() { return m_data.valuePtr(); }
|
||||
|
||||
/** \returns a const pointer to the array of inner indices.
|
||||
* This function is aimed at interoperability with other libraries.
|
||||
* \sa valuePtr(), outerIndexPtr() */
|
||||
inline const StorageIndex* innerIndexPtr() const { return &m_data.index(0); }
|
||||
inline const StorageIndex* innerIndexPtr() const { return m_data.indexPtr(); }
|
||||
/** \returns a non-const pointer to the array of inner indices.
|
||||
* This function is aimed at interoperability with other libraries.
|
||||
* \sa valuePtr(), outerIndexPtr() */
|
||||
inline StorageIndex* innerIndexPtr() { return &m_data.index(0); }
|
||||
inline StorageIndex* innerIndexPtr() { return m_data.indexPtr(); }
|
||||
|
||||
/** \returns a const pointer to the array of the starting positions of the inner vectors.
|
||||
* This function is aimed at interoperability with other libraries.
|
||||
@ -740,8 +740,8 @@ class SparseMatrix
|
||||
{
|
||||
eigen_assert(rows() == cols() && "ONLY FOR SQUARED MATRICES");
|
||||
this->m_data.resize(rows());
|
||||
Eigen::Map<IndexVector>(&this->m_data.index(0), rows()).setLinSpaced(0, StorageIndex(rows()-1));
|
||||
Eigen::Map<ScalarVector>(&this->m_data.value(0), rows()).setOnes();
|
||||
Eigen::Map<IndexVector>(this->m_data.indexPtr(), rows()).setLinSpaced(0, StorageIndex(rows()-1));
|
||||
Eigen::Map<ScalarVector>(this->m_data.valuePtr(), rows()).setOnes();
|
||||
Eigen::Map<IndexVector>(this->m_outerIndex, rows()+1).setLinSpaced(0, StorageIndex(rows()));
|
||||
std::free(m_innerNonZeros);
|
||||
m_innerNonZeros = 0;
|
||||
|
@ -30,7 +30,7 @@ typename internal::traits<SparseMatrix<_Scalar,_Options,_Index> >::Scalar
|
||||
SparseMatrix<_Scalar,_Options,_Index>::sum() const
|
||||
{
|
||||
eigen_assert(rows()>0 && cols()>0 && "you are using a non initialized matrix");
|
||||
return Matrix<Scalar,1,Dynamic>::Map(&m_data.value(0), m_data.size()).sum();
|
||||
return Matrix<Scalar,1,Dynamic>::Map(m_data.valuePtr(), m_data.size()).sum();
|
||||
}
|
||||
|
||||
template<typename _Scalar, int _Options, typename _Index>
|
||||
@ -38,7 +38,7 @@ typename internal::traits<SparseVector<_Scalar,_Options, _Index> >::Scalar
|
||||
SparseVector<_Scalar,_Options,_Index>::sum() const
|
||||
{
|
||||
eigen_assert(rows()>0 && cols()>0 && "you are using a non initialized matrix");
|
||||
return Matrix<Scalar,1,Dynamic>::Map(&m_data.value(0), m_data.size()).sum();
|
||||
return Matrix<Scalar,1,Dynamic>::Map(m_data.valuePtr(), m_data.size()).sum();
|
||||
}
|
||||
|
||||
} // end namespace Eigen
|
||||
|
@ -83,11 +83,11 @@ class SparseVector
|
||||
EIGEN_STRONG_INLINE Index innerSize() const { return m_size; }
|
||||
EIGEN_STRONG_INLINE Index outerSize() const { return 1; }
|
||||
|
||||
EIGEN_STRONG_INLINE const Scalar* valuePtr() const { return &m_data.value(0); }
|
||||
EIGEN_STRONG_INLINE Scalar* valuePtr() { return &m_data.value(0); }
|
||||
EIGEN_STRONG_INLINE const Scalar* valuePtr() const { return m_data.valuePtr(); }
|
||||
EIGEN_STRONG_INLINE Scalar* valuePtr() { return m_data.valuePtr(); }
|
||||
|
||||
EIGEN_STRONG_INLINE const StorageIndex* innerIndexPtr() const { return &m_data.index(0); }
|
||||
EIGEN_STRONG_INLINE StorageIndex* innerIndexPtr() { return &m_data.index(0); }
|
||||
EIGEN_STRONG_INLINE const StorageIndex* innerIndexPtr() const { return m_data.indexPtr(); }
|
||||
EIGEN_STRONG_INLINE StorageIndex* innerIndexPtr() { return m_data.indexPtr(); }
|
||||
|
||||
inline const StorageIndex* outerIndexPtr() const { return 0; }
|
||||
inline StorageIndex* outerIndexPtr() { return 0; }
|
||||
@ -125,6 +125,7 @@ class SparseVector
|
||||
inline Scalar& coeffRef(Index i)
|
||||
{
|
||||
eigen_assert(i>=0 && i<m_size);
|
||||
|
||||
return m_data.atWithInsertion(StorageIndex(i));
|
||||
}
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user