Fix ambiguous instantiation

This commit is contained in:
Gael Guennebaud 2015-10-27 11:01:37 +01:00
parent e6f8c5c325
commit 73f692d16b
3 changed files with 62 additions and 19 deletions

View File

@ -39,6 +39,34 @@ struct generic_product_impl<Lhs, Rhs, SparseShape, SparseShape, ProductType>
{ {
template<typename Dest> template<typename Dest>
static void evalTo(Dest& dst, const Lhs& lhs, const Rhs& rhs) static void evalTo(Dest& dst, const Lhs& lhs, const Rhs& rhs)
{
evalTo(dst, lhs, rhs, typename evaluator_traits<Dest>::Shape());
}
// dense += sparse * sparse
template<typename Dest,typename ActualLhs>
static void addTo(Dest& dst, const ActualLhs& lhs, const Rhs& rhs, int* = typename enable_if<is_same<typename evaluator_traits<Dest>::Shape,DenseShape>::value,int*>::type(0) )
{
typedef typename nested_eval<ActualLhs,Dynamic>::type LhsNested;
typedef typename nested_eval<Rhs,Dynamic>::type RhsNested;
LhsNested lhsNested(lhs);
RhsNested rhsNested(rhs);
internal::sparse_sparse_to_dense_product_selector<typename remove_all<LhsNested>::type,
typename remove_all<RhsNested>::type, Dest>::run(lhsNested,rhsNested,dst);
}
// dense -= sparse * sparse
template<typename Dest>
static void subTo(Dest& dst, const Lhs& lhs, const Rhs& rhs, int* = typename enable_if<is_same<typename evaluator_traits<Dest>::Shape,DenseShape>::value,int*>::type(0) )
{
addTo(dst, -lhs, rhs);
}
protected:
// sparse = sparse * sparse
template<typename Dest>
static void evalTo(Dest& dst, const Lhs& lhs, const Rhs& rhs, SparseShape)
{ {
typedef typename nested_eval<Lhs,Dynamic>::type LhsNested; typedef typename nested_eval<Lhs,Dynamic>::type LhsNested;
typedef typename nested_eval<Rhs,Dynamic>::type RhsNested; typedef typename nested_eval<Rhs,Dynamic>::type RhsNested;
@ -47,6 +75,14 @@ struct generic_product_impl<Lhs, Rhs, SparseShape, SparseShape, ProductType>
internal::conservative_sparse_sparse_product_selector<typename remove_all<LhsNested>::type, internal::conservative_sparse_sparse_product_selector<typename remove_all<LhsNested>::type,
typename remove_all<RhsNested>::type, Dest>::run(lhsNested,rhsNested,dst); typename remove_all<RhsNested>::type, Dest>::run(lhsNested,rhsNested,dst);
} }
// dense = sparse * sparse
template<typename Dest>
static void evalTo(Dest& dst, const Lhs& lhs, const Rhs& rhs, DenseShape)
{
dst.setZero();
addTo(dst, lhs, rhs);
}
}; };
// sparse * sparse-triangular // sparse * sparse-triangular
@ -61,33 +97,36 @@ struct generic_product_impl<Lhs, Rhs, SparseTriangularShape, SparseShape, Produc
: public generic_product_impl<Lhs, Rhs, SparseShape, SparseShape, ProductType> : public generic_product_impl<Lhs, Rhs, SparseShape, SparseShape, ProductType>
{}; {};
// Dense = sparse * sparse // dense = sparse-product (can be sparse*sparse, sparse*perm, etc.)
template< typename DstXprType, typename Lhs, typename Rhs, int Options/*, typename Scalar*/> template< typename DstXprType, typename Lhs, typename Rhs>
struct Assignment<DstXprType, Product<Lhs,Rhs,Options>, internal::assign_op<typename DstXprType::Scalar>, Sparse2Dense/*, struct Assignment<DstXprType, Product<Lhs,Rhs,AliasFreeProduct>, internal::assign_op<typename DstXprType::Scalar>, Sparse2Dense>
typename enable_if<(Options==DefaultProduct || Options==AliasFreeProduct),Scalar>::type*/>
{ {
typedef Product<Lhs,Rhs,Options> SrcXprType; typedef Product<Lhs,Rhs,AliasFreeProduct> SrcXprType;
static void run(DstXprType &dst, const SrcXprType &src, const internal::assign_op<typename DstXprType::Scalar> &) static void run(DstXprType &dst, const SrcXprType &src, const internal::assign_op<typename DstXprType::Scalar> &)
{ {
dst.setZero(); generic_product_impl<Lhs, Rhs>::evalTo(dst,src.lhs(),src.rhs());
dst += src;
} }
}; };
// Dense += sparse * sparse // dense += sparse-product (can be sparse*sparse, sparse*perm, etc.)
template< typename DstXprType, typename Lhs, typename Rhs, int Options> template< typename DstXprType, typename Lhs, typename Rhs>
struct Assignment<DstXprType, Product<Lhs,Rhs,Options>, internal::add_assign_op<typename DstXprType::Scalar>, Sparse2Dense/*, struct Assignment<DstXprType, Product<Lhs,Rhs,AliasFreeProduct>, internal::add_assign_op<typename DstXprType::Scalar>, Sparse2Dense>
typename enable_if<(Options==DefaultProduct || Options==AliasFreeProduct),Scalar>::type*/>
{ {
typedef Product<Lhs,Rhs,Options> SrcXprType; typedef Product<Lhs,Rhs,AliasFreeProduct> SrcXprType;
static void run(DstXprType &dst, const SrcXprType &src, const internal::add_assign_op<typename DstXprType::Scalar> &) static void run(DstXprType &dst, const SrcXprType &src, const internal::add_assign_op<typename DstXprType::Scalar> &)
{ {
typedef typename nested_eval<Lhs,Dynamic>::type LhsNested; generic_product_impl<Lhs, Rhs>::addTo(dst,src.lhs(),src.rhs());
typedef typename nested_eval<Rhs,Dynamic>::type RhsNested; }
LhsNested lhsNested(src.lhs()); };
RhsNested rhsNested(src.rhs());
internal::sparse_sparse_to_dense_product_selector<typename remove_all<LhsNested>::type, // dense -= sparse-product (can be sparse*sparse, sparse*perm, etc.)
typename remove_all<RhsNested>::type, DstXprType>::run(lhsNested,rhsNested,dst); template< typename DstXprType, typename Lhs, typename Rhs>
struct Assignment<DstXprType, Product<Lhs,Rhs,AliasFreeProduct>, internal::sub_assign_op<typename DstXprType::Scalar>, Sparse2Dense>
{
typedef Product<Lhs,Rhs,AliasFreeProduct> SrcXprType;
static void run(DstXprType &dst, const SrcXprType &src, const internal::sub_assign_op<typename DstXprType::Scalar> &)
{
generic_product_impl<Lhs, Rhs>::subTo(dst,src.lhs(),src.rhs());
} }
}; };

View File

@ -438,7 +438,7 @@ template<typename SparseMatrixType> void sparse_basic(const SparseMatrixType& re
{ {
Index i = internal::random<Index>(0,rows-1); Index i = internal::random<Index>(0,rows-1);
Index j = internal::random<Index>(0,rows-1); Index j = internal::random<Index>(0,rows-1);
Index v = internal::random<Scalar>(); Scalar v = internal::random<Scalar>();
m1.coeffRef(i,j) = v; m1.coeffRef(i,j) = v;
refMat1.coeffRef(i,j) = v; refMat1.coeffRef(i,j) = v;
VERIFY_IS_APPROX(m1, refMat1); VERIFY_IS_APPROX(m1, refMat1);

View File

@ -79,12 +79,16 @@ template<typename SparseMatrixType> void sparse_product()
// dense ?= sparse * sparse // dense ?= sparse * sparse
VERIFY_IS_APPROX(dm4 =m2*m3, refMat4 =refMat2*refMat3); VERIFY_IS_APPROX(dm4 =m2*m3, refMat4 =refMat2*refMat3);
VERIFY_IS_APPROX(dm4+=m2*m3, refMat4+=refMat2*refMat3); VERIFY_IS_APPROX(dm4+=m2*m3, refMat4+=refMat2*refMat3);
VERIFY_IS_APPROX(dm4-=m2*m3, refMat4-=refMat2*refMat3);
VERIFY_IS_APPROX(dm4 =m2t.transpose()*m3, refMat4 =refMat2t.transpose()*refMat3); VERIFY_IS_APPROX(dm4 =m2t.transpose()*m3, refMat4 =refMat2t.transpose()*refMat3);
VERIFY_IS_APPROX(dm4+=m2t.transpose()*m3, refMat4+=refMat2t.transpose()*refMat3); VERIFY_IS_APPROX(dm4+=m2t.transpose()*m3, refMat4+=refMat2t.transpose()*refMat3);
VERIFY_IS_APPROX(dm4-=m2t.transpose()*m3, refMat4-=refMat2t.transpose()*refMat3);
VERIFY_IS_APPROX(dm4 =m2t.transpose()*m3t.transpose(), refMat4 =refMat2t.transpose()*refMat3t.transpose()); VERIFY_IS_APPROX(dm4 =m2t.transpose()*m3t.transpose(), refMat4 =refMat2t.transpose()*refMat3t.transpose());
VERIFY_IS_APPROX(dm4+=m2t.transpose()*m3t.transpose(), refMat4+=refMat2t.transpose()*refMat3t.transpose()); VERIFY_IS_APPROX(dm4+=m2t.transpose()*m3t.transpose(), refMat4+=refMat2t.transpose()*refMat3t.transpose());
VERIFY_IS_APPROX(dm4-=m2t.transpose()*m3t.transpose(), refMat4-=refMat2t.transpose()*refMat3t.transpose());
VERIFY_IS_APPROX(dm4 =m2*m3t.transpose(), refMat4 =refMat2*refMat3t.transpose()); VERIFY_IS_APPROX(dm4 =m2*m3t.transpose(), refMat4 =refMat2*refMat3t.transpose());
VERIFY_IS_APPROX(dm4+=m2*m3t.transpose(), refMat4+=refMat2*refMat3t.transpose()); VERIFY_IS_APPROX(dm4+=m2*m3t.transpose(), refMat4+=refMat2*refMat3t.transpose());
VERIFY_IS_APPROX(dm4-=m2*m3t.transpose(), refMat4-=refMat2*refMat3t.transpose());
VERIFY_IS_APPROX(dm4 = m2*m3*s1, refMat4 = refMat2*refMat3*s1); VERIFY_IS_APPROX(dm4 = m2*m3*s1, refMat4 = refMat2*refMat3*s1);
// test aliasing // test aliasing