Skip to content

Commit

Permalink
Fix transform iterator for non-copy-constructible types (#3542)
Browse files Browse the repository at this point in the history
Fixes: #3541

Co-authored-by: Michael Schellenberger Costa <[email protected]>
  • Loading branch information
bernhardmgruber and miscco authored Jan 28, 2025
1 parent 0b5844f commit 4567491
Show file tree
Hide file tree
Showing 4 changed files with 55 additions and 1 deletion.
12 changes: 12 additions & 0 deletions thrust/testing/cuda/transform_iterator.cmake
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
target_compile_options(${test_target} PRIVATE $<$<COMPILE_LANG_AND_ID:CUDA,NVIDIA>: --extended-lambda>)

# this check is actually not correct, because we must check the host compiler, not the CXX compiler.
# We rely on that those are usually the same ;)
if ("Clang" STREQUAL "${CMAKE_CXX_COMPILER_ID}" AND CMAKE_CXX_COMPILER_VERSION VERSION_GREATER_EQUAL 13)
# When clang >= 13 is used as host compiler, we get the following warning:
# nvcc_internal_extended_lambda_implementation:312:22: error: definition of implicit copy constructor for '__nv_hdl_wrapper_t<false, true, false, __nv_dl_tag<void (*)(), &TestAddressStabilityLambda, 2>, int (const int &)>' is deprecated because it has a user-declared copy assignment operator [-Werror,-Wdeprecated-copy]
# 312 | __nv_hdl_wrapper_t & operator=(const __nv_hdl_wrapper_t &in) = delete;
# | ^
# Let's suppress it until NVBug 4980157 is resolved.
target_compile_options(${test_target} PRIVATE $<$<COMPILE_LANG_AND_ID:CUDA,NVIDIA>: -Wno-deprecated-copy>)
endif ()
19 changes: 19 additions & 0 deletions thrust/testing/cuda/transform_iterator.cu
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
#include <thrust/device_vector.h>
#include <thrust/host_vector.h>
#include <thrust/iterator/transform_iterator.h>
#include <thrust/logical.h>

#include <unittest/unittest.h>

// see also: https://github.com/NVIDIA/cccl/issues/3541
void TestTransformWithLambda()
{
auto l = [] __host__ __device__(int v) { return v < 4; };
thrust::host_vector<int> A{1, 2, 3, 4, 5, 6, 7};
ASSERT_EQUAL(thrust::any_of(A.begin(), A.end(), l), true);

thrust::device_vector<int> B{1, 2, 3, 4, 5, 6, 7};
ASSERT_EQUAL(thrust::any_of(B.begin(), B.end(), l), true);
}

DECLARE_UNITTEST(TestTransformWithLambda);
1 change: 1 addition & 0 deletions thrust/testing/transform_iterator.cu
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
#include <thrust/functional.h>
#include <thrust/iterator/counting_iterator.h>
#include <thrust/iterator/transform_iterator.h>
#include <thrust/logical.h>
#include <thrust/reduce.h>
#include <thrust/sequence.h>

Expand Down
24 changes: 23 additions & 1 deletion thrust/thrust/iterator/transform_iterator.h
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,9 @@
#include <thrust/iterator/iterator_facade.h>
#include <thrust/iterator/iterator_traits.h>

#include <cuda/std/__memory/construct_at.h>
#include <cuda/std/type_traits>

THRUST_NAMESPACE_BEGIN

/*! \addtogroup iterators
Expand Down Expand Up @@ -238,7 +241,26 @@ class transform_iterator
, m_f(other.functor())
{}

transform_iterator& operator=(const transform_iterator&) = default;
_CCCL_HOST_DEVICE transform_iterator& operator=(transform_iterator const& other)
{
super_t::operator=(other);
if constexpr (_CCCL_TRAIT(::cuda::std::is_copy_assignable, AdaptableUnaryFunction))
{
m_f = other.m_f;
}
else if constexpr (_CCCL_TRAIT(::cuda::std::is_copy_constructible, AdaptableUnaryFunction))
{
::cuda::std::__destroy_at(&m_f);
::cuda::std::__construct_at(&m_f, other.m_f);
}
else
{
static_assert(_CCCL_TRAIT(::cuda::std::is_copy_constructible, AdaptableUnaryFunction),
"Cannot use thrust::transform_iterator with a functor that is neither copy constructible nor "
"copy assignable");
}
return *this;
}

/*! This method returns a copy of this \p transform_iterator's \c AdaptableUnaryFunction.
* \return A copy of this \p transform_iterator's \c AdaptableUnaryFunction.
Expand Down

0 comments on commit 4567491

Please sign in to comment.