950 const Scalar* src = &src_data[src_offset];
951 Scalar* dst = &dst_data[dst_offset];
955 dst[
i * dst_stride] = src[
i * src_stride];
969 for (
int j = 0;
j < 4; ++
j) {
976 pstoreu<Scalar, Packet>(dst +
i,
p);
980 if (
i < vectorized_half_size) {
982 pstoreu<Scalar, HalfPacket>(dst +
i,
p);
986 for (;
i < count; ++
i) {
995 pscatter<Scalar, Packet>(dst +
i * dst_stride,
p, dst_stride);
999 if (
i < vectorized_half_size) {
1001 pscatter<Scalar, HalfPacket>(dst +
i * dst_stride,
p, dst_stride);
1005 for (;
i < count; ++
i) {
1006 dst[
i * dst_stride] = src[
i];
1017 for (
int j = 0;
j < 4; ++
j) {
1022 pstoreu<Scalar, Packet>(dst +
i,
p);
1024 if (HasHalfPacket) {
1026 if (
i < vectorized_half_size) {
1028 pstoreu<Scalar, HalfPacket>(dst +
i, hp);
1032 for (;
i < count; ++
i) {
1042 pscatter<Scalar, Packet>(dst +
i * dst_stride,
p, dst_stride);
1044 if (HasHalfPacket) {
1046 if (
i < vectorized_half_size) {
1048 pscatter<Scalar, HalfPacket>(dst +
i * dst_stride, hp, dst_stride);
1052 for (;
i < count; ++
i) {
1053 dst[
i * dst_stride] =
s;
1060 Packet p = pgather<Scalar, Packet>(src +
i * src_stride, src_stride);
1061 pstoreu<Scalar, Packet>(dst +
i,
p);
1063 if (HasHalfPacket) {
1065 if (
i < vectorized_half_size) {
1066 HalfPacket p = pgather<Scalar, HalfPacket>(src +
i * src_stride, src_stride);
1067 pstoreu<Scalar, HalfPacket>(dst +
i,
p);
1071 for (;
i < count; ++
i) {
1072 dst[
i] = src[
i * src_stride];
1077 for (;
i < count; ++
i) {
1078 dst[
i * dst_stride] = src[
i * src_stride];
int i
Definition: BiCGSTAB_step_by_step.cpp:9
#define eigen_assert(x)
Definition: Macros.h:910
float * p
Definition: Tutorial_Map_using.cpp:9
SCALAR Scalar
Definition: bench_gemm.cpp:45
unpacket_traits< Packet >::half HalfPacket
Definition: TensorBlock.h:904
RealScalar s
Definition: level1_cplx_impl.h:130
EIGEN_DEFAULT_DENSE_INDEX_TYPE Index
The Index type as used for the API.
Definition: Meta.h:83
std::ptrdiff_t j
Definition: tut_arithmetic_redux_minmax.cpp:2