39#ifndef _GLIBCXX_EXPERIMENTAL_SIMD_FIXED_SIZE_H_
40#define _GLIBCXX_EXPERIMENTAL_SIMD_FIXED_SIZE_H_
42#if __cplusplus >= 201703L
46_GLIBCXX_SIMD_BEGIN_NAMESPACE
49template <
size_t _I,
typename _Tp>
50 struct __simd_tuple_element;
52template <
typename _Tp,
typename _A0,
typename... _As>
53 struct __simd_tuple_element<0, _SimdTuple<_Tp, _A0, _As...>>
54 {
using type = simd<_Tp, _A0>; };
56template <
size_t _I,
typename _Tp,
typename _A0,
typename... _As>
57 struct __simd_tuple_element<_I, _SimdTuple<_Tp, _A0, _As...>>
60 typename __simd_tuple_element<_I - 1, _SimdTuple<_Tp, _As...>>::type;
63template <
size_t _I,
typename _Tp>
64 using __simd_tuple_element_t =
typename __simd_tuple_element<_I, _Tp>::type;
69template <
typename _Tp,
typename... _A0s,
typename... _A1s>
70 _GLIBCXX_SIMD_INTRINSIC
constexpr _SimdTuple<_Tp, _A0s..., _A1s...>
71 __simd_tuple_concat(
const _SimdTuple<_Tp, _A0s...>& __left,
72 const _SimdTuple<_Tp, _A1s...>& __right)
74 if constexpr (
sizeof...(_A0s) == 0)
76 else if constexpr (
sizeof...(_A1s) == 0)
79 return {__left.first, __simd_tuple_concat(__left.second, __right)};
82template <
typename _Tp,
typename _A10,
typename... _A1s>
83 _GLIBCXX_SIMD_INTRINSIC
constexpr _SimdTuple<_Tp, simd_abi::scalar, _A10,
85 __simd_tuple_concat(
const _Tp& __left,
86 const _SimdTuple<_Tp, _A10, _A1s...>& __right)
87 {
return {__left, __right}; }
93template <
size_t _Np,
typename _Tp>
94 _GLIBCXX_SIMD_INTRINSIC
constexpr decltype(
auto)
95 __simd_tuple_pop_front(_Tp&& __x)
97 if constexpr (_Np == 0)
98 return static_cast<_Tp&&
>(__x);
101 using _Up = __remove_cvref_t<_Tp>;
102 static_assert(_Np >= _Up::_S_first_size);
103 return __simd_tuple_pop_front<_Np - _Up::_S_first_size>(__x.second);
111struct __as_simd_tuple {};
113template <
typename _Tp,
typename _A0,
typename... _Abis>
114 _GLIBCXX_SIMD_INTRINSIC
constexpr simd<_Tp, _A0>
115 __simd_tuple_get_impl(__as_simd,
const _SimdTuple<_Tp, _A0, _Abis...>& __t,
117 {
return {__private_init, __t.first}; }
119template <
typename _Tp,
typename _A0,
typename... _Abis>
120 _GLIBCXX_SIMD_INTRINSIC
constexpr const auto&
121 __simd_tuple_get_impl(__as_simd_tuple,
122 const _SimdTuple<_Tp, _A0, _Abis...>& __t,
124 {
return __t.first; }
126template <
typename _Tp,
typename _A0,
typename... _Abis>
127 _GLIBCXX_SIMD_INTRINSIC
constexpr auto&
128 __simd_tuple_get_impl(__as_simd_tuple, _SimdTuple<_Tp, _A0, _Abis...>& __t,
130 {
return __t.first; }
132template <
typename _R,
size_t _Np,
typename _Tp,
typename... _Abis>
133 _GLIBCXX_SIMD_INTRINSIC
constexpr auto
134 __simd_tuple_get_impl(_R,
const _SimdTuple<_Tp, _Abis...>& __t,
136 {
return __simd_tuple_get_impl(_R(), __t.second, _SizeConstant<_Np - 1>()); }
138template <
size_t _Np,
typename _Tp,
typename... _Abis>
139 _GLIBCXX_SIMD_INTRINSIC
constexpr auto&
140 __simd_tuple_get_impl(__as_simd_tuple, _SimdTuple<_Tp, _Abis...>& __t,
143 return __simd_tuple_get_impl(__as_simd_tuple(), __t.second,
144 _SizeConstant<_Np - 1>());
147template <
size_t _Np,
typename _Tp,
typename... _Abis>
148 _GLIBCXX_SIMD_INTRINSIC
constexpr auto
149 __get_simd_at(
const _SimdTuple<_Tp, _Abis...>& __t)
150 {
return __simd_tuple_get_impl(__as_simd(), __t, _SizeConstant<_Np>()); }
154template <
size_t _Np,
typename _Tp,
typename... _Abis>
155 _GLIBCXX_SIMD_INTRINSIC
constexpr auto
156 __get_tuple_at(
const _SimdTuple<_Tp, _Abis...>& __t)
158 return __simd_tuple_get_impl(__as_simd_tuple(), __t, _SizeConstant<_Np>());
161template <
size_t _Np,
typename _Tp,
typename... _Abis>
162 _GLIBCXX_SIMD_INTRINSIC
constexpr auto&
163 __get_tuple_at(_SimdTuple<_Tp, _Abis...>& __t)
165 return __simd_tuple_get_impl(__as_simd_tuple(), __t, _SizeConstant<_Np>());
169template <
typename _Tp,
typename _Abi,
size_t _Offset>
170 struct __tuple_element_meta :
public _Abi::_SimdImpl
172 static_assert(is_same_v<
typename _Abi::_SimdImpl::abi_type,
175 using value_type = _Tp;
176 using abi_type = _Abi;
177 using _Traits = _SimdTraits<_Tp, _Abi>;
178 using _MaskImpl =
typename _Abi::_MaskImpl;
179 using _MaskMember =
typename _Traits::_MaskMember;
180 using simd_type = simd<_Tp, _Abi>;
181 static constexpr size_t _S_offset = _Offset;
182 static constexpr size_t _S_size() {
return simd_size<_Tp, _Abi>::value; }
183 static constexpr _MaskImpl _S_mask_impl = {};
185 template <
size_t _Np,
bool _Sanitized>
186 _GLIBCXX_SIMD_INTRINSIC
static auto
187 _S_submask(_BitMask<_Np, _Sanitized> __bits)
188 {
return __bits.template _M_extract<_Offset, _S_size()>(); }
190 template <
size_t _Np,
bool _Sanitized>
191 _GLIBCXX_SIMD_INTRINSIC
static _MaskMember
192 _S_make_mask(_BitMask<_Np, _Sanitized> __bits)
194 return _MaskImpl::template _S_convert<_Tp>(
195 __bits.template _M_extract<_Offset, _S_size()>()._M_sanitized());
198 _GLIBCXX_SIMD_INTRINSIC
static _ULLong
199 _S_mask_to_shifted_ullong(_MaskMember __k)
200 {
return _MaskImpl::_S_to_bits(__k).to_ullong() << _Offset; }
203template <
size_t _Offset,
typename _Tp,
typename _Abi,
typename... _As>
204 _GLIBCXX_SIMD_INTRINSIC
205 __tuple_element_meta<_Tp, _Abi, _Offset>
206 __make_meta(
const _SimdTuple<_Tp, _Abi, _As...>&)
211template <
size_t _Offset,
typename _Base>
212 struct _WithOffset :
public _Base
214 static inline constexpr size_t _S_offset = _Offset;
216 _GLIBCXX_SIMD_INTRINSIC
char* _M_as_charptr()
218 return reinterpret_cast<char*
>(
this)
219 + _S_offset *
sizeof(
typename _Base::value_type);
222 _GLIBCXX_SIMD_INTRINSIC
const char* _M_as_charptr()
const
224 return reinterpret_cast<const char*
>(
this)
225 + _S_offset *
sizeof(
typename _Base::value_type);
230template <
size_t _O0,
size_t _O1,
typename _Base>
231 struct _WithOffset<_O0, _WithOffset<_O1, _Base>> {};
233template <
size_t _Offset,
typename _Tp>
234 _GLIBCXX_SIMD_INTRINSIC
236 __add_offset(_Tp& __base)
237 {
return static_cast<_WithOffset<_Offset, __remove_cvref_t<_Tp>
>&>(
__base); }
239template <
size_t _Offset,
typename _Tp>
240 _GLIBCXX_SIMD_INTRINSIC
242 __add_offset(
const _Tp& __base)
244 return static_cast<const _WithOffset<_Offset, __remove_cvref_t<_Tp>
>&>(
248template <
size_t _Offset,
size_t _ExistingOffset,
typename _Tp>
249 _GLIBCXX_SIMD_INTRINSIC
251 __add_offset(_WithOffset<_ExistingOffset, _Tp>& __base)
253 return static_cast<_WithOffset<_Offset + _ExistingOffset, _Tp>&
>(
254 static_cast<_Tp&
>(
__base));
257template <
size_t _Offset,
size_t _ExistingOffset,
typename _Tp>
258 _GLIBCXX_SIMD_INTRINSIC
260 __add_offset(
const _WithOffset<_ExistingOffset, _Tp>& __base)
262 return static_cast<const _WithOffset<_Offset + _ExistingOffset, _Tp>&
>(
263 static_cast<const _Tp&
>(
__base));
266template <
typename _Tp>
267 constexpr inline size_t __offset = 0;
269template <
size_t _Offset,
typename _Tp>
270 constexpr inline size_t __offset<_WithOffset<_Offset, _Tp>>
271 = _WithOffset<_Offset, _Tp>::_S_offset;
273template <
typename _Tp>
274 constexpr inline size_t __offset<const _Tp> = __offset<_Tp>;
276template <
typename _Tp>
277 constexpr inline size_t __offset<_Tp&> = __offset<_Tp>;
279template <
typename _Tp>
280 constexpr inline size_t __offset<_Tp&&> = __offset<_Tp>;
285template <
typename _Tp>
286 struct _SimdTuple<_Tp>
288 using value_type = _Tp;
289 static constexpr size_t _S_tuple_size = 0;
290 static constexpr size_t _S_size() {
return 0; }
294template <
typename _FirstType,
typename _SecondType>
295 struct _SimdTupleData
300 _GLIBCXX_SIMD_INTRINSIC
301 constexpr bool _M_is_constprop()
const
303 if constexpr (is_class_v<_FirstType>)
304 return first._M_is_constprop() && second._M_is_constprop();
306 return __builtin_constant_p(first) && second._M_is_constprop();
310template <
typename _FirstType,
typename _Tp>
311 struct _SimdTupleData<_FirstType, _SimdTuple<_Tp>>
314 static constexpr _SimdTuple<_Tp> second = {};
316 _GLIBCXX_SIMD_INTRINSIC
317 constexpr bool _M_is_constprop()
const
319 if constexpr (is_class_v<_FirstType>)
320 return first._M_is_constprop();
322 return __builtin_constant_p(first);
327template <
typename _Tp,
typename _Abi0,
typename... _Abis>
328 struct _SimdTuple<_Tp, _Abi0, _Abis...>
329 : _SimdTupleData<typename _SimdTraits<_Tp, _Abi0>::_SimdMember,
330 _SimdTuple<_Tp, _Abis...>>
332 static_assert(!__is_fixed_size_abi_v<_Abi0>);
333 using value_type = _Tp;
334 using _FirstType =
typename _SimdTraits<_Tp, _Abi0>::_SimdMember;
335 using _FirstAbi = _Abi0;
336 using _SecondType = _SimdTuple<_Tp, _Abis...>;
337 static constexpr size_t _S_tuple_size =
sizeof...(_Abis) + 1;
339 static constexpr size_t _S_size()
340 {
return simd_size_v<_Tp, _Abi0> + _SecondType::_S_size(); }
342 static constexpr size_t _S_first_size = simd_size_v<_Tp, _Abi0>;
343 static constexpr bool _S_is_homogeneous = (is_same_v<_Abi0, _Abis> && ...);
345 using _Base = _SimdTupleData<typename _SimdTraits<_Tp, _Abi0>::_SimdMember,
346 _SimdTuple<_Tp, _Abis...>>;
350 _GLIBCXX_SIMD_INTRINSIC
constexpr _SimdTuple() =
default;
351 _GLIBCXX_SIMD_INTRINSIC
constexpr _SimdTuple(
const _SimdTuple&) =
default;
352 _GLIBCXX_SIMD_INTRINSIC
constexpr _SimdTuple& operator=(
const _SimdTuple&)
355 template <
typename _Up>
356 _GLIBCXX_SIMD_INTRINSIC
constexpr _SimdTuple(_Up&& __x)
357 : _Base{static_cast<_Up&&>(__x)} {}
359 template <
typename _Up,
typename _Up2>
360 _GLIBCXX_SIMD_INTRINSIC
constexpr _SimdTuple(_Up&& __x, _Up2&& __y)
361 : _Base{static_cast<_Up&&>(__x), static_cast<_Up2&&>(__y)} {}
363 template <
typename _Up>
364 _GLIBCXX_SIMD_INTRINSIC
constexpr _SimdTuple(_Up&& __x, _SimdTuple<_Tp>)
365 : _Base{static_cast<_Up&&>(__x)} {}
367 _GLIBCXX_SIMD_INTRINSIC
char* _M_as_charptr()
368 {
return reinterpret_cast<char*
>(
this); }
370 _GLIBCXX_SIMD_INTRINSIC
const char* _M_as_charptr()
const
371 {
return reinterpret_cast<const char*
>(
this); }
373 template <
size_t _Np>
374 _GLIBCXX_SIMD_INTRINSIC
constexpr auto& _M_at()
376 if constexpr (_Np == 0)
379 return second.template _M_at<_Np - 1>();
382 template <
size_t _Np>
383 _GLIBCXX_SIMD_INTRINSIC
constexpr const auto& _M_at()
const
385 if constexpr (_Np == 0)
388 return second.template _M_at<_Np - 1>();
391 template <
size_t _Np>
392 _GLIBCXX_SIMD_INTRINSIC
constexpr auto _M_simd_at()
const
394 if constexpr (_Np == 0)
395 return simd<_Tp, _Abi0>(__private_init, first);
397 return second.template _M_simd_at<_Np - 1>();
400 template <
size_t _Offset = 0,
typename _Fp>
401 _GLIBCXX_SIMD_INTRINSIC
static constexpr _SimdTuple
402 _S_generate(_Fp&& __gen, _SizeConstant<_Offset> = {})
404 auto&& __first = __gen(__tuple_element_meta<_Tp, _Abi0, _Offset>());
405 if constexpr (_S_tuple_size == 1)
409 _SecondType::_S_generate(
410 static_cast<_Fp&&
>(__gen),
411 _SizeConstant<_Offset + simd_size_v<_Tp, _Abi0>>())};
414 template <
size_t _Offset = 0,
typename _Fp,
typename... _More>
415 _GLIBCXX_SIMD_INTRINSIC _SimdTuple
416 _M_apply_wrapped(_Fp&& __fun,
const _More&... __more)
const
419 = __fun(__make_meta<_Offset>(*
this), first, __more.first...);
420 if constexpr (_S_tuple_size == 1)
425 second.template _M_apply_wrapped<_Offset + simd_size_v<_Tp, _Abi0>>(
426 static_cast<_Fp&&
>(__fun), __more.second...)};
429 template <
typename _Tup>
430 _GLIBCXX_SIMD_INTRINSIC
constexpr decltype(
auto)
431 _M_extract_argument(_Tup&& __tup)
const
433 using _TupT =
typename __remove_cvref_t<_Tup>::value_type;
434 if constexpr (is_same_v<_SimdTuple, __remove_cvref_t<_Tup>>)
436 else if (__builtin_is_constant_evaluated())
437 return __fixed_size_storage_t<_TupT, _S_first_size>::_S_generate([&](
438 auto __meta)
constexpr {
439 return __meta._S_generator(
440 [&](
auto __i)
constexpr {
return __tup[__i]; },
441 static_cast<_TupT*
>(
nullptr));
445 __fixed_size_storage_t<_TupT, _S_first_size> __r;
446 __builtin_memcpy(__r._M_as_charptr(), __tup._M_as_charptr(),
452 template <
typename _Tup>
453 _GLIBCXX_SIMD_INTRINSIC
constexpr auto&
454 _M_skip_argument(_Tup&& __tup)
const
456 static_assert(_S_tuple_size > 1);
457 using _Up = __remove_cvref_t<_Tup>;
458 constexpr size_t __off = __offset<_Up>;
459 if constexpr (_S_first_size == _Up::_S_first_size && __off == 0)
461 else if constexpr (_S_first_size > _Up::_S_first_size
462 && _S_first_size % _Up::_S_first_size == 0
464 return __simd_tuple_pop_front<_S_first_size>(__tup);
465 else if constexpr (_S_first_size + __off < _Up::_S_first_size)
466 return __add_offset<_S_first_size>(__tup);
467 else if constexpr (_S_first_size + __off == _Up::_S_first_size)
470 __assert_unreachable<_Tup>();
473 template <
size_t _Offset,
typename... _More>
474 _GLIBCXX_SIMD_INTRINSIC
constexpr void
475 _M_assign_front(
const _SimdTuple<_Tp, _Abi0, _More...>& __x) &
477 static_assert(_Offset == 0);
479 if constexpr (
sizeof...(_More) > 0)
481 static_assert(
sizeof...(_Abis) >=
sizeof...(_More));
482 second.template _M_assign_front<0>(__x.second);
486 template <
size_t _Offset>
487 _GLIBCXX_SIMD_INTRINSIC
constexpr void
488 _M_assign_front(
const _FirstType& __x) &
490 static_assert(_Offset == 0);
494 template <
size_t _Offset,
typename... _As>
495 _GLIBCXX_SIMD_INTRINSIC
constexpr void
496 _M_assign_front(
const _SimdTuple<_Tp, _As...>& __x) &
498 __builtin_memcpy(_M_as_charptr() + _Offset *
sizeof(value_type),
500 sizeof(_Tp) * _SimdTuple<_Tp, _As...>::_S_size());
508 template <
typename _Fp,
typename... _More>
509 _GLIBCXX_SIMD_INTRINSIC
constexpr _SimdTuple
510 _M_apply_per_chunk(_Fp&& __fun, _More&&... __more)
const
514 is_lvalue_reference<_More>,
515 negation<is_const<remove_reference_t<_More>>>>) )
518 auto&& __first = [&](
auto... __args)
constexpr
520 auto __r = __fun(__tuple_element_meta<_Tp, _Abi0, 0>(), first,
522 [[maybe_unused]]
auto&& __ignore_me = {(
523 [](
auto&& __dst,
const auto& __src) {
524 if constexpr (is_assignable_v<
decltype(__dst),
527 __dst.template _M_assign_front<__offset<
decltype(__dst)>>(
530 }(
static_cast<_More&&
>(__more), __args),
534 (_M_extract_argument(__more)...);
535 if constexpr (_S_tuple_size == 1)
539 second._M_apply_per_chunk(
static_cast<_Fp&&
>(__fun),
540 _M_skip_argument(__more)...)};
544 auto&& __first = __fun(__tuple_element_meta<_Tp, _Abi0, 0>(), first,
545 _M_extract_argument(__more)...);
546 if constexpr (_S_tuple_size == 1)
550 second._M_apply_per_chunk(
static_cast<_Fp&&
>(__fun),
551 _M_skip_argument(__more)...)};
555 template <
typename _R = _Tp,
typename _Fp,
typename... _More>
556 _GLIBCXX_SIMD_INTRINSIC
auto _M_apply_r(_Fp&& __fun,
557 const _More&... __more)
const
559 auto&& __first = __fun(__tuple_element_meta<_Tp, _Abi0, 0>(), first,
561 if constexpr (_S_tuple_size == 1)
564 return __simd_tuple_concat<_R>(
565 __first, second.template _M_apply_r<_R>(
static_cast<_Fp&&
>(__fun),
569 template <
typename _Fp,
typename... _More>
570 _GLIBCXX_SIMD_INTRINSIC
constexpr friend _SanitizedBitMask<_S_size()>
571 _M_test(
const _Fp& __fun,
const _SimdTuple& __x,
const _More&... __more)
573 const _SanitizedBitMask<_S_first_size> __first
574 = _Abi0::_MaskImpl::_S_to_bits(
575 __fun(__tuple_element_meta<_Tp, _Abi0, 0>(), __x.first,
577 if constexpr (_S_tuple_size == 1)
580 return _M_test(__fun, __x.second, __more.second...)
581 ._M_prepend(__first);
584 template <
typename _Up, _Up _I>
585 _GLIBCXX_SIMD_INTRINSIC
constexpr _Tp
586 operator[](integral_constant<_Up, _I>)
const noexcept
588 if constexpr (_I < simd_size_v<_Tp, _Abi0>)
589 return _M_subscript_read(_I);
591 return second[integral_constant<_Up, _I - simd_size_v<_Tp, _Abi0>>()];
594 _GLIBCXX_SIMD_INTRINSIC
595 _Tp operator[](
size_t __i)
const noexcept
597 if constexpr (_S_tuple_size == 1)
598 return _M_subscript_read(__i);
601#ifdef _GLIBCXX_SIMD_USE_ALIASING_LOADS
602 return reinterpret_cast<const __may_alias<_Tp>*
>(
this)[__i];
604 if constexpr (__is_scalar_abi<_Abi0>())
606 const _Tp* ptr = &first;
610 return __i < simd_size_v<_Tp, _Abi0>
611 ? _M_subscript_read(__i)
612 : second[__i - simd_size_v<_Tp, _Abi0>];
617 _GLIBCXX_SIMD_INTRINSIC
618 void _M_set(
size_t __i, _Tp __val)
noexcept
620 if constexpr (_S_tuple_size == 1)
621 return _M_subscript_write(__i, __val);
624#ifdef _GLIBCXX_SIMD_USE_ALIASING_LOADS
625 reinterpret_cast<__may_alias<_Tp>*
>(
this)[__i] = __val;
627 if (__i < simd_size_v<_Tp, _Abi0>)
628 _M_subscript_write(__i, __val);
630 second._M_set(__i - simd_size_v<_Tp, _Abi0>, __val);
637 _GLIBCXX_SIMD_INTRINSIC
638 _Tp _M_subscript_read([[maybe_unused]]
size_t __i)
const noexcept
640 if constexpr (__is_vectorizable_v<_FirstType>)
646 _GLIBCXX_SIMD_INTRINSIC
647 void _M_subscript_write([[maybe_unused]]
size_t __i, _Tp __y)
noexcept
649 if constexpr (__is_vectorizable_v<_FirstType>)
652 first._M_set(__i, __y);
659template <
typename _Tp,
typename _A0>
660 _GLIBCXX_SIMD_INTRINSIC _SimdTuple<_Tp, _A0>
661 __make_simd_tuple(simd<_Tp, _A0> __x0)
662 {
return {__data(__x0)}; }
664template <
typename _Tp,
typename _A0,
typename... _As>
665 _GLIBCXX_SIMD_INTRINSIC _SimdTuple<_Tp, _A0, _As...>
666 __make_simd_tuple(
const simd<_Tp, _A0>& __x0,
const simd<_Tp, _As>&... __xs)
667 {
return {__data(__x0), __make_simd_tuple(__xs...)}; }
669template <
typename _Tp,
typename _A0>
670 _GLIBCXX_SIMD_INTRINSIC _SimdTuple<_Tp, _A0>
671 __make_simd_tuple(
const typename _SimdTraits<_Tp, _A0>::_SimdMember& __arg0)
674template <
typename _Tp,
typename _A0,
typename _A1,
typename... _Abis>
675 _GLIBCXX_SIMD_INTRINSIC _SimdTuple<_Tp, _A0, _A1, _Abis...>
677 const typename _SimdTraits<_Tp, _A0>::_SimdMember& __arg0,
678 const typename _SimdTraits<_Tp, _A1>::_SimdMember& __arg1,
679 const typename _SimdTraits<_Tp, _Abis>::_SimdMember&... __args)
680 {
return {__arg0, __make_simd_tuple<_Tp, _A1, _Abis...>(__arg1, __args...)}; }
683template <
typename _Tp,
size_t _Np,
typename _V,
size_t _NV,
typename... _VX>
684 _GLIBCXX_SIMD_INTRINSIC
constexpr __fixed_size_storage_t<_Tp, _Np>
685 __to_simd_tuple(
const array<_V, _NV>& __from,
const _VX... __fromX);
687template <
typename _Tp,
size_t _Np,
689 typename _R = __fixed_size_storage_t<_Tp, _Np>,
typename _V0,
690 typename _V0VT = _VectorTraits<_V0>,
typename... _VX>
691 _GLIBCXX_SIMD_INTRINSIC _R
constexpr __to_simd_tuple(
const _V0 __from0,
692 const _VX... __fromX)
694 static_assert(is_same_v<typename _V0VT::value_type, _Tp>);
695 static_assert(_Offset < _V0VT::_S_full_size);
696 using _R0 = __vector_type_t<_Tp, _R::_S_first_size>;
697 if constexpr (_R::_S_tuple_size == 1)
699 if constexpr (_Np == 1)
700 return _R{__from0[_Offset]};
701 else if constexpr (_Offset == 0 && _V0VT::_S_full_size >= _Np)
702 return _R{__intrin_bitcast<_R0>(__from0)};
703 else if constexpr (_Offset * 2 == _V0VT::_S_full_size
704 && _V0VT::_S_full_size / 2 >= _Np)
705 return _R{__intrin_bitcast<_R0>(__extract_part<1, 2>(__from0))};
706 else if constexpr (_Offset * 4 == _V0VT::_S_full_size
707 && _V0VT::_S_full_size / 4 >= _Np)
708 return _R{__intrin_bitcast<_R0>(__extract_part<1, 4>(__from0))};
710 __assert_unreachable<_Tp>();
714 if constexpr (1 == _R::_S_first_size)
716 if constexpr (_Offset + 1 < _V0VT::_S_full_size)
717 return _R{__from0[_Offset],
718 __to_simd_tuple<_Tp, _Np - 1, _Offset + 1>(__from0,
721 return _R{__from0[_Offset],
722 __to_simd_tuple<_Tp, _Np - 1, 0>(__fromX...)};
726 else if constexpr (_V0VT::_S_full_size == _R::_S_first_size
729 __to_simd_tuple<_Tp, _Np - _R::_S_first_size>(__fromX...)};
732 else if constexpr (_V0VT::_S_full_size > _R::_S_first_size
734 return _R{__intrin_bitcast<_R0>(__from0),
735 __to_simd_tuple<_Tp, _Np - _R::_S_first_size,
736 _R::_S_first_size>(__from0, __fromX...)};
740 else if constexpr (_Offset * 4 == _V0VT::_S_full_size
741 && _V0VT::_S_full_size >= 4 * _R::_S_first_size)
742 return _R{__intrin_bitcast<_R0>(__extract_part<2, 4>(__from0)),
743 __to_simd_tuple<_Tp, _Np - _R::_S_first_size,
744 _Offset + _R::_S_first_size>(__from0,
749 else if constexpr (_Offset * 2 == _V0VT::_S_full_size
750 && _V0VT::_S_full_size >= 4 * _R::_S_first_size)
751 return _R{__intrin_bitcast<_R0>(__extract_part<2, 4>(__from0)),
752 __to_simd_tuple<_Tp, _Np - _R::_S_first_size,
753 _Offset + _R::_S_first_size>(__from0,
757 else if constexpr (_Offset * 2 == _V0VT::_S_full_size
758 && _V0VT::_S_full_size / 2 >= _R::_S_first_size)
759 return _R{__intrin_bitcast<_R0>(__extract_part<1, 2>(__from0)),
760 __to_simd_tuple<_Tp, _Np - _R::_S_first_size, 0>(
765 __assert_unreachable<_Tp>();
769template <
typename _Tp,
size_t _Np,
typename _V,
size_t _NV,
typename... _VX>
770 _GLIBCXX_SIMD_INTRINSIC
constexpr __fixed_size_storage_t<_Tp, _Np>
771 __to_simd_tuple(
const array<_V, _NV>& __from,
const _VX... __fromX)
773 if constexpr (is_same_v<_Tp, _V>)
777 "An array of scalars must be the last argument to __to_simd_tuple");
778 return __call_with_subscripts(
780 make_index_sequence<_NV>(), [&](
const auto... __args)
constexpr {
781 return __simd_tuple_concat(
782 _SimdTuple<_Tp, simd_abi::scalar>{__args}..., _SimdTuple<_Tp>());
786 return __call_with_subscripts(
788 make_index_sequence<_NV>(), [&](
const auto... __args)
constexpr {
789 return __to_simd_tuple<_Tp, _Np>(__args..., __fromX...);
793template <
size_t,
typename _Tp>
794 using __to_tuple_helper = _Tp;
796template <
typename _Tp,
typename _A0,
size_t _NOut,
size_t _Np,
798 _GLIBCXX_SIMD_INTRINSIC __fixed_size_storage_t<_Tp, _NOut>
799 __to_simd_tuple_impl(index_sequence<_Indexes...>,
800 const array<__vector_type_t<_Tp, simd_size_v<_Tp, _A0>>, _Np>& __args)
802 return __make_simd_tuple<_Tp, __to_tuple_helper<_Indexes, _A0>...>(
803 __args[_Indexes]...);
806template <
typename _Tp,
typename _A0,
size_t _NOut,
size_t _Np,
807 typename _R = __fixed_size_storage_t<_Tp, _NOut>>
808 _GLIBCXX_SIMD_INTRINSIC _R
809 __to_simd_tuple_sized(
810 const array<__vector_type_t<_Tp, simd_size_v<_Tp, _A0>>, _Np>& __args)
812 static_assert(_Np * simd_size_v<_Tp, _A0> >= _NOut);
813 return __to_simd_tuple_impl<_Tp, _A0, _NOut>(
814 make_index_sequence<_R::_S_tuple_size>(), __args);
818template <
typename _Tp>
819 _GLIBCXX_SIMD_INTRINSIC _SimdTuple<_Tp>
820 __optimize_simd_tuple(
const _SimdTuple<_Tp>)
823template <
typename _Tp,
typename _Ap>
824 _GLIBCXX_SIMD_INTRINSIC
const _SimdTuple<_Tp, _Ap>&
825 __optimize_simd_tuple(
const _SimdTuple<_Tp, _Ap>& __x)
828template <
typename _Tp,
typename _A0,
typename _A1,
typename... _Abis,
829 typename _R = __fixed_size_storage_t<
830 _Tp, _SimdTuple<_Tp, _A0, _A1, _Abis...>::_S_size()>>
831 _GLIBCXX_SIMD_INTRINSIC _R
832 __optimize_simd_tuple(
const _SimdTuple<_Tp, _A0, _A1, _Abis...>& __x)
834 using _Tup = _SimdTuple<_Tp, _A0, _A1, _Abis...>;
835 if constexpr (is_same_v<_R, _Tup>)
837 else if constexpr (is_same_v<
typename _R::_FirstType,
838 typename _Tup::_FirstType>)
839 return {__x.first, __optimize_simd_tuple(__x.second)};
840 else if constexpr (__is_scalar_abi<_A0>()
841 || _A0::template _S_is_partial<_Tp>)
842 return {__generate_from_n_evaluations<_R::_S_first_size,
843 typename _R::_FirstType>(
844 [&](
auto __i) {
return __x[__i]; }),
845 __optimize_simd_tuple(
846 __simd_tuple_pop_front<_R::_S_first_size>(__x))};
847 else if constexpr (is_same_v<_A0, _A1>
848 && _R::_S_first_size == simd_size_v<_Tp, _A0> + simd_size_v<_Tp, _A1>)
849 return {__concat(__x.template _M_at<0>(), __x.template _M_at<1>()),
850 __optimize_simd_tuple(__x.second.second)};
851 else if constexpr (
sizeof...(_Abis) >= 2
852 && _R::_S_first_size == (4 * simd_size_v<_Tp, _A0>)
853 && simd_size_v<_Tp, _A0> == __simd_tuple_element_t<
854 (
sizeof...(_Abis) >= 2 ? 3 : 0), _Tup>::size())
856 __concat(__concat(__x.template _M_at<0>(), __x.template _M_at<1>()),
857 __concat(__x.template _M_at<2>(), __x.template _M_at<3>())),
858 __optimize_simd_tuple(__x.second.second.second.second)};
861 static_assert(
sizeof(_R) ==
sizeof(__x));
863 __builtin_memcpy(__r._M_as_charptr(), __x._M_as_charptr(),
864 sizeof(_Tp) * _R::_S_size());
870template <
size_t _Offset = 0,
typename _Tp,
typename _A0,
typename _Fp>
871 _GLIBCXX_SIMD_INTRINSIC
constexpr void
872 __for_each(
const _SimdTuple<_Tp, _A0>& __t, _Fp&& __fun)
873 {
static_cast<_Fp&&
>(__fun)(__make_meta<_Offset>(__t), __t.first); }
875template <
size_t _Offset = 0,
typename _Tp,
typename _A0,
typename _A1,
876 typename... _As,
typename _Fp>
877 _GLIBCXX_SIMD_INTRINSIC
constexpr void
878 __for_each(
const _SimdTuple<_Tp, _A0, _A1, _As...>& __t, _Fp&& __fun)
880 __fun(__make_meta<_Offset>(__t), __t.first);
881 __for_each<_Offset + simd_size<_Tp, _A0>::value>(__t.second,
882 static_cast<_Fp&&
>(__fun));
886template <
size_t _Offset = 0,
typename _Tp,
typename _A0,
typename _Fp>
887 _GLIBCXX_SIMD_INTRINSIC
constexpr void
888 __for_each(_SimdTuple<_Tp, _A0>& __t, _Fp&& __fun)
889 {
static_cast<_Fp&&
>(__fun)(__make_meta<_Offset>(__t), __t.first); }
891template <
size_t _Offset = 0,
typename _Tp,
typename _A0,
typename _A1,
892 typename... _As,
typename _Fp>
893 _GLIBCXX_SIMD_INTRINSIC
constexpr void
894 __for_each(_SimdTuple<_Tp, _A0, _A1, _As...>& __t, _Fp&& __fun)
896 __fun(__make_meta<_Offset>(__t), __t.first);
897 __for_each<_Offset + simd_size<_Tp, _A0>::value>(__t.second,
898 static_cast<_Fp&&
>(__fun));
902template <
size_t _Offset = 0,
typename _Tp,
typename _A0,
typename _Fp>
903 _GLIBCXX_SIMD_INTRINSIC
constexpr void
904 __for_each(_SimdTuple<_Tp, _A0>& __a,
const _SimdTuple<_Tp, _A0>& __b,
907 static_cast<_Fp&&
>(__fun)(__make_meta<_Offset>(__a), __a.first, __b.first);
910template <
size_t _Offset = 0,
typename _Tp,
typename _A0,
typename _A1,
911 typename... _As,
typename _Fp>
912 _GLIBCXX_SIMD_INTRINSIC
constexpr void
913 __for_each(_SimdTuple<_Tp, _A0, _A1, _As...>& __a,
914 const _SimdTuple<_Tp, _A0, _A1, _As...>& __b, _Fp&& __fun)
916 __fun(__make_meta<_Offset>(__a), __a.first, __b.first);
917 __for_each<_Offset + simd_size<_Tp, _A0>::value>(__a.second, __b.second,
918 static_cast<_Fp&&
>(__fun));
922template <
size_t _Offset = 0,
typename _Tp,
typename _A0,
typename _Fp>
923 _GLIBCXX_SIMD_INTRINSIC
constexpr void
924 __for_each(
const _SimdTuple<_Tp, _A0>& __a,
const _SimdTuple<_Tp, _A0>& __b,
927 static_cast<_Fp&&
>(__fun)(__make_meta<_Offset>(__a), __a.first, __b.first);
930template <
size_t _Offset = 0,
typename _Tp,
typename _A0,
typename _A1,
931 typename... _As,
typename _Fp>
932 _GLIBCXX_SIMD_INTRINSIC
constexpr void
933 __for_each(
const _SimdTuple<_Tp, _A0, _A1, _As...>& __a,
934 const _SimdTuple<_Tp, _A0, _A1, _As...>& __b, _Fp&& __fun)
936 __fun(__make_meta<_Offset>(__a), __a.first, __b.first);
937 __for_each<_Offset + simd_size<_Tp, _A0>::value>(__a.second, __b.second,
938 static_cast<_Fp&&
>(__fun));
943template <
int _Index,
int _Total,
int _Combine,
typename _Tp,
typename _A0,
945 _GLIBCXX_SIMD_INTRINSIC
auto
946 __extract_part(
const _SimdTuple<_Tp, _A0, _As...>& __x)
952 using _Tuple = _SimdTuple<_Tp, _A0, _As...>;
953 static_assert(_Index + _Combine <= _Total && _Index >= 0 && _Total >= 1);
954 constexpr size_t _Np = _Tuple::_S_size();
955 static_assert(_Np >= _Total && _Np % _Total == 0);
956 constexpr size_t __values_per_part = _Np / _Total;
957 [[maybe_unused]]
constexpr size_t __values_to_skip
958 = _Index * __values_per_part;
959 constexpr size_t __return_size = __values_per_part * _Combine;
960 using _RetAbi = simd_abi::deduce_t<_Tp, __return_size>;
963 if constexpr (_Index == 0 && _Tuple::_S_first_size == __return_size)
964 return __x.first._M_data;
965 else if constexpr (_Index == 0 && _Total == _Combine)
967 else if constexpr (_Index == 0 && _Tuple::_S_first_size >= __return_size)
968 return __intrin_bitcast<__vector_type_t<_Tp, __return_size>>(
969 __as_vector(__x.first));
972 else if constexpr (__values_to_skip >= _Tuple::_S_first_size)
974 if constexpr (_Tuple::_S_first_size % __values_per_part == 0)
976 constexpr int __parts_in_first
977 = _Tuple::_S_first_size / __values_per_part;
978 return __extract_part<_Index - __parts_in_first,
979 _Total - __parts_in_first, _Combine>(
983 return __extract_part<__values_to_skip - _Tuple::_S_first_size,
984 _Np - _Tuple::_S_first_size, __return_size>(
989 else if constexpr (__return_size > _Tuple::_S_first_size - __values_to_skip)
991#ifdef _GLIBCXX_SIMD_USE_ALIASING_LOADS
992 const __may_alias<_Tp>*
const element_ptr
993 =
reinterpret_cast<const __may_alias<_Tp>*
>(&__x) + __values_to_skip;
994 return __as_vector(simd<_Tp, _RetAbi>(element_ptr, element_aligned));
996 [[maybe_unused]]
constexpr size_t __offset = __values_to_skip;
997 return __as_vector(simd<_Tp, _RetAbi>([&](
auto __i)
constexpr {
998 constexpr _SizeConstant<__i + __offset> __k;
1005 else if constexpr (_Tuple::_S_first_size % __values_per_part == 0)
1006 return __extract_part<_Index, _Tuple::_S_first_size / __values_per_part,
1007 _Combine>(__x.first);
1009 return __extract_part<__values_to_skip, _Tuple::_S_first_size,
1010 _Combine * __values_per_part>(__x.first);
1015template <
typename _Tp,
int _Np,
typename _Tuple,
1016 typename _Next = simd<_Tp, _AllNativeAbis::_BestAbi<_Tp, _Np>>,
1017 int _Remain = _Np - int(_Next::size())>
1018 struct __fixed_size_storage_builder;
1020template <
typename _Tp,
int _Np>
1021 struct __fixed_size_storage
1022 :
public __fixed_size_storage_builder<_Tp, _Np, _SimdTuple<_Tp>> {};
1024template <
typename _Tp,
int _Np,
typename... _As,
typename _Next>
1025 struct __fixed_size_storage_builder<_Tp, _Np, _SimdTuple<_Tp, _As...>, _Next,
1027 {
using type = _SimdTuple<_Tp, _As...,
typename _Next::abi_type>; };
1029template <
typename _Tp,
int _Np,
typename... _As,
typename _Next,
int _Remain>
1030 struct __fixed_size_storage_builder<_Tp, _Np, _SimdTuple<_Tp, _As...>, _Next,
1033 using type =
typename __fixed_size_storage_builder<
1034 _Tp, _Remain, _SimdTuple<_Tp, _As...,
typename _Next::abi_type>>::type;
1039template <
typename _Tp,
bool = is_arithmetic_v<__remove_cvref_t<_Tp>>>
1040 struct __autocvt_to_simd
1043 using _TT = __remove_cvref_t<_Tp>;
1045 _GLIBCXX_SIMD_INTRINSIC
1049 _GLIBCXX_SIMD_INTRINSIC
1052 static_assert(is_lvalue_reference<_Tp>::value,
"");
1053 static_assert(!is_const<_Tp>::value,
"");
1057 _GLIBCXX_SIMD_INTRINSIC
1060 static_assert(is_lvalue_reference<_Tp>::value,
"");
1061 static_assert(!is_const<_Tp>::value,
"");
1065 _GLIBCXX_SIMD_INTRINSIC
1066 constexpr __autocvt_to_simd(_Tp dd) : _M_data(dd) {}
1068 template <
typename _Abi>
1069 _GLIBCXX_SIMD_INTRINSIC
1070 operator simd<typename _TT::value_type, _Abi>()
1071 {
return {__private_init, _M_data}; }
1073 template <
typename _Abi>
1074 _GLIBCXX_SIMD_INTRINSIC
1075 operator simd<typename _TT::value_type, _Abi>&()
1077 return *
reinterpret_cast<simd<typename _TT::value_type, _Abi>*
>(
1081 template <
typename _Abi>
1082 _GLIBCXX_SIMD_INTRINSIC
1083 operator simd<typename _TT::value_type, _Abi>*()
1085 return reinterpret_cast<simd<typename _TT::value_type, _Abi>*
>(
1090template <
typename _Tp>
1091 __autocvt_to_simd(_Tp &&) -> __autocvt_to_simd<_Tp>;
1093template <
typename _Tp>
1094 struct __autocvt_to_simd<_Tp, true>
1096 using _TT = __remove_cvref_t<_Tp>;
1098 fixed_size_simd<_TT, 1> _M_fd;
1100 _GLIBCXX_SIMD_INTRINSIC
1101 constexpr __autocvt_to_simd(_Tp dd) : _M_data(dd), _M_fd(_M_data) {}
1103 _GLIBCXX_SIMD_INTRINSIC
1104 ~__autocvt_to_simd()
1105 { _M_data = __data(_M_fd).first; }
1107 _GLIBCXX_SIMD_INTRINSIC
1108 operator fixed_size_simd<_TT, 1>()
1111 _GLIBCXX_SIMD_INTRINSIC
1112 operator fixed_size_simd<_TT, 1> &()
1114 static_assert(is_lvalue_reference<_Tp>::value,
"");
1115 static_assert(!is_const<_Tp>::value,
"");
1119 _GLIBCXX_SIMD_INTRINSIC
1120 operator fixed_size_simd<_TT, 1> *()
1122 static_assert(is_lvalue_reference<_Tp>::value,
"");
1123 static_assert(!is_const<_Tp>::value,
"");
1130struct _CommonImplFixedSize;
1131template <
int _Np,
typename = __detail::__odr_helper>
struct _SimdImplFixedSize;
1132template <
int _Np,
typename = __detail::__odr_helper>
struct _MaskImplFixedSize;
1135 struct simd_abi::_Fixed
1137 template <
typename _Tp>
static constexpr size_t _S_size = _Np;
1138 template <
typename _Tp>
static constexpr size_t _S_full_size = _Np;
1140 struct _IsValidAbiTag :
public __bool_constant<(_Np > 0)> {};
1142 template <
typename _Tp>
1143 struct _IsValidSizeFor
1144 : __bool_constant<(_Np <= simd_abi::max_fixed_size<_Tp>)> {};
1146 template <typename _Tp>
1147 struct _IsValid : conjunction<_IsValidAbiTag, __is_vectorizable<_Tp>,
1148 _IsValidSizeFor<_Tp>> {};
1150 template <typename _Tp>
1151 static constexpr bool _S_is_valid_v = _IsValid<_Tp>::value;
1155 _GLIBCXX_SIMD_INTRINSIC static constexpr _SanitizedBitMask<_Np>
1156 _S_masked(_BitMask<_Np> __x)
1157 { return __x._M_sanitized(); }
1159 _GLIBCXX_SIMD_INTRINSIC static constexpr _SanitizedBitMask<_Np>
1160 _S_masked(_SanitizedBitMask<_Np> __x)
1165 using _CommonImpl = _CommonImplFixedSize;
1166 using _SimdImpl = _SimdImplFixedSize<_Np>;
1167 using _MaskImpl = _MaskImplFixedSize<_Np>;
1171 template <typename _Tp, bool = _S_is_valid_v<_Tp>>
1172 struct __traits : _InvalidTraits {};
1174 template <typename _Tp>
1175 struct __traits<_Tp, true>
1177 using _IsValid = true_type;
1178 using _SimdImpl = _SimdImplFixedSize<_Np>;
1179 using _MaskImpl = _MaskImplFixedSize<_Np>;
1182 using _SimdMember = __fixed_size_storage_t<_Tp, _Np>;
1183 using _MaskMember = _SanitizedBitMask<_Np>;
1185 static constexpr size_t _S_simd_align
1186 = std::__bit_ceil(_Np * sizeof(_Tp));
1188 static constexpr size_t _S_mask_align = alignof(_MaskMember);
1196 _GLIBCXX_SIMD_ALWAYS_INLINE
1197 _SimdBase(const _SimdBase&) {}
1198 _SimdBase() = default;
1200 _GLIBCXX_SIMD_ALWAYS_INLINE
1201 explicit operator const _SimdMember &() const
1202 { return static_cast<const simd<_Tp, _Fixed>*>(this)->_M_data; }
1204 _GLIBCXX_SIMD_ALWAYS_INLINE
1205 explicit operator array<_Tp, _Np>() const
1207 array<_Tp, _Np> __r;
1209 static_assert(
sizeof(__r) <=
sizeof(_SimdMember),
"");
1210 __builtin_memcpy(__r.data(), &
static_cast<const _SimdMember&
>(*
this),
1219 struct _MaskBase {};
1223 struct _SimdCastType
1225 _GLIBCXX_SIMD_ALWAYS_INLINE
1226 _SimdCastType(
const array<_Tp, _Np>&);
1227 _GLIBCXX_SIMD_ALWAYS_INLINE
1228 _SimdCastType(
const _SimdMember& dd) : _M_data(dd) {}
1229 _GLIBCXX_SIMD_ALWAYS_INLINE
1230 explicit operator const _SimdMember &()
const {
return _M_data; }
1233 const _SimdMember& _M_data;
1240 _MaskCastType() =
delete;
1249struct _CommonImplFixedSize
1252 template <
typename _Tp,
typename... _As>
1253 _GLIBCXX_SIMD_INTRINSIC
static void
1254 _S_store(
const _SimdTuple<_Tp, _As...>& __x,
void* __addr)
1256 constexpr size_t _Np = _SimdTuple<_Tp, _As...>::_S_size();
1257 __builtin_memcpy(__addr, &__x, _Np *
sizeof(_Tp));
1267template <
int _Np,
typename>
1268 struct _SimdImplFixedSize
1271 using _MaskMember = _SanitizedBitMask<_Np>;
1273 template <
typename _Tp>
1274 using _SimdMember = __fixed_size_storage_t<_Tp, _Np>;
1276 template <
typename _Tp>
1277 static constexpr size_t _S_tuple_size = _SimdMember<_Tp>::_S_tuple_size;
1279 template <
typename _Tp>
1280 using _Simd = simd<_Tp, simd_abi::fixed_size<_Np>>;
1282 template <
typename _Tp>
1283 using _TypeTag = _Tp*;
1286 template <
typename _Tp>
1287 static constexpr inline _SimdMember<_Tp> _S_broadcast(_Tp __x)
noexcept
1289 return _SimdMember<_Tp>::_S_generate([&](
auto __meta)
constexpr {
1290 return __meta._S_broadcast(__x);
1295 template <
typename _Fp,
typename _Tp>
1296 static constexpr inline _SimdMember<_Tp> _S_generator(_Fp&& __gen,
1299 return _SimdMember<_Tp>::_S_generate([&__gen](
auto __meta)
constexpr {
1300 return __meta._S_generator(
1301 [&](
auto __i)
constexpr {
1302 return __i < _Np ? __gen(_SizeConstant<__meta._S_offset + __i>())
1310 template <
typename _Tp,
typename _Up>
1311 static inline _SimdMember<_Tp> _S_load(
const _Up* __mem,
1312 _TypeTag<_Tp>)
noexcept
1314 return _SimdMember<_Tp>::_S_generate([&](
auto __meta) {
1315 return __meta._S_load(&__mem[__meta._S_offset], _TypeTag<_Tp>());
1320 template <
typename _Tp,
typename... _As,
typename _Up>
1321 static inline _SimdTuple<_Tp, _As...>
1322 _S_masked_load(
const _SimdTuple<_Tp, _As...>& __old,
1323 const _MaskMember __bits,
const _Up* __mem)
noexcept
1325 auto __merge = __old;
1326 __for_each(__merge, [&](
auto __meta,
auto& __native) {
1327 if (__meta._S_submask(__bits).any())
1328#pragma GCC diagnostic push
1333#pragma GCC diagnostic ignored
"-Warray-bounds"
1335 = __meta._S_masked_load(__native, __meta._S_make_mask(__bits),
1336 __mem + __meta._S_offset);
1337#pragma GCC diagnostic pop
1343 template <
typename _Tp,
typename _Up>
1344 static inline void _S_store(
const _SimdMember<_Tp>& __v, _Up* __mem,
1345 _TypeTag<_Tp>)
noexcept
1347 __for_each(__v, [&](
auto __meta,
auto __native) {
1348 __meta._S_store(__native, &__mem[__meta._S_offset], _TypeTag<_Tp>());
1353 template <
typename _Tp,
typename... _As,
typename _Up>
1354 static inline void _S_masked_store(
const _SimdTuple<_Tp, _As...>& __v,
1356 const _MaskMember __bits)
noexcept
1358 __for_each(__v, [&](
auto __meta,
auto __native) {
1359 if (__meta._S_submask(__bits).any())
1360#pragma GCC diagnostic push
1365#pragma GCC diagnostic ignored
"-Warray-bounds"
1366 __meta._S_masked_store(__native, __mem + __meta._S_offset,
1367 __meta._S_make_mask(__bits));
1368#pragma GCC diagnostic pop
1373 template <
typename _Tp,
typename... _As>
1374 static inline _MaskMember
1375 _S_negate(
const _SimdTuple<_Tp, _As...>& __x)
noexcept
1377 _MaskMember __bits = 0;
1379 __x, [&__bits](
auto __meta,
auto __native)
constexpr {
1381 |= __meta._S_mask_to_shifted_ullong(__meta._S_negate(__native));
1387 template <
typename _Tp,
typename _BinaryOperation>
1388 static constexpr inline _Tp _S_reduce(
const _Simd<_Tp>& __x,
1389 const _BinaryOperation& __binary_op)
1391 using _Tup = _SimdMember<_Tp>;
1392 const _Tup& __tup = __data(__x);
1393 if constexpr (_Tup::_S_tuple_size == 1)
1394 return _Tup::_FirstAbi::_SimdImpl::_S_reduce(
1395 __tup.template _M_simd_at<0>(), __binary_op);
1396 else if constexpr (_Tup::_S_tuple_size == 2 && _Tup::_S_size() > 2
1397 && _Tup::_SecondType::_S_size() == 1)
1399 return __binary_op(simd<_Tp, simd_abi::scalar>(
1400 reduce(__tup.template _M_simd_at<0>(),
1402 __tup.template _M_simd_at<1>())[0];
1404 else if constexpr (_Tup::_S_tuple_size == 2 && _Tup::_S_size() > 4
1405 && _Tup::_SecondType::_S_size() == 2)
1408 simd<_Tp, simd_abi::scalar>(
1409 reduce(__tup.template _M_simd_at<0>(), __binary_op)),
1410 simd<_Tp, simd_abi::scalar>(
1411 reduce(__tup.template _M_simd_at<1>(), __binary_op)))[0];
1415 const auto& __x2 = __call_with_n_evaluations<
1416 __div_roundup(_Tup::_S_tuple_size, 2)>(
1417 [](
auto __first_simd,
auto... __remaining) {
1418 if constexpr (
sizeof...(__remaining) == 0)
1419 return __first_simd;
1424 typename decltype(__first_simd)::abi_type,
1425 typename decltype(__remaining)::abi_type...>;
1426 return fixed_size_simd<_Tp, _Tup2::_S_size()>(
1428 __make_simd_tuple(__first_simd, __remaining...));
1432 auto __left = __tup.template _M_simd_at<2 * __i>();
1433 if constexpr (2 * __i + 1 == _Tup::_S_tuple_size)
1437 auto __right = __tup.template _M_simd_at<2 * __i + 1>();
1438 using _LT =
decltype(__left);
1439 using _RT =
decltype(__right);
1440 if constexpr (_LT::size() == _RT::size())
1441 return __binary_op(__left, __right);
1444 _GLIBCXX_SIMD_USE_CONSTEXPR_API
1445 typename _LT::mask_type __k(
1447 [](
auto __j)
constexpr {
return __j < _RT::size(); });
1448 _LT __ext_right = __left;
1449 where(__k, __ext_right)
1450 = __proposed::resizing_simd_cast<_LT>(__right);
1451 where(__k, __left) = __binary_op(__left, __ext_right);
1456 return reduce(__x2, __binary_op);
1461 template <
typename _Tp,
typename... _As>
1462 static inline constexpr _SimdTuple<_Tp, _As...>
1463 _S_min(
const _SimdTuple<_Tp, _As...>& __a,
1464 const _SimdTuple<_Tp, _As...>& __b)
1466 return __a._M_apply_per_chunk(
1467 [](
auto __impl,
auto __aa,
auto __bb)
constexpr {
1468 return __impl._S_min(__aa, __bb);
1473 template <
typename _Tp,
typename... _As>
1474 static inline constexpr _SimdTuple<_Tp, _As...>
1475 _S_max(
const _SimdTuple<_Tp, _As...>& __a,
1476 const _SimdTuple<_Tp, _As...>& __b)
1478 return __a._M_apply_per_chunk(
1479 [](
auto __impl,
auto __aa,
auto __bb)
constexpr {
1480 return __impl._S_max(__aa, __bb);
1486 template <
typename _Tp,
typename... _As>
1487 static inline constexpr _SimdTuple<_Tp, _As...>
1488 _S_complement(
const _SimdTuple<_Tp, _As...>& __x)
noexcept
1490 return __x._M_apply_per_chunk([](
auto __impl,
auto __xx)
constexpr {
1491 return __impl._S_complement(__xx);
1496 template <
typename _Tp,
typename... _As>
1497 static inline constexpr _SimdTuple<_Tp, _As...>
1498 _S_unary_minus(
const _SimdTuple<_Tp, _As...>& __x)
noexcept
1500 return __x._M_apply_per_chunk([](
auto __impl,
auto __xx)
constexpr {
1501 return __impl._S_unary_minus(__xx);
1507#define _GLIBCXX_SIMD_FIXED_OP(name_, op_) \
1508 template <typename _Tp, typename... _As> \
1509 static inline constexpr _SimdTuple<_Tp, _As...> name_( \
1510 const _SimdTuple<_Tp, _As...>& __x, const _SimdTuple<_Tp, _As...>& __y)\
1512 return __x._M_apply_per_chunk( \
1513 [](auto __impl, auto __xx, auto __yy) constexpr { \
1514 return __impl.name_(__xx, __yy); \
1519 _GLIBCXX_SIMD_FIXED_OP(_S_plus, +)
1520 _GLIBCXX_SIMD_FIXED_OP(_S_minus, -)
1521 _GLIBCXX_SIMD_FIXED_OP(_S_multiplies, *)
1522 _GLIBCXX_SIMD_FIXED_OP(_S_divides, /)
1523 _GLIBCXX_SIMD_FIXED_OP(_S_modulus, %)
1524 _GLIBCXX_SIMD_FIXED_OP(_S_bit_and, &)
1525 _GLIBCXX_SIMD_FIXED_OP(_S_bit_or, |)
1526 _GLIBCXX_SIMD_FIXED_OP(_S_bit_xor, ^)
1527 _GLIBCXX_SIMD_FIXED_OP(_S_bit_shift_left, <<)
1528 _GLIBCXX_SIMD_FIXED_OP(_S_bit_shift_right, >>)
1529#undef _GLIBCXX_SIMD_FIXED_OP
1531 template <
typename _Tp,
typename... _As>
1532 static inline constexpr _SimdTuple<_Tp, _As...>
1533 _S_bit_shift_left(
const _SimdTuple<_Tp, _As...>& __x,
int __y)
1535 return __x._M_apply_per_chunk([__y](
auto __impl,
auto __xx)
constexpr {
1536 return __impl._S_bit_shift_left(__xx, __y);
1540 template <
typename _Tp,
typename... _As>
1541 static inline constexpr _SimdTuple<_Tp, _As...>
1542 _S_bit_shift_right(
const _SimdTuple<_Tp, _As...>& __x,
int __y)
1544 return __x._M_apply_per_chunk([__y](
auto __impl,
auto __xx)
constexpr {
1545 return __impl._S_bit_shift_right(__xx, __y);
1550#define _GLIBCXX_SIMD_APPLY_ON_TUPLE(_RetTp, __name) \
1551 template <typename _Tp, typename... _As, typename... _More> \
1552 static inline __fixed_size_storage_t<_RetTp, _Np> \
1553 _S_##__name(const _SimdTuple<_Tp, _As...>& __x, \
1554 const _More&... __more) \
1556 if constexpr (sizeof...(_More) == 0) \
1558 if constexpr (is_same_v<_Tp, _RetTp>) \
1559 return __x._M_apply_per_chunk( \
1560 [](auto __impl, auto __xx) constexpr { \
1561 using _V = typename decltype(__impl)::simd_type; \
1562 return __data(__name(_V(__private_init, __xx))); \
1565 return __optimize_simd_tuple( \
1566 __x.template _M_apply_r<_RetTp>([](auto __impl, auto __xx) { \
1567 return __impl._S_##__name(__xx); \
1570 else if constexpr ( \
1573 _RetTp> && (... && is_same_v<_SimdTuple<_Tp, _As...>, _More>) ) \
1574 return __x._M_apply_per_chunk( \
1575 [](auto __impl, auto __xx, auto... __pack) constexpr { \
1576 using _V = typename decltype(__impl)::simd_type; \
1577 return __data(__name(_V(__private_init, __xx), \
1578 _V(__private_init, __pack)...)); \
1581 else if constexpr (is_same_v<_Tp, _RetTp>) \
1582 return __x._M_apply_per_chunk( \
1583 [](auto __impl, auto __xx, auto... __pack) constexpr { \
1584 using _V = typename decltype(__impl)::simd_type; \
1585 return __data(__name(_V(__private_init, __xx), \
1586 __autocvt_to_simd(__pack)...)); \
1590 __assert_unreachable<_Tp>(); \
1593 _GLIBCXX_SIMD_APPLY_ON_TUPLE(_Tp, acos)
1594 _GLIBCXX_SIMD_APPLY_ON_TUPLE(_Tp, asin)
1595 _GLIBCXX_SIMD_APPLY_ON_TUPLE(_Tp, atan)
1596 _GLIBCXX_SIMD_APPLY_ON_TUPLE(_Tp, atan2)
1597 _GLIBCXX_SIMD_APPLY_ON_TUPLE(_Tp, cos)
1598 _GLIBCXX_SIMD_APPLY_ON_TUPLE(_Tp, sin)
1599 _GLIBCXX_SIMD_APPLY_ON_TUPLE(_Tp, tan)
1600 _GLIBCXX_SIMD_APPLY_ON_TUPLE(_Tp, acosh)
1601 _GLIBCXX_SIMD_APPLY_ON_TUPLE(_Tp, asinh)
1602 _GLIBCXX_SIMD_APPLY_ON_TUPLE(_Tp, atanh)
1603 _GLIBCXX_SIMD_APPLY_ON_TUPLE(_Tp, cosh)
1604 _GLIBCXX_SIMD_APPLY_ON_TUPLE(_Tp, sinh)
1605 _GLIBCXX_SIMD_APPLY_ON_TUPLE(_Tp, tanh)
1606 _GLIBCXX_SIMD_APPLY_ON_TUPLE(_Tp, exp)
1607 _GLIBCXX_SIMD_APPLY_ON_TUPLE(_Tp, exp2)
1608 _GLIBCXX_SIMD_APPLY_ON_TUPLE(_Tp, expm1)
1609 _GLIBCXX_SIMD_APPLY_ON_TUPLE(
int, ilogb)
1610 _GLIBCXX_SIMD_APPLY_ON_TUPLE(_Tp, log)
1611 _GLIBCXX_SIMD_APPLY_ON_TUPLE(_Tp, log10)
1612 _GLIBCXX_SIMD_APPLY_ON_TUPLE(_Tp, log1p)
1613 _GLIBCXX_SIMD_APPLY_ON_TUPLE(_Tp, log2)
1614 _GLIBCXX_SIMD_APPLY_ON_TUPLE(_Tp, logb)
1616 _GLIBCXX_SIMD_APPLY_ON_TUPLE(_Tp,
1618 _GLIBCXX_SIMD_APPLY_ON_TUPLE(_Tp, scalbln)
1619 _GLIBCXX_SIMD_APPLY_ON_TUPLE(_Tp, cbrt)
1620 _GLIBCXX_SIMD_APPLY_ON_TUPLE(_Tp, abs)
1621 _GLIBCXX_SIMD_APPLY_ON_TUPLE(_Tp, fabs)
1622 _GLIBCXX_SIMD_APPLY_ON_TUPLE(_Tp, pow)
1623 _GLIBCXX_SIMD_APPLY_ON_TUPLE(_Tp, sqrt)
1624 _GLIBCXX_SIMD_APPLY_ON_TUPLE(_Tp, erf)
1625 _GLIBCXX_SIMD_APPLY_ON_TUPLE(_Tp, erfc)
1626 _GLIBCXX_SIMD_APPLY_ON_TUPLE(_Tp, lgamma)
1627 _GLIBCXX_SIMD_APPLY_ON_TUPLE(_Tp, tgamma)
1628 _GLIBCXX_SIMD_APPLY_ON_TUPLE(_Tp, trunc)
1629 _GLIBCXX_SIMD_APPLY_ON_TUPLE(_Tp, ceil)
1630 _GLIBCXX_SIMD_APPLY_ON_TUPLE(_Tp, floor)
1631 _GLIBCXX_SIMD_APPLY_ON_TUPLE(_Tp, nearbyint)
1633 _GLIBCXX_SIMD_APPLY_ON_TUPLE(_Tp, rint)
1634 _GLIBCXX_SIMD_APPLY_ON_TUPLE(
long, lrint)
1635 _GLIBCXX_SIMD_APPLY_ON_TUPLE(
long long, llrint)
1637 _GLIBCXX_SIMD_APPLY_ON_TUPLE(_Tp, round)
1638 _GLIBCXX_SIMD_APPLY_ON_TUPLE(
long, lround)
1639 _GLIBCXX_SIMD_APPLY_ON_TUPLE(
long long, llround)
1641 _GLIBCXX_SIMD_APPLY_ON_TUPLE(_Tp, ldexp)
1642 _GLIBCXX_SIMD_APPLY_ON_TUPLE(_Tp, fmod)
1643 _GLIBCXX_SIMD_APPLY_ON_TUPLE(_Tp, remainder)
1644 _GLIBCXX_SIMD_APPLY_ON_TUPLE(_Tp, copysign)
1645 _GLIBCXX_SIMD_APPLY_ON_TUPLE(_Tp, nextafter)
1646 _GLIBCXX_SIMD_APPLY_ON_TUPLE(_Tp, fdim)
1647 _GLIBCXX_SIMD_APPLY_ON_TUPLE(_Tp, fmax)
1648 _GLIBCXX_SIMD_APPLY_ON_TUPLE(_Tp, fmin)
1649 _GLIBCXX_SIMD_APPLY_ON_TUPLE(_Tp, fma)
1650 _GLIBCXX_SIMD_APPLY_ON_TUPLE(
int, fpclassify)
1651#undef _GLIBCXX_SIMD_APPLY_ON_TUPLE
1653 template <
typename _Tp,
typename... _Abis>
1654 static _SimdTuple<_Tp, _Abis...> _S_remquo(
1655 const _SimdTuple<_Tp, _Abis...>& __x,
1656 const _SimdTuple<_Tp, _Abis...>& __y,
1657 __fixed_size_storage_t<
int, _SimdTuple<_Tp, _Abis...>::_S_size()>* __z)
1659 return __x._M_apply_per_chunk(
1660 [](
auto __impl,
const auto __xx,
const auto __yy,
auto& __zz) {
1661 return __impl._S_remquo(__xx, __yy, &__zz);
1666 template <
typename _Tp,
typename... _As>
1667 static inline _SimdTuple<_Tp, _As...>
1668 _S_frexp(
const _SimdTuple<_Tp, _As...>& __x,
1669 __fixed_size_storage_t<int, _Np>& __exp)
noexcept
1671 return __x._M_apply_per_chunk(
1672 [](
auto __impl,
const auto& __a,
auto& __b) {
1674 frexp(
typename decltype(__impl)::simd_type(__private_init, __a),
1675 __autocvt_to_simd(__b)));
1680#define _GLIBCXX_SIMD_TEST_ON_TUPLE_(name_) \
1681 template <typename _Tp, typename... _As> \
1682 static inline _MaskMember \
1683 _S_##name_(const _SimdTuple<_Tp, _As...>& __x) noexcept \
1685 return _M_test([](auto __impl, \
1686 auto __xx) { return __impl._S_##name_(__xx); }, \
1690 _GLIBCXX_SIMD_TEST_ON_TUPLE_(isinf)
1691 _GLIBCXX_SIMD_TEST_ON_TUPLE_(isfinite)
1692 _GLIBCXX_SIMD_TEST_ON_TUPLE_(isnan)
1693 _GLIBCXX_SIMD_TEST_ON_TUPLE_(isnormal)
1694 _GLIBCXX_SIMD_TEST_ON_TUPLE_(signbit)
1695#undef _GLIBCXX_SIMD_TEST_ON_TUPLE_
1698 template <
typename... _Ts>
1699 _GLIBCXX_SIMD_INTRINSIC
static constexpr void
1700 _S_increment(_SimdTuple<_Ts...>& __x)
1703 __x, [](
auto __meta,
auto& native)
constexpr {
1704 __meta._S_increment(native);
1708 template <
typename... _Ts>
1709 _GLIBCXX_SIMD_INTRINSIC
static constexpr void
1710 _S_decrement(_SimdTuple<_Ts...>& __x)
1713 __x, [](
auto __meta,
auto& native)
constexpr {
1714 __meta._S_decrement(native);
1719#define _GLIBCXX_SIMD_CMP_OPERATIONS(__cmp) \
1720 template <typename _Tp, typename... _As> \
1721 _GLIBCXX_SIMD_INTRINSIC constexpr static _MaskMember \
1722 __cmp(const _SimdTuple<_Tp, _As...>& __x, \
1723 const _SimdTuple<_Tp, _As...>& __y) \
1726 [](auto __impl, auto __xx, auto __yy) constexpr { \
1727 return __impl.__cmp(__xx, __yy); \
1732 _GLIBCXX_SIMD_CMP_OPERATIONS(_S_equal_to)
1733 _GLIBCXX_SIMD_CMP_OPERATIONS(_S_not_equal_to)
1734 _GLIBCXX_SIMD_CMP_OPERATIONS(_S_less)
1735 _GLIBCXX_SIMD_CMP_OPERATIONS(_S_less_equal)
1736 _GLIBCXX_SIMD_CMP_OPERATIONS(_S_isless)
1737 _GLIBCXX_SIMD_CMP_OPERATIONS(_S_islessequal)
1738 _GLIBCXX_SIMD_CMP_OPERATIONS(_S_isgreater)
1739 _GLIBCXX_SIMD_CMP_OPERATIONS(_S_isgreaterequal)
1740 _GLIBCXX_SIMD_CMP_OPERATIONS(_S_islessgreater)
1741 _GLIBCXX_SIMD_CMP_OPERATIONS(_S_isunordered)
1742#undef _GLIBCXX_SIMD_CMP_OPERATIONS
1745 template <
typename _Tp,
typename... _As,
typename _Up>
1746 _GLIBCXX_SIMD_INTRINSIC
static void _S_set(_SimdTuple<_Tp, _As...>& __v,
1747 int __i, _Up&& __x)
noexcept
1748 { __v._M_set(__i,
static_cast<_Up&&
>(__x)); }
1751 template <
typename _Tp,
typename... _As>
1752 _GLIBCXX_SIMD_INTRINSIC
static void
1753 _S_masked_assign(
const _MaskMember __bits, _SimdTuple<_Tp, _As...>& __lhs,
1754 const __type_identity_t<_SimdTuple<_Tp, _As...>>& __rhs)
1758 [&](
auto __meta,
auto& __native_lhs,
auto __native_rhs)
constexpr {
1759 __meta._S_masked_assign(__meta._S_make_mask(__bits), __native_lhs,
1766 template <
typename _Tp,
typename... _As>
1767 _GLIBCXX_SIMD_INTRINSIC
static void
1768 _S_masked_assign(
const _MaskMember __bits, _SimdTuple<_Tp, _As...>& __lhs,
1769 const __type_identity_t<_Tp> __rhs)
1772 __lhs, [&](
auto __meta,
auto& __native_lhs)
constexpr {
1773 __meta._S_masked_assign(__meta._S_make_mask(__bits), __native_lhs,
1779 template <
typename _Op,
typename _Tp,
typename... _As>
1780 static inline void _S_masked_cassign(
const _MaskMember __bits,
1781 _SimdTuple<_Tp, _As...>& __lhs,
1782 const _SimdTuple<_Tp, _As...>& __rhs,
1787 [&](
auto __meta,
auto& __native_lhs,
auto __native_rhs)
constexpr {
1788 __meta.template _S_masked_cassign(__meta._S_make_mask(__bits),
1789 __native_lhs, __native_rhs, __op);
1795 template <
typename _Op,
typename _Tp,
typename... _As>
1796 static inline void _S_masked_cassign(
const _MaskMember __bits,
1797 _SimdTuple<_Tp, _As...>& __lhs,
1798 const _Tp& __rhs, _Op __op)
1801 __lhs, [&](
auto __meta,
auto& __native_lhs)
constexpr {
1802 __meta.template _S_masked_cassign(__meta._S_make_mask(__bits),
1803 __native_lhs, __rhs, __op);
1808 template <
template <
typename>
class _Op,
typename _Tp,
typename... _As>
1809 static inline _SimdTuple<_Tp, _As...>
1810 _S_masked_unary(
const _MaskMember __bits,
const _SimdTuple<_Tp, _As...>& __v)
1812 return __v._M_apply_wrapped([&__bits](
auto __meta,
1813 auto __native)
constexpr {
1814 return __meta.template _S_masked_unary<_Op>(__meta._S_make_mask(
1824template <
int _Np,
typename>
1825 struct _MaskImplFixedSize
1828 sizeof(_ULLong) * __CHAR_BIT__ >= _Np,
1829 "The fixed_size implementation relies on one _ULLong being able to store "
1830 "all boolean elements.");
1833 using _Abi = simd_abi::fixed_size<_Np>;
1835 using _MaskMember = _SanitizedBitMask<_Np>;
1837 template <
typename _Tp>
1838 using _FirstAbi =
typename __fixed_size_storage_t<_Tp, _Np>::_FirstAbi;
1840 template <
typename _Tp>
1841 using _TypeTag = _Tp*;
1846 _GLIBCXX_SIMD_INTRINSIC
static constexpr _MaskMember
1847 _S_broadcast(
bool __x)
1848 {
return __x ? ~_MaskMember() : _MaskMember(); }
1853 _GLIBCXX_SIMD_INTRINSIC
static constexpr _MaskMember
1854 _S_load(
const bool* __mem)
1856 using _Ip = __int_for_sizeof_t<bool>;
1860 const simd<_Ip, _Abi> __bools(
reinterpret_cast<const __may_alias<_Ip>*
>(
1863 return __data(__bools != 0);
1868 template <
bool _Sanitized>
1869 _GLIBCXX_SIMD_INTRINSIC
static constexpr _SanitizedBitMask<_Np>
1870 _S_to_bits(_BitMask<_Np, _Sanitized> __x)
1872 if constexpr (_Sanitized)
1875 return __x._M_sanitized();
1880 template <
typename _Tp,
typename _Up,
typename _UAbi>
1881 _GLIBCXX_SIMD_INTRINSIC
static constexpr _MaskMember
1882 _S_convert(simd_mask<_Up, _UAbi> __x)
1884 return _UAbi::_MaskImpl::_S_to_bits(__data(__x))
1885 .template _M_extract<0, _Np>();
1890 template <
typename _Tp>
1891 _GLIBCXX_SIMD_INTRINSIC
static _MaskMember
1892 _S_from_bitmask(_MaskMember __bits, _TypeTag<_Tp>)
noexcept
1896 static inline _MaskMember _S_load(
const bool* __mem)
noexcept
1901 using _Vs = __fixed_size_storage_t<_UChar, _Np>;
1902 __for_each(_Vs{}, [&](
auto __meta,
auto) {
1903 __r |= __meta._S_mask_to_shifted_ullong(
1904 __meta._S_mask_impl._S_load(&__mem[__meta._S_offset],
1905 _SizeConstant<__meta._S_size()>()));
1911 static inline _MaskMember _S_masked_load(_MaskMember __merge,
1913 const bool* __mem)
noexcept
1915 _BitOps::_S_bit_iteration(__mask.to_ullong(), [&](
auto __i) {
1916 __merge.set(__i, __mem[__i]);
1922 static inline void _S_store(
const _MaskMember __bitmask,
1923 bool* __mem)
noexcept
1925 if constexpr (_Np == 1)
1926 __mem[0] = __bitmask[0];
1928 _FirstAbi<_UChar>::_CommonImpl::_S_store_bool_array(__bitmask, __mem);
1932 static inline void _S_masked_store(
const _MaskMember __v,
bool* __mem,
1933 const _MaskMember __k)
noexcept
1935 _BitOps::_S_bit_iteration(__k, [&](
auto __i) { __mem[__i] = __v[__i]; });
1939 _GLIBCXX_SIMD_INTRINSIC
static _MaskMember
1940 _S_logical_and(
const _MaskMember& __x,
const _MaskMember& __y)
noexcept
1941 {
return __x & __y; }
1943 _GLIBCXX_SIMD_INTRINSIC
static _MaskMember
1944 _S_logical_or(
const _MaskMember& __x,
const _MaskMember& __y)
noexcept
1945 {
return __x | __y; }
1947 _GLIBCXX_SIMD_INTRINSIC
static constexpr _MaskMember
1948 _S_bit_not(
const _MaskMember& __x)
noexcept
1951 _GLIBCXX_SIMD_INTRINSIC
static _MaskMember
1952 _S_bit_and(
const _MaskMember& __x,
const _MaskMember& __y)
noexcept
1953 {
return __x & __y; }
1955 _GLIBCXX_SIMD_INTRINSIC
static _MaskMember
1956 _S_bit_or(
const _MaskMember& __x,
const _MaskMember& __y)
noexcept
1957 {
return __x | __y; }
1959 _GLIBCXX_SIMD_INTRINSIC
static _MaskMember
1960 _S_bit_xor(
const _MaskMember& __x,
const _MaskMember& __y)
noexcept
1961 {
return __x ^ __y; }
1964 _GLIBCXX_SIMD_INTRINSIC
static void _S_set(_MaskMember& __k,
int __i,
1966 { __k.set(__i, __x); }
1969 _GLIBCXX_SIMD_INTRINSIC
static void
1970 _S_masked_assign(
const _MaskMember __k, _MaskMember& __lhs,
1971 const _MaskMember __rhs)
1972 { __lhs = (__lhs & ~__k) | (__rhs & __k); }
1975 _GLIBCXX_SIMD_INTRINSIC
static void _S_masked_assign(
const _MaskMember __k,
1987 template <
typename _Tp>
1988 _GLIBCXX_SIMD_INTRINSIC
static bool _S_all_of(simd_mask<_Tp, _Abi> __k)
1989 {
return __data(__k).all(); }
1993 template <
typename _Tp>
1994 _GLIBCXX_SIMD_INTRINSIC
static bool _S_any_of(simd_mask<_Tp, _Abi> __k)
1995 {
return __data(__k).any(); }
1999 template <
typename _Tp>
2000 _GLIBCXX_SIMD_INTRINSIC
static bool _S_none_of(simd_mask<_Tp, _Abi> __k)
2001 {
return __data(__k).none(); }
2005 template <
typename _Tp>
2006 _GLIBCXX_SIMD_INTRINSIC
static bool
2007 _S_some_of([[maybe_unused]] simd_mask<_Tp, _Abi> __k)
2009 if constexpr (_Np == 1)
2012 return __data(__k).any() && !__data(__k).all();
2017 template <
typename _Tp>
2018 _GLIBCXX_SIMD_INTRINSIC
static int _S_popcount(simd_mask<_Tp, _Abi> __k)
2019 {
return __data(__k).count(); }
2023 template <
typename _Tp>
2024 _GLIBCXX_SIMD_INTRINSIC
static int
2025 _S_find_first_set(simd_mask<_Tp, _Abi> __k)
2026 {
return std::__countr_zero(__data(__k).to_ullong()); }
2030 template <
typename _Tp>
2031 _GLIBCXX_SIMD_INTRINSIC
static int
2032 _S_find_last_set(simd_mask<_Tp, _Abi> __k)
2033 {
return std::__bit_width(__data(__k).to_ullong()) - 1; }
2039_GLIBCXX_SIMD_END_NAMESPACE
constexpr _Tp reduce(_InputIterator __first, _InputIterator __last, _Tp __init, _BinaryOperation __binary_op)
Calculate reduction of values in a range.
constexpr _Iterator __base(_Iterator __it)