27 #include "../common/aliasingentryhelper.h"
28 #include "../common/memoryfwd.h"
47 class WriteMaskedVector
111 vec->assign(x, mask);
121 return vec->call(f, mask);
124 return vec->call(f, mask);
127 return vec->apply(f, mask);
130 return vec->apply(f, mask);
139 template<
typename T>
class Vector
143 #ifdef VC_COMPILE_BENCHMARKS
161 #ifdef VC_PASSING_VECTOR_BY_VALUE_IS_BROKEN
264 template<typename IndexT>
Vector(const EntryType *mem, const IndexT *indexes);
266 template<typename IndexT>
Vector(const EntryType *mem, const IndexT *indexes, MaskArg mask);
271 template<typename S1, typename S2, typename IT>
Vector(const S1 *array, const S2 S1::* member1, const EntryType S2::* member2,
VC_ALIGNED_PARAMETER(IT) indexes, MaskArg mask);
276 #ifdef VC_USE_SET_GATHERS
279 template<
typename S1,
typename IT>
void gather(
const S1 *array,
const EntryType S1::* member1,
VC_ALIGNED_PARAMETER(IT) indexes);
280 template<
typename S1,
typename IT>
void gather(
const S1 *array,
const EntryType S1::* member1,
VC_ALIGNED_PARAMETER(IT) indexes, MaskArg mask);
281 template<
typename S1,
typename S2,
typename IT>
void gather(
const S1 *array,
const S2 S1::* member1,
const EntryType S2::* member2,
VC_ALIGNED_PARAMETER(IT) indexes);
282 template<
typename S1,
typename S2,
typename IT>
void gather(
const S1 *array,
const S2 S1::* member1,
const EntryType S2::* member2,
VC_ALIGNED_PARAMETER(IT) indexes, MaskArg mask);
291 template<
typename S1,
typename IT>
void scatter(S1 *array, EntryType S1::* member1,
VC_ALIGNED_PARAMETER(IT) indexes, MaskArg mask)
const;
292 template<
typename S1,
typename S2,
typename IT>
void scatter(S1 *array, S2 S1::* member1, EntryType S2::* member2,
VC_ALIGNED_PARAMETER(IT) indexes)
const;
293 template<
typename S1,
typename S2,
typename IT>
void scatter(S1 *array, S2 S1::* member1, EntryType S2::* member2,
VC_ALIGNED_PARAMETER(IT) indexes, MaskArg mask)
const;
305 #if defined(VC_GCC) && VC_GCC >= 0x40300 && VC_GCC < 0x40400
316 #define OP(symbol, fun) \
317 Vc_INTRINSIC Vector &operator symbol##=(const Vector<T> &x) { data() = VectorHelper<T>::fun(data(), x.data()); return *this; } \
318 Vc_INTRINSIC Vector &operator symbol##=(EntryType x) { return operator symbol##=(Vector<T>(x)); } \
319 Vc_INTRINSIC Vector Vc_PURE operator symbol(const Vector<T> &x) const { return HT::fun(data(), x.data()); } \
320 template<typename TT> Vc_INTRINSIC VC_EXACT_TYPE(TT, EntryType, Vector) Vc_PURE operator symbol(TT x) const { return operator symbol(Vector(x)); }
341 #define OP(symbol, fun) \
342 Vc_INTRINSIC_L Vector &operator symbol##=(const Vector<T> &x) Vc_INTRINSIC_R; \
343 Vc_INTRINSIC_L Vector operator symbol(const Vector<T> &x) const Vc_PURE Vc_INTRINSIC_R; \
344 Vc_INTRINSIC Vector &operator symbol##=(EntryType x) { return operator symbol##=(Vector(x)); } \
345 template<typename TT> Vc_INTRINSIC VC_EXACT_TYPE(TT, EntryType, Vector) Vc_PURE operator symbol(TT x) const { return operator symbol(Vector(x)); }
350 #define OPcmp(symbol, fun) \
351 Vc_INTRINSIC Mask Vc_PURE operator symbol(const Vector<T> &x) const { return VectorHelper<T>::fun(data(), x.data()); } \
352 template<typename TT> Vc_INTRINSIC VC_EXACT_TYPE(TT, EntryType, Mask) Vc_PURE operator symbol(TT x) const { return operator symbol(Vector(x)); }
405 for (
int i = 1; i <
Size; ++i) {
531 #include "forceToRegisters.tcc"
535 __asm__ __volatile__(
""::
"x"(x1.data()[0]),
"x"(x1.data()[1]));
537 #elif defined(VC_MSVC)
538 #pragma optimize("g", off)
548 #include "vector.tcc"
550 #endif // SSE_VECTOR_H
VectorTraits< T >::StorageType StorageType
Vc_ALWAYS_INLINE_L Vector< typename NegateTypeHelper< T >::Type > operator-() const Vc_ALWAYS_INLINE_R
Vc_INTRINSIC_L Vc_PURE_L const Vector< T > cdab() const Vc_INTRINSIC_R Vc_PURE_R
VectorTraits< T >::EntryType EntryType
Vc_INTRINSIC Vector< T > & operator*=(EntryType x)
static Vc_CONST_L VectorType sort(VectorType) Vc_CONST_R
Vc_ALWAYS_INLINE Vector & operator=(EntryType a)
Vc_INTRINSIC void fill(EntryType(&f)(IndexT))
DetermineEntryType< T >::Type EntryType
DetermineGatherMask< MaskType >::Type GatherMaskType
#define Vc_foreach_bit(_it_, _mask_)
Small helper to encapsulate whether to return the value pointed to by the iterator or its address...
static Vc_ALWAYS_INLINE Vc_PURE Vector< T > sqrt(const Vector< T > &x)
Namespace for new ROOT classes and functions.
Vc_INTRINSIC Vector operator--(int)
Vc_ALWAYS_INLINE Vc_PURE V2 staticCast() const
Vc_INTRINSIC Vector Vc_PURE operator~() const
Vc_INTRINSIC_L Vector copySign(typename Vector::AsArg reference) const Vc_INTRINSIC_R
Vc_INTRINSIC void call(F &f) const
Vc_INTRINSIC_L void setZero() Vc_INTRINSIC_R
#define FREE_STORE_OPERATORS_ALIGNED(alignment)
static Vc_ALWAYS_INLINE Vc_PURE Vector< T >::Mask isnan(const Vector< T > &x)
Vc_INTRINSIC Vector & operator++()
static Vc_INTRINSIC_L Vector Random() Vc_INTRINSIC_R
Vc_INTRINSIC_L Vc_PURE_L const Vector< T > bcad() const Vc_INTRINSIC_R Vc_PURE_R
Vc_INTRINSIC EntryType product() const
Vc_INTRINSIC Vector< T > & operator=(EntryType x)
Vc_INTRINSIC Vector< T > apply(const F &f) const
Vc_ALWAYS_INLINE Vc_PURE _M128 data() const
VectorTraits< T >::MaskType Mask
Vc_INTRINSIC_L VC_EXACT_TYPE(TT, typename DetermineEntryType< T >::Type, Vector< T >) operator/(TT x) const Vc_PURE Vc_INTRINSIC_R
Vc_INTRINSIC Vector< T > apply(const F &f, const Mask &mask) const
static Vc_INTRINSIC Vc_CONST M256 create(_M128 a, _M128 b)
Vc_INTRINSIC_L Vector operator>>(AsArg shift) const Vc_INTRINSIC_R
Vc_INTRINSIC_L void setQnan() Vc_INTRINSIC_R
Vc_INTRINSIC_L Vector shifted(int amount) const Vc_INTRINSIC_R
Vc_ALWAYS_INLINE Vc_PURE AliasingEntryType & m(size_t index)
Vc_INTRINSIC void call(const F &f, const Mask &mask) const
Vc_INTRINSIC void call(F &f, const Mask &mask) const
Vc_INTRINSIC WriteMaskedVector< T > operator()(const Mask &k)
Vc_INTRINSIC EntryType sum() const
VectorTraits< T >::GatherMaskType GatherMask
Vc_INTRINSIC Vector(TT x, VC_EXACT_TYPE(TT, EntryType, void *)=0)
Vc_INTRINSIC_L Vc_PURE_L const Vector< T > bbbb() const Vc_INTRINSIC_R Vc_PURE_R
Vc_INTRINSIC Vector< T > & operator=(const Vector< T > &x)
Vc_INTRINSIC_L Vector & operator/=(const Vector< T > &x) Vc_INTRINSIC_R
VECTOR_NAMESPACE::short_v short_v
Vc_INTRINSIC_L Vc_PURE_L const Vector< T > acbd() const Vc_INTRINSIC_R Vc_PURE_R
static Vc_ALWAYS_INLINE void forceToRegisters(const Vector< T1 > &, const Vector< T2 > &, const Vector< T3 > &, const Vector< T4 > &, const Vector< T5 > &, const Vector< T6 > &, const Vector< T7 > &, const Vector< T8 > &, const Vector< T9 > &, const Vector< T10 > &, const Vector< T11 > &, const Vector< T12 > &, const Vector< T13 > &, const Vector< T14 > &, const Vector< T15 > &, const Vector< T16 > &)
void _operator_bracket_warning()
static Vc_INTRINSIC __m128i Vc_CONST mm_max_epu32(__m128i a, __m128i b)
void gather(const EntryType *mem, VC_ALIGNED_PARAMETER(Index) indexes)
Vector< T >::EntryType EntryType
static Vc_INTRINSIC_L Vector Zero() Vc_INTRINSIC_R
Vc_INTRINSIC_L Vector & operator=(const Vector< OtherT > &x) Vc_INTRINSIC_R
static Vc_ALWAYS_INLINE Vc_PURE Vector< T > abs(const Vector< T > &x)
#define for_all_vector_entries(_it_, _code_)
static Vc_ALWAYS_INLINE Vc_PURE m256 xor_(param256 a, param256 b)
Vc_INTRINSIC_L Vc_PURE_L const Vector< T > dbca() const Vc_INTRINSIC_R Vc_PURE_R
cmpneq cmpnle cmple Vc_INTRINSIC_L Vc_PURE_L Mask isNegative() const Vc_PURE_R Vc_INTRINSIC_R
Vc_INTRINSIC Common::AliasingEntryHelper< StorageType > operator[](size_t index)
Vc_ALWAYS_INLINE Vc_PURE V2 reinterpretCast() const
Vc_INTRINSIC_L Vc_PURE_L const Vector< T > cccc() const Vc_INTRINSIC_R Vc_PURE_R
static Vc_INTRINSIC __m128i Vc_CONST mm_max_epu16(__m128i a, __m128i b)
Vc_INTRINSIC Vector< T > operator++(int)
void scatter(EntryType *mem, VC_ALIGNED_PARAMETER(Index) indexes) const
static Vc_ALWAYS_INLINE Vc_PURE Vector< T > round(const Vector< T > &x)
Vc_ALWAYS_INLINE void fusedMultiplyAdd(const Vector< T > &factor, const Vector< T > &summand)
Vc_ALWAYS_INLINE void assign(const Vector< T > &v, const Mask &mask)
Vc_INTRINSIC Vector operator++(int)
Vc_INTRINSIC_L Vc_PURE_L const Vector< T > bcda() const Vc_INTRINSIC_R Vc_PURE_R
#define Vc_ALWAYS_INLINE_R
Vc_ALWAYS_INLINE Vc_PURE VectorType & v()
Vc_INTRINSIC EntryType max() const
#define VC_ALIGNED_PARAMETER(_Type)
Vc_INTRINSIC_L void store(EntryType *mem) const Vc_INTRINSIC_R
VectorTraits< T >::MaskType Mask
Vc_INTRINSIC Vector< T > & operator/=(const Vector< T > &x)
def cast(some_object, new_type)
Vc_INTRINSIC Vector< T > apply(const F &f) const
static Vc_INTRINSIC_L Vector IndexesFromZero() Vc_INTRINSIC_R
Vc_INTRINSIC_L Vc_PURE_L const Vector< T > dddd() const Vc_INTRINSIC_R Vc_PURE_R
Vc_INTRINSIC_L Vc_PURE_L const Vector< T > badc() const Vc_INTRINSIC_R Vc_PURE_R
Vc_INTRINSIC_L Vc_PURE_L const Vector< T > dabc() const Vc_INTRINSIC_R Vc_PURE_R
Vc_INTRINSIC_L Vc_PURE_L const Vector< T > aaaa() const Vc_INTRINSIC_R Vc_PURE_R
static Vc_INTRINSIC Vector broadcast4(const EntryType *x)
static Vc_INTRINSIC_L Vector One() Vc_INTRINSIC_R
Vc_INTRINSIC Vector< T > & operator+=(EntryType x)
void callWithValuesSorted(F &f)
static Vc_ALWAYS_INLINE Vc_PURE Vector< T > rsqrt(const Vector< T > &x)
Vc_ALWAYS_INLINE WriteMaskedVector(Vector< T > *v, const Mask &k)
static Vc_INTRINSIC __m128i Vc_CONST mm_min_epu16(__m128i a, __m128i b)
VectorHelper< typename VectorTraits< T >::VectorType > HV
Vc_INTRINSIC Vector< T > apply(F &f, const Mask &mask) const
Type
enumeration specifying the integration types.
static Vc_ALWAYS_INLINE Vc_PURE Vector< T > reciprocal(const Vector< T > &x)
VectorTypeHelper< T >::Type VectorType
Vc_INTRINSIC_L Vc_PURE_L const Vector< T > dcba() const Vc_INTRINSIC_R Vc_PURE_R
VectorTraits< T >::IndexType IndexType
Vc::Memory< Vector< T >, Size > Memory
RooCmdArg Index(RooCategory &icat)
static Vc_ALWAYS_INLINE Vc_PURE sfloat_v max(const sfloat_v &x, const sfloat_v &y)
static Vc_INTRINSIC __m128i Vc_CONST mm_min_epi32(__m128i a, __m128i b)
static Vc_ALWAYS_INLINE Vc_PURE m256 or_(param256 a, param256 b)
static const double x1[5]
Vc_INTRINSIC_L Vector rotated(int amount) const Vc_INTRINSIC_R
Vc_INTRINSIC_L Vc_PURE_L const Vector< T > & abcd() const Vc_INTRINSIC_R Vc_PURE_R
static Vc_ALWAYS_INLINE Vc_PURE m256 and_(param256 a, param256 b)
Vc_INTRINSIC_L Vector operator<<(AsArg shift) const Vc_INTRINSIC_R
Vc_INTRINSIC_L Vector & operator>>=(AsArg shift) Vc_INTRINSIC_R
Vc_INTRINSIC Vector< T > & operator+=(const Vector< T > &x)
Vc_INTRINSIC Vector< T > apply(F &f) const
#define Vc_ALWAYS_INLINE_L
Vc_INTRINSIC_L Vector exponent() const Vc_INTRINSIC_R
static Vc_INTRINSIC __m128i Vc_CONST mm_min_epu32(__m128i a, __m128i b)
Vc_INTRINSIC Vector< T > & operator-=(const Vector< T > &x)
VectorTraits< T >::VectorType VectorType
cmpneq cmpnle OPcmp(<, cmplt) OPcmp(<
Vc_INTRINSIC Vector< T > & operator*=(const Vector< T > &x)
static Vc_ALWAYS_INLINE Vc_PURE sfloat_v min(const sfloat_v &x, const sfloat_v &y)
Vc_ALWAYS_INLINE Vector()
void expand(Vector< typename ExpandTypeHelper< T >::Type > *x) const
Vc_INTRINSIC Vector< T > & operator/=(EntryType x)
Vc_INTRINSIC Vector< T > & operator--()
Vc_INTRINSIC Vector< T > operator--(int)
Vc_ALWAYS_INLINE Vc_PURE const VectorType & data() const
Vc_INTRINSIC void call(F &f) const
Vc_INTRINSIC_L void load(const EntryType *mem) Vc_INTRINSIC_R
Vc_ALWAYS_INLINE Vector & operator=(AsArg v)
Vc_INTRINSIC_L Vector & operator<<=(AsArg shift) Vc_INTRINSIC_R
static Vc_INTRINSIC __m128i Vc_CONST mm_max_epi32(__m128i a, __m128i b)
static Vc_ALWAYS_INLINE Vc_PURE Vector< T >::Mask isfinite(const Vector< T > &x)
Vc_INTRINSIC void call(const F &f) const
Vc_INTRINSIC Vector< T > & operator-=(EntryType x)
Vc_INTRINSIC Vector< T > apply(F &f) const
Vc_INTRINSIC_L Vector operator/(const Vector< T > &x) const Vc_PURE Vc_INTRINSIC_R
Vc_INTRINSIC void fill(EntryType(&f)())
Vc_INTRINSIC void call(const F &f) const
Vc_PURE Vector sorted() const
static Vc_ALWAYS_INLINE To Vc_CONST mm128_reinterpret_cast(VC_ALIGNED_PARAMETER(From) v)
Vc_INTRINSIC Vector & operator--()
Vc_ALWAYS_INLINE Vc_PURE VectorType & data()
Vc_INTRINSIC EntryType min() const