summaryrefslogtreecommitdiffstats
path: root/src/corelib/tools/qsimd_p.h
diff options
context:
space:
mode:
authorErik Verbruggen <erik.verbruggen@theqtcompany.com>2015-12-03 12:50:44 +0100
committerErik Verbruggen <erik.verbruggen@qt.io>2016-05-31 12:09:52 +0000
commite70324f8dd1f191556599cf60100dd0ad0b16708 (patch)
tree7b605613143718896810150614538feab022e5a2 /src/corelib/tools/qsimd_p.h
parentf7e29f07ffead8bd1cb6ff81bb791de2fb11fa5e (diff)
Remove _bit_scan_{forward,reverse}
Use qCountTrailingZeroBits and qCountLeadingZeroBits from qalgorithms.h instead. Also extended these versions for MSVC. The _bit_scan_* versions stem from a time before the glorious days of qalgorithms.h. A big advantage is that these functions can be used on all platforms. Change-Id: I5a1b886371520310a7fe16e617635ea335046beb Reviewed-by: Simon Hausmann <simon.hausmann@qt.io>
Diffstat (limited to 'src/corelib/tools/qsimd_p.h')
-rw-r--r--src/corelib/tools/qsimd_p.h53
1 files changed, 0 insertions, 53 deletions
diff --git a/src/corelib/tools/qsimd_p.h b/src/corelib/tools/qsimd_p.h
index 48ef686bbd..8cf0c5a4d2 100644
--- a/src/corelib/tools/qsimd_p.h
+++ b/src/corelib/tools/qsimd_p.h
@@ -465,59 +465,6 @@ static inline quint64 qCpuFeatures()
#define qCpuHasFeature(feature) ((qCompilerCpuFeatures & (Q_UINT64_C(1) << CpuFeature ## feature)) \
|| (qCpuFeatures() & (Q_UINT64_C(1) << CpuFeature ## feature)))
-#if QT_HAS_BUILTIN(__builtin_clz) && QT_HAS_BUILTIN(__builtin_ctz) && defined(Q_CC_CLANG) && !defined(Q_CC_INTEL)
-static Q_ALWAYS_INLINE unsigned _bit_scan_reverse(unsigned val)
-{
- Q_ASSERT(val != 0); // if val==0, the result is undefined.
- unsigned result = static_cast<unsigned>(__builtin_clz(val)); // Count Leading Zeros
- // Now Invert the result: clz will count *down* from the msb to the lsb, so the msb index is 31
- // and the lsb inde is 0. The result for _bit_scan_reverse is expected to be the index when
- // counting up: msb index is 0 (because it starts there), and the lsb index is 31.
- result ^= sizeof(unsigned) * 8 - 1;
- return result;
-}
-static Q_ALWAYS_INLINE unsigned _bit_scan_forward(unsigned val)
-{
- Q_ASSERT(val != 0); // if val==0, the result is undefined.
- return static_cast<unsigned>(__builtin_ctz(val)); // Count Trailing Zeros
-}
-#elif defined(Q_PROCESSOR_X86)
-// Bit scan functions for x86
-# if defined(Q_CC_MSVC)
-// MSVC calls it _BitScanReverse and returns the carry flag, which we don't need
-static __forceinline unsigned long _bit_scan_reverse(uint val)
-{
- unsigned long result;
- _BitScanReverse(&result, val);
- return result;
-}
-static __forceinline unsigned long _bit_scan_forward(uint val)
-{
- unsigned long result;
- _BitScanForward(&result, val);
- return result;
-}
-# elif (defined(Q_CC_CLANG) || (defined(Q_CC_GNU) && Q_CC_GNU < 405)) \
- && !defined(Q_CC_INTEL)
-// Clang is missing the intrinsic for _bit_scan_reverse
-// GCC only added it in version 4.5
-static inline __attribute__((always_inline))
-unsigned _bit_scan_reverse(unsigned val)
-{
- unsigned result;
- asm("bsr %1, %0" : "=r" (result) : "r" (val));
- return result;
-}
-static inline __attribute__((always_inline))
-unsigned _bit_scan_forward(unsigned val)
-{
- unsigned result;
- asm("bsf %1, %0" : "=r" (result) : "r" (val));
- return result;
-}
-# endif
-#endif // Q_PROCESSOR_X86
-
#define ALIGNMENT_PROLOGUE_16BYTES(ptr, i, length) \
for (; i < static_cast<int>(qMin(static_cast<quintptr>(length), ((4 - ((reinterpret_cast<quintptr>(ptr) >> 2) & 0x3)) & 0x3))); ++i)