summaryrefslogtreecommitdiffstats
path: root/src/corelib/arch/qatomic_x86_64.h
diff options
context:
space:
mode:
authorThiago Macieira <thiago@kde.org>2011-07-31 18:38:00 -0300
committerQt by Nokia <qt-info@nokia.com>2012-01-24 01:08:28 +0100
commitba660ea7548d86147afe30f314f67527c22c267b (patch)
tree2c4471ee8137467294ca89fd27e68d39aab9a2c9 /src/corelib/arch/qatomic_x86_64.h
parent161d5eb3239b52e6f19942721491f2632305e426 (diff)
Port the i386 and x86-64 atomics to the new QBasicAtomic architecture
Both implementations now are very similar to one another, to the point we could share the code if we wanted to. They are based on assembly code for the Relaxed functions only, as the i386 and x86-64 architectures only allow for full memory ordering or something that closely resembles it (see 8.2 "Memory ordering" in the Intel 64 and IA-32 Architectures Software Developer's Manual Volume 3A). We could add "lfence/mfence/sfence" in future versions if we wanted to (SSE2+). Change-Id: I76966d9f8694edfece2c5ebd3387348fac721447 Reviewed-by: Bradley T. Hughes <bradley.hughes@nokia.com> Reviewed-by: Thiago Macieira <thiago.macieira@intel.com>
Diffstat (limited to 'src/corelib/arch/qatomic_x86_64.h')
-rw-r--r--src/corelib/arch/qatomic_x86_64.h410
1 files changed, 230 insertions, 180 deletions
diff --git a/src/corelib/arch/qatomic_x86_64.h b/src/corelib/arch/qatomic_x86_64.h
index 418248a43e..342cd12294 100644
--- a/src/corelib/arch/qatomic_x86_64.h
+++ b/src/corelib/arch/qatomic_x86_64.h
@@ -1,7 +1,7 @@
/****************************************************************************
**
** Copyright (C) 2012 Nokia Corporation and/or its subsidiary(-ies).
-** All rights reserved.
+** Copyright (C) 2011 Thiago Macieira <thiago@kde.org>
** Contact: http://www.qt-project.org/
**
** This file is part of the QtCore module of the Qt Toolkit.
@@ -42,79 +42,98 @@
#ifndef QATOMIC_X86_64_H
#define QATOMIC_X86_64_H
+#include <QtCore/qgenericatomic.h>
+
QT_BEGIN_HEADER
QT_BEGIN_NAMESPACE
+#if 0
+#pragma qt_sync_stop_processing
+#endif
+
#define Q_ATOMIC_INT_REFERENCE_COUNTING_IS_ALWAYS_NATIVE
#define Q_ATOMIC_INT_REFERENCE_COUNTING_IS_WAIT_FREE
-inline bool QBasicAtomicInt::isReferenceCountingNative()
-{ return true; }
-inline bool QBasicAtomicInt::isReferenceCountingWaitFree()
-{ return true; }
-
#define Q_ATOMIC_INT_TEST_AND_SET_IS_ALWAYS_NATIVE
#define Q_ATOMIC_INT_TEST_AND_SET_IS_WAIT_FREE
-inline bool QBasicAtomicInt::isTestAndSetNative()
-{ return true; }
-inline bool QBasicAtomicInt::isTestAndSetWaitFree()
-{ return true; }
-
#define Q_ATOMIC_INT_FETCH_AND_STORE_IS_ALWAYS_NATIVE
#define Q_ATOMIC_INT_FETCH_AND_STORE_IS_WAIT_FREE
-inline bool QBasicAtomicInt::isFetchAndStoreNative()
-{ return true; }
-inline bool QBasicAtomicInt::isFetchAndStoreWaitFree()
-{ return true; }
-
#define Q_ATOMIC_INT_FETCH_AND_ADD_IS_ALWAYS_NATIVE
#define Q_ATOMIC_INT_FETCH_AND_ADD_IS_WAIT_FREE
-inline bool QBasicAtomicInt::isFetchAndAddNative()
-{ return true; }
-inline bool QBasicAtomicInt::isFetchAndAddWaitFree()
-{ return true; }
+#define Q_ATOMIC_INT32_IS_SUPPORTED
+
+#define Q_ATOMIC_INT32_REFERENCE_COUNTING_IS_ALWAYS_NATIVE
+#define Q_ATOMIC_INT32_REFERENCE_COUNTING_IS_WAIT_FREE
+
+#define Q_ATOMIC_INT32_TEST_AND_SET_IS_ALWAYS_NATIVE
+#define Q_ATOMIC_INT32_TEST_AND_SET_IS_WAIT_FREE
+
+#define Q_ATOMIC_INT32_FETCH_AND_STORE_IS_ALWAYS_NATIVE
+#define Q_ATOMIC_INT32_FETCH_AND_STORE_IS_WAIT_FREE
+
+#define Q_ATOMIC_INT32_FETCH_AND_ADD_IS_ALWAYS_NATIVE
+#define Q_ATOMIC_INT32_FETCH_AND_ADD_IS_WAIT_FREE
#define Q_ATOMIC_POINTER_TEST_AND_SET_IS_ALWAYS_NATIVE
#define Q_ATOMIC_POINTER_TEST_AND_SET_IS_WAIT_FREE
-template <typename T>
-Q_INLINE_TEMPLATE bool QBasicAtomicPointer<T>::isTestAndSetNative()
-{ return true; }
-template <typename T>
-Q_INLINE_TEMPLATE bool QBasicAtomicPointer<T>::isTestAndSetWaitFree()
-{ return true; }
-
#define Q_ATOMIC_POINTER_FETCH_AND_STORE_IS_ALWAYS_NATIVE
#define Q_ATOMIC_POINTER_FETCH_AND_STORE_IS_WAIT_FREE
-template <typename T>
-Q_INLINE_TEMPLATE bool QBasicAtomicPointer<T>::isFetchAndStoreNative()
-{ return true; }
-template <typename T>
-Q_INLINE_TEMPLATE bool QBasicAtomicPointer<T>::isFetchAndStoreWaitFree()
-{ return true; }
-
#define Q_ATOMIC_POINTER_FETCH_AND_ADD_IS_ALWAYS_NATIVE
#define Q_ATOMIC_POINTER_FETCH_AND_ADD_IS_WAIT_FREE
-template <typename T>
-Q_INLINE_TEMPLATE bool QBasicAtomicPointer<T>::isFetchAndAddNative()
-{ return true; }
-template <typename T>
-Q_INLINE_TEMPLATE bool QBasicAtomicPointer<T>::isFetchAndAddWaitFree()
-{ return true; }
+template<> struct QAtomicIntegerTraits<int> { enum { IsInteger = 1 }; };
+template<> struct QAtomicIntegerTraits<unsigned int> { enum { IsInteger = 1 }; };
+
+template <int size> struct QBasicAtomicOps: QGenericAtomicOps<QBasicAtomicOps<size> >
+{
+ static inline bool isReferenceCountingNative() { return true; }
+ static inline bool isReferenceCountingWaitFree() { return true; }
+ template <typename T> static bool ref(T &_q_value);
+ template <typename T> static bool deref(T &_q_value);
+
+ static inline bool isTestAndSetNative() { return true; }
+ static inline bool isTestAndSetWaitFree() { return true; }
+ template <typename T> static bool testAndSetRelaxed(T &_q_value, T expectedValue, T newValue);
+
+ static inline bool isFetchAndStoreNative() { return true; }
+ static inline bool isFetchAndStoreWaitFree() { return true; }
+ template <typename T> static T fetchAndStoreRelaxed(T &_q_value, T newValue);
+
+ static inline bool isFetchAndAddNative() { return true; }
+ static inline bool isFetchAndAddWaitFree() { return true; }
+ template <typename T> static
+ T fetchAndAddRelaxed(T &_q_value, typename QAtomicAdditiveType<T>::AdditiveT valueToAdd);
+};
+
+template <typename T> struct QAtomicOps : QBasicAtomicOps<sizeof(T)>
+{
+ typedef T Type;
+};
#if defined(Q_CC_GNU) || defined(Q_CC_INTEL)
-inline bool QBasicAtomicInt::ref()
+template<> struct QAtomicIntegerTraits<char> { enum { IsInteger = 1 }; };
+template<> struct QAtomicIntegerTraits<signed char> { enum { IsInteger = 1 }; };
+template<> struct QAtomicIntegerTraits<unsigned char> { enum { IsInteger = 1 }; };
+template<> struct QAtomicIntegerTraits<short> { enum { IsInteger = 1 }; };
+template<> struct QAtomicIntegerTraits<unsigned short> { enum { IsInteger = 1 }; };
+template<> struct QAtomicIntegerTraits<long> { enum { IsInteger = 1 }; };
+template<> struct QAtomicIntegerTraits<unsigned long> { enum { IsInteger = 1 }; };
+template<> struct QAtomicIntegerTraits<long long> { enum { IsInteger = 1 }; };
+template<> struct QAtomicIntegerTraits<unsigned long long> { enum { IsInteger = 1 }; };
+
+template<> template<typename T> inline
+bool QBasicAtomicOps<1>::ref(T &_q_value)
{
unsigned char ret;
asm volatile("lock\n"
- "incl %0\n"
+ "incb %0\n"
"setne %1"
: "=m" (_q_value), "=qm" (ret)
: "m" (_q_value)
@@ -122,11 +141,12 @@ inline bool QBasicAtomicInt::ref()
return ret != 0;
}
-inline bool QBasicAtomicInt::deref()
+template<> template<typename T> inline
+bool QBasicAtomicOps<2>::ref(T &_q_value)
{
unsigned char ret;
asm volatile("lock\n"
- "decl %0\n"
+ "incw %0\n"
"setne %1"
: "=m" (_q_value), "=qm" (ret)
: "m" (_q_value)
@@ -134,228 +154,258 @@ inline bool QBasicAtomicInt::deref()
return ret != 0;
}
-inline bool QBasicAtomicInt::testAndSetOrdered(int expectedValue, int newValue)
+template<> template<typename T> inline
+bool QBasicAtomicOps<4>::ref(T &_q_value)
{
unsigned char ret;
asm volatile("lock\n"
- "cmpxchgl %3,%2\n"
- "sete %1\n"
- : "=a" (newValue), "=qm" (ret), "+m" (_q_value)
- : "r" (newValue), "0" (expectedValue)
+ "incl %0\n"
+ "setne %1"
+ : "=m" (_q_value), "=qm" (ret)
+ : "m" (_q_value)
: "memory");
return ret != 0;
}
-inline int QBasicAtomicInt::fetchAndStoreOrdered(int newValue)
+template<> template<typename T> inline
+bool QBasicAtomicOps<8>::ref(T &_q_value)
{
- asm volatile("xchgl %0,%1"
- : "=r" (newValue), "+m" (_q_value)
- : "0" (newValue)
+ unsigned char ret;
+ asm volatile("lock\n"
+ "incq %0\n"
+ "setne %1"
+ : "=m" (_q_value), "=qm" (ret)
+ : "m" (_q_value)
: "memory");
- return newValue;
+ return ret != 0;
}
-inline int QBasicAtomicInt::fetchAndAddOrdered(int valueToAdd)
+template<> template <typename T> inline
+bool QBasicAtomicOps<1>::deref(T &_q_value)
{
+ unsigned char ret;
asm volatile("lock\n"
- "xaddl %0,%1"
- : "=r" (valueToAdd), "+m" (_q_value)
- : "0" (valueToAdd)
+ "decb %0\n"
+ "setne %1"
+ : "=m" (_q_value), "=qm" (ret)
+ : "m" (_q_value)
: "memory");
- return valueToAdd;
+ return ret != 0;
}
-template <typename T>
-Q_INLINE_TEMPLATE bool QBasicAtomicPointer<T>::testAndSetOrdered(T *expectedValue, T *newValue)
+template<> template <typename T> inline
+bool QBasicAtomicOps<2>::deref(T &_q_value)
{
unsigned char ret;
asm volatile("lock\n"
- "cmpxchgq %3,%2\n"
- "sete %1\n"
- : "=a" (newValue), "=qm" (ret), "+m" (_q_value)
- : "r" (newValue), "0" (expectedValue)
+ "decw %0\n"
+ "setne %1"
+ : "=m" (_q_value), "=qm" (ret)
+ : "m" (_q_value)
: "memory");
return ret != 0;
}
-
-template <typename T>
-Q_INLINE_TEMPLATE T *QBasicAtomicPointer<T>::fetchAndStoreOrdered(T *newValue)
+template<> template <typename T> inline
+bool QBasicAtomicOps<4>::deref(T &_q_value)
{
- asm volatile("xchgq %0,%1"
- : "=r" (newValue), "+m" (_q_value)
- : "0" (newValue)
+ unsigned char ret;
+ asm volatile("lock\n"
+ "decl %0\n"
+ "setne %1"
+ : "=m" (_q_value), "=qm" (ret)
+ : "m" (_q_value)
: "memory");
- return newValue;
+ return ret != 0;
}
-template <typename T>
-Q_INLINE_TEMPLATE T *QBasicAtomicPointer<T>::fetchAndAddOrdered(qptrdiff valueToAdd)
+template<> template <typename T> inline
+bool QBasicAtomicOps<8>::deref(T &_q_value)
{
+ unsigned char ret;
asm volatile("lock\n"
- "xaddq %0,%1"
- : "=r" (valueToAdd), "+m" (_q_value)
- : "0" (valueToAdd * sizeof(T))
+ "decq %0\n"
+ "setne %1"
+ : "=m" (_q_value), "=qm" (ret)
+ : "m" (_q_value)
: "memory");
- return reinterpret_cast<T *>(valueToAdd);
+ return ret != 0;
}
-#else // !Q_CC_INTEL && !Q_CC_GNU
-
-extern "C" {
- Q_CORE_EXPORT int q_atomic_test_and_set_int(volatile int *ptr, int expected, int newval);
- Q_CORE_EXPORT int q_atomic_test_and_set_ptr(volatile void *ptr, void *expected, void *newval);
- Q_CORE_EXPORT int q_atomic_increment(volatile int *ptr);
- Q_CORE_EXPORT int q_atomic_decrement(volatile int *ptr);
- Q_CORE_EXPORT int q_atomic_set_int(volatile int *ptr, int newval);
- Q_CORE_EXPORT void *q_atomic_set_ptr(volatile void *ptr, void *newval);
- Q_CORE_EXPORT int q_atomic_fetch_and_add_int(volatile int *ptr, int value);
- Q_CORE_EXPORT void *q_atomic_fetch_and_add_ptr(volatile void *ptr, qptrdiff value);
-} // extern "C"
-
-inline bool QBasicAtomicInt::ref()
+template<int size> template <typename T> inline
+bool QBasicAtomicOps<size>::testAndSetRelaxed(T &_q_value, T expectedValue, T newValue)
{
- return q_atomic_increment(&_q_value) != 0;
+ unsigned char ret;
+ asm volatile("lock\n"
+ "cmpxchg %3,%2\n"
+ "sete %1\n"
+ : "=a" (newValue), "=qm" (ret), "+m" (_q_value)
+ : "r" (newValue), "0" (expectedValue)
+ : "memory");
+ return ret != 0;
}
-inline bool QBasicAtomicInt::deref()
+template<> template <typename T> inline
+bool QBasicAtomicOps<1>::testAndSetRelaxed(T &_q_value, T expectedValue, T newValue)
{
- return q_atomic_decrement(&_q_value) != 0;
+ unsigned char ret;
+ asm volatile("lock\n"
+ "cmpxchg %3,%2\n"
+ "sete %1\n"
+ : "=a" (newValue), "=qm" (ret), "+m" (_q_value)
+ : "q" (newValue), "0" (expectedValue)
+ : "memory");
+ return ret != 0;
}
-inline bool QBasicAtomicInt::testAndSetOrdered(int expected, int newval)
+template<int size> template <typename T> inline
+T QBasicAtomicOps<size>::fetchAndStoreRelaxed(T &_q_value, T newValue)
{
- return q_atomic_test_and_set_int(&_q_value, expected, newval) != 0;
+ asm volatile("xchg %0,%1"
+ : "=r" (newValue), "+m" (_q_value)
+ : "0" (newValue)
+ : "memory");
+ return newValue;
}
-inline int QBasicAtomicInt::fetchAndStoreOrdered(int newval)
+template<> template <typename T> inline
+T QBasicAtomicOps<1>::fetchAndStoreRelaxed(T &_q_value, T newValue)
{
- return q_atomic_set_int(&_q_value, newval);
+ asm volatile("xchg %0,%1"
+ : "=q" (newValue), "+m" (_q_value)
+ : "0" (newValue)
+ : "memory");
+ return newValue;
}
-inline int QBasicAtomicInt::fetchAndAddOrdered(int aValue)
+template<int size> template <typename T> inline
+T QBasicAtomicOps<size>::fetchAndAddRelaxed(T &_q_value, typename QAtomicAdditiveType<T>::AdditiveT valueToAdd)
{
- return q_atomic_fetch_and_add_int(&_q_value, aValue);
+ T result;
+ asm volatile("lock\n"
+ "xadd %0,%1"
+ : "=r" (result), "+m" (_q_value)
+ : "0" (valueToAdd * QAtomicAdditiveType<T>::AddScale)
+ : "memory");
+ return result;
}
-template <typename T>
-Q_INLINE_TEMPLATE bool QBasicAtomicPointer<T>::testAndSetOrdered(T *expectedValue, T *newValue)
+template<> template <typename T> inline
+T QBasicAtomicOps<1>::fetchAndAddRelaxed(T &_q_value, typename QAtomicAdditiveType<T>::AdditiveT valueToAdd)
{
- return q_atomic_test_and_set_ptr(&_q_value, expectedValue, newValue);
+ T result;
+ asm volatile("lock\n"
+ "xadd %0,%1"
+ : "=q" (result), "+m" (_q_value)
+ : "0" (valueToAdd * QAtomicAdditiveType<T>::AddScale)
+ : "memory");
+ return result;
}
-template <typename T>
-Q_INLINE_TEMPLATE T *QBasicAtomicPointer<T>::fetchAndStoreOrdered(T *newValue)
-{
- return reinterpret_cast<T *>(q_atomic_set_ptr(&_q_value, newValue));
-}
+#define Q_ATOMIC_INT8_IS_SUPPORTED
-template <typename T>
-Q_INLINE_TEMPLATE T *QBasicAtomicPointer<T>::fetchAndAddOrdered(qptrdiff valueToAdd)
-{
- return reinterpret_cast<T *>(q_atomic_fetch_and_add_ptr(&_q_value, valueToAdd * sizeof(T)));
-}
+#define Q_ATOMIC_INT8_REFERENCE_COUNTING_IS_ALWAYS_NATIVE
+#define Q_ATOMIC_INT8_REFERENCE_COUNTING_IS_WAIT_FREE
-#endif // Q_CC_GNU || Q_CC_INTEL
+#define Q_ATOMIC_INT8_TEST_AND_SET_IS_ALWAYS_NATIVE
+#define Q_ATOMIC_INT8_TEST_AND_SET_IS_WAIT_FREE
-inline bool QBasicAtomicInt::testAndSetRelaxed(int expectedValue, int newValue)
-{
- return testAndSetOrdered(expectedValue, newValue);
-}
+#define Q_ATOMIC_INT8_FETCH_AND_STORE_IS_ALWAYS_NATIVE
+#define Q_ATOMIC_INT8_FETCH_AND_STORE_IS_WAIT_FREE
-inline bool QBasicAtomicInt::testAndSetAcquire(int expectedValue, int newValue)
-{
- return testAndSetOrdered(expectedValue, newValue);
-}
+#define Q_ATOMIC_INT8_FETCH_AND_ADD_IS_ALWAYS_NATIVE
+#define Q_ATOMIC_INT8_FETCH_AND_ADD_IS_WAIT_FREE
-inline bool QBasicAtomicInt::testAndSetRelease(int expectedValue, int newValue)
-{
- return testAndSetOrdered(expectedValue, newValue);
-}
+#define Q_ATOMIC_INT16_IS_SUPPORTED
-inline int QBasicAtomicInt::fetchAndStoreRelaxed(int newValue)
-{
- return fetchAndStoreOrdered(newValue);
-}
+#define Q_ATOMIC_INT16_REFERENCE_COUNTING_IS_ALWAYS_NATIVE
+#define Q_ATOMIC_INT16_REFERENCE_COUNTING_IS_WAIT_FREE
-inline int QBasicAtomicInt::fetchAndStoreAcquire(int newValue)
-{
- return fetchAndStoreOrdered(newValue);
-}
+#define Q_ATOMIC_INT16_TEST_AND_SET_IS_ALWAYS_NATIVE
+#define Q_ATOMIC_INT16_TEST_AND_SET_IS_WAIT_FREE
-inline int QBasicAtomicInt::fetchAndStoreRelease(int newValue)
-{
- return fetchAndStoreOrdered(newValue);
-}
+#define Q_ATOMIC_INT16_FETCH_AND_STORE_IS_ALWAYS_NATIVE
+#define Q_ATOMIC_INT16_FETCH_AND_STORE_IS_WAIT_FREE
-inline int QBasicAtomicInt::fetchAndAddRelaxed(int valueToAdd)
-{
- return fetchAndAddOrdered(valueToAdd);
-}
+#define Q_ATOMIC_INT16_FETCH_AND_ADD_IS_ALWAYS_NATIVE
+#define Q_ATOMIC_INT16_FETCH_AND_ADD_IS_WAIT_FREE
-inline int QBasicAtomicInt::fetchAndAddAcquire(int valueToAdd)
-{
- return fetchAndAddOrdered(valueToAdd);
-}
+#define Q_ATOMIC_INT64_IS_SUPPORTED
-inline int QBasicAtomicInt::fetchAndAddRelease(int valueToAdd)
-{
- return fetchAndAddOrdered(valueToAdd);
-}
+#define Q_ATOMIC_INT64_REFERENCE_COUNTING_IS_ALWAYS_NATIVE
+#define Q_ATOMIC_INT64_REFERENCE_COUNTING_IS_WAIT_FREE
-template <typename T>
-Q_INLINE_TEMPLATE bool QBasicAtomicPointer<T>::testAndSetRelaxed(T *expectedValue, T *newValue)
-{
- return testAndSetOrdered(expectedValue, newValue);
-}
+#define Q_ATOMIC_INT64_TEST_AND_SET_IS_ALWAYS_NATIVE
+#define Q_ATOMIC_INT64_TEST_AND_SET_IS_WAIT_FREE
-template <typename T>
-Q_INLINE_TEMPLATE bool QBasicAtomicPointer<T>::testAndSetAcquire(T *expectedValue, T *newValue)
+#define Q_ATOMIC_INT64_FETCH_AND_STORE_IS_ALWAYS_NATIVE
+#define Q_ATOMIC_INT64_FETCH_AND_STORE_IS_WAIT_FREE
+
+#define Q_ATOMIC_INT64_FETCH_AND_ADD_IS_ALWAYS_NATIVE
+#define Q_ATOMIC_INT64_FETCH_AND_ADD_IS_WAIT_FREE
+
+#else // !Q_CC_INTEL && !Q_CC_GNU
+
+extern "C" {
+ Q_CORE_EXPORT int q_atomic_test_and_set_int(volatile int *ptr, int expected, int newval);
+ Q_CORE_EXPORT int q_atomic_test_and_set_ptr(volatile void *ptr, void *expected, void *newval);
+ Q_CORE_EXPORT int q_atomic_increment(volatile int *ptr);
+ Q_CORE_EXPORT int q_atomic_decrement(volatile int *ptr);
+ Q_CORE_EXPORT int q_atomic_set_int(volatile int *ptr, int newval);
+ Q_CORE_EXPORT void *q_atomic_set_ptr(volatile void *ptr, void *newval);
+ Q_CORE_EXPORT int q_atomic_fetch_and_add_int(volatile int *ptr, int value);
+ Q_CORE_EXPORT void *q_atomic_fetch_and_add_ptr(volatile void *ptr, qptrdiff value);
+} // extern "C"
+
+template<> template<typename T> inline
+bool QBasicAtomicOps<4>::ref(T &_q_value)
{
- return testAndSetOrdered(expectedValue, newValue);
+ return q_atomic_increment((int *)&_q_value) != 0;
}
-template <typename T>
-Q_INLINE_TEMPLATE bool QBasicAtomicPointer<T>::testAndSetRelease(T *expectedValue, T *newValue)
+template<> template <typename T> inline
+bool QBasicAtomicOps<4>::deref(T &_q_value)
{
- return testAndSetOrdered(expectedValue, newValue);
+ return q_atomic_decrement((int *)&_q_value) != 0;
}
-template <typename T>
-Q_INLINE_TEMPLATE T *QBasicAtomicPointer<T>::fetchAndStoreRelaxed(T *newValue)
+template<> template <typename T> inline
+bool QBasicAtomicOps<4>::testAndSetRelaxed(T &_q_value, T expectedValue, T newValue)
{
- return fetchAndStoreOrdered(newValue);
+ return q_atomic_test_and_set_int((int*)&_q_value, int(expectedValue), int(newValue));
}
-template <typename T>
-Q_INLINE_TEMPLATE T *QBasicAtomicPointer<T>::fetchAndStoreAcquire(T *newValue)
+template<> template <typename T> inline
+T QBasicAtomicOps<4>::fetchAndStoreRelaxed(T &_q_value, T newValue)
{
- return fetchAndStoreOrdered(newValue);
+ return T(q_atomic_set_int((int*)&_q_value, int(newValue));
}
-template <typename T>
-Q_INLINE_TEMPLATE T *QBasicAtomicPointer<T>::fetchAndStoreRelease(T *newValue)
+template<> template <typename T> inline
+T QBasicAtomicOps<4>::fetchAndAddRelaxed(T &_q_value, typename QAtomicAdditiveType<T>::AdditiveT valueToAdd)
{
- return fetchAndStoreOrdered(newValue);
+ return T(q_atomic_fetch_and_add_int((int *)&_q_value, valueToAdd * QAtomicAdditiveType<T>::AddScale));
}
-template <typename T>
-Q_INLINE_TEMPLATE T *QBasicAtomicPointer<T>::fetchAndAddRelaxed(qptrdiff valueToAdd)
+template<> template <typename T> inline
+bool QBasicAtomicOps<8>::testAndSetRelaxed(T &_q_value, T expectedValue, T newValue)
{
- return fetchAndAddOrdered(valueToAdd);
+ return q_atomic_test_and_set_ptr(&_q_value, (void*)expectedValue, (void*)newValue);
}
-template <typename T>
-Q_INLINE_TEMPLATE T *QBasicAtomicPointer<T>::fetchAndAddAcquire(qptrdiff valueToAdd)
+template<> template <typename T> inline
+T QBasicAtomicOps<8>::fetchAndStoreRelaxed(T &_q_value, T newValue)
{
- return fetchAndAddOrdered(valueToAdd);
+ return T(q_atomic_set_ptr(&_q_value, (void*)newValue);
}
-template <typename T>
-Q_INLINE_TEMPLATE T *QBasicAtomicPointer<T>::fetchAndAddRelease(qptrdiff valueToAdd)
+template<> template <typename T> inline
+T QBasicAtomicOps<8>::fetchAndAddRelaxed(T &_q_value, typename QAtomicAdditiveType<T>::AdditiveT valueToAdd)
{
- return fetchAndAddOrdered(valueToAdd);
+ return T(q_atomic_fetch_and_add_int(&_q_value, valueToAdd * QAtomicAdditiveType<T>::AddScale));
}
+#endif // Q_CC_GNU || Q_CC_INTEL
+
QT_END_NAMESPACE
QT_END_HEADER