]> git.ipfire.org Git - thirdparty/haproxy.git/commitdiff
MEDIUM: threads: Use __ATOMIC_SEQ_CST when using the newer atomic API.
authorOlivier Houchard <ohouchard@haproxy.com>
Thu, 7 Mar 2019 17:48:22 +0000 (18:48 +0100)
committerOlivier Houchard <cognet@ci0.org>
Mon, 11 Mar 2019 16:02:37 +0000 (17:02 +0100)
When using the new __atomic* API, ask the compiler to generate barriers.
A variant of those functions that don't generate barriers will be added later.
Before that, using HA_ATOMIC* would not generate any barrier, and some parts
of the code should be reviewed and missing barriers should be added.

This should probably be backported to 1.8 and 1.9.

include/common/hathreads.h

index 77d5c6f380d8f9ca6d1f84aeb8a68885810c3be8..df3fb66c2b429bbd4070de900de71e52032c70ff 100644 (file)
@@ -277,12 +277,12 @@ static inline unsigned long thread_isolated()
        })
 #else
 /* gcc >= 4.7 */
-#define HA_ATOMIC_CAS(val, old, new) __atomic_compare_exchange_n(val, old, new, 0, 0, 0)
-#define HA_ATOMIC_ADD(val, i)        __atomic_add_fetch(val, i, 0)
-#define HA_ATOMIC_XADD(val, i)       __atomic_fetch_add(val, i, 0)
-#define HA_ATOMIC_SUB(val, i)        __atomic_sub_fetch(val, i, 0)
-#define HA_ATOMIC_AND(val, flags)    __atomic_and_fetch(val, flags, 0)
-#define HA_ATOMIC_OR(val, flags)     __atomic_or_fetch(val,  flags, 0)
+#define HA_ATOMIC_CAS(val, old, new) __atomic_compare_exchange_n(val, old, new, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST)
+#define HA_ATOMIC_ADD(val, i)        __atomic_add_fetch(val, i, __ATOMIC_SEQ_CST)
+#define HA_ATOMIC_XADD(val, i)       __atomic_fetch_add(val, i, __ATOMIC_SEQ_CST)
+#define HA_ATOMIC_SUB(val, i)        __atomic_sub_fetch(val, i, __ATOMIC_SEQ_CST)
+#define HA_ATOMIC_AND(val, flags)    __atomic_and_fetch(val, flags, __ATOMIC_SEQ_CST)
+#define HA_ATOMIC_OR(val, flags)     __atomic_or_fetch(val,  flags, __ATOMIC_SEQ_CST)
 #define HA_ATOMIC_BTS(val, bit)                                                \
        ({                                                              \
                typeof(*(val)) __b_bts = (1UL << (bit));                \
@@ -295,8 +295,8 @@ static inline unsigned long thread_isolated()
                __sync_fetch_and_and((val), ~__b_btr) & __b_btr;        \
        })
 
-#define HA_ATOMIC_XCHG(val, new)     __atomic_exchange_n(val, new, 0)
-#define HA_ATOMIC_STORE(val, new)    __atomic_store_n(val, new, 0)
+#define HA_ATOMIC_XCHG(val, new)     __atomic_exchange_n(val, new, __ATOMIC_SEQ_CST)
+#define HA_ATOMIC_STORE(val, new)    __atomic_store_n(val, new, __ATOMIC_SEQ_CST)
 #endif
 
 #define HA_ATOMIC_UPDATE_MAX(val, new)                                 \