INTERNAL_SYSCALL_ERROR_P (__ret, __err); \
})
+#define lll_robust_mutex_dead(futexv) \
+ do \
+ { \
+ int *__futexp = &(futexv); \
+ atomic_or (__futexp, FUTEX_OWNER_DIED); \
+ lll_futex_wake (__futexp, 1); \
+ } \
+ while (0)
+
/* Returns non-zero if error happened, zero if success. */
#ifdef __sparc32_atomic_do_lock
/* Avoid FUTEX_WAKE_OP if supporting pre-v9 CPUs. */
}
#define lll_mutex_cond_trylock(futex) __lll_mutex_cond_trylock (&(futex))
+static inline int
+__attribute__ ((always_inline))
+__lll_robust_mutex_trylock (int *futex, int id)
+{
+ return atomic_compare_and_exchange_val_acq (futex, id, 0) != 0;
+}
+#define lll_robust_mutex_trylock(futex, id) \
+ __lll_robust_mutex_trylock (&(futex), id)
+
extern void __lll_lock_wait (int *futex) attribute_hidden;
-
+extern int __lll_robust_lock_wait (int *futex) attribute_hidden;
static inline void
__attribute__ ((always_inline))
}
#define lll_mutex_lock(futex) __lll_mutex_lock (&(futex))
+static inline int
+__attribute__ ((always_inline))
+__lll_robust_mutex_lock (int *futex, int id)
+{
+ int result = 0;
+ if (atomic_compare_and_exchange_bool_acq (futex, id, 0) != 0)
+ result = __lll_robust_lock_wait (futex);
+ return result;
+}
+#define lll_robust_mutex_lock(futex, id) \
+ __lll_robust_mutex_lock (&(futex), id)
static inline void
__attribute__ ((always_inline))
}
#define lll_mutex_cond_lock(futex) __lll_mutex_cond_lock (&(futex))
+#define lll_robust_mutex_cond_lock(futex, id) \
+ __lll_robust_mutex_lock (&(futex), (id) | FUTEX_WAITERS)
+
extern int __lll_timedlock_wait (int *futex, const struct timespec *)
attribute_hidden;
-
+extern int __lll_robust_timedlock_wait (int *futex, const struct timespec *)
+ attribute_hidden;
static inline int
__attribute__ ((always_inline))
#define lll_mutex_timedlock(futex, abstime) \
__lll_mutex_timedlock (&(futex), abstime)
+static inline int
+__attribute__ ((always_inline))
+__lll_robust_mutex_timedlock (int *futex, const struct timespec *abstime,
+ int id)
+{
+ int result = 0;
+ if (atomic_compare_and_exchange_bool_acq (futex, id, 0) != 0)
+ result = __lll_robust_timedlock_wait (futex, abstime);
+ return result;
+}
+#define lll_robust_mutex_timedlock(futex, abstime, id) \
+ __lll_robust_mutex_timedlock (&(futex), abstime, id)
+
#define lll_mutex_unlock(lock) \
((void) ({ \
int *__futex = &(lock); \
lll_futex_wake (__futex, 1); \
}))
+#define lll_robust_mutex_unlock(lock) \
+ ((void) ({ \
+ int *__futex = &(lock); \
+ int __val = atomic_exchange_rel (__futex, 0); \
+ if (__builtin_expect (__val & FUTEX_WAITERS, 0)) \
+ lll_futex_wake (__futex, 1); \
+ }))
+
#define lll_mutex_unlock_force(lock) \
((void) ({ \
int *__futex = &(lock); \