2018-03-07 Adhemerval Zanella <adhemerval.zanella@linaro.org>
+ [BZ #22926]
+ * sysdeps/powerpc/powerpc32/sysdep.h (ABORT_TRANSACTION_IMPL): Define
+ empty for __SPE__.
+ * sysdeps/powerpc/sysdep.h (ABORT_TRANSACTION): Likewise.
+ * sysdeps/unix/sysv/linux/powerpc/elision-lock.c (__lll_lock_elision):
+ Do not build hardware transactional code for __SPE__.
+ * sysdeps/unix/sysv/linux/powerpc/elision-trylock.c
+ (__lll_trylock_elision): Likewise.
+ * sysdeps/unix/sysv/linux/powerpc/elision-unlock.c
+ (__lll_unlock_elision): Likewise.
+
* sysdeps/nptl/fork.c (ARCH_FORK): Replace by auch_fork.
* sysdeps/unix/sysv/linux/alpha/arch-fork.h: Remove file.
* sysdeps/unix/sysv/linux/riscv/arch-fork.h: Likewise.
cfi_endproc; \
ASM_SIZE_DIRECTIVE(name)
-#if ! IS_IN(rtld)
+#if !IS_IN(rtld) && !defined(__SPE__)
# define ABORT_TRANSACTION_IMPL \
cmpwi 2,0; \
beq 1f; \
we abort transaction just before syscalls.
[1] Documentation/powerpc/transactional_memory.txt [Syscalls] */
-#if !IS_IN(rtld)
+#if !IS_IN(rtld) && !defined(__SPE__)
# define ABORT_TRANSACTION \
({ \
if (THREAD_GET_TM_CAPABLE ()) \
int
__lll_lock_elision (int *lock, short *adapt_count, EXTRAARG int pshared)
{
+#ifndef __SPE__
/* adapt_count is accessed concurrently but is just a hint. Thus,
use atomic accesses but relaxed MO is sufficient. */
if (atomic_load_relaxed (adapt_count) > 0)
aconf.skip_lock_out_of_tbegin_retries);
use_lock:
+#endif
return LLL_LOCK ((*lock), pshared);
}
int
__lll_trylock_elision (int *futex, short *adapt_count)
{
+#ifndef __SPE__
/* Implement POSIX semantics by forbiding nesting elided trylocks. */
__libc_tabort (_ABORT_NESTED_TRYLOCK);
}
use_lock:
+#endif
return lll_trylock (*futex);
}
int
__lll_unlock_elision (int *lock, short *adapt_count, int pshared)
{
+#ifndef __SPE__
/* When the lock was free we're in a transaction. */
if (*lock == 0)
__libc_tend (0);
lll_unlock ((*lock), pshared);
}
+#else
+ lll_unlock ((*lock), pshared);
+#endif
return 0;
}