+2015-10-01 Kyrylo Tkachov <kyrylo.tkachov@arm.com>
+
+ Backport from mainline
+ 2015-06-09 Shiva Chen <shiva0217@gmail.com>
+
+ * sync.md (atomic_load<mode>): Add conditional code for lda/ldr
+ (atomic_store<mode>): Likewise.
+
2015-09-28 Daniel Hellstrom <daniel@gaisler.com>
* config/sparc/t-rtems: Remove -muser-mode. Add ut699, at697f and leon.
if (model == MEMMODEL_RELAXED
|| model == MEMMODEL_CONSUME
|| model == MEMMODEL_RELEASE)
- return \"ldr<sync_sfx>\\t%0, %1\";
+ return \"ldr%(<sync_sfx>%)\\t%0, %1\";
else
- return \"lda<sync_sfx>\\t%0, %1\";
+ return \"lda%(<sync_sfx>%)\\t%0, %1\";
}
-)
+ [(set_attr "predicable" "yes")
+ (set_attr "predicable_short_it" "no")])
(define_insn "atomic_store<mode>"
[(set (match_operand:QHSI 0 "memory_operand" "=Q")
if (model == MEMMODEL_RELAXED
|| model == MEMMODEL_CONSUME
|| model == MEMMODEL_ACQUIRE)
- return \"str<sync_sfx>\t%1, %0\";
+ return \"str%(<sync_sfx>%)\t%1, %0\";
else
- return \"stl<sync_sfx>\t%1, %0\";
+ return \"stl%(<sync_sfx>%)\t%1, %0\";
}
-)
+ [(set_attr "predicable" "yes")
+ (set_attr "predicable_short_it" "no")])
;; Note that ldrd and vldr are *not* guaranteed to be single-copy atomic,
;; even for a 64-bit aligned address. Instead we use a ldrexd unparied
--- /dev/null
+/* { dg-do compile } */
+/* { dg-require-effective-target arm_arm_ok } */
+/* { dg-require-effective-target arm_arch_v8a_ok } */
+/* { dg-options "-O2 -marm" } */
+/* { dg-add-options arm_arch_v8a } */
+
+struct backtrace_state
+{
+ int threaded;
+ int lock_alloc;
+};
+
+void foo (struct backtrace_state *state)
+{
+ if (state->threaded)
+ __sync_lock_release (&state->lock_alloc);
+}
+
+/* { dg-final { scan-assembler "stlne" } } */