From: Kyrylo Tkachov Date: Wed, 1 Apr 2020 10:54:14 +0000 (+0100) Subject: aarch64: Tidy aarch64_split_compare_and_swap X-Git-Tag: embedded-9-2020q2~54 X-Git-Url: http://git.ipfire.org/cgi-bin/gitweb.cgi?a=commitdiff_plain;h=43e46197c10daad6e0aee839c85266c246e78bb2;p=thirdparty%2Fgcc.git aarch64: Tidy aarch64_split_compare_and_swap 2020-04-01 Kyrylo Tkachov Backport from mainline 2019-09-19 Richard Henderson * config/aarch64/aarch64 (aarch64_split_compare_and_swap): Disable strong_zero_p for aarch64_track_speculation; unify some code paths; use aarch64_gen_compare_reg instead of open-coding. --- diff --git a/gcc/ChangeLog b/gcc/ChangeLog index de711d5192ee..6f1e40c1f1c8 100644 --- a/gcc/ChangeLog +++ b/gcc/ChangeLog @@ -1,3 +1,12 @@ +2020-04-01 Kyrylo Tkachov + + Backport from mainline + 2019-09-19 Richard Henderson + + * config/aarch64/aarch64 (aarch64_split_compare_and_swap): Disable + strong_zero_p for aarch64_track_speculation; unify some code paths; + use aarch64_gen_compare_reg instead of open-coding. + 2020-04-01 Kyrylo Tkachov Backport from mainline diff --git a/gcc/config/aarch64/aarch64.c b/gcc/config/aarch64/aarch64.c index ae9f567cac7a..d5515f859e5b 100644 --- a/gcc/config/aarch64/aarch64.c +++ b/gcc/config/aarch64/aarch64.c @@ -15573,13 +15573,11 @@ aarch64_split_compare_and_swap (rtx operands[]) /* Split after prolog/epilog to avoid interactions with shrinkwrapping. */ gcc_assert (epilogue_completed); - rtx rval, mem, oldval, newval, scratch; + rtx rval, mem, oldval, newval, scratch, x, model_rtx; machine_mode mode; bool is_weak; rtx_code_label *label1, *label2; - rtx x, cond; enum memmodel model; - rtx model_rtx; rval = operands[0]; mem = operands[1]; @@ -15600,7 +15598,8 @@ aarch64_split_compare_and_swap (rtx operands[]) CBNZ scratch, .label1 .label2: CMP rval, 0. */ - bool strong_zero_p = !is_weak && oldval == const0_rtx && mode != TImode; + bool strong_zero_p = (!is_weak && !aarch64_track_speculation && + oldval == const0_rtx && mode != TImode); label1 = NULL; if (!is_weak) @@ -15613,35 +15612,20 @@ aarch64_split_compare_and_swap (rtx operands[]) /* The initial load can be relaxed for a __sync operation since a final barrier will be emitted to stop code hoisting. */ if (is_mm_sync (model)) - aarch64_emit_load_exclusive (mode, rval, mem, - GEN_INT (MEMMODEL_RELAXED)); + aarch64_emit_load_exclusive (mode, rval, mem, GEN_INT (MEMMODEL_RELAXED)); else aarch64_emit_load_exclusive (mode, rval, mem, model_rtx); if (strong_zero_p) - { - if (aarch64_track_speculation) - { - /* Emit an explicit compare instruction, so that we can correctly - track the condition codes. */ - rtx cc_reg = aarch64_gen_compare_reg (NE, rval, const0_rtx); - x = gen_rtx_NE (GET_MODE (cc_reg), cc_reg, const0_rtx); - } - else - x = gen_rtx_NE (VOIDmode, rval, const0_rtx); - - x = gen_rtx_IF_THEN_ELSE (VOIDmode, x, - gen_rtx_LABEL_REF (Pmode, label2), pc_rtx); - aarch64_emit_unlikely_jump (gen_rtx_SET (pc_rtx, x)); - } + x = gen_rtx_NE (VOIDmode, rval, const0_rtx); else { - cond = aarch64_gen_compare_reg_maybe_ze (NE, rval, oldval, mode); - x = gen_rtx_NE (VOIDmode, cond, const0_rtx); - x = gen_rtx_IF_THEN_ELSE (VOIDmode, x, - gen_rtx_LABEL_REF (Pmode, label2), pc_rtx); - aarch64_emit_unlikely_jump (gen_rtx_SET (pc_rtx, x)); + rtx cc_reg = aarch64_gen_compare_reg_maybe_ze (NE, rval, oldval, mode); + x = gen_rtx_NE (VOIDmode, cc_reg, const0_rtx); } + x = gen_rtx_IF_THEN_ELSE (VOIDmode, x, + gen_rtx_LABEL_REF (Pmode, label2), pc_rtx); + aarch64_emit_unlikely_jump (gen_rtx_SET (pc_rtx, x)); aarch64_emit_store_exclusive (mode, scratch, mem, newval, model_rtx); @@ -15662,22 +15646,16 @@ aarch64_split_compare_and_swap (rtx operands[]) aarch64_emit_unlikely_jump (gen_rtx_SET (pc_rtx, x)); } else - { - cond = gen_rtx_REG (CCmode, CC_REGNUM); - x = gen_rtx_COMPARE (CCmode, scratch, const0_rtx); - emit_insn (gen_rtx_SET (cond, x)); - } + aarch64_gen_compare_reg (NE, scratch, const0_rtx); emit_label (label2); + /* If we used a CBNZ in the exchange loop emit an explicit compare with RVAL to set the condition flags. If this is not used it will be removed by later passes. */ if (strong_zero_p) - { - cond = gen_rtx_REG (CCmode, CC_REGNUM); - x = gen_rtx_COMPARE (CCmode, rval, const0_rtx); - emit_insn (gen_rtx_SET (cond, x)); - } + aarch64_gen_compare_reg (NE, rval, const0_rtx); + /* Emit any final barrier needed for a __sync operation. */ if (is_mm_sync (model)) aarch64_emit_post_barrier (model);