]> git.ipfire.org Git - thirdparty/gcc.git/blobdiff - gcc/config/sh/sync.md
Update copyright years.
[thirdparty/gcc.git] / gcc / config / sh / sync.md
index b3ff70ce171dc55ee135eba11dfc6a65967ca0e4..52b764bde76c092b66523d93a3affa47779ace34 100644 (file)
@@ -1,5 +1,5 @@
 ;; GCC machine description for SH synchronization instructions.
-;; Copyright (C) 2011-2015 Free Software Foundation, Inc.
+;; Copyright (C) 2011-2024 Free Software Foundation, Inc.
 ;;
 ;; This file is part of GCC.
 ;;
                      (match_test "TARGET_ATOMIC_ANY && TARGET_SH4A
                                   && !TARGET_ATOMIC_STRICT"))))))
 
+;; Displacement addressing can be used for all SImode atomic patterns, except
+;; llcs.
+(define_predicate "atomic_mem_operand_0"
+  (and (match_code "mem")
+       (ior (match_operand 0 "simple_mem_operand")
+           (and (match_test "mode == SImode")
+                (and (match_test "!TARGET_ATOMIC_HARD_LLCS")
+                     (match_test "!TARGET_SH4A || TARGET_ATOMIC_STRICT"))
+                (match_operand 0 "short_displacement_mem_operand")))))
+
 (define_expand "atomic_compare_and_swap<mode>"
   [(match_operand:SI 0 "arith_reg_dest")               ;; bool success output
    (match_operand:QIHISI 1 "arith_reg_dest")           ;; oldval output
-   (match_operand:QIHISI 2 "memory_operand")           ;; memory
+   (match_operand:QIHISI 2 "atomic_mem_operand_0")     ;; memory
    (match_operand:QIHISI 3 "atomic_arith_operand_0")   ;; expected input
    (match_operand:QIHISI 4 "atomic_arith_operand_0")   ;; newval input
    (match_operand:SI 5 "const_int_operand")            ;; is_weak
    (match_operand:SI 7 "const_int_operand")]           ;; failure model
   "TARGET_ATOMIC_ANY"
 {
-  rtx addr = force_reg (Pmode, XEXP (operands[2], 0));
+  rtx mem = operands[2];
   rtx old_val = gen_lowpart (SImode, operands[1]);
   rtx exp_val = operands[3];
   rtx new_val = operands[4];
 
   if (TARGET_ATOMIC_HARD_LLCS
       || (TARGET_SH4A && <MODE>mode == SImode && !TARGET_ATOMIC_STRICT))
-    atomic_insn = gen_atomic_compare_and_swap<mode>_hard (old_val, addr,
+    atomic_insn = gen_atomic_compare_and_swap<mode>_hard (old_val, mem,
                                                          exp_val, new_val);
   else if (TARGET_ATOMIC_SOFT_GUSA)
-    atomic_insn = gen_atomic_compare_and_swap<mode>_soft_gusa (old_val, addr,
+    atomic_insn = gen_atomic_compare_and_swap<mode>_soft_gusa (old_val, mem,
                      exp_val, new_val);
   else if (TARGET_ATOMIC_SOFT_TCB)
-    atomic_insn = gen_atomic_compare_and_swap<mode>_soft_tcb (old_val, addr,
+    atomic_insn = gen_atomic_compare_and_swap<mode>_soft_tcb (old_val, mem,
                      exp_val, new_val, TARGET_ATOMIC_SOFT_TCB_GBR_OFFSET_RTX);
   else if (TARGET_ATOMIC_SOFT_IMASK)
-    atomic_insn = gen_atomic_compare_and_swap<mode>_soft_imask (old_val, addr,
+    atomic_insn = gen_atomic_compare_and_swap<mode>_soft_imask (old_val, mem,
                      exp_val, new_val);
   else
     FAIL;
   DONE;
 })
 
-(define_insn "atomic_compare_and_swapsi_hard"
+(define_insn_and_split "atomic_compare_and_swapsi_hard"
   [(set (match_operand:SI 0 "arith_reg_dest" "=&r")
        (unspec_volatile:SI
-         [(mem:SI (match_operand:SI 1 "arith_reg_operand" "r"))
+         [(match_operand:SI 1 "atomic_mem_operand_0" "=Sra")
           (match_operand:SI 2 "arith_operand" "rI08")
           (match_operand:SI 3 "arith_operand" "rI08")]
          UNSPECV_CMPXCHG_1))
-   (set (mem:SI (match_dup 1))
+   (set (match_dup 1)
        (unspec_volatile:SI [(const_int 0)] UNSPECV_CMPXCHG_2))
    (set (reg:SI T_REG)
        (unspec_volatile:SI [(const_int 0)] UNSPECV_CMPXCHG_3))
   "TARGET_ATOMIC_HARD_LLCS
    || (TARGET_SH4A && TARGET_ATOMIC_ANY && !TARGET_ATOMIC_STRICT)"
 {
-  return "\r0: movli.l @%1,r0"         "\n"
+  return "\r0: movli.l %1,r0"          "\n"
         "      cmp/eq  %2,r0"          "\n"
         "      bf{.|/}s        0f"     "\n"
         "      mov     r0,%0"          "\n"
         "      mov     %3,r0"          "\n"
-        "      movco.l r0,@%1"         "\n"
+        "      movco.l r0,%1"          "\n"
         "      bf      0b"             "\n"
         "0:";
+}
+  "&& can_create_pseudo_p () && !satisfies_constraint_I08 (operands[2])"
+  [(const_int 0)]
+{
+  /* FIXME: Sometimes the 'expected value' operand is not propagated as
+     immediate value.  See PR 64974.  */
+  set_of_reg op2 = sh_find_set_of_reg (operands[2], curr_insn,
+                                      prev_nonnote_nondebug_insn_bb);
+  if (op2.set_src != NULL && satisfies_constraint_I08 (op2.set_src))
+    {
+      rtx* r = &XVECEXP (XEXP (XVECEXP (PATTERN (curr_insn), 0, 0), 1), 0, 1);
+      validate_change (curr_insn, r, op2.set_src, false);
+      DONE;
+    }
+  else
+    FAIL;
 }
   [(set_attr "length" "14")])
 
-(define_insn "atomic_compare_and_swap<mode>_hard"
+;; The QIHImode llcs patterns modify the address register of the memory
+;; operand.  In order to express that, we have to open code the memory
+;; operand.  Initially the insn is expanded like every other atomic insn
+;; using the memory operand.  In split1 the insn is converted and the
+;; memory operand's address register is exposed.
+(define_insn_and_split "atomic_compare_and_swap<mode>_hard"
+  [(set (match_operand:SI 0 "arith_reg_dest")
+       (unspec_volatile:SI
+         [(match_operand:QIHI 1 "atomic_mem_operand_0")
+          (match_operand:QIHI 2 "arith_reg_operand")
+          (match_operand:QIHI 3 "arith_reg_operand")]
+         UNSPECV_CMPXCHG_1))
+   (set (match_dup 1)
+       (unspec_volatile:QIHI [(const_int 0)] UNSPECV_CMPXCHG_2))
+   (set (reg:SI T_REG)
+       (unspec_volatile:SI [(const_int 0)] UNSPECV_CMPXCHG_3))
+   (clobber (reg:SI R0_REG))]
+  "TARGET_ATOMIC_HARD_LLCS && can_create_pseudo_p ()"
+  "#"
+  "&& 1"
+  [(const_int 0)]
+{
+  rtx i = gen_atomic_compare_and_swap<mode>_hard_1 (
+               operands[0], XEXP (operands[1], 0), operands[2], operands[3]);
+
+  /* Replace the new mems in the new insn with the old mem to preserve
+     aliasing info.  */
+  XVECEXP (XEXP (XVECEXP (i, 0, 0), 1), 0, 0) = operands[1];
+  XEXP (XVECEXP (i, 0, 1), 0) = operands[1];
+  emit_insn (i);
+})
+
+(define_insn "atomic_compare_and_swap<mode>_hard_1"
   [(set (match_operand:SI 0 "arith_reg_dest" "=&r")
        (unspec_volatile:SI
          [(mem:QIHI (match_operand:SI 1 "arith_reg_operand" "r"))
 (define_insn "atomic_compare_and_swap<mode>_soft_gusa"
   [(set (match_operand:SI 0 "arith_reg_dest" "=&u")
        (unspec_volatile:SI
-         [(mem:QIHISI (match_operand:SI 1 "arith_reg_operand" "u"))
+         [(match_operand:QIHISI 1 "atomic_mem_operand_0" "=AraAdd")
           (match_operand:QIHISI 2 "arith_reg_operand" "u")
           (match_operand:QIHISI 3 "arith_reg_operand" "u")]
          UNSPECV_CMPXCHG_1))
-   (set (mem:QIHISI (match_dup 1))
+   (set (match_dup 1)
        (unspec_volatile:QIHISI [(const_int 0)] UNSPECV_CMPXCHG_2))
    (set (reg:SI T_REG)
        (unspec_volatile:SI [(const_int 0)] UNSPECV_CMPXCHG_3))
         "      .align 2"                       "\n"
         "      mov     r15,r1"                 "\n"
         "      mov     #(0f-1f),r15"           "\n"
-        "0:    mov.<bwl>       @%1,%0"         "\n"
+        "0:    mov.<bwl>       %1,%0"          "\n"
         "      cmp/eq  %0,%4"                  "\n"
         "      bf      1f"                     "\n"
-        "      mov.<bwl>       %3,@%1"         "\n"
+        "      mov.<bwl>       %3,%1"          "\n"
         "1:    mov     r1,r15";
 }
   [(set_attr "length" "20")])
 (define_insn "atomic_compare_and_swap<mode>_soft_tcb"
   [(set (match_operand:SI 0 "arith_reg_dest" "=&r")
        (unspec_volatile:SI
-         [(mem:QIHISI (match_operand:SI 1 "arith_reg_operand" "r"))
+         [(match_operand:QIHISI 1 "atomic_mem_operand_0" "=SraSdd")
           (match_operand:QIHISI 2 "arith_reg_operand" "r")
           (match_operand:QIHISI 3 "arith_reg_operand" "r")]
          UNSPECV_CMPXCHG_1))
-   (set (mem:QIHISI (match_dup 1))
+   (set (match_dup 1)
        (unspec_volatile:QIHISI [(const_int 0)] UNSPECV_CMPXCHG_2))
    (set (reg:SI T_REG)
        (unspec_volatile:SI [(const_int 0)] UNSPECV_CMPXCHG_3))
         "      <i124extend_insn>       %2,%5"  "\n"
         "      mov     #(0f-1f),r1"            "\n"
         "      mov.l   r0,@(%O4,gbr)"          "\n"
-        "0:    mov.<bwl>       @%1,%0"         "\n"
+        "0:    mov.<bwl>       %1,%0"          "\n"
         "      mov     #0,r0"                  "\n"
         "      cmp/eq  %0,%5"                  "\n"
         "      bf      1f"                     "\n"
-        "      mov.<bwl>       %3,@%1"         "\n"
+        "      mov.<bwl>       %3,%1"          "\n"
         "1:    mov.l   r0,@(%O4,gbr)";
 }
   [(set_attr "length" "22")])
 (define_insn "atomic_compare_and_swap<mode>_soft_imask"
   [(set (match_operand:SI 0 "arith_reg_dest" "=&z")
        (unspec_volatile:SI
-         [(mem:QIHISI (match_operand:SI 1 "arith_reg_operand" "r"))
+         [(match_operand:QIHISI 1 "atomic_mem_operand_0" "=SraSdd")
           (match_operand:QIHISI 2 "arith_reg_operand" "r")
           (match_operand:QIHISI 3 "arith_reg_operand" "r")]
          UNSPECV_CMPXCHG_1))
-   (set (mem:QIHISI (match_dup 1))
+   (set (match_dup 1)
        (unspec_volatile:QIHISI [(const_int 0)] UNSPECV_CMPXCHG_2))
    (set (reg:SI T_REG)
        (unspec_volatile:SI [(const_int 0)] UNSPECV_CMPXCHG_3))
           "    or      #0xF0,%0"               "\n"
           "    shlr    %5"                     "\n"
           "    ldc     %0,sr"                  "\n"
-          "    mov.<bwl>       @%1,%0"         "\n"
+          "    mov.<bwl>       %1,%0"          "\n"
           "    cmp/eq  %4,%0"                  "\n"
           "    bf      1f"                     "\n"
-          "    mov.<bwl>       %3,@%1"         "\n"
+          "    mov.<bwl>       %3,%1"          "\n"
           "1:  rotcl   %5"                     "\n"
           "    ldc     %5,sr";
   else
           "    mov     %0,%5"                  "\n"
           "    or      #0xF0,%0"               "\n"
           "    ldc     %0,sr"                  "\n"
-          "    mov.<bwl>       @%1,%0"         "\n"
+          "    mov.<bwl>       %1,%0"          "\n"
           "    cmp/eq  %4,%0"                  "\n"
           "    bst     #0,%5"                  "\n"
           "    bf      1f"                     "\n"
-          "    mov.<bwl>       %3,@%1"         "\n"
+          "    mov.<bwl>       %3,%1"          "\n"
           "1:  ldc     %5,sr";
 }
   [(set (attr "length") (if_then_else (match_test "!TARGET_SH2A")
 
 (define_expand "atomic_exchange<mode>"
   [(match_operand:QIHISI 0 "arith_reg_dest")           ;; oldval output
-   (match_operand:QIHISI 1 "memory_operand")           ;; memory
+   (match_operand:QIHISI 1 "atomic_mem_operand_0")     ;; memory
    (match_operand:QIHISI 2 "atomic_arith_operand_0")   ;; newval input
    (match_operand:SI 3 "const_int_operand")]           ;; memory model
   "TARGET_ATOMIC_ANY"
 {
-  rtx addr = force_reg (Pmode, XEXP (operands[1], 0));
+  rtx mem = operands[1];
   rtx val = operands[2];
   rtx atomic_insn;
 
   if (TARGET_ATOMIC_HARD_LLCS
       || (TARGET_SH4A && <MODE>mode == SImode && !TARGET_ATOMIC_STRICT))
-    atomic_insn = gen_atomic_exchange<mode>_hard (operands[0], addr, val);
+    atomic_insn = gen_atomic_exchange<mode>_hard (operands[0], mem, val);
   else if (TARGET_ATOMIC_SOFT_GUSA)
-    atomic_insn = gen_atomic_exchange<mode>_soft_gusa (operands[0], addr, val);
+    atomic_insn = gen_atomic_exchange<mode>_soft_gusa (operands[0], mem, val);
   else if (TARGET_ATOMIC_SOFT_TCB)
-    atomic_insn = gen_atomic_exchange<mode>_soft_tcb (operands[0], addr, val,
+    atomic_insn = gen_atomic_exchange<mode>_soft_tcb (operands[0], mem, val,
                      TARGET_ATOMIC_SOFT_TCB_GBR_OFFSET_RTX);
   else if (TARGET_ATOMIC_SOFT_IMASK)
-    atomic_insn = gen_atomic_exchange<mode>_soft_imask (operands[0], addr, val);
+    atomic_insn = gen_atomic_exchange<mode>_soft_imask (operands[0], mem, val);
   else
     FAIL;
 
 
 (define_insn "atomic_exchangesi_hard"
   [(set (match_operand:SI 0 "arith_reg_dest" "=&r")
-       (mem:SI (match_operand:SI 1 "arith_reg_operand" "r")))
-   (set (mem:SI (match_dup 1))
+       (match_operand:SI 1 "atomic_mem_operand_0" "=Sra"))
+   (set (match_dup 1)
        (unspec:SI
          [(match_operand:SI 2 "arith_operand" "rI08")] UNSPEC_ATOMIC))
    (set (reg:SI T_REG) (const_int 1))
   "TARGET_ATOMIC_HARD_LLCS
    || (TARGET_SH4A && TARGET_ATOMIC_ANY && !TARGET_ATOMIC_STRICT)"
 {
-  return "\r0: movli.l @%1,r0"         "\n"
+  return "\r0: movli.l %1,r0"          "\n"
         "      mov     r0,%0"          "\n"
         "      mov     %2,r0"          "\n"
-        "      movco.l r0,@%1"         "\n"
+        "      movco.l r0,%1"          "\n"
         "      bf      0b";
 }
   [(set_attr "length" "10")])
 
-(define_insn "atomic_exchange<mode>_hard"
+;; The QIHImode llcs patterns modify the address register of the memory
+;; operand.  In order to express that, we have to open code the memory
+;; operand.  Initially the insn is expanded like every other atomic insn
+;; using the memory operand.  In split1 the insn is converted and the
+;; memory operand's address register is exposed.
+(define_insn_and_split "atomic_exchange<mode>_hard"
+  [(set (match_operand:QIHI 0 "arith_reg_dest")
+       (match_operand:QIHI 1 "atomic_mem_operand_0"))
+   (set (match_dup 1)
+       (unspec:QIHI
+         [(match_operand:QIHI 2 "arith_reg_operand")] UNSPEC_ATOMIC))
+   (set (reg:SI T_REG) (const_int 1))
+   (clobber (reg:SI R0_REG))]
+  "TARGET_ATOMIC_HARD_LLCS && can_create_pseudo_p ()"
+  "#"
+  "&& 1"
+  [(const_int 0)]
+{
+  rtx i = gen_atomic_exchange<mode>_hard_1 (operands[0], XEXP (operands[1], 0),
+                                           operands[2]);
+
+  /* Replace the new mems in the new insn with the old mem to preserve
+     aliasing info.  */
+  XEXP (XVECEXP (i, 0, 0), 1) = operands[1];
+  XEXP (XVECEXP (i, 0, 1), 0) = operands[1];
+  emit_insn (i);
+})
+
+(define_insn "atomic_exchange<mode>_hard_1"
   [(set (match_operand:QIHI 0 "arith_reg_dest" "=&r")
        (mem:QIHI (match_operand:SI 1 "arith_reg_operand" "r")))
    (set (mem:QIHI (match_dup 1))
 
 (define_insn "atomic_exchange<mode>_soft_gusa"
   [(set (match_operand:QIHISI 0 "arith_reg_dest" "=&u")
-       (mem:QIHISI (match_operand:SI 1 "arith_reg_operand" "u")))
-   (set (mem:QIHISI (match_dup 1))
+       (match_operand:QIHISI 1 "atomic_mem_operand_0" "=AraAdd"))
+   (set (match_dup 1)
        (unspec:QIHISI
          [(match_operand:QIHISI 2 "arith_reg_operand" "u")] UNSPEC_ATOMIC))
    (clobber (reg:SI R0_REG))
         "      .align 2"                       "\n"
         "      mov     r15,r1"                 "\n"
         "      mov     #(0f-1f),r15"           "\n"
-        "0:    mov.<bwl>       @%1,%0"         "\n"
-        "      mov.<bwl>       %2,@%1"         "\n"
+        "0:    mov.<bwl>       %1,%0"          "\n"
+        "      mov.<bwl>       %2,%1"          "\n"
         "1:    mov     r1,r15";
 }
   [(set_attr "length" "14")])
 
 (define_insn "atomic_exchange<mode>_soft_tcb"
   [(set (match_operand:QIHISI 0 "arith_reg_dest" "=&r")
-       (mem:QIHISI (match_operand:SI 1 "arith_reg_operand" "r")))
-   (set (mem:QIHISI (match_dup 1))
+       (match_operand:QIHISI 1 "atomic_mem_operand_0" "=SraSdd"))
+   (set (match_dup 1)
        (unspec:QIHISI
          [(match_operand:QIHISI 2 "arith_reg_operand" "r")] UNSPEC_ATOMIC))
    (clobber (reg:SI R0_REG))
         "      mov     #(0f-1f),r1"            "\n"
         "      .align 2"                       "\n"
         "      mov.l   r0,@(%O3,gbr)"          "\n"
-        "0:    mov.<bwl>       @%1,%0"         "\n"
+        "0:    mov.<bwl>       %1,%0"          "\n"
         "      mov     #0,r0"                  "\n"
-        "      mov.<bwl>       %2,@%1"         "\n"
+        "      mov.<bwl>       %2,%1"          "\n"
         "1:    mov.l   r0,@(%O3,gbr)";
 }
   [(set_attr "length" "16")])
 
 (define_insn "atomic_exchange<mode>_soft_imask"
   [(set (match_operand:QIHISI 0 "arith_reg_dest" "=&z")
-       (mem:QIHISI (match_operand:SI 1 "arith_reg_operand" "r")))
-   (set (mem:QIHISI (match_dup 1))
+       (match_operand:QIHISI 1 "atomic_mem_operand_0" "=SraSdd"))
+   (set (match_dup 1)
        (unspec:QIHISI
          [(match_operand:QIHISI 2 "arith_reg_operand" "r")] UNSPEC_ATOMIC))
    (clobber (match_scratch:SI 3 "=&r"))]
         "      mov     %0,%3"                  "\n"
         "      or      #0xF0,%0"               "\n"
         "      ldc     %0,sr"                  "\n"
-        "      mov.<bwl>       @%1,%0"         "\n"
-        "      mov.<bwl>       %2,@%1"         "\n"
+        "      mov.<bwl>       %1,%0"          "\n"
+        "      mov.<bwl>       %2,%1"          "\n"
         "      ldc     %3,sr";
 }
   [(set_attr "length" "14")])
 (define_code_attr fetchop_constraint_1_imask
   [(plus "rI08") (minus "r") (ior "rK08") (xor "rK08") (and "rK08")])
 
+;; Displacement addressing mode (incl. GBR relative) can be used by tcb and
+;; imask atomic patterns in any mode, since all the patterns use R0 as the
+;; register operand for memory loads/stores.  gusa and llcs patterns can only
+;; use displacement addressing for SImode.
+(define_predicate "atomic_mem_operand_1"
+  (and (match_code "mem")
+       (ior (match_operand 0 "simple_mem_operand")
+           (and (match_test "mode == SImode")
+                (match_test "TARGET_ATOMIC_SOFT_GUSA
+                             && (!TARGET_SH4A || TARGET_ATOMIC_STRICT)")
+                (match_operand 0 "short_displacement_mem_operand"))
+           (and (ior (match_test "(TARGET_ATOMIC_SOFT_TCB
+                                   || TARGET_ATOMIC_SOFT_IMASK)
+                                  && (!TARGET_SH4A || TARGET_ATOMIC_STRICT)")
+                     (match_test "(TARGET_ATOMIC_SOFT_TCB
+                                   || TARGET_ATOMIC_SOFT_IMASK)
+                                  && TARGET_SH4A && !TARGET_ATOMIC_STRICT
+                                  && mode != SImode"))
+                (ior (match_operand 0 "short_displacement_mem_operand")
+                     (match_operand 0 "gbr_address_mem"))))))
+
 (define_expand "atomic_fetch_<fetchop_name><mode>"
   [(set (match_operand:QIHISI 0 "arith_reg_dest")
-       (match_operand:QIHISI 1 "memory_operand"))
+       (match_operand:QIHISI 1 "atomic_mem_operand_1"))
    (set (match_dup 1)
        (unspec:QIHISI
          [(FETCHOP:QIHISI (match_dup 1)
    (match_operand:SI 3 "const_int_operand")]
   "TARGET_ATOMIC_ANY"
 {
-  rtx addr = force_reg (Pmode, XEXP (operands[1], 0));
+  rtx mem = operands[1];
   rtx atomic_insn;
 
   if (TARGET_ATOMIC_HARD_LLCS
       || (TARGET_SH4A && <MODE>mode == SImode && !TARGET_ATOMIC_STRICT))
-    atomic_insn = gen_atomic_fetch_<fetchop_name><mode>_hard (operands[0], addr,
+    atomic_insn = gen_atomic_fetch_<fetchop_name><mode>_hard (operands[0], mem,
                                                              operands[2]);
   else if (TARGET_ATOMIC_SOFT_GUSA)
     atomic_insn = gen_atomic_fetch_<fetchop_name><mode>_soft_gusa (operands[0],
-                     addr, operands[2]);
+                     mem, operands[2]);
   else if (TARGET_ATOMIC_SOFT_TCB)
     atomic_insn = gen_atomic_fetch_<fetchop_name><mode>_soft_tcb (operands[0],
-                     addr, operands[2], TARGET_ATOMIC_SOFT_TCB_GBR_OFFSET_RTX);
+                     mem, operands[2], TARGET_ATOMIC_SOFT_TCB_GBR_OFFSET_RTX);
   else if (TARGET_ATOMIC_SOFT_IMASK)
     atomic_insn = gen_atomic_fetch_<fetchop_name><mode>_soft_imask (operands[0],
-                     addr, operands[2]);
+                     mem, operands[2]);
   else
     FAIL;
 
   DONE;
 })
 
-(define_insn "atomic_fetch_<fetchop_name>si_hard"
+(define_insn_and_split "atomic_fetch_<fetchop_name>si_hard"
   [(set (match_operand:SI 0 "arith_reg_dest" "=&r")
-       (mem:SI (match_operand:SI 1 "arith_reg_operand" "r")))
-   (set (mem:SI (match_dup 1))
+       (match_operand:SI 1 "atomic_mem_operand_1" "=Sra"))
+   (set (match_dup 1)
        (unspec:SI
-         [(FETCHOP:SI (mem:SI (match_dup 1))
+         [(FETCHOP:SI (match_dup 1)
                       (match_operand:SI 2 "<fetchop_predicate_1>"
                                           "<fetchop_constraint_1_llcs>"))]
          UNSPEC_ATOMIC))
   "TARGET_ATOMIC_HARD_LLCS
    || (TARGET_SH4A && TARGET_ATOMIC_ANY && !TARGET_ATOMIC_STRICT)"
 {
-  return "\r0: movli.l @%1,r0"         "\n"
+  return "\r0: movli.l %1,r0"          "\n"
         "      mov     r0,%0"          "\n"
         "      <fetchop_name>  %2,r0"  "\n"
-        "      movco.l r0,@%1"         "\n"
+        "      movco.l r0,%1"          "\n"
         "      bf      0b";
+}
+  "&& can_create_pseudo_p () && optimize
+   && sh_reg_dead_or_unused_after_insn (insn, REGNO (operands[0]))"
+  [(const_int 0)]
+{
+  emit_insn (gen_atomic_<fetchop_name>_fetchsi_hard (gen_reg_rtx (SImode),
+                                                    operands[1], operands[2]));
 }
   [(set_attr "length" "10")])
 
-(define_insn "atomic_fetch_<fetchop_name><mode>_hard"
+;; Combine pattern for xor (val, -1) / nand (val, -1).
+(define_insn_and_split "atomic_fetch_notsi_hard"
+  [(set (match_operand:SI 0 "arith_reg_dest" "=&r")
+       (match_operand:SI 1 "atomic_mem_operand_1" "=Sra"))
+   (set (match_dup 1)
+       (unspec:SI [(not:SI (match_dup 1))] UNSPEC_ATOMIC))
+   (set (reg:SI T_REG) (const_int 1))
+   (clobber (reg:SI R0_REG))]
+  "TARGET_ATOMIC_HARD_LLCS
+   || (TARGET_SH4A && TARGET_ATOMIC_ANY && !TARGET_ATOMIC_STRICT)"
+{
+  return "\r0: movli.l %1,r0"          "\n"
+        "      mov     r0,%0"          "\n"
+        "      not     r0,r0"          "\n"
+        "      movco.l r0,%1"          "\n"
+        "      bf      0b";
+}
+  "&& can_create_pseudo_p () && optimize
+   && sh_reg_dead_or_unused_after_insn (insn, REGNO (operands[0]))"
+  [(const_int 0)]
+{
+  emit_insn (gen_atomic_not_fetchsi_hard (gen_reg_rtx (SImode), operands[1]));
+}
+  [(set_attr "length" "10")])
+
+;; The QIHImode llcs patterns modify the address register of the memory
+;; operand.  In order to express that, we have to open code the memory
+;; operand.  Initially the insn is expanded like every other atomic insn
+;; using the memory operand.  In split1 the insn is converted and the
+;; memory operand's address register is exposed.
+(define_insn_and_split "atomic_fetch_<fetchop_name><mode>_hard"
+  [(set (match_operand:QIHI 0 "arith_reg_dest")
+       (match_operand:QIHI 1 "atomic_mem_operand_1"))
+   (set (match_dup 1)
+       (unspec:QIHI
+         [(FETCHOP:QIHI (match_dup 1)
+                        (match_operand:QIHI 2 "<fetchop_predicate_1>"))]
+         UNSPEC_ATOMIC))
+   (set (reg:SI T_REG) (const_int 1))
+   (clobber (reg:SI R0_REG))]
+  "TARGET_ATOMIC_HARD_LLCS && can_create_pseudo_p ()"
+  "#"
+  "&& 1"
+  [(const_int 0)]
+{
+  if (optimize
+      && sh_reg_dead_or_unused_after_insn (curr_insn, REGNO (operands[0])))
+    emit_insn (gen_atomic_<fetchop_name><mode>_hard (operands[1], operands[2]));
+  else
+    {
+      rtx i = gen_atomic_fetch_<fetchop_name><mode>_hard_1 (
+                       operands[0], XEXP (operands[1], 0), operands[2]);
+
+      /* Replace the new mems in the new insn with the old mem to preserve
+        aliasing info.  */
+      XEXP (XVECEXP (i, 0, 0), 1) = operands[1];
+      XEXP (XVECEXP (i, 0, 1), 0) = operands[1];
+      XEXP (XVECEXP (XEXP (XVECEXP (i, 0, 1), 1), 0, 0), 0) = operands[1];
+      emit_insn (i);
+    }
+})
+
+(define_insn "atomic_fetch_<fetchop_name><mode>_hard_1"
   [(set (match_operand:QIHI 0 "arith_reg_dest" "=&r")
        (mem:QIHI (match_operand:SI 1 "arith_reg_operand" "r")))
    (set (mem:QIHI (match_dup 1))
 }
   [(set_attr "length" "28")])
 
-(define_insn "atomic_fetch_<fetchop_name><mode>_soft_gusa"
+;; The QIHImode llcs patterns modify the address register of the memory
+;; operand.  In order to express that, we have to open code the memory
+;; operand.  Initially the insn is expanded like every other atomic insn
+;; using the memory operand.  In split1 the insn is converted and the
+;; memory operand's address register is exposed.
+(define_insn_and_split "atomic_<fetchop_name><mode>_hard"
+  [(set (match_operand:QIHI 0 "atomic_mem_operand_1")
+       (unspec:QIHI
+         [(FETCHOP:QIHI (match_dup 0)
+                        (match_operand:QIHI 1 "<fetchop_predicate_1>"))]
+         UNSPEC_ATOMIC))
+   (set (reg:SI T_REG) (const_int 1))
+   (clobber (reg:SI R0_REG))]
+  "TARGET_ATOMIC_HARD_LLCS && can_create_pseudo_p ()"
+  "#"
+  "&& 1"
+  [(const_int 0)]
+{
+  rtx i = gen_atomic_<fetchop_name><mode>_hard_1 (XEXP (operands[0], 0),
+                                                 operands[1]);
+  /* Replace the new mems in the new insn with the old mem to preserve
+     aliasing info.  */
+  XEXP (XVECEXP (i, 0, 0), 0) = operands[0];
+  XEXP (XVECEXP (XEXP (XVECEXP (i, 0, 0), 1), 0, 0), 0) = operands[0];
+  emit_insn (i);
+})
+
+(define_insn "atomic_<fetchop_name><mode>_hard_1"
+  [(set (mem:QIHI (match_operand:SI 0 "arith_reg_operand" "r"))
+       (unspec:QIHI
+         [(FETCHOP:QIHI (mem:QIHI (match_dup 0))
+                        (match_operand:QIHI 1 "<fetchop_predicate_1>"
+                                              "<fetchop_constraint_1_llcs>"))]
+         UNSPEC_ATOMIC))
+   (set (reg:SI T_REG) (const_int 1))
+   (clobber (reg:SI R0_REG))
+   (clobber (match_scratch:SI 2 "=&r"))
+   (clobber (match_scratch:SI 3 "=0"))]
+  "TARGET_ATOMIC_HARD_LLCS"
+{
+  return "\r   mov     #-4,%2"                 "\n"
+        "      and     %0,%2"                  "\n"
+        "      xor     %2,%0"                  "\n"
+        "      add     r15,%0"                 "\n"
+        "      add     #-4,%0"                 "\n"
+        "0:    movli.l @%2,r0"                 "\n"
+        "      mov.l   r0,@-r15"               "\n"
+        "      mov.<bw>        @%0,r0"         "\n"
+        "      <fetchop_name>  %1,r0"          "\n"
+        "      mov.<bw>        r0,@%0"         "\n"
+        "      mov.l   @r15+,r0"               "\n"
+        "      movco.l r0,@%2"                 "\n"
+        "      bf      0b";
+}
+  [(set_attr "length" "26")])
+
+;; Combine pattern for xor (val, -1) / nand (val, -1).
+(define_insn_and_split "atomic_fetch_not<mode>_hard"
+  [(set (match_operand:QIHI 0 "arith_reg_dest" "=&r")
+       (mem:QIHI (match_operand:SI 1 "arith_reg_operand" "r")))
+   (set (mem:QIHI (match_dup 1))
+       (unspec:QIHI [(not:QIHI (mem:QIHI (match_dup 1)))] UNSPEC_ATOMIC))
+   (set (reg:SI T_REG) (const_int 1))
+   (clobber (reg:SI R0_REG))
+   (clobber (match_scratch:SI 2 "=&r"))
+   (clobber (match_scratch:SI 3 "=1"))]
+  "TARGET_ATOMIC_HARD_LLCS"
+{
+  return "\r   mov     #-4,%2"                 "\n"
+        "      and     %1,%2"                  "\n"
+        "      xor     %2,%1"                  "\n"
+        "      add     r15,%1"                 "\n"
+        "      add     #-4,%1"                 "\n"
+        "0:    movli.l @%2,r0"                 "\n"
+        "      mov.l   r0,@-r15"               "\n"
+        "      mov.<bw>        @%1,%0"         "\n"
+        "      not     %0,r0"                  "\n"
+        "      mov.<bw>        r0,@%1"         "\n"
+        "      mov.l   @r15+,r0"               "\n"
+        "      movco.l r0,@%2"                 "\n"
+        "      bf      0b";
+}
+  "&& can_create_pseudo_p () && optimize
+   && sh_reg_dead_or_unused_after_insn (insn, REGNO (operands[0]))"
+  [(const_int 0)]
+{
+  rtx i = gen_atomic_not<mode>_hard (operands[1]);
+
+  /* Replace the new mems in the new insn with the old mem to preserve
+     aliasing info.  */
+  rtx m = XEXP (XVECEXP (PATTERN (curr_insn), 0, 0), 1);
+  XEXP (XVECEXP (i, 0, 0), 0) = m;
+  XEXP (XVECEXP (XEXP (XVECEXP (i, 0, 0), 1), 0, 0), 0) = m;
+  emit_insn (i);
+}
+  [(set_attr "length" "26")])
+
+(define_insn "atomic_not<mode>_hard"
+  [(set (mem:QIHI (match_operand:SI 0 "arith_reg_operand" "r"))
+       (unspec:QIHI [(not:QIHI (mem:QIHI (match_dup 0)))] UNSPEC_ATOMIC))
+   (set (reg:SI T_REG) (const_int 1))
+   (clobber (reg:SI R0_REG))
+   (clobber (match_scratch:SI 1 "=&r"))
+   (clobber (match_scratch:SI 2 "=0"))]
+  "TARGET_ATOMIC_HARD_LLCS"
+{
+  return "\r   mov     #-4,%1"                 "\n"
+        "      and     %0,%1"                  "\n"
+        "      xor     %1,%0"                  "\n"
+        "      add     r15,%0"                 "\n"
+        "      add     #-4,%0"                 "\n"
+        "0:    movli.l @%1,r0"                 "\n"
+        "      mov.l   r0,@-r15"               "\n"
+        "      mov.<bw>        @%0,r0"         "\n"
+        "      not     r0,r0"                  "\n"
+        "      mov.<bw>        r0,@%0"         "\n"
+        "      mov.l   @r15+,r0"               "\n"
+        "      movco.l r0,@%1"                 "\n"
+        "      bf      0b";
+}
+  [(set_attr "length" "26")])
+
+(define_insn_and_split "atomic_fetch_<fetchop_name><mode>_soft_gusa"
   [(set (match_operand:QIHISI 0 "arith_reg_dest" "=&u")
-       (mem:QIHISI (match_operand:SI 1 "arith_reg_operand" "u")))
-   (set (mem:QIHISI (match_dup 1))
+       (match_operand:QIHISI 1 "atomic_mem_operand_1" "=AraAdd"))
+   (set (match_dup 1)
        (unspec:QIHISI
          [(FETCHOP:QIHISI
-               (mem:QIHISI (match_dup 1))
+               (match_dup 1)
                (match_operand:QIHISI 2 "<fetchop_predicate_1>"
                                        "<fetchop_constraint_1_gusa>"))]
          UNSPEC_ATOMIC))
         "      .align 2"                       "\n"
         "      mov     r15,r1"                 "\n"
         "      mov     #(0f-1f),r15"           "\n"
-        "0:    mov.<bwl>       @%1,%0"         "\n"
+        "0:    mov.<bwl>       %1,%0"          "\n"
         "      mov     %0,%3"                  "\n"
         "      <fetchop_name>  %2,%3"          "\n"
-        "      mov.<bwl>       %3,@%1"         "\n"
+        "      mov.<bwl>       %3,%1"          "\n"
         "1:    mov     r1,r15";
+}
+  "&& can_create_pseudo_p () && optimize
+   && sh_reg_dead_or_unused_after_insn (insn, REGNO (operands[0]))"
+  [(const_int 0)]
+{
+  emit_insn (gen_atomic_<fetchop_name>_fetch<mode>_soft_gusa (
+               gen_reg_rtx (<MODE>mode), operands[1], operands[2]));
 }
   [(set_attr "length" "18")])
 
-(define_insn "atomic_fetch_<fetchop_name><mode>_soft_tcb"
+;; Combine pattern for xor (val, -1) / nand (val, -1).
+(define_insn_and_split "atomic_fetch_not<mode>_soft_gusa"
+  [(set (match_operand:QIHISI 0 "arith_reg_dest" "=&u")
+       (match_operand:QIHISI 1 "atomic_mem_operand_1" "=AraAdd"))
+   (set (match_dup 1)
+       (unspec:QIHISI [(not:QIHISI (match_dup 1))] UNSPEC_ATOMIC))
+   (clobber (match_scratch:QIHISI 2 "=&u"))
+   (clobber (reg:SI R0_REG))
+   (clobber (reg:SI R1_REG))]
+  "TARGET_ATOMIC_SOFT_GUSA"
+{
+  return "\r   mova    1f,r0"                  "\n"
+        "      mov     r15,r1"                 "\n"
+        "      .align 2"                       "\n"
+        "      mov     #(0f-1f),r15"           "\n"
+        "0:    mov.<bwl>       %1,%0"          "\n"
+        "      not     %0,%2"                  "\n"
+        "      mov.<bwl>       %2,%1"          "\n"
+        "1:    mov     r1,r15";
+}
+  "&& can_create_pseudo_p () && optimize
+   && sh_reg_dead_or_unused_after_insn (insn, REGNO (operands[0]))"
+  [(const_int 0)]
+{
+  emit_insn (gen_atomic_not_fetch<mode>_soft_gusa (gen_reg_rtx (<MODE>mode),
+                                                  operands[1]));
+}
+  [(set_attr "length" "16")])
+
+(define_insn_and_split "atomic_fetch_<fetchop_name><mode>_soft_tcb"
   [(set (match_operand:QIHISI 0 "arith_reg_dest" "=&r")
-       (mem:QIHISI (match_operand:SI 1 "arith_reg_operand" "r")))
-   (set (mem:QIHISI (match_dup 1))
+       (match_operand:QIHISI 1 "atomic_mem_operand_1" "=SraSdd"))
+   (set (match_dup 1)
        (unspec:QIHISI
          [(FETCHOP:QIHISI
-               (mem:QIHISI (match_dup 1))
+               (match_dup 1)
                (match_operand:QIHISI 2 "<fetchop_predicate_1>"
                                        "<fetchop_constraint_1_tcb>"))]
          UNSPEC_ATOMIC))
         "      .align 2"                       "\n"
         "      mov     #(0f-1f),r1"            "\n"
         "      mov.l   r0,@(%O3,gbr)"          "\n"
-        "0:    mov.<bwl>       @%1,r0"         "\n"
+        "0:    mov.<bwl>       %1,r0"          "\n"
         "      mov     r0,%0"                  "\n"
         "      <fetchop_name>  %2,r0"          "\n"
-        "      mov.<bwl>       r0,@%1"         "\n"
+        "      mov.<bwl>       r0,%1"          "\n"
         "1:    mov     #0,r0"                  "\n"
         "      mov.l   r0,@(%O3,gbr)";
+}
+  "&& can_create_pseudo_p () && optimize
+   && sh_reg_dead_or_unused_after_insn (insn, REGNO (operands[0]))"
+  [(const_int 0)]
+{
+  emit_insn (gen_atomic_<fetchop_name><mode>_soft_tcb (
+                                       operands[1], operands[2], operands[3]));
 }
   [(set_attr "length" "20")])
 
-(define_insn "atomic_fetch_<fetchop_name><mode>_soft_imask"
+(define_insn "atomic_<fetchop_name><mode>_soft_tcb"
+  [(set (match_operand:QIHISI 0 "atomic_mem_operand_1" "=SraSdd")
+       (unspec:QIHISI
+         [(FETCHOP:QIHISI
+               (match_dup 0)
+               (match_operand:QIHISI 1 "<fetchop_predicate_1>"
+                                       "<fetchop_constraint_1_tcb>"))]
+         UNSPEC_ATOMIC))
+   (use (match_operand:SI 2 "gbr_displacement"))
+   (clobber (reg:SI R0_REG))
+   (clobber (reg:SI R1_REG))]
+  "TARGET_ATOMIC_SOFT_TCB"
+{
+  return "\r   mova    1f,r0"                  "\n"
+        "      mov     #(0f-1f),r1"            "\n"
+        "      .align 2"                       "\n"
+        "      mov.l   r0,@(%O2,gbr)"          "\n"
+        "0:    mov.<bwl>       %0,r0"          "\n"
+        "      <fetchop_name>  %1,r0"          "\n"
+        "      mov.<bwl>       r0,%0"          "\n"
+        "1:    mov     #0,r0"                  "\n"
+        "      mov.l   r0,@(%O2,gbr)";
+}
+  [(set_attr "length" "18")])
+
+;; Combine pattern for xor (val, -1) / nand (val, -1).
+(define_insn_and_split "atomic_fetch_not<mode>_soft_tcb"
   [(set (match_operand:QIHISI 0 "arith_reg_dest" "=&r")
-       (mem:QIHISI (match_operand:SI 1 "arith_reg_operand" "r")))
-   (set (mem:QIHISI (match_dup 1))
+       (match_operand:QIHISI 1 "atomic_mem_operand_1" "=SraSdd"))
+   (set (match_dup 1)
+       (unspec:QIHISI [(not:QIHISI (match_dup 1))] UNSPEC_ATOMIC))
+   (use (match_operand:SI 2 "gbr_displacement"))
+   (clobber (reg:SI R0_REG))
+   (clobber (reg:SI R1_REG))]
+  "TARGET_ATOMIC_SOFT_TCB"
+{
+  return "\r   mova    1f,r0"                  "\n"
+        "      .align 2"                       "\n"
+        "      mov     #(0f-1f),r1"            "\n"
+        "      mov.l   r0,@(%O2,gbr)"          "\n"
+        "0:    mov.<bwl>       %1,r0"          "\n"
+        "      mov     r0,%0"                  "\n"
+        "      not     r0,r0"                  "\n"
+        "      mov.<bwl>       r0,%1"          "\n"
+        "1:    mov     #0,r0"                  "\n"
+        "      mov.l   r0,@(%O2,gbr)";
+}
+  "&& can_create_pseudo_p () && optimize
+   && sh_reg_dead_or_unused_after_insn (insn, REGNO (operands[0]))"
+  [(const_int 0)]
+{
+  emit_insn (gen_atomic_not<mode>_soft_tcb (operands[1], operands[2]));
+}
+  [(set_attr "length" "20")])
+
+(define_insn "atomic_not<mode>_soft_tcb"
+  [(set (match_operand:QIHISI 0 "atomic_mem_operand_1" "=SraSdd")
+       (unspec:QIHISI [(not:QIHISI (match_dup 0))] UNSPEC_ATOMIC))
+   (use (match_operand:SI 1 "gbr_displacement"))
+   (clobber (reg:SI R0_REG))
+   (clobber (reg:SI R1_REG))]
+  "TARGET_ATOMIC_SOFT_TCB"
+{
+  return "\r   mova    1f,r0"                  "\n"
+        "      mov     #(0f-1f),r1"            "\n"
+        "      .align 2"                       "\n"
+        "      mov.l   r0,@(%O1,gbr)"          "\n"
+        "0:    mov.<bwl>       %0,r0"          "\n"
+        "      not     r0,r0"                  "\n"
+        "      mov.<bwl>       r0,%0"          "\n"
+        "1:    mov     #0,r0"                  "\n"
+        "      mov.l   r0,@(%O1,gbr)";
+}
+  [(set_attr "length" "18")])
+
+(define_insn_and_split "atomic_fetch_<fetchop_name><mode>_soft_imask"
+  [(set (match_operand:QIHISI 0 "arith_reg_dest" "=&r")
+       (match_operand:QIHISI 1 "atomic_mem_operand_1" "=SraSdd"))
+   (set (match_dup 1)
        (unspec:QIHISI
          [(FETCHOP:QIHISI
-               (mem:QIHISI (match_dup 1))
+               (match_dup 1)
                (match_operand:QIHISI 2 "<fetchop_predicate_1>"
                                        "<fetchop_constraint_1_imask>"))]
          UNSPEC_ATOMIC))
         "      mov     r0,%3"                  "\n"
         "      or      #0xF0,r0"               "\n"
         "      ldc     r0,sr"                  "\n"
-        "      mov.<bwl>       @%1,r0"         "\n"
+        "      mov.<bwl>       %1,r0"          "\n"
         "      mov     r0,%0"                  "\n"
         "      <fetchop_name>  %2,r0"          "\n"
-        "      mov.<bwl>       r0,@%1"         "\n"
+        "      mov.<bwl>       r0,%1"          "\n"
         "      ldc     %3,sr";
+}
+  "&& can_create_pseudo_p () && optimize
+   && sh_reg_dead_or_unused_after_insn (insn, REGNO (operands[0]))"
+  [(const_int 0)]
+{
+  emit_insn (gen_atomic_<fetchop_name>_fetch<mode>_soft_imask (
+                       gen_reg_rtx (<MODE>mode), operands[1], operands[2]));
+}
+  [(set_attr "length" "18")])
+
+;; Combine pattern for xor (val, -1) / nand (val, -1).
+(define_insn_and_split "atomic_fetch_not<mode>_soft_imask"
+  [(set (match_operand:QIHISI 0 "arith_reg_dest" "=&r")
+       (match_operand:QIHISI 1 "atomic_mem_operand_1" "=SraSdd"))
+   (set (match_dup 1)
+       (unspec:QIHISI [(not:QIHISI (match_dup 1))] UNSPEC_ATOMIC))
+   (clobber (reg:SI R0_REG))
+   (clobber (match_scratch:QIHISI 2 "=&r"))]
+  "TARGET_ATOMIC_SOFT_IMASK"
+{
+  return "\r   stc     sr,r0"                  "\n"
+        "      mov     r0,%2"                  "\n"
+        "      or      #0xF0,r0"               "\n"
+        "      ldc     r0,sr"                  "\n"
+        "      mov.<bwl>       %1,r0"          "\n"
+        "      mov     r0,%0"                  "\n"
+        "      not     r0,r0"                  "\n"
+        "      mov.<bwl>       r0,%1"          "\n"
+        "      ldc     %2,sr";
+}
+  "&& can_create_pseudo_p () && optimize
+   && sh_reg_dead_or_unused_after_insn (insn, REGNO (operands[0]))"
+  [(const_int 0)]
+{
+  emit_insn (gen_atomic_not_fetch<mode>_soft_imask (gen_reg_rtx (<MODE>mode),
+                                                   operands[1]));
 }
   [(set_attr "length" "18")])
 
 (define_expand "atomic_fetch_nand<mode>"
   [(set (match_operand:QIHISI 0 "arith_reg_dest")
-       (match_operand:QIHISI 1 "memory_operand"))
+       (match_operand:QIHISI 1 "atomic_mem_operand_1"))
    (set (match_dup 1)
        (unspec:QIHISI
          [(not:QIHISI (and:QIHISI (match_dup 1)
    (match_operand:SI 3 "const_int_operand")]
   "TARGET_ATOMIC_ANY"
 {
-  rtx addr = force_reg (Pmode, XEXP (operands[1], 0));
+  rtx mem = operands[1];
   rtx atomic_insn;
 
   if (TARGET_ATOMIC_HARD_LLCS
       || (TARGET_SH4A && <MODE>mode == SImode && !TARGET_ATOMIC_STRICT))
-    atomic_insn = gen_atomic_fetch_nand<mode>_hard (operands[0], addr,
+    atomic_insn = gen_atomic_fetch_nand<mode>_hard (operands[0], mem,
                                                    operands[2]);
   else if (TARGET_ATOMIC_SOFT_GUSA)
-    atomic_insn = gen_atomic_fetch_nand<mode>_soft_gusa (operands[0], addr,
+    atomic_insn = gen_atomic_fetch_nand<mode>_soft_gusa (operands[0], mem,
                                                         operands[2]);
   else if (TARGET_ATOMIC_SOFT_TCB)
-    atomic_insn = gen_atomic_fetch_nand<mode>_soft_tcb (operands[0], addr,
+    atomic_insn = gen_atomic_fetch_nand<mode>_soft_tcb (operands[0], mem,
                      operands[2], TARGET_ATOMIC_SOFT_TCB_GBR_OFFSET_RTX);
   else if (TARGET_ATOMIC_SOFT_IMASK)
-    atomic_insn = gen_atomic_fetch_nand<mode>_soft_imask (operands[0], addr,
+    atomic_insn = gen_atomic_fetch_nand<mode>_soft_imask (operands[0], mem,
                                                          operands[2]);
   else
     FAIL;
   DONE;
 })
 
-(define_insn "atomic_fetch_nandsi_hard"
+(define_insn_and_split "atomic_fetch_nandsi_hard"
   [(set (match_operand:SI 0 "arith_reg_dest" "=&r")
-       (mem:SI (match_operand:SI 1 "arith_reg_operand" "r")))
-   (set (mem:SI (match_dup 1))
+       (match_operand:SI 1 "atomic_mem_operand_1" "=Sra"))
+   (set (match_dup 1)
        (unspec:SI
-         [(not:SI (and:SI (mem:SI (match_dup 1))
+         [(not:SI (and:SI (match_dup 1)
                   (match_operand:SI 2 "logical_operand" "rK08")))]
          UNSPEC_ATOMIC))
    (set (reg:SI T_REG) (const_int 1))
   "TARGET_ATOMIC_HARD_LLCS
    || (TARGET_SH4A && TARGET_ATOMIC_ANY && !TARGET_ATOMIC_STRICT)"
 {
-  return "\r0: movli.l @%1,r0"         "\n"
+  return "\r0: movli.l %1,r0"          "\n"
         "      mov     r0,%0"          "\n"
         "      and     %2,r0"          "\n"
         "      not     r0,r0"          "\n"
-        "      movco.l r0,@%1"         "\n"
+        "      movco.l r0,%1"          "\n"
         "      bf      0b";
+}
+  "&& can_create_pseudo_p () && optimize
+   && sh_reg_dead_or_unused_after_insn (insn, REGNO (operands[0]))"
+  [(const_int 0)]
+{
+  emit_insn (gen_atomic_nand_fetchsi_hard (gen_reg_rtx (SImode), operands[1],
+                                          operands[2]));
 }
   [(set_attr "length" "12")])
 
-(define_insn "atomic_fetch_nand<mode>_hard"
+;; The QIHImode llcs patterns modify the address register of the memory
+;; operand.  In order to express that, we have to open code the memory
+;; operand.  Initially the insn is expanded like every other atomic insn
+;; using the memory operand.  In split1 the insn is converted and the
+;; memory operand's address register is exposed.
+(define_insn_and_split "atomic_fetch_nand<mode>_hard"
+  [(set (match_operand:QIHI 0 "arith_reg_dest")
+       (match_operand:QIHI 1 "atomic_mem_operand_1"))
+   (set (match_dup 1)
+       (unspec:QIHI
+         [(not:QIHI (and:QIHI (match_dup 1)
+                    (match_operand:QIHI 2 "logical_operand" "rK08")))]
+         UNSPEC_ATOMIC))
+   (set (reg:SI T_REG) (const_int 1))
+   (clobber (reg:SI R0_REG))]
+  "TARGET_ATOMIC_HARD_LLCS && can_create_pseudo_p ()"
+  "#"
+  "&& 1"
+  [(const_int 0)]
+{
+  if (optimize
+      && sh_reg_dead_or_unused_after_insn (curr_insn, REGNO (operands[0])))
+    emit_insn (gen_atomic_nand<mode>_hard (operands[1], operands[2]));
+  else
+    {
+      rtx i = gen_atomic_fetch_nand<mode>_hard_1 (
+                       operands[0], XEXP (operands[1], 0), operands[2]);
+
+      /* Replace the new mems in the new insn with the old mem to preserve
+        aliasing info.  */
+      XEXP (XVECEXP (i, 0, 0), 1) = operands[1];
+      XEXP (XVECEXP (i, 0, 1), 0) = operands[1];
+      XEXP (XEXP (XVECEXP (XEXP (XVECEXP (i, 0, 1), 1), 0, 0), 0),
+           0) = operands[1];
+      emit_insn (i);
+    }
+})
+
+(define_insn "atomic_fetch_nand<mode>_hard_1"
   [(set (match_operand:QIHI 0 "arith_reg_dest" "=&r")
        (mem:QIHI (match_operand:SI 1 "arith_reg_operand" "r")))
    (set (mem:QIHI (match_dup 1))
 }
   [(set_attr "length" "30")])
 
-(define_insn "atomic_fetch_nand<mode>_soft_gusa"
+;; The QIHImode llcs patterns modify the address register of the memory
+;; operand.  In order to express that, we have to open code the memory
+;; operand.  Initially the insn is expanded like every other atomic insn
+;; using the memory operand.  In split1 the insn is converted and the
+;; memory operand's address register is exposed.
+(define_insn_and_split "atomic_nand<mode>_hard"
+  [(set (match_operand:QIHI 0 "atomic_mem_operand_1")
+       (unspec:QIHI
+         [(not:QIHI (and:QIHI (match_dup 0)
+                              (match_operand:QIHI 1 "logical_operand")))]
+         UNSPEC_ATOMIC))
+   (set (reg:SI T_REG) (const_int 1))
+   (clobber (reg:SI R0_REG))]
+  "TARGET_ATOMIC_HARD_LLCS && can_create_pseudo_p ()"
+  "#"
+  "&& 1"
+  [(const_int 0)]
+{
+  rtx i = gen_atomic_nand<mode>_hard_1 (XEXP (operands[0], 0), operands[1]);
+
+  /* Replace the new mems in the new insn with the old mem to preserve
+     aliasing info.  */
+  XEXP (XVECEXP (i, 0, 0), 0) = operands[0];
+  XEXP (XEXP (XVECEXP (XEXP (XVECEXP (i, 0, 0), 1), 0, 0), 0), 0) = operands[0];
+  emit_insn (i);
+})
+
+(define_insn "atomic_nand<mode>_hard_1"
+  [(set (mem:QIHI (match_operand:SI 0 "arith_reg_operand" "r"))
+       (unspec:QIHI
+         [(not:QIHI (and:QIHI (mem:QIHI (match_dup 0))
+                    (match_operand:QIHI 1 "logical_operand" "rK08")))]
+         UNSPEC_ATOMIC))
+   (set (reg:SI T_REG) (const_int 1))
+   (clobber (reg:SI R0_REG))
+   (clobber (match_scratch:SI 2 "=&r"))
+   (clobber (match_scratch:SI 3 "=0"))]
+  "TARGET_ATOMIC_HARD_LLCS"
+{
+  return "\r   mov     #-4,%2"                 "\n"
+        "      and     %0,%2"                  "\n"
+        "      xor     %2,%0"                  "\n"
+        "      add     r15,%0"                 "\n"
+        "      add     #-4,%0"                 "\n"
+        "0:    movli.l @%2,r0"                 "\n"
+        "      mov.l   r0,@-r15"               "\n"
+        "      mov.<bw>        @%0,r0"         "\n"
+        "      and     %1,r0"                  "\n"
+        "      not     r0,r0"                  "\n"
+        "      mov.<bw>        r0,@%0"         "\n"
+        "      mov.l   @r15+,r0"               "\n"
+        "      movco.l r0,@%2"                 "\n"
+        "      bf      0b";
+}
+  [(set_attr "length" "28")])
+
+(define_insn_and_split "atomic_fetch_nand<mode>_soft_gusa"
   [(set (match_operand:QIHISI 0 "arith_reg_dest" "=&u")
-       (mem:QIHISI (match_operand:SI 1 "arith_reg_operand" "u")))
-   (set (mem:QIHISI (match_dup 1))
+       (match_operand:QIHISI 1 "atomic_mem_operand_1" "=AraAdd"))
+   (set (match_dup 1)
        (unspec:QIHISI
          [(not:QIHISI
-            (and:QIHISI (mem:QIHISI (match_dup 1))
+            (and:QIHISI (match_dup 1)
                         (match_operand:QIHISI 2 "arith_reg_operand" "u")))]
          UNSPEC_ATOMIC))
    (clobber (match_scratch:QIHISI 3 "=&u"))
         "      mov     r15,r1"                 "\n"
         "      .align 2"                       "\n"
         "      mov     #(0f-1f),r15"           "\n"
-        "0:    mov.<bwl>       @%1,%0"         "\n"
+        "0:    mov.<bwl>       %1,%0"          "\n"
         "      mov     %2,%3"                  "\n"
         "      and     %0,%3"                  "\n"
         "      not     %3,%3"                  "\n"
-        "      mov.<bwl>       %3,@%1"         "\n"
+        "      mov.<bwl>       %3,%1"          "\n"
         "1:    mov     r1,r15";
+}
+  "&& can_create_pseudo_p () && optimize
+   && sh_reg_dead_or_unused_after_insn (insn, REGNO (operands[0]))"
+  [(const_int 0)]
+{
+  emit_insn (gen_atomic_nand_fetch<mode>_soft_gusa (gen_reg_rtx (<MODE>mode),
+                                                   operands[1], operands[2]));
 }
   [(set_attr "length" "20")])
 
-(define_insn "atomic_fetch_nand<mode>_soft_tcb"
+(define_insn_and_split "atomic_fetch_nand<mode>_soft_tcb"
   [(set (match_operand:QIHISI 0 "arith_reg_dest" "=&r")
-       (mem:QIHISI (match_operand:SI 1 "arith_reg_operand" "r")))
-   (set (mem:QIHISI (match_dup 1))
+       (match_operand:QIHISI 1 "atomic_mem_operand_1" "=SraSdd"))
+   (set (match_dup 1)
        (unspec:QIHISI
          [(not:QIHISI
-            (and:QIHISI (mem:QIHISI (match_dup 1))
+            (and:QIHISI (match_dup 1)
                         (match_operand:QIHISI 2 "logical_operand" "rK08")))]
          UNSPEC_ATOMIC))
    (use (match_operand:SI 3 "gbr_displacement"))
         "      mov     #(0f-1f),r1"            "\n"
         "      .align 2"                       "\n"
         "      mov.l   r0,@(%O3,gbr)"          "\n"
-        "0:    mov.<bwl>       @%1,r0"         "\n"
+        "0:    mov.<bwl>       %1,r0"          "\n"
         "      mov     r0,%0"                  "\n"
         "      and     %2,r0"                  "\n"
         "      not     r0,r0"                  "\n"
-        "      mov.<bwl>       r0,@%1"         "\n"
+        "      mov.<bwl>       r0,%1"          "\n"
         "1:    mov     #0,r0"                  "\n"
         "      mov.l   r0,@(%O3,gbr)";
+}
+  "&& can_create_pseudo_p () && optimize
+   && sh_reg_dead_or_unused_after_insn (insn, REGNO (operands[0]))"
+  [(const_int 0)]
+{
+  emit_insn (gen_atomic_nand<mode>_soft_tcb (operands[1], operands[2],
+                                            operands[3]));
 }
   [(set_attr "length" "22")])
 
-(define_insn "atomic_fetch_nand<mode>_soft_imask"
+(define_insn "atomic_nand<mode>_soft_tcb"
+  [(set (match_operand:QIHISI 0 "atomic_mem_operand_1" "=SraSdd")
+       (unspec:QIHISI
+         [(not:QIHISI
+            (and:QIHISI (match_dup 0)
+                        (match_operand:QIHISI 1 "logical_operand" "rK08")))]
+         UNSPEC_ATOMIC))
+   (use (match_operand:SI 2 "gbr_displacement"))
+   (clobber (reg:SI R0_REG))
+   (clobber (reg:SI R1_REG))]
+  "TARGET_ATOMIC_SOFT_TCB"
+{
+  return "\r   mova    1f,r0"                  "\n"
+        "      .align 2"                       "\n"
+        "      mov     #(0f-1f),r1"            "\n"
+        "      mov.l   r0,@(%O2,gbr)"          "\n"
+        "0:    mov.<bwl>       %0,r0"          "\n"
+        "      and     %1,r0"                  "\n"
+        "      not     r0,r0"                  "\n"
+        "      mov.<bwl>       r0,%0"          "\n"
+        "1:    mov     #0,r0"                  "\n"
+        "      mov.l   r0,@(%O2,gbr)";
+}
+  [(set_attr "length" "20")])
+
+(define_insn_and_split "atomic_fetch_nand<mode>_soft_imask"
   [(set (match_operand:QIHISI 0 "arith_reg_dest" "=&r")
-       (mem:QIHISI (match_operand:SI 1 "arith_reg_operand" "r")))
-   (set (mem:QIHISI (match_dup 1))
+       (match_operand:QIHISI 1 "atomic_mem_operand_1" "=SraSdd"))
+   (set (match_dup 1)
        (unspec:QIHISI
          [(not:QIHISI
-            (and:QIHISI (mem:QIHISI (match_dup 1))
+            (and:QIHISI (match_dup 1)
                         (match_operand:QIHISI 2 "logical_operand" "rK08")))]
          UNSPEC_ATOMIC))
    (clobber (reg:SI R0_REG))
         "      mov     r0,%3"                  "\n"
         "      or      #0xF0,r0"               "\n"
         "      ldc     r0,sr"                  "\n"
-        "      mov.<bwl>       @%1,r0"         "\n"
+        "      mov.<bwl>       %1,r0"          "\n"
         "      mov     r0,%0"                  "\n"
         "      and     %2,r0"                  "\n"
         "      not     r0,r0"                  "\n"
-        "      mov.<bwl>       r0,@%1"         "\n"
+        "      mov.<bwl>       r0,%1"          "\n"
         "      ldc     %3,sr";
+}
+  "&& can_create_pseudo_p () && optimize
+   && sh_reg_dead_or_unused_after_insn (insn, REGNO (operands[0]))"
+  [(const_int 0)]
+{
+  emit_insn (gen_atomic_nand_fetch<mode>_soft_imask (gen_reg_rtx (<MODE>mode),
+                                                    operands[1], operands[2]));
 }
   [(set_attr "length" "20")])
 
 (define_expand "atomic_<fetchop_name>_fetch<mode>"
   [(set (match_operand:QIHISI 0 "arith_reg_dest")
        (FETCHOP:QIHISI
-         (match_operand:QIHISI 1 "memory_operand")
+         (match_operand:QIHISI 1 "atomic_mem_operand_1")
          (match_operand:QIHISI 2 "<fetchop_predicate_1>")))
    (set (match_dup 1)
        (unspec:QIHISI
    (match_operand:SI 3 "const_int_operand" "")]
   "TARGET_ATOMIC_ANY"
 {
-  rtx addr = force_reg (Pmode, XEXP (operands[1], 0));
+  rtx mem = operands[1];
   rtx atomic_insn;
 
   if (TARGET_ATOMIC_HARD_LLCS
       || (TARGET_SH4A && <MODE>mode == SImode && !TARGET_ATOMIC_STRICT))
-    atomic_insn = gen_atomic_<fetchop_name>_fetch<mode>_hard (operands[0], addr,
+    atomic_insn = gen_atomic_<fetchop_name>_fetch<mode>_hard (operands[0], mem,
                                                              operands[2]);
   else if (TARGET_ATOMIC_SOFT_GUSA)
     atomic_insn = gen_atomic_<fetchop_name>_fetch<mode>_soft_gusa (operands[0],
-                     addr, operands[2]);
+                     mem, operands[2]);
   else if (TARGET_ATOMIC_SOFT_TCB)
     atomic_insn = gen_atomic_<fetchop_name>_fetch<mode>_soft_tcb (operands[0],
-                     addr, operands[2], TARGET_ATOMIC_SOFT_TCB_GBR_OFFSET_RTX);
+                     mem, operands[2], TARGET_ATOMIC_SOFT_TCB_GBR_OFFSET_RTX);
   else if (TARGET_ATOMIC_SOFT_IMASK)
     atomic_insn = gen_atomic_<fetchop_name>_fetch<mode>_soft_imask (operands[0],
-                     addr, operands[2]);
+                     mem, operands[2]);
   else
     FAIL;
 
 (define_insn "atomic_<fetchop_name>_fetchsi_hard"
   [(set (match_operand:SI 0 "arith_reg_dest" "=&z")
        (FETCHOP:SI
-         (mem:SI (match_operand:SI 1 "arith_reg_operand" "r"))
+         (match_operand:SI 1 "atomic_mem_operand_1" "=Sra")
          (match_operand:SI 2 "<fetchop_predicate_1>"
                              "<fetchop_constraint_1_llcs>")))
-   (set (mem:SI (match_dup 1))
+   (set (match_dup 1)
        (unspec:SI
-         [(FETCHOP:SI (mem:SI (match_dup 1)) (match_dup 2))]
+         [(FETCHOP:SI (match_dup 1) (match_dup 2))]
          UNSPEC_ATOMIC))
    (set (reg:SI T_REG) (const_int 1))]
   "TARGET_ATOMIC_HARD_LLCS
    || (TARGET_SH4A && TARGET_ATOMIC_ANY && !TARGET_ATOMIC_STRICT)"
 {
-  return "\r0: movli.l @%1,%0"         "\n"
+  return "\r0: movli.l %1,%0"          "\n"
         "      <fetchop_name>  %2,%0"  "\n"
-        "      movco.l %0,@%1"         "\n"
+        "      movco.l %0,%1"          "\n"
         "      bf      0b";
 }
   [(set_attr "length" "8")])
 
-(define_insn "atomic_<fetchop_name>_fetch<mode>_hard"
+;; Combine pattern for xor (val, -1) / nand (val, -1).
+(define_insn "atomic_not_fetchsi_hard"
+  [(set (match_operand:SI 0 "arith_reg_dest" "=&z")
+       (not:SI (match_operand:SI 1 "atomic_mem_operand_1" "=Sra")))
+   (set (match_dup 1)
+       (unspec:SI [(not:SI (match_dup 1))] UNSPEC_ATOMIC))
+   (set (reg:SI T_REG) (const_int 1))]
+  "TARGET_ATOMIC_HARD_LLCS
+   || (TARGET_SH4A && TARGET_ATOMIC_ANY && !TARGET_ATOMIC_STRICT)"
+{
+  return "\r0: movli.l %1,%0"          "\n"
+        "      not     %0,%0"          "\n"
+        "      movco.l %0,%1"          "\n"
+        "      bf      0b";
+}
+  [(set_attr "length" "8")])
+
+;; The QIHImode llcs patterns modify the address register of the memory
+;; operand.  In order to express that, we have to open code the memory
+;; operand.  Initially the insn is expanded like every other atomic insn
+;; using the memory operand.  In split1 the insn is converted and the
+;; memory operand's address register is exposed.
+(define_insn_and_split "atomic_<fetchop_name>_fetch<mode>_hard"
+  [(set (match_operand:QIHI 0 "arith_reg_dest" "=&r")
+       (FETCHOP:QIHI (match_operand:QIHI 1 "atomic_mem_operand_1")
+                     (match_operand:QIHI 2 "<fetchop_predicate_1>")))
+   (set (match_dup 1) (unspec:QIHI [(FETCHOP:QIHI (match_dup 1) (match_dup 2))]
+                                  UNSPEC_ATOMIC))
+   (set (reg:SI T_REG) (const_int 1))
+   (clobber (reg:SI R0_REG))]
+  "TARGET_ATOMIC_HARD_LLCS && can_create_pseudo_p ()"
+  "#"
+  "&& 1"
+  [(const_int 0)]
+{
+  if (optimize
+      && sh_reg_dead_or_unused_after_insn (curr_insn, REGNO (operands[0])))
+    emit_insn (gen_atomic_<fetchop_name><mode>_hard (operands[1], operands[2]));
+  else
+    {
+      rtx i = gen_atomic_<fetchop_name>_fetch<mode>_hard_1 (
+                       operands[0], XEXP (operands[1], 0), operands[2]);
+
+      /* Replace the new mems in the new insn with the old mem to preserve
+        aliasing info.  */
+      XEXP (XEXP (XVECEXP (i, 0, 0), 1), 0) = operands[1];
+      XEXP (XVECEXP (i, 0, 1), 0) = operands[1];
+      XEXP (XVECEXP (XEXP (XVECEXP (i, 0, 1), 1), 0, 0), 0) = operands[1];
+      emit_insn (i);
+    }
+})
+
+(define_insn "atomic_<fetchop_name>_fetch<mode>_hard_1"
   [(set (match_operand:QIHI 0 "arith_reg_dest" "=&r")
        (FETCHOP:QIHI
          (mem:QIHI (match_operand:SI 1 "arith_reg_operand" "r"))
 }
   [(set_attr "length" "28")])
 
+;; Combine pattern for xor (val, -1) / nand (val, -1).
+(define_insn_and_split "atomic_not_fetch<mode>_hard"
+  [(set (match_operand:QIHI 0 "arith_reg_dest" "=&r")
+       (not:QIHI (mem:QIHI (match_operand:SI 1 "arith_reg_operand" "r"))))
+   (set (mem:QIHI (match_dup 1))
+       (unspec:QIHI [(not:QIHI (mem:QIHI (match_dup 1)))] UNSPEC_ATOMIC))
+   (set (reg:SI T_REG) (const_int 1))
+   (clobber (reg:SI R0_REG))
+   (clobber (match_scratch:SI 2 "=&r"))
+   (clobber (match_scratch:SI 3 "=1"))]
+  "TARGET_ATOMIC_HARD_LLCS"
+{
+  return "\r   mov     #-4,%2"                 "\n"
+        "      and     %1,%2"                  "\n"
+        "      xor     %2,%1"                  "\n"
+        "      add     r15,%1"                 "\n"
+        "      add     #-4,%1"                 "\n"
+        "0:    movli.l @%2,r0"                 "\n"
+        "      mov.l   r0,@-r15"               "\n"
+        "      mov.<bw>        @%1,r0"         "\n"
+        "      not     r0,r0"                  "\n"
+        "      mov.<bw>        r0,@%1"         "\n"
+        "      mov     r0,%0"                  "\n"
+        "      mov.l   @r15+,r0"               "\n"
+        "      movco.l r0,@%2"                 "\n"
+        "      bf      0b";
+}
+  "&& can_create_pseudo_p () && optimize
+   && sh_reg_dead_or_unused_after_insn (insn, REGNO (operands[0]))"
+  [(const_int 0)]
+{
+  rtx i = gen_atomic_not<mode>_hard (operands[1]);
+
+  /* Replace the new mems in the new insn with the old mem to preserve
+     aliasing info.  */
+  rtx m = XEXP (XEXP (XVECEXP (PATTERN (curr_insn), 0, 0), 1), 0);
+  XEXP (XVECEXP (i, 0, 0), 0) = m;
+  XEXP (XVECEXP (XEXP (XVECEXP (i, 0, 0), 1), 0, 0), 0) = m;
+  emit_insn (i);
+}
+  [(set_attr "length" "28")])
+
 (define_insn "atomic_<fetchop_name>_fetch<mode>_soft_gusa"
   [(set (match_operand:QIHISI 0 "arith_reg_dest" "=&u")
        (FETCHOP:QIHISI
-         (mem:QIHISI (match_operand:SI 1 "arith_reg_operand" "u"))
+         (match_operand:QIHISI 1 "atomic_mem_operand_1" "=AraAdd")
          (match_operand:QIHISI 2 "<fetchop_predicate_1>"
                                  "<fetchop_constraint_1_gusa>")))
-   (set (mem:QIHISI (match_dup 1))
+   (set (match_dup 1)
        (unspec:QIHISI
-         [(FETCHOP:QIHISI (mem:QIHISI (match_dup 1)) (match_dup 2))]
+         [(FETCHOP:QIHISI (match_dup 1) (match_dup 2))]
          UNSPEC_ATOMIC))
    (clobber (reg:SI R0_REG))
    (clobber (reg:SI R1_REG))]
         "      mov     r15,r1"                 "\n"
         "      .align 2"                       "\n"
         "      mov     #(0f-1f),r15"           "\n"
-        "0:    mov.<bwl>       @%1,%0"         "\n"
+        "0:    mov.<bwl>       %1,%0"          "\n"
         "      <fetchop_name>  %2,%0"          "\n"
-        "      mov.<bwl>       %0,@%1"         "\n"
+        "      mov.<bwl>       %0,%1"          "\n"
         "1:    mov     r1,r15";
 }
   [(set_attr "length" "16")])
 
-(define_insn "atomic_<fetchop_name>_fetch<mode>_soft_tcb"
+;; Combine pattern for xor (val, -1) / nand (val, -1).
+(define_insn "atomic_not_fetch<mode>_soft_gusa"
+  [(set (match_operand:QIHISI 0 "arith_reg_dest" "=&u")
+       (not:QIHISI (match_operand:QIHISI 1 "atomic_mem_operand_1" "=AraAdd")))
+   (set (match_dup 1)
+       (unspec:QIHISI [(not:QIHISI (match_dup 1))] UNSPEC_ATOMIC))
+   (clobber (reg:SI R0_REG))
+   (clobber (reg:SI R1_REG))]
+  "TARGET_ATOMIC_SOFT_GUSA"
+{
+  return "\r   mova    1f,r0"                  "\n"
+        "      mov     r15,r1"                 "\n"
+        "      .align 2"                       "\n"
+        "      mov     #(0f-1f),r15"           "\n"
+        "0:    mov.<bwl>       %1,%0"          "\n"
+        "      not     %0,%0"                  "\n"
+        "      mov.<bwl>       %0,%1"          "\n"
+        "1:    mov     r1,r15";
+}
+  [(set_attr "length" "16")])
+
+(define_insn_and_split "atomic_<fetchop_name>_fetch<mode>_soft_tcb"
   [(set (match_operand:QIHISI 0 "arith_reg_dest" "=&r")
        (FETCHOP:QIHISI
-         (mem:QIHISI (match_operand:SI 1 "arith_reg_operand" "r"))
+         (match_operand:QIHISI 1 "atomic_mem_operand_1" "=SraSdd")
          (match_operand:QIHISI 2 "<fetchop_predicate_1>"
                                  "<fetchop_constraint_1_tcb>")))
-   (set (mem:QIHISI (match_dup 1))
+   (set (match_dup 1)
        (unspec:QIHISI
-         [(FETCHOP:QIHISI (mem:QIHISI (match_dup 1)) (match_dup 2))]
+         [(FETCHOP:QIHISI (match_dup 1) (match_dup 2))]
          UNSPEC_ATOMIC))
    (clobber (reg:SI R0_REG))
    (clobber (reg:SI R1_REG))
         "      mov     #(0f-1f),r1"            "\n"
         "      .align 2"                       "\n"
         "      mov.l   r0,@(%O3,gbr)"          "\n"
-        "0:    mov.<bwl>       @%1,r0"         "\n"
+        "0:    mov.<bwl>       %1,r0"          "\n"
         "      <fetchop_name>  %2,r0"          "\n"
-        "      mov.<bwl>       r0,@%1"         "\n"
+        "      mov.<bwl>       r0,%1"          "\n"
         "1:    mov     r0,%0"                  "\n"
         "      mov     #0,r0"                  "\n"
         "      mov.l   r0,@(%O3,gbr)";
+}
+  "&& can_create_pseudo_p () && optimize
+   && sh_reg_dead_or_unused_after_insn (insn, REGNO (operands[0]))"
+  [(const_int 0)]
+{
+  emit_insn (gen_atomic_<fetchop_name><mode>_soft_tcb (
+                               operands[1], operands[2], operands[3]));
+}
+  [(set_attr "length" "20")])
+
+;; Combine pattern for xor (val, -1) / nand (val, -1).
+(define_insn_and_split "atomic_not_fetch<mode>_soft_tcb"
+  [(set (match_operand:QIHISI 0 "arith_reg_dest" "=&r")
+       (not:QIHISI (match_operand:QIHISI 1 "atomic_mem_operand_1" "=SraSdd")))
+   (set (match_dup 1)
+       (unspec:QIHISI [(not:QIHISI (match_dup 1))] UNSPEC_ATOMIC))
+   (clobber (reg:SI R0_REG))
+   (clobber (reg:SI R1_REG))
+   (use (match_operand:SI 2 "gbr_displacement"))]
+  "TARGET_ATOMIC_SOFT_TCB"
+{
+  return "\r   mova    1f,r0"                  "\n"
+        "      mov     #(0f-1f),r1"            "\n"
+        "      .align 2"                       "\n"
+        "      mov.l   r0,@(%O2,gbr)"          "\n"
+        "0:    mov.<bwl>       %1,r0"          "\n"
+        "      not     r0,r0"                  "\n"
+        "      mov.<bwl>       r0,%1"          "\n"
+        "1:    mov     r0,%0"                  "\n"
+        "      mov     #0,r0"                  "\n"
+        "      mov.l   r0,@(%O2,gbr)";
+}
+  "&& can_create_pseudo_p () && optimize
+   && sh_reg_dead_or_unused_after_insn (insn, REGNO (operands[0]))"
+  [(const_int 0)]
+{
+  emit_insn (gen_atomic_not<mode>_soft_tcb (operands[1], operands[2]));
 }
   [(set_attr "length" "20")])
 
 (define_insn "atomic_<fetchop_name>_fetch<mode>_soft_imask"
   [(set (match_operand:QIHISI 0 "arith_reg_dest" "=&z")
        (FETCHOP:QIHISI
-         (mem:QIHISI (match_operand:SI 1 "arith_reg_operand" "r"))
+         (match_operand:QIHISI 1 "atomic_mem_operand_1" "=SraSdd")
          (match_operand:QIHISI 2 "<fetchop_predicate_1>"
                                  "<fetchop_constraint_1_imask>")))
-   (set (mem:QIHISI (match_dup 1))
+   (set (match_dup 1)
        (unspec:QIHISI
-         [(FETCHOP:QIHISI (mem:QIHISI (match_dup 1)) (match_dup 2))]
+         [(FETCHOP:QIHISI (match_dup 1) (match_dup 2))]
          UNSPEC_ATOMIC))
    (clobber (match_scratch:SI 3 "=&r"))]
   "TARGET_ATOMIC_SOFT_IMASK"
         "      mov     %0,%3"                  "\n"
         "      or      #0xF0,%0"               "\n"
         "      ldc     %0,sr"                  "\n"
-        "      mov.<bwl>       @%1,%0"         "\n"
+        "      mov.<bwl>       %1,%0"          "\n"
         "      <fetchop_name>  %2,%0"          "\n"
-        "      mov.<bwl>       %0,@%1"         "\n"
+        "      mov.<bwl>       %0,%1"          "\n"
         "      ldc     %3,sr";
 }
   [(set_attr "length" "16")])
 
+;; Combine pattern for xor (val, -1) / nand (val, -1).
+(define_insn "atomic_not_fetch<mode>_soft_imask"
+  [(set (match_operand:QIHISI 0 "arith_reg_dest" "=&z")
+       (not:QIHISI (match_operand:QIHISI 1 "atomic_mem_operand_1" "=SraSdd")))
+   (set (match_dup 1)
+       (unspec:QIHISI [(not:QIHISI (match_dup 1))] UNSPEC_ATOMIC))
+   (clobber (match_scratch:SI 2 "=&r"))]
+  "TARGET_ATOMIC_SOFT_IMASK"
+{
+  return "\r   stc     sr,%0"                  "\n"
+        "      mov     %0,%2"                  "\n"
+        "      or      #0xF0,%0"               "\n"
+        "      ldc     %0,sr"                  "\n"
+        "      mov.<bwl>       %1,%0"          "\n"
+        "      not     %0,%0"                  "\n"
+        "      mov.<bwl>       %0,%1"          "\n"
+        "      ldc     %2,sr";
+}
+  [(set_attr "length" "16")])
+
 (define_expand "atomic_nand_fetch<mode>"
   [(set (match_operand:QIHISI 0 "arith_reg_dest")
        (not:QIHISI (and:QIHISI
-         (match_operand:QIHISI 1 "memory_operand")
+         (match_operand:QIHISI 1 "atomic_mem_operand_1")
          (match_operand:QIHISI 2 "atomic_logical_operand_1"))))
    (set (match_dup 1)
        (unspec:QIHISI
    (match_operand:SI 3 "const_int_operand")]
   "TARGET_ATOMIC_ANY"
 {
-  rtx addr = force_reg (Pmode, XEXP (operands[1], 0));
+  rtx mem = operands[1];
   rtx atomic_insn;
 
   if (TARGET_ATOMIC_HARD_LLCS
       || (TARGET_SH4A && <MODE>mode == SImode && !TARGET_ATOMIC_STRICT))
-    atomic_insn = gen_atomic_nand_fetch<mode>_hard (operands[0], addr,
+    atomic_insn = gen_atomic_nand_fetch<mode>_hard (operands[0], mem,
                                                    operands[2]);
   else if (TARGET_ATOMIC_SOFT_GUSA)
-    atomic_insn = gen_atomic_nand_fetch<mode>_soft_gusa (operands[0], addr,
+    atomic_insn = gen_atomic_nand_fetch<mode>_soft_gusa (operands[0], mem,
                                                         operands[2]);
   else if (TARGET_ATOMIC_SOFT_TCB)
-    atomic_insn = gen_atomic_nand_fetch<mode>_soft_tcb (operands[0], addr,
+    atomic_insn = gen_atomic_nand_fetch<mode>_soft_tcb (operands[0], mem,
                      operands[2], TARGET_ATOMIC_SOFT_TCB_GBR_OFFSET_RTX);
   else if (TARGET_ATOMIC_SOFT_IMASK)
-    atomic_insn = gen_atomic_nand_fetch<mode>_soft_imask (operands[0], addr,
+    atomic_insn = gen_atomic_nand_fetch<mode>_soft_imask (operands[0], mem,
                                                          operands[2]);
   else
     FAIL;
 
 (define_insn "atomic_nand_fetchsi_hard"
   [(set (match_operand:SI 0 "arith_reg_dest" "=&z")
-       (not:SI (and:SI (mem:SI (match_operand:SI 1 "arith_reg_operand" "r"))
+       (not:SI (and:SI (match_operand:SI 1 "atomic_mem_operand_1" "=Sra")
                        (match_operand:SI 2 "logical_operand" "rK08"))))
-   (set (mem:SI (match_dup 1))
+   (set (match_dup 1)
        (unspec:SI
-         [(not:SI (and:SI (mem:SI (match_dup 1)) (match_dup 2)))]
+         [(not:SI (and:SI (match_dup 1) (match_dup 2)))]
          UNSPEC_ATOMIC))
    (set (reg:SI T_REG) (const_int 1))]
   "TARGET_ATOMIC_HARD_LLCS
    || (TARGET_SH4A && TARGET_ATOMIC_ANY && !TARGET_ATOMIC_STRICT)"
 {
-  return "\r0: movli.l @%1,%0"         "\n"
+  return "\r0: movli.l %1,%0"          "\n"
         "      and     %2,%0"          "\n"
         "      not     %0,%0"          "\n"
-        "      movco.l %0,@%1"         "\n"
+        "      movco.l %0,%1"          "\n"
         "      bf      0b";
 }
   [(set_attr "length" "10")])
 
-(define_insn "atomic_nand_fetch<mode>_hard"
+;; The QIHImode llcs patterns modify the address register of the memory
+;; operand.  In order to express that, we have to open code the memory
+;; operand.  Initially the insn is expanded like every other atomic insn
+;; using the memory operand.  In split1 the insn is converted and the
+;; memory operand's address register is exposed.
+(define_insn_and_split "atomic_nand_fetch<mode>_hard"
+  [(set (match_operand:QIHI 0 "arith_reg_dest" "=&r")
+       (not:QIHI (and:QIHI (match_operand:QIHI 1 "atomic_mem_operand_1")
+                           (match_operand:QIHI 2 "logical_operand"))))
+   (set (match_dup 1)
+       (unspec:QIHI [(not:QIHI (and:QIHI (match_dup 1) (match_dup 2)))]
+                    UNSPEC_ATOMIC))
+   (set (reg:SI T_REG) (const_int 1))
+   (clobber (reg:SI R0_REG))]
+  "TARGET_ATOMIC_HARD_LLCS && can_create_pseudo_p ()"
+  "#"
+  "&& 1"
+  [(const_int 0)]
+{
+  if (optimize
+      && sh_reg_dead_or_unused_after_insn (curr_insn, REGNO (operands[0])))
+    emit_insn (gen_atomic_nand<mode>_hard (operands[1], operands[2]));
+  else
+    {
+      rtx i = gen_atomic_nand_fetch<mode>_hard_1 (
+                       operands[0], XEXP (operands[1], 0), operands[2]);
+      
+      /* Replace the new mems in the new insn with the old mem to preserve
+        aliasing info.  */
+      XEXP (XEXP (XEXP (XVECEXP (i, 0, 0), 1), 0), 0) = operands[1];
+      XEXP (XVECEXP (i, 0, 1), 0) = operands[1];
+      XEXP (XEXP (XVECEXP (XEXP (XVECEXP (i, 0, 1), 1), 0, 0), 0),
+           0) = operands[1];
+      emit_insn (i);
+    }
+})
+
+(define_insn "atomic_nand_fetch<mode>_hard_1"
   [(set (match_operand:QIHI 0 "arith_reg_dest" "=&r")
        (not:QIHI
          (and:QIHI (mem:QIHI (match_operand:SI 1 "arith_reg_operand" "r"))
 (define_insn "atomic_nand_fetch<mode>_soft_gusa"
   [(set (match_operand:QIHISI 0 "arith_reg_dest" "=&u")
        (not:QIHISI (and:QIHISI
-         (mem:QIHISI (match_operand:SI 1 "arith_reg_operand" "u"))
+         (match_operand:QIHISI 1 "atomic_mem_operand_1" "=AraAdd")
          (match_operand:QIHISI 2 "arith_reg_operand" "u"))))
-   (set (mem:QIHISI (match_dup 1))
+   (set (match_dup 1)
        (unspec:QIHISI
-         [(not:QIHISI (and:QIHISI (mem:QIHISI (match_dup 1)) (match_dup 2)))]
+         [(not:QIHISI (and:QIHISI (match_dup 1) (match_dup 2)))]
          UNSPEC_ATOMIC))
    (clobber (reg:SI R0_REG))
    (clobber (reg:SI R1_REG))]
         "      .align 2"                       "\n"
         "      mov     r15,r1"                 "\n"
         "      mov     #(0f-1f),r15"           "\n"
-        "0:    mov.<bwl>       @%1,%0"         "\n"
+        "0:    mov.<bwl>       %1,%0"          "\n"
         "      and     %2,%0"                  "\n"
         "      not     %0,%0"                  "\n"
-        "      mov.<bwl>       %0,@%1"         "\n"
+        "      mov.<bwl>       %0,%1"          "\n"
         "1:    mov     r1,r15";
 }
   [(set_attr "length" "18")])
 
-(define_insn "atomic_nand_fetch<mode>_soft_tcb"
+(define_insn_and_split "atomic_nand_fetch<mode>_soft_tcb"
   [(set (match_operand:QIHISI 0 "arith_reg_dest" "=&r")
        (not:QIHISI (and:QIHISI
-         (mem:QIHISI (match_operand:SI 1 "arith_reg_operand" "r"))
+         (match_operand:QIHISI 1 "atomic_mem_operand_1" "=SraSdd")
          (match_operand:QIHISI 2 "logical_operand" "rK08"))))
-   (set (mem:QIHISI (match_dup 1))
+   (set (match_dup 1)
        (unspec:QIHISI
-         [(not:QIHISI (and:QIHISI (mem:QIHISI (match_dup 1)) (match_dup 2)))]
+         [(not:QIHISI (and:QIHISI (match_dup 1) (match_dup 2)))]
          UNSPEC_ATOMIC))
    (clobber (reg:SI R0_REG))
    (clobber (reg:SI R1_REG))
         "      mov     #(0f-1f),r1"            "\n"
         "      .align 2"                       "\n"
         "      mov.l   r0,@(%O3,gbr)"          "\n"
-        "0:    mov.<bwl>       @%1,r0"         "\n"
+        "0:    mov.<bwl>       %1,r0"          "\n"
         "      and     %2,r0"                  "\n"
         "      not     r0,r0"                  "\n"
         "      mov     r0,%0"                  "\n"
-        "      mov.<bwl>       r0,@%1"         "\n"
+        "      mov.<bwl>       r0,%1"          "\n"
         "1:    mov     #0,r0"                  "\n"
         "      mov.l   r0,@(%O3,gbr)";
+}
+  "&& can_create_pseudo_p () && optimize
+   && sh_reg_dead_or_unused_after_insn (insn, REGNO (operands[0]))"
+  [(const_int 0)]
+{
+  emit_insn (gen_atomic_nand<mode>_soft_tcb (operands[1], operands[2],
+                                            operands[3]));
 }
   [(set_attr "length" "22")])
 
 (define_insn "atomic_nand_fetch<mode>_soft_imask"
   [(set (match_operand:QIHISI 0 "arith_reg_dest" "=&z")
        (not:QIHISI (and:QIHISI
-         (mem:QIHISI (match_operand:SI 1 "arith_reg_operand" "r"))
+         (match_operand:QIHISI 1 "atomic_mem_operand_1" "=SraSdd")
          (match_operand:QIHISI 2 "logical_operand" "rK08"))))
-   (set (mem:QIHISI (match_dup 1))
+   (set (match_dup 1)
        (unspec:QIHISI
-         [(not:QIHISI (and:QIHISI (mem:QIHISI (match_dup 1)) (match_dup 2)))]
+         [(not:QIHISI (and:QIHISI (match_dup 1) (match_dup 2)))]
          UNSPEC_ATOMIC))
    (clobber (match_scratch:SI 3 "=&r"))]
   "TARGET_ATOMIC_SOFT_IMASK"
         "      mov     %0,%3"                  "\n"
         "      or      #0xF0,%0"               "\n"
         "      ldc     %0,sr"                  "\n"
-        "      mov.<bwl>       @%1,%0"         "\n"
+        "      mov.<bwl>       %1,%0"          "\n"
         "      and     %2,%0"                  "\n"
         "      not     %0,%0"                  "\n"
-        "      mov.<bwl>       %0,@%1"         "\n"
+        "      mov.<bwl>       %0,%1"          "\n"
         "      ldc     %3,sr";
 }
   [(set_attr "length" "18")])
   [(match_operand:SI 0 "register_operand" "")          ;; bool result output
    (match_operand:QI 1 "memory_operand" "")            ;; memory
    (match_operand:SI 2 "const_int_operand" "")]                ;; model
-  "(TARGET_ATOMIC_ANY || TARGET_ENABLE_TAS) && !TARGET_SHMEDIA"
+  "TARGET_ATOMIC_ANY || TARGET_ENABLE_TAS"
 {
   rtx addr = force_reg (Pmode, XEXP (operands[1], 0));
 
               (const_int 0)))
    (set (mem:QI (match_dup 0))
        (unspec:QI [(const_int 128)] UNSPEC_ATOMIC))]
-  "TARGET_ENABLE_TAS && !TARGET_SHMEDIA"
+  "TARGET_ENABLE_TAS"
   "tas.b       @%0"
   [(set_attr "insn_class" "co_group")])