(match_operand:SI 2 "const_int_operand")] ;; Memory model.
UNSPEC_AADD))]
""
- "{xadd<mop>\t%0,%1|lock *(<smop> *)(%w0) += %w1}"
+ "{xadd<mop>\t%0,%1|lock *(<smop> *)%w0 += %w1}"
[(set_attr "type" "atomic")])
(define_insn "atomic_and<AMO:mode>"
(match_operand:SI 2 "const_int_operand")] ;; Memory model.
UNSPEC_AAND))]
"bpf_has_v3_atomics"
- "{aand<msuffix>\t%0,%1|lock *(<smop> *)(%w0) &= %w1}")
+ "{aand<msuffix>\t%0,%1|lock *(<smop> *)%w0 &= %w1}")
(define_insn "atomic_or<AMO:mode>"
[(set (match_operand:AMO 0 "memory_operand" "+m")
(match_operand:SI 2 "const_int_operand")] ;; Memory model.
UNSPEC_AOR))]
"bpf_has_v3_atomics"
- "{aor<msuffix>\t%0,%1|lock *(<smop> *)(%w0) %|= %w1}")
+ "{aor<msuffix>\t%0,%1|lock *(<smop> *)%w0 %|= %w1}")
(define_insn "atomic_xor<AMO:mode>"
[(set (match_operand:AMO 0 "memory_operand" "+m")
(match_operand:SI 2 "const_int_operand")] ;; Memory model.
UNSPEC_AXOR))]
"bpf_has_v3_atomics"
- "{axor<msuffix>\t%0,%1|lock *(<smop> *)(%w0) ^= %w1}")
+ "{axor<msuffix>\t%0,%1|lock *(<smop> *)%w0 ^= %w1}")
;;; Feching (read-modify-store) versions of atomic operations.
(match_operand:AMO 3 "const_int_operand")] ;; Memory model
UNSPEC_AFADD))]
"bpf_has_v3_atomics"
- "{afadd<msuffix>\t%1,%0|%w0 = atomic_fetch_add((<smop> *)(%1), %w0)}")
+ "{afadd<msuffix>\t%1,%0|%w0 = atomic_fetch_add((<smop> *)%1, %w0)}")
(define_insn "atomic_fetch_and<AMO:mode>"
[(set (match_operand:AMO 0 "register_operand" "=r")
(match_operand:AMO 3 "const_int_operand")]
UNSPEC_AFAND))]
"bpf_has_v3_atomics"
- "{afand<msuffix>\t%1,%0|%w0 = atomic_fetch_and((<smop> *)(%1), %w0)}")
+ "{afand<msuffix>\t%1,%0|%w0 = atomic_fetch_and((<smop> *)%1, %w0)}")
(define_insn "atomic_fetch_or<AMO:mode>"
[(set (match_operand:AMO 0 "register_operand" "=r")
(match_operand:AMO 3 "const_int_operand")]
UNSPEC_AFOR))]
"bpf_has_v3_atomics"
- "{afor<msuffix>\t%1,%0|%w0 = atomic_fetch_or((<smop> *)(%1), %w0)}")
+ "{afor<msuffix>\t%1,%0|%w0 = atomic_fetch_or((<smop> *)%1, %w0)}")
(define_insn "atomic_fetch_xor<AMO:mode>"
[(set (match_operand:AMO 0 "register_operand" "=r")
(match_operand:AMO 3 "const_int_operand")]
UNSPEC_AFXOR))]
"bpf_has_v3_atomics"
- "{afxor<msuffix>\t%1,%0|%w0 = atomic_fetch_xor((<smop> *)(%1), %w0)}")
+ "{afxor<msuffix>\t%1,%0|%w0 = atomic_fetch_xor((<smop> *)%1, %w0)}")
;; Weird suffixes used in pseudo-c atomic compare-exchange insns.
(define_mode_attr pcaxsuffix [(SI "32_32") (DI "_64")])
switch (GET_CODE (addr))
{
case REG:
- if (asm_dialect == ASM_NORMAL)
- fprintf (file, "[");
+ fprintf (file, asm_dialect == ASM_NORMAL ? "[" : "(");
bpf_print_register (file, addr, 0);
- fprintf (file, asm_dialect == ASM_NORMAL ? "+0]" : "+0");
+ fprintf (file, asm_dialect == ASM_NORMAL ? "+0]" : "+0)");
break;
case PLUS:
{
|| (GET_CODE (op1) == UNSPEC
&& XINT (op1, 1) == UNSPEC_CORE_RELOC)))
{
- if (asm_dialect == ASM_NORMAL)
- fprintf (file, "[");
+ fprintf (file, asm_dialect == ASM_NORMAL ? "[" : "(");
bpf_print_register (file, op0, 0);
fprintf (file, "+");
if (GET_CODE (op1) == UNSPEC)
output_addr_const (file, XVECEXP (op1, 0, 0));
else
output_addr_const (file, op1);
- if (asm_dialect == ASM_NORMAL)
- fprintf (file, "]");
+ fprintf (file, asm_dialect == ASM_NORMAL ? "]" : ")");
}
else
fatal_insn ("invalid address in operand", addr);
"@
{and\t%0,0xffff|%0 &= 0xffff}
*return bpf_output_move (operands, \"{mov\t%0,%1\;and\t%0,0xffff|%0 = %1;%0 &= 0xffff}\");
- *return bpf_output_move (operands, \"{ldxh\t%0,%1|%0 = *(u16 *) (%1)}\");"
+ *return bpf_output_move (operands, \"{ldxh\t%0,%1|%0 = *(u16 *) %1}\");"
[(set_attr "type" "alu,alu,ldx")])
(define_insn "zero_extendqidi2"
"@
{and\t%0,0xff|%0 &= 0xff}
*return bpf_output_move (operands, \"{mov\t%0,%1\;and\t%0,0xff|%0 = %1;%0 &= 0xff}\");
- *return bpf_output_move (operands, \"{ldxb\t%0,%1|%0 = *(u8 *) (%1)}\");"
+ *return bpf_output_move (operands, \"{ldxb\t%0,%1|%0 = *(u8 *) %1}\");"
[(set_attr "type" "alu,alu,ldx")])
(define_insn "zero_extendsidi2"
""
"@
*return bpf_output_move (operands, bpf_has_alu32 ? \"{mov32\t%0,%1|%0 = %1}\" : \"{mov\t%0,%1\;and\t%0,0xffffffff|%0 = %1;%0 &= 0xffffffff}\");
- *return bpf_output_move (operands, \"{ldxw\t%0,%1|%0 = *(u32 *) (%1)}\");"
+ *return bpf_output_move (operands, \"{ldxw\t%0,%1|%0 = *(u32 *) %1}\");"
[(set_attr "type" "alu,ldx")])
;;; Sign-extension
"bpf_has_smov"
"@
*return bpf_output_move (operands, \"{movs\t%0,%1,32|%0 = (s32) %1}\");
- *return bpf_output_move (operands, \"{ldxsw\t%0,%1|%0 = *(s32 *) (%1)}\");"
+ *return bpf_output_move (operands, \"{ldxsw\t%0,%1|%0 = *(s32 *) %1}\");"
[(set_attr "type" "alu,ldx")])
(define_insn "extendhidi2"
"bpf_has_smov"
"@
*return bpf_output_move (operands, \"{movs\t%0,%1,16|%0 = (s16) %1}\");
- *return bpf_output_move (operands, \"{ldxsh\t%0,%1|%0 = *(s16 *) (%1)}\");"
+ *return bpf_output_move (operands, \"{ldxsh\t%0,%1|%0 = *(s16 *) %1}\");"
[(set_attr "type" "alu,ldx")])
(define_insn "extendqidi2"
"bpf_has_smov"
"@
*return bpf_output_move (operands, \"{movs\t%0,%1,8|%0 = (s8) %1}\");
- *return bpf_output_move (operands, \"{ldxsb\t%0,%1|%0 = *(s8 *) (%1)}\");"
+ *return bpf_output_move (operands, \"{ldxsb\t%0,%1|%0 = *(s8 *) %1}\");"
[(set_attr "type" "alu,ldx")])
(define_insn "extendhisi2"
(match_operand:MM 1 "mov_src_operand" " q,rIc,BC,r,I"))]
""
"@
- *return bpf_output_move (operands, \"{ldx<mop>\t%0,%1|%0 = *(<smop> *) (%1)}\");
+ *return bpf_output_move (operands, \"{ldx<mop>\t%0,%1|%0 = *(<smop> *) %1}\");
*return bpf_output_move (operands, \"{mov\t%0,%1|%0 = %1}\");
*return bpf_output_move (operands, \"{lddw\t%0,%1|%0 = %1 ll}\");
- *return bpf_output_move (operands, \"{stx<mop>\t%0,%1|*(<smop> *) (%0) = %1}\");
- *return bpf_output_move (operands, \"{st<mop>\t%0,%1|*(<smop> *) (%0) = %1}\");"
+ *return bpf_output_move (operands, \"{stx<mop>\t%0,%1|*(<smop> *) %0 = %1}\");
+ *return bpf_output_move (operands, \"{st<mop>\t%0,%1|*(<smop> *) %0 = %1}\");"
[(set_attr "type" "ldx,alu,alu,stx,st")])
;;;; Shifts