From: mengqinggang Date: Tue, 25 Nov 2025 12:09:52 +0000 (+0800) Subject: LoongArch: Machine description files for LA32 X-Git-Url: http://git.ipfire.org/cgi-bin/gitweb.cgi?a=commitdiff_plain;h=f5ae30e614c8fb2caf0874a8c3cf436e28860ef4;p=thirdparty%2Fgcc.git LoongArch: Machine description files for LA32 gcc/ChangeLog: * config/loongarch/constraints.md: Disable k on LA32. * config/loongarch/loongarch.md (*and3): Delete. (*and3_extend): New. (zero_extend2_la32r): New. (extend2_la32r): New. (extendqihi2_la32r): New. (*movdi_32bit): Remove not working split, use existing loongarch_split_move instead. (move_doubleword_2_): New. (load_low): New. (load_high): New. (store_word): New. (movgr2frh): New. (movfrh2gr): New. * config/loongarch/predicates.md: Disable low_bitmask_operand and ins_zero_bitmask_operand on LA32. Enable const_call_insn_operand on LA32. * config/loongarch/sync.md (atomic_): Change to define_expand. (la64_atomic_): New. (la32_atomic_si): New. (atomic_fetch_): Change to define_expand. (la64_atomic_fetch_): New. (la32_atomic_fetch_si): New. (atomic_exchange): Change to define_expand. (la64_atomic_exchange): New. (la32_atomic_exchangesi): New. Co-authored-by: Jiajie Chen Reviewed-by: Xi Ruoyao Reviewed-by: Lulu Cheng --- diff --git a/gcc/config/loongarch/constraints.md b/gcc/config/loongarch/constraints.md index 82bf1d80b1f..1ca9a752580 100644 --- a/gcc/config/loongarch/constraints.md +++ b/gcc/config/loongarch/constraints.md @@ -133,6 +133,7 @@ "A memory operand whose address is formed by a base register and (optionally scaled) index register." (and (match_code "mem") + (match_test "TARGET_64BIT") (match_test "loongarch_base_index_address_p (XEXP (op, 0), mode)"))) (define_constraint "l" diff --git a/gcc/config/loongarch/loongarch.md b/gcc/config/loongarch/loongarch.md index 763d514cac7..39e315c818b 100644 --- a/gcc/config/loongarch/loongarch.md +++ b/gcc/config/loongarch/loongarch.md @@ -409,6 +409,12 @@ (define_mode_iterator ANYFI [(SI "TARGET_HARD_FLOAT") (DI "TARGET_DOUBLE_FLOAT")]) +;; A mode for which moves involving FPRs may need to be split. +(define_mode_iterator SPLITF + [(DF "!TARGET_64BIT && TARGET_DOUBLE_FLOAT") + (DI "!TARGET_64BIT && TARGET_DOUBLE_FLOAT") + (TF "TARGET_64BIT && TARGET_DOUBLE_FLOAT")]) + ;; A mode for anything with 32 bits or more, and able to be loaded with ;; the same addressing mode as ld.w. (define_mode_iterator LD_AT_LEAST_32_BIT [GPR ANYF]) @@ -633,7 +639,7 @@ ;; so the redundant sign extension can be removed if the output is used as ;; an input of a bitwise operation. Note plus, rotl, and div are handled ;; separately. -(define_code_iterator shift_w [any_shift rotatert]) +(define_code_iterator shift_w [any_shift (rotatert "!TARGET_32BIT_R")]) (define_code_iterator arith_w [minus mult]) (define_expand "3" @@ -743,30 +749,33 @@ [(set (match_operand:SI 0 "register_operand" "=r,r,r,r,r") (plus:SI (match_operand:SI 1 "register_operand" "r,r,r,r,r") (match_operand:SI 2 "plus_si_operand" "r,I,La,Le,Lb")))] - "TARGET_64BIT" + "" { - if (CONST_INT_P (operands[2]) && !IMM12_INT (operands[2]) - && ADDU16I_OPERAND (INTVAL (operands[2]))) - { - rtx t1 = gen_reg_rtx (DImode); - rtx t2 = gen_reg_rtx (DImode); - rtx t3 = gen_reg_rtx (DImode); - emit_insn (gen_extend_insn (t1, operands[1], DImode, SImode, 0)); - t2 = operands[2]; - emit_insn (gen_adddi3 (t3, t1, t2)); - t3 = gen_lowpart (SImode, t3); - emit_move_insn (operands[0], t3); - DONE; - } - else + if (TARGET_64BIT) { - rtx t = gen_reg_rtx (DImode); - emit_insn (gen_addsi3_extended (t, operands[1], operands[2])); - t = gen_lowpart (SImode, t); - SUBREG_PROMOTED_VAR_P (t) = 1; - SUBREG_PROMOTED_SET (t, SRP_SIGNED); - emit_move_insn (operands[0], t); - DONE; + if (CONST_INT_P (operands[2]) && !IMM12_INT (operands[2]) + && ADDU16I_OPERAND (INTVAL (operands[2]))) + { + rtx t1 = gen_reg_rtx (DImode); + rtx t2 = gen_reg_rtx (DImode); + rtx t3 = gen_reg_rtx (DImode); + emit_insn (gen_extend_insn (t1, operands[1], DImode, SImode, 0)); + t2 = operands[2]; + emit_insn (gen_adddi3 (t3, t1, t2)); + t3 = gen_lowpart (SImode, t3); + emit_move_insn (operands[0], t3); + DONE; + } + else + { + rtx t = gen_reg_rtx (DImode); + emit_insn (gen_addsi3_extended (t, operands[1], operands[2])); + t = gen_lowpart (SImode, t); + SUBREG_PROMOTED_VAR_P (t) = 1; + SUBREG_PROMOTED_SET (t, SRP_SIGNED); + emit_move_insn (operands[0], t); + DONE; + } } }) @@ -1376,7 +1385,7 @@ (unspec:ANYF [(match_operand:ANYF 1 "register_operand" "f") (match_operand: 2 "register_operand" "f")] UNSPEC_FSCALEB))] - "TARGET_HARD_FLOAT" + "TARGET_64BIT" "fscaleb.\t%0,%1,%2" [(set_attr "type" "fscaleb") (set_attr "mode" "")]) @@ -1392,7 +1401,7 @@ [(set (match_operand:ANYF 0 "register_operand" "=f") (unspec:ANYF [(match_operand:ANYF 1 "register_operand" "f")] UNSPEC_FLOGB))] - "TARGET_HARD_FLOAT" + "TARGET_64BIT" "flogb.\t%0,%1" [(set_attr "type" "flogb") (set_attr "mode" "")]) @@ -1401,7 +1410,7 @@ [(set (match_operand:ANYF 0 "register_operand") (unspec:ANYF [(abs:ANYF (match_operand:ANYF 1 "register_operand"))] UNSPEC_FLOGB))] - "TARGET_HARD_FLOAT" + "TARGET_64BIT" { rtx tmp = gen_reg_rtx (mode); @@ -1421,7 +1430,7 @@ (define_insn "clz2" [(set (match_operand:GPR 0 "register_operand" "=r") (clz:GPR (match_operand:GPR 1 "register_operand" "r")))] - "" + "TARGET_64BIT || TARGET_32BIT_S" "clz.\t%0,%1" [(set_attr "type" "clz") (set_attr "mode" "")]) @@ -1437,7 +1446,7 @@ (define_insn "ctz2" [(set (match_operand:GPR 0 "register_operand" "=r") (ctz:GPR (match_operand:GPR 1 "register_operand" "r")))] - "" + "TARGET_64BIT || TARGET_32BIT_S" "ctz.\t%0,%1" [(set_attr "type" "clz") (set_attr "mode" "")]) @@ -1553,28 +1562,26 @@ (define_insn "*3" [(set (match_operand:GPR 0 "register_operand" "=r,r") - (any_or:GPR (match_operand:GPR 1 "register_operand" "%r,r") - (match_operand:GPR 2 "uns_arith_operand" "r,K")))] + (any_bitwise:GPR (match_operand:GPR 1 "register_operand" "%r,r") + (match_operand:GPR 2 "uns_arith_operand" "r,K")))] "" "%i2\t%0,%1,%2" [(set_attr "type" "logical") (set_attr "mode" "")]) -(define_insn "*and3" - [(set (match_operand:GPR 0 "register_operand" "=r,r,r,r") - (and:GPR (match_operand:GPR 1 "register_operand" "%r,r,r,0") - (match_operand:GPR 2 "and_operand" "r,K,Yx,Yy")))] - "" +(define_insn "*and3_extend" + [(set (match_operand:GPR 0 "register_operand" "=r,r") + (and:GPR (match_operand:GPR 1 "register_operand" "%r,0") + (match_operand:GPR 2 "and_operand" "Yx,Yy")))] + "TARGET_64BIT || TARGET_32BIT_S" "@ - and\t%0,%1,%2 - andi\t%0,%1,%2 * operands[2] = GEN_INT (INTVAL (operands[2]) \ & GET_MODE_MASK (mode)); \ return \"bstrpick.\t%0,%1,%M2\"; * operands[2] = GEN_INT (~INTVAL (operands[2]) \ & GET_MODE_MASK (mode)); \ return \"bstrins.\t%0,%.,%M2\";" - [(set_attr "move_type" "logical,logical,pick_ins,pick_ins") + [(set_attr "move_type" "pick_ins,pick_ins") (set_attr "mode" "")]) (define_expand "3" @@ -1606,7 +1613,8 @@ (match_operand:GPR 2 "const_int_operand" "i")) (and:GPR (match_operand:GPR 3 "register_operand" "r") (match_operand:GPR 4 "const_int_operand" "i"))))] - "loongarch_pre_reload_split () + "(TARGET_64BIT || TARGET_32BIT_S) + && loongarch_pre_reload_split () && loongarch_use_bstrins_for_ior_with_mask (mode, operands)" "#" "&& true" @@ -1734,7 +1742,7 @@ (match_operand:SI 3 "const_int_operand") (const_int 0)) (match_dup 0))] - "peep2_reg_dead_p (3, operands[0])" + "(TARGET_64BIT || TARGET_32BIT_S) && peep2_reg_dead_p (3, operands[0])" [(const_int 0)] { int len = GET_MODE_BITSIZE (mode) - INTVAL (operands[3]); @@ -1795,7 +1803,7 @@ (neg_bitwise:X (not:X (match_operand:X 2 "register_operand" "r")) (match_operand:X 1 "register_operand" "r")))] - "" + "TARGET_64BIT || TARGET_32BIT_S" "n\t%0,%1,%2" [(set_attr "type" "logical") (set_attr "mode" "")]) @@ -1894,7 +1902,7 @@ [(set (match_operand:GPR 0 "register_operand" "=r,r,r") (zero_extend:GPR (match_operand:SHORT 1 "nonimmediate_operand" "r,m,k")))] - "" + "TARGET_64BIT || TARGET_32BIT_S" "@ bstrpick.w\t%0,%1,,0 ld.u\t%0,%1 @@ -1902,6 +1910,15 @@ [(set_attr "move_type" "pick_ins,load,load") (set_attr "mode" "")]) +(define_insn "zero_extend2_la32r" + [(set (match_operand:GPR 0 "register_operand" "=r") + (zero_extend:GPR + (match_operand:SHORT 1 "nonimmediate_operand" "m")))] + "TARGET_32BIT_R" + "ld.u\t%0,%1" + [(set_attr "move_type" "load") + (set_attr "mode" "")]) + (define_insn "zero_extendqihi2" [(set (match_operand:HI 0 "register_operand" "=r,r,r") (zero_extend:HI (match_operand:QI 1 "nonimmediate_operand" "r,k,m")))] @@ -1919,7 +1936,7 @@ [(set (match_operand:GPR 0 "register_operand" "=r") (zero_extend:GPR (truncate:SHORT (match_operand:DI 1 "register_operand" "r"))))] - "TARGET_64BIT" + "TARGET_64BIT || TARGET_32BIT_S" "bstrpick.w\t%0,%1,,0" [(set_attr "move_type" "pick_ins") (set_attr "mode" "")]) @@ -1958,7 +1975,7 @@ [(set (match_operand:GPR 0 "register_operand" "=r,r,r") (sign_extend:GPR (match_operand:SHORT 1 "nonimmediate_operand" "r,m,k")))] - "" + "TARGET_64BIT || TARGET_32BIT_S" "@ ext.w.\t%0,%1 ld.\t%0,%1 @@ -1966,11 +1983,20 @@ [(set_attr "move_type" "signext,load,load") (set_attr "mode" "")]) +(define_insn "extend2_la32r" + [(set (match_operand:GPR 0 "register_operand" "=r") + (sign_extend:GPR + (match_operand:SHORT 1 "nonimmediate_operand" "m")))] + "TARGET_32BIT_R" + "ld.\t%0,%1" + [(set_attr "move_type" "load") + (set_attr "mode" "")]) + (define_insn "extendqihi2" [(set (match_operand:HI 0 "register_operand" "=r,r,r") (sign_extend:HI (match_operand:QI 1 "nonimmediate_operand" "r,m,k")))] - "" + "TARGET_64BIT || TARGET_32BIT_S" "@ ext.w.b\t%0,%1 ld.b\t%0,%1 @@ -1978,6 +2004,15 @@ [(set_attr "move_type" "signext,load,load") (set_attr "mode" "SI")]) +(define_insn "extendqihi2_la32r" + [(set (match_operand:HI 0 "register_operand" "=r") + (sign_extend:HI + (match_operand:QI 1 "nonimmediate_operand" "m")))] + "" + "ld.b\t%0,%1" + [(set_attr "move_type" "load") + (set_attr "mode" "SI")]) + (define_insn "extendsfdf2" [(set (match_operand:DF 0 "register_operand" "=f") (float_extend:DF (match_operand:SF 1 "register_operand" "f")))] @@ -2090,7 +2125,7 @@ (define_expand "fixuns_truncdfdi2" [(set (match_operand:DI 0 "register_operand") (unsigned_fix:DI (match_operand:DF 1 "register_operand")))] - "TARGET_DOUBLE_FLOAT" + "TARGET_64BIT && TARGET_DOUBLE_FLOAT" { rtx reg1 = gen_reg_rtx (DFmode); rtx reg2 = gen_reg_rtx (DFmode); @@ -2172,7 +2207,7 @@ (define_expand "fixuns_truncsfdi2" [(set (match_operand:DI 0 "register_operand") (unsigned_fix:DI (match_operand:SF 1 "register_operand")))] - "TARGET_DOUBLE_FLOAT" + "TARGET_64BIT && TARGET_DOUBLE_FLOAT" { rtx reg1 = gen_reg_rtx (SFmode); rtx reg2 = gen_reg_rtx (SFmode); @@ -2222,7 +2257,7 @@ (zero_extract:X (match_operand:X 1 "register_operand") (match_operand 2 "const_int_operand") (match_operand 3 "const_int_operand")))] - "" + "TARGET_64BIT || TARGET_32BIT_S" { if (!loongarch_use_ins_ext_p (operands[1], INTVAL (operands[2]), INTVAL (operands[3]))) @@ -2232,10 +2267,11 @@ (define_insn "*extzv" [(set (match_operand:X 0 "register_operand" "=r") (zero_extract:X (match_operand:X 1 "register_operand" "r") - (match_operand 2 "const_int_operand" "") - (match_operand 3 "const_int_operand" "")))] - "loongarch_use_ins_ext_p (operands[1], INTVAL (operands[2]), - INTVAL (operands[3]))" + (match_operand 2 "const_int_operand" "") + (match_operand 3 "const_int_operand" "")))] + "(TARGET_64BIT || TARGET_32BIT_S) + && loongarch_use_ins_ext_p (operands[1], INTVAL (operands[2]), + INTVAL (operands[3]))" { operands[2] = GEN_INT (INTVAL (operands[2]) + INTVAL (operands[3]) - 1); return "bstrpick.\t%0,%1,%2,%3"; @@ -2248,7 +2284,7 @@ (match_operand 1 "const_int_operand") (match_operand 2 "const_int_operand")) (match_operand:GPR 3 "reg_or_0_operand"))] - "" + "TARGET_64BIT || TARGET_32BIT_S" { if (!loongarch_use_ins_ext_p (operands[0], INTVAL (operands[1]), INTVAL (operands[2]))) @@ -2260,8 +2296,9 @@ (match_operand:SI 1 "const_int_operand" "") (match_operand:SI 2 "const_int_operand" "")) (match_operand:GPR 3 "reg_or_0_operand" "rJ"))] - "loongarch_use_ins_ext_p (operands[0], INTVAL (operands[1]), - INTVAL (operands[2]))" + "(TARGET_64BIT || TARGET_32BIT_S) + && loongarch_use_ins_ext_p (operands[0], INTVAL (operands[1]), + INTVAL (operands[2]))" { operands[1] = GEN_INT (INTVAL (operands[1]) + INTVAL (operands[2]) - 1); return "bstrins.\t%0,%z3,%1,%2"; @@ -2291,24 +2328,28 @@ DONE; }) -(define_insn_and_split "*movdi_32bit" +(define_insn "*movdi_32bit" [(set (match_operand:DI 0 "nonimmediate_operand" "=r,r,r,w,*f,*f,*r,*m") (match_operand:DI 1 "move_operand" "r,i,w,r,*J*r,*m,*f,*f"))] "!TARGET_64BIT && (register_operand (operands[0], DImode) || reg_or_0_operand (operands[1], DImode))" { return loongarch_output_move (operands); } - "CONST_INT_P (operands[1]) && REG_P (operands[0]) && GP_REG_P (REGNO - (operands[0]))" + [(set_attr "move_type" "move,const,load,store,mgtf,fpload,mftg,fpstore") + (set_attr "mode" "DI")]) + +;; Split 64-bit move in LoongArch32 + +(define_split + [(set (match_operand:MOVE64 0 "nonimmediate_operand") + (match_operand:MOVE64 1 "move_operand"))] + "TARGET_32BIT && reload_completed + && loongarch_split_move_p (operands[0], operands[1])" [(const_int 0)] - " { - loongarch_move_integer (operands[0], operands[0], INTVAL (operands[1])); + loongarch_split_move (operands[0], operands[1]); DONE; -} - " - [(set_attr "move_type" "move,const,load,store,mgtf,fpload,mftg,fpstore") - (set_attr "mode" "DI")]) +}) (define_insn_and_split "*movdi_64bit" [(set (match_operand:DI 0 "nonimmediate_operand" "=r,r,r,w,*f,*f,*r,*m") @@ -2319,8 +2360,8 @@ { return loongarch_output_move (operands); } - "CONST_INT_P (operands[1]) && REG_P (operands[0]) && GP_REG_P (REGNO - (operands[0]))" + "&& CONST_INT_P (operands[1]) && REG_P (operands[0]) + && GP_REG_P (REGNO (operands[0]))" [(const_int 0)] " { @@ -2378,7 +2419,7 @@ (unspec:DI [(match_operand:DI 2 "") (pc)] UNSPEC_LA_PCREL_64_PART1)) (set (match_operand:DI 1 "register_operand" "=r") (unspec:DI [(match_dup 2) (pc)] UNSPEC_LA_PCREL_64_PART2))] - "TARGET_ABI_LP64 && la_opt_explicit_relocs != EXPLICIT_RELOCS_NONE" + "TARGET_64BIT && la_opt_explicit_relocs != EXPLICIT_RELOCS_NONE" { return "pcalau12i\t%0,%r2\n\t" "addi.d\t%1,$r0,%L2\n\t" @@ -2396,6 +2437,15 @@ (match_operand:SI 1 ""))] "" { + if (TARGET_32BIT + && ((MEM_P (operands[0]) + && loongarch_14bit_shifted_offset_address_p (XEXP (operands[0], 0), SImode) + && !loongarch_12bit_offset_address_p (XEXP (operands[0], 0), SImode)) + || (MEM_P (operands[1]) + && loongarch_14bit_shifted_offset_address_p (XEXP (operands[1], 0), SImode) + && !loongarch_12bit_offset_address_p (XEXP (operands[1], 0), SImode)))) + FAIL; + if (loongarch_legitimize_move (SImode, operands[0], operands[1])) DONE; }) @@ -2404,10 +2454,17 @@ [(set (match_operand:SI 0 "nonimmediate_operand" "=r,r,r,w,f,f,r,*m") (match_operand:SI 1 "move_operand" "r,Yd,w,rJ,rJ,m,f,*f"))] "(register_operand (operands[0], SImode) - || reg_or_0_operand (operands[1], SImode))" + || reg_or_0_operand (operands[1], SImode)) + && !(TARGET_32BIT + && ((MEM_P (operands[0]) + && loongarch_14bit_shifted_offset_address_p (XEXP (operands[0], 0), SImode) + && !loongarch_12bit_offset_address_p (XEXP (operands[0], 0), SImode)) + || (MEM_P (operands[1]) + && loongarch_14bit_shifted_offset_address_p (XEXP (operands[1], 0), SImode) + && !loongarch_12bit_offset_address_p (XEXP (operands[1], 0), SImode))))" { return loongarch_output_move (operands); } - "CONST_INT_P (operands[1]) && REG_P (operands[0]) && GP_REG_P (REGNO - (operands[0]))" + "&& CONST_INT_P (operands[1]) && REG_P (operands[0]) + && GP_REG_P (REGNO (operands[0]))" [(const_int 0)] " { @@ -2482,7 +2539,7 @@ (define_expand "movsf" [(set (match_operand:SF 0 "") (match_operand:SF 1 ""))] - "" + "TARGET_HARD_FLOAT" { if (loongarch_legitimize_move (SFmode, operands[0], operands[1])) DONE; @@ -2513,7 +2570,7 @@ (define_expand "movdf" [(set (match_operand:DF 0 "") (match_operand:DF 1 ""))] - "" + "TARGET_DOUBLE_FLOAT" { if (loongarch_legitimize_move (DFmode, operands[0], operands[1])) DONE; @@ -2540,6 +2597,41 @@ [(set_attr "move_type" "move,load,store") (set_attr "mode" "DF")]) +;; Emit a doubleword move in which exactly one of the operands is +;; a floating-point register. We can't just emit two normal moves +;; because of the constraints imposed by the FPU register model; +;; see loongarch_can_change_mode_class for details. Instead, we keep +;; the FPR whole and use special patterns to refer to each word of +;; the other operand. + +(define_expand "move_doubleword_2_" + [(set (match_operand:SPLITF 0) + (match_operand:SPLITF 1))] + "" +{ + if (FP_REG_RTX_P (operands[0])) + { + rtx low = loongarch_subword (operands[1], 0); + rtx high = loongarch_subword (operands[1], 1); + emit_insn (gen_load_low (operands[0], low)); + if (!TARGET_64BIT) + emit_insn (gen_movgr2frh (operands[0], high, operands[0])); + else + emit_insn (gen_load_high (operands[0], high, operands[0])); + } + else + { + rtx low = loongarch_subword (operands[0], 0); + rtx high = loongarch_subword (operands[0], 1); + emit_insn (gen_store_word (low, operands[1], const0_rtx)); + if (!TARGET_64BIT) + emit_insn (gen_movfrh2gr (high, operands[1])); + else + emit_insn (gen_store_word (high, operands[1], const1_rtx)); + } + DONE; +}) + ;; Clear one FCC register (define_expand "movfcc" @@ -2618,8 +2710,9 @@ (const_int 0)) (match_operand:GPR 2 "reg_or_0_operand" "r,J") (match_operand:GPR 3 "reg_or_0_operand" "J,r")))] - "register_operand (operands[2], mode) - != register_operand (operands[3], mode)" + "(TARGET_64BIT || TARGET_32BIT_S) + && register_operand (operands[2], mode) + != register_operand (operands[3], mode)" "@ \t%0,%2,%1 \t%0,%3,%1" @@ -2681,7 +2774,7 @@ (if_then_else:GPR (match_operator 1 "comparison_operator" [(match_operand:GPR 2 "reg_or_0_operand") (match_operand:GPR 3 "reg_or_0_operand")])))] - "TARGET_COND_MOVE_INT" + "(TARGET_64BIT || TARGET_32BIT_S) && TARGET_COND_MOVE_INT" { if (!INTEGRAL_MODE_P (GET_MODE (XEXP (operands[1], 0)))) FAIL; @@ -2786,7 +2879,7 @@ [(set (match_operand:P 0 "register_operand" "=j") (unspec:P [(match_operand:P 1 "symbolic_operand" "")] UNSPEC_PCALAU12I))] - "" + "TARGET_64BIT || TARGET_32BIT_S" "pcalau12i\t%0,%%pc_hi20(%1)" [(set_attr "type" "move")]) @@ -2796,7 +2889,7 @@ [(set (match_operand:P 0 "register_operand" "=r") (unspec:P [(match_operand:P 1 "symbolic_operand" "")] UNSPEC_PCALAU12I_GR))] - "" + "TARGET_64BIT || TARGET_32BIT_S" "pcalau12i\t%0,%%pc_hi20(%1)" [(set_attr "type" "move")]) @@ -2846,7 +2939,7 @@ [(set (match_operand:ANYF 0 "register_operand" "=f") (unspec:ANYF [(match_operand:ANYF 1 "register_operand" "f")] UNSPEC_FRINT))] - "" + "TARGET_64BIT" "frint.\t%0,%1" [(set_attr "type" "fcvt") (set_attr "mode" "")]) @@ -2864,6 +2957,71 @@ [(set_attr "type" "fcvt") (set_attr "mode" "")]) +;; Load the low word of operand 0 with operand 1. +(define_insn "load_low" + [(set (match_operand:SPLITF 0 "register_operand" "=f,f") + (unspec:SPLITF [(match_operand: 1 "general_operand" "rJ,m")] + UNSPEC_LOAD_LOW))] + "TARGET_HARD_FLOAT" +{ + operands[0] = loongarch_subword (operands[0], 0); + return loongarch_output_move (operands); +} + [(set_attr "move_type" "mgtf,fpload") + (set_attr "mode" "")]) + +;; Load the high word of operand 0 from operand 1, preserving the value +;; in the low word. +(define_insn "load_high" + [(set (match_operand:SPLITF 0 "register_operand" "=f,f") + (unspec:SPLITF [(match_operand: 1 "general_operand" "rJ,m") + (match_operand:SPLITF 2 "register_operand" "0,0")] + UNSPEC_LOAD_HIGH))] + "TARGET_HARD_FLOAT" +{ + operands[0] = loongarch_subword (operands[0], 1); + return loongarch_output_move (operands); +} + [(set_attr "move_type" "mgtf,fpload") + (set_attr "mode" "")]) + +;; Store one word of operand 1 in operand 0. Operand 2 is 1 to store the +;; high word and 0 to store the low word. +(define_insn "store_word" + [(set (match_operand: 0 "nonimmediate_operand" "=r,m") + (unspec: [(match_operand:SPLITF 1 "register_operand" "f,f") + (match_operand 2 "const_int_operand")] + UNSPEC_STORE_WORD))] + "TARGET_HARD_FLOAT" +{ + operands[1] = loongarch_subword (operands[1], INTVAL (operands[2])); + return loongarch_output_move (operands); +} + [(set_attr "move_type" "mftg,fpstore") + (set_attr "mode" "")]) + +;; Move operand 1 to the high word of operand 0 using movgr2frh.w, preserving the +;; value in the low word. +(define_insn "movgr2frh" + [(set (match_operand:SPLITF 0 "register_operand" "=f") + (unspec:SPLITF [(match_operand: 1 "reg_or_0_operand" "rJ") + (match_operand:SPLITF 2 "register_operand" "0")] + UNSPEC_MOVGR2FRH))] + "TARGET_DOUBLE_FLOAT" + "movgr2frh.w\t%0,%z1" + [(set_attr "move_type" "mgtf") + (set_attr "mode" "")]) + +;; Move high word of operand 1 to operand 0 using movfrh2gr.s. +(define_insn "movfrh2gr" + [(set (match_operand: 0 "register_operand" "=r") + (unspec: [(match_operand:SPLITF 1 "register_operand" "f")] + UNSPEC_MOVFRH2GR))] + "TARGET_DOUBLE_FLOAT" + "movfrh2gr.s\t%0,%1" + [(set_attr "move_type" "mftg") + (set_attr "mode" "")]) + ;; Thread-Local Storage (define_insn "@got_load_tls_desc" @@ -2879,13 +3037,13 @@ (clobber (reg:SI FCC5_REGNUM)) (clobber (reg:SI FCC6_REGNUM)) (clobber (reg:SI FCC7_REGNUM)) - (clobber (reg:SI RETURN_ADDR_REGNUM))] + (clobber (reg:P RETURN_ADDR_REGNUM))] "TARGET_TLS_DESC" { return TARGET_EXPLICIT_RELOCS ? "pcalau12i\t$r4,%%desc_pc_hi20(%0)\n\t" - "addi.d\t$r4,$r4,%%desc_pc_lo12(%0)\n\t" - "ld.d\t$r1,$r4,%%desc_ld(%0)\n\t" + "addi.\t$r4,$r4,%%desc_pc_lo12(%0)\n\t" + "ld.\t$r1,$r4,%%desc_ld(%0)\n\t" "jirl\t$r1,$r1,%%desc_call(%0)" : "la.tls.desc\t$r4,%0"; } @@ -3102,7 +3260,7 @@ (match_operand 2 "const_uimm5_operand")] UNSPECV_LDDIR) (clobber (mem:BLK (scratch)))] - "" + "TARGET_64BIT || TARGET_32BIT_S" "lddir\t%0,%1,%2" [(set_attr "type" "load") (set_attr "mode" "")]) @@ -3112,7 +3270,7 @@ (match_operand 1 "const_uimm5_operand")] UNSPECV_LDPTE) (clobber (mem:BLK (scratch)))] - "" + "TARGET_64BIT || TARGET_32BIT_S" "ldpte\t%0,%1" [(set_attr "type" "load") (set_attr "mode" "")]) @@ -3190,7 +3348,7 @@ [(set (match_operand:GPR 0 "register_operand" "=r,r") (rotatert:GPR (match_operand:GPR 1 "register_operand" "r,r") (match_operand:SI 2 "arith_operand" "r,I")))] - "" + "TARGET_64BIT || TARGET_32BIT_S" "rotr%i2.\t%0,%1,%2" [(set_attr "type" "shift,shift") (set_attr "mode" "")]) @@ -3200,7 +3358,7 @@ (sign_extend:DI (rotatert:SI (match_operand:SI 1 "register_operand" "r,r") (match_operand:SI 2 "arith_operand" "r,I"))))] - "TARGET_64BIT" + "TARGET_64BIT || TARGET_32BIT_S" "rotr%i2.w\t%0,%1,%2" [(set_attr "type" "shift,shift") (set_attr "mode" "SI")]) @@ -3212,7 +3370,7 @@ (set (match_operand:GPR 0 "register_operand") (rotatert:GPR (match_operand:GPR 1 "register_operand") (match_dup 3)))] - "" + "TARGET_64BIT || TARGET_32BIT_S" { operands[3] = gen_reg_rtx (SImode); @@ -3235,7 +3393,7 @@ (plus:GPR (ashift:GPR (match_operand:GPR 1 "register_operand" "r") (match_operand 2 "const_immalsl_operand" "")) (match_operand:GPR 3 "register_operand" "r")))] - "" + "TARGET_64BIT || TARGET_32BIT_S" "alsl.\t%0,%1,%3,%2" [(set_attr "type" "arith") (set_attr "mode" "")]) @@ -3278,8 +3436,9 @@ (ashift:X (match_operand:X 1 "register_operand" "r") (match_operand:SI 2 "const_int_operand" "i")) (match_operand:X 3 "const_int_operand" "i")))] - "(const_immalsl_operand (operands[2], SImode) - || ! (operands[3], mode)) + "TARGET_64BIT + && (const_immalsl_operand (operands[2], SImode) + || ! (operands[3], mode)) && loongarch_reassoc_shift_bitwise (, operands[2], operands[3], mode)" "#" @@ -3360,7 +3519,7 @@ [(set (match_operand:SI 0 "register_operand" "=r") (rotatert:SI (bswap:SI (match_operand:SI 1 "register_operand" "r")) (const_int 16)))] - "" + "TARGET_64BIT || TARGET_32BIT_S" "revb.2h\t%0,%1" [(set_attr "type" "shift")]) @@ -3370,14 +3529,14 @@ (rotatert:SI (bswap:SI (match_operand:SI 1 "register_operand" "r")) (const_int 16))))] - "TARGET_64BIT" + "TARGET_64BIT || TARGET_32BIT_S" "revb.2h\t%0,%1" [(set_attr "type" "shift")]) (define_insn "bswaphi2" [(set (match_operand:HI 0 "register_operand" "=r") (bswap:HI (match_operand:HI 1 "register_operand" "r")))] - "" + "TARGET_64BIT || TARGET_32BIT_S" "revb.2h\t%0,%1" [(set_attr "type" "shift")]) @@ -3399,9 +3558,9 @@ (define_expand "bswapsi2" [(set (match_operand:SI 0 "register_operand" "=r") (bswap:SI (match_operand:SI 1 "register_operand" "r")))] - "" + "TARGET_64BIT || TARGET_32BIT_S" { - if (!TARGET_64BIT) + if (TARGET_32BIT_S) { rtx t = gen_reg_rtx (SImode); emit_insn (gen_revb_2h (t, operands[1])); @@ -3565,7 +3724,7 @@ (match_operator:SI 1 "loongarch_cstore_operator" [(match_operand:GPR 2 "register_operand") (match_operand:GPR 3 "nonmemory_operand")]))] - "" + "TARGET_64BIT" { loongarch_expand_scc (operands); DONE; @@ -3933,12 +4092,18 @@ return "jr\t%0"; case 1: if (TARGET_CMODEL_MEDIUM) - return "pcaddu18i\t$r12,%%call36(%0)\n\tjirl\t$r0,$r12,0"; + if (TARGET_64BIT) + return "pcaddu18i\t$r12,%%call36(%0)\n\tjirl\t$r0,$r12,0"; + else + return "pcaddu12i\t$r12,%%call30(%0)\n\tjirl\t$r0,$r12,0"; else return "b\t%0"; case 2: if (TARGET_CMODEL_MEDIUM) - return "pcaddu18i\t$r12,%%call36(%0)\n\tjirl\t$r0,$r12,0"; + if (TARGET_64BIT) + return "pcaddu18i\t$r12,%%call36(%0)\n\tjirl\t$r0,$r12,0"; + else + return "pcaddu12i\t$r12,%%call30(%0)\n\tjirl\t$r0,$r12,0"; else return "b\t%%plt(%0)"; default: @@ -4026,12 +4191,18 @@ return "jr\t%1"; case 1: if (TARGET_CMODEL_MEDIUM) - return "pcaddu18i\t$r12,%%call36(%1)\n\tjirl\t$r0,$r12,0"; + if (TARGET_64BIT) + return "pcaddu18i\t$r12,%%call36(%1)\n\tjirl\t$r0,$r12,0"; + else + return "pcaddu12i\t$r12,%%call30(%1)\n\tjirl\t$r0,$r12,0"; else return "b\t%1"; case 2: if (TARGET_CMODEL_MEDIUM) - return "pcaddu18i\t$r12,%%call36(%1)\n\tjirl\t$r0,$r12,0"; + if (TARGET_64BIT) + return "pcaddu18i\t$r12,%%call36(%1)\n\tjirl\t$r0,$r12,0"; + else + return "pcaddu12i\t$r12,%%call30(%1)\n\tjirl\t$r0,$r12,0"; else return "b\t%%plt(%1)"; default: @@ -4064,12 +4235,18 @@ return "jr\t%1"; case 1: if (TARGET_CMODEL_MEDIUM) - return "pcaddu18i\t$r12,%%call36(%1)\n\tjirl\t$r0,$r12,0"; + if (TARGET_64BIT) + return "pcaddu18i\t$r12,%%call36(%1)\n\tjirl\t$r0,$r12,0"; + else + return "pcaddu12i\t$r12,%%call30(%1)\n\tjirl\t$r0,$r12,0"; else return "b\t%1"; case 2: if (TARGET_CMODEL_MEDIUM) - return "pcaddu18i\t$r12,%%call36(%1)\n\tjirl\t$r0,$r12,0"; + if (TARGET_64BIT) + return "pcaddu18i\t$r12,%%call36(%1)\n\tjirl\t$r0,$r12,0"; + else + return "pcaddu12i\t$r12,%%call30(%1)\n\tjirl\t$r0,$r12,0"; else return "b\t%%plt(%1)"; default: @@ -4122,12 +4299,18 @@ return "jirl\t$r1,%0,0"; case 1: if (TARGET_CMODEL_MEDIUM) - return "pcaddu18i\t$r1,%%call36(%0)\n\tjirl\t$r1,$r1,0"; + if (TARGET_64BIT) + return "pcaddu18i\t$r1,%%call36(%0)\n\tjirl\t$r1,$r1,0"; + else + return "pcaddu12i\t$r1,%%call30(%0)\n\tjirl\t$r1,$r1,0"; else return "bl\t%0"; case 2: if (TARGET_CMODEL_MEDIUM) - return "pcaddu18i\t$r1,%%call36(%0)\n\tjirl\t$r1,$r1,0"; + if (TARGET_64BIT) + return "pcaddu18i\t$r1,%%call36(%0)\n\tjirl\t$r1,$r1,0"; + else + return "pcaddu12i\t$r1,%%call30(%0)\n\tjirl\t$r1,$r1,0"; else return "bl\t%%plt(%0)"; default: @@ -4199,12 +4382,18 @@ return "jirl\t$r1,%1,0"; case 1: if (TARGET_CMODEL_MEDIUM) - return "pcaddu18i\t$r1,%%call36(%1)\n\tjirl\t$r1,$r1,0"; + if (TARGET_64BIT) + return "pcaddu18i\t$r1,%%call36(%1)\n\tjirl\t$r1,$r1,0"; + else + return "pcaddu12i\t$r1,%%call30(%1)\n\tjirl\t$r1,$r1,0"; else return "bl\t%1"; case 2: if (TARGET_CMODEL_MEDIUM) - return "pcaddu18i\t$r1,%%call36(%1)\n\tjirl\t$r1,$r1,0"; + if (TARGET_64BIT) + return "pcaddu18i\t$r1,%%call36(%1)\n\tjirl\t$r1,$r1,0"; + else + return "pcaddu12i\t$r1,%%call30(%1)\n\tjirl\t$r1,$r1,0"; else return "bl\t%%plt(%1)"; default: @@ -4239,12 +4428,18 @@ return "jirl\t$r1,%1,0"; case 1: if (TARGET_CMODEL_MEDIUM) - return "pcaddu18i\t$r1,%%call36(%1)\n\tjirl\t$r1,$r1,0"; + if (TARGET_64BIT) + return "pcaddu18i\t$r1,%%call36(%1)\n\tjirl\t$r1,$r1,0"; + else + return "pcaddu12i\t$r1,%%call30(%1)\n\tjirl\t$r1,$r1,0"; else return "bl\t%1"; case 2: if (TARGET_CMODEL_MEDIUM) - return "pcaddu18i\t$r1,%%call36(%1)\n\tjirl\t$r1,$r1,0"; + if (TARGET_64BIT) + return "pcaddu18i\t$r1,%%call36(%1)\n\tjirl\t$r1,$r1,0"; + else + return "pcaddu12i\t$r1,%%call30(%1)\n\tjirl\t$r1,$r1,0"; else return "bl\t%%plt(%1)"; default: @@ -4393,7 +4588,7 @@ (const_int )) (ashift:SI (match_operand:SI 2 "register_operand" "r") (const_int bytepick_w_ashift_amount))))] - "" + "TARGET_64BIT || TARGET_32BIT_S" "bytepick.w\t%0,%1,%2," [(set_attr "mode" "SI")]) @@ -4411,7 +4606,7 @@ (zero_extract:DI (match_operand:DI 2 "register_operand" "r") (const_int ) (const_int ))))] - "TARGET_64BIT" + "TARGET_64BIT || TARGET_32BIT_S" "bytepick.w\t%0,%2,%1," [(set_attr "mode" "SI")]) @@ -4426,7 +4621,7 @@ (zero_extract:DI (match_operand:DI 2 "register_operand" "r") (const_int 8) (const_int 24))))] - "TARGET_64BIT" + "TARGET_64BIT || TARGET_32BIT_S" "bytepick.w\t%0,%2,%1,1" [(set_attr "mode" "SI")]) @@ -4456,7 +4651,7 @@ [(set (match_operand:SI 0 "register_operand" "=r") (unspec:SI [(match_operand:SI 1 "register_operand" "r")] UNSPEC_BITREV_4B))] - "" + "TARGET_64BIT || TARGET_32BIT_S" "bitrev.4b\t%0,%1" [(set_attr "type" "unknown") (set_attr "mode" "SI")]) @@ -4465,7 +4660,7 @@ [(set (match_operand:DI 0 "register_operand" "=r") (unspec:DI [(match_operand:DI 1 "register_operand" "r")] UNSPEC_BITREV_8B))] - "" + "TARGET_64BIT" "bitrev.8b\t%0,%1" [(set_attr "type" "unknown") (set_attr "mode" "DI")]) @@ -4473,7 +4668,7 @@ (define_insn "@rbit" [(set (match_operand:GPR 0 "register_operand" "=r") (bitreverse:GPR (match_operand:GPR 1 "register_operand" "r")))] - "" + "TARGET_64BIT || TARGET_32BIT_S" "bitrev.\t%0,%1" [(set_attr "type" "unknown") (set_attr "mode" "")]) @@ -4492,7 +4687,7 @@ (define_insn "rbitqi" [(set (match_operand:QI 0 "register_operand" "=r") (bitreverse:QI (match_operand:QI 1 "register_operand" "r")))] - "" + "TARGET_64BIT || TARGET_32BIT_S" "bitrev.4b\t%0,%1" [(set_attr "type" "unknown") (set_attr "mode" "SI")]) @@ -4635,7 +4830,7 @@ (unspec:SI [(match_operand:QHWD 1 "register_operand" "r") (match_operand:SI 2 "register_operand" "r")] CRC))] - "" + "TARGET_64BIT" ".w..w\t%0,%1,%2" [(set_attr "type" "unknown") (set_attr "mode" "")]) @@ -4656,7 +4851,7 @@ (match_operand:SI 1 "register_operand") ; old_chksum (match_operand:SUBDI 2 "reg_or_0_operand") ; msg (match_operand 3 "const_int_operand")] ; poly - "" + "TARGET_64BIT" { unsigned HOST_WIDE_INT poly = UINTVAL (operands[3]); rtx msg = operands[2]; @@ -4753,7 +4948,8 @@ (define_insn_and_rewrite "simple_load" [(set (match_operand:LD_AT_LEAST_32_BIT 0 "register_operand" "=r,f") (match_operand:LD_AT_LEAST_32_BIT 1 "mem_simple_ldst_operand" ""))] - "loongarch_pre_reload_split () + "(TARGET_64BIT || TARGET_32BIT_S) + && loongarch_pre_reload_split () && la_opt_explicit_relocs == EXPLICIT_RELOCS_AUTO && (TARGET_CMODEL_NORMAL || TARGET_CMODEL_MEDIUM)" "#" @@ -4766,7 +4962,8 @@ [(set (match_operand:GPR 0 "register_operand" "=r") (any_extend:GPR (match_operand:SUBDI 1 "mem_simple_ldst_operand" "")))] - "loongarch_pre_reload_split () + "(TARGET_64BIT || TARGET_32BIT_S) + && loongarch_pre_reload_split () && la_opt_explicit_relocs == EXPLICIT_RELOCS_AUTO && (TARGET_CMODEL_NORMAL || TARGET_CMODEL_MEDIUM)" "#" @@ -4778,7 +4975,8 @@ (define_insn_and_rewrite "simple_store" [(set (match_operand:ST_ANY 0 "mem_simple_ldst_operand" "") (match_operand:ST_ANY 1 "reg_or_0_operand" "r,f"))] - "loongarch_pre_reload_split () + "(TARGET_64BIT || TARGET_32BIT_S) + && loongarch_pre_reload_split () && la_opt_explicit_relocs == EXPLICIT_RELOCS_AUTO && (TARGET_CMODEL_NORMAL || TARGET_CMODEL_MEDIUM)" "#" diff --git a/gcc/config/loongarch/predicates.md b/gcc/config/loongarch/predicates.md index 7e4d8abc822..980ae5cc8dd 100644 --- a/gcc/config/loongarch/predicates.md +++ b/gcc/config/loongarch/predicates.md @@ -295,7 +295,8 @@ (define_predicate "low_bitmask_operand" (and (match_code "const_int") - (match_test "low_bitmask_len (mode, INTVAL (op)) > 12"))) + (match_test "low_bitmask_len (mode, INTVAL (op)) > 12") + (match_test "!TARGET_32BIT_R"))) (define_predicate "d_operand" (and (match_code "reg") @@ -406,6 +407,7 @@ (define_predicate "ins_zero_bitmask_operand" (and (match_code "const_int") + (match_test "!TARGET_32BIT_R") (match_test "low_bitmask_len (mode, \ ~UINTVAL (op) | (~UINTVAL(op) - 1)) \ > 0") @@ -438,6 +440,10 @@ if (offset != const0_rtx) return false; + /* TARGET_32BIT always support call30. */ + if (TARGET_32BIT) + return true; + /* When compiling with '-mcmodel=medium -mexplicit-relocs' symbols are splited in loongarch_legitimize_call_address. diff --git a/gcc/config/loongarch/sync.md b/gcc/config/loongarch/sync.md index 5784dab92a6..86c475d052f 100644 --- a/gcc/config/loongarch/sync.md +++ b/gcc/config/loongarch/sync.md @@ -45,6 +45,10 @@ ;; particular code. (define_code_attr amop [(ior "or") (xor "xor") (and "and") (plus "add")]) +;; For 32 bit. +(define_code_attr atomic_optab_insn + [(plus "add.w") (ior "or") (xor "xor") (and "and")]) + ;; Memory barriers. (define_expand "mem_thread_fence" @@ -260,17 +264,48 @@ DONE; }) -(define_insn "atomic_" +(define_expand "atomic_" + [(any_atomic:GPR (match_operand:GPR 0 "memory_operand") ;; mem location + (match_operand:GPR 1 "reg_or_0_operand")) ;; value for op + (match_operand:SI 2 "const_int_operand")] ;; model + "" +{ + if (TARGET_64BIT) + emit_insn (gen_la64_atomic_ (operands[0], operands[1], operands[2])); + else + emit_insn (gen_la32_atomic_si (operands[0], operands[1], operands[2])); + DONE; +}) + +(define_insn "la64_atomic_" [(set (match_operand:GPR 0 "memory_operand" "+ZB") (unspec_volatile:GPR [(any_atomic:GPR (match_dup 0) (match_operand:GPR 1 "reg_or_0_operand" "rJ")) (match_operand:SI 2 "const_int_operand")] ;; model UNSPEC_SYNC_OLD_OP))] - "" + "TARGET_64BIT" "am%A2.\t$zero,%z1,%0" [(set (attr "length") (const_int 4))]) +(define_insn "la32_atomic_si" + [(set (match_operand:SI 0 "memory_operand" "+ZB") + (unspec_volatile:SI + [(any_atomic:SI (match_dup 0) + (match_operand:SI 1 "reg_or_0_operand" "rJ")) + (match_operand:SI 2 "const_int_operand")] ;; model + UNSPEC_SYNC_OLD_OP)) + (clobber (match_scratch:SI 3 "=&r"))] + "!TARGET_64BIT" +{ + return "1:\n\t" + "ll.w\t%3,%0\n\t" + "\t%3,%z1,%3\n\t" + "sc.w\t%3,%0\n\t" + "beq\t$zero,%3,1b\n\t"; +} + [(set (attr "length") (const_int 16))]) + (define_insn "atomic_add" [(set (match_operand:SHORT 0 "memory_operand" "+ZB") (unspec_volatile:SHORT @@ -282,7 +317,23 @@ "amadd%A2.\t$zero,%z1,%0" [(set (attr "length") (const_int 4))]) -(define_insn "atomic_fetch_" +(define_expand "atomic_fetch_" + [(match_operand:GPR 0 "register_operand") ;; old value at mem + (any_atomic:GPR (match_operand:GPR 1 "memory_operand") ;; mem location + (match_operand:GPR 2 "reg_or_0_operand")) ;; value for op + (match_operand:SI 3 "const_int_operand")] ;; model + "" + { + if (TARGET_64BIT) + emit_insn (gen_la64_atomic_fetch_ (operands[0], operands[1], + operands[2], operands[3])); + else + emit_insn (gen_la32_atomic_fetch_si (operands[0], operands[1], + operands[2], operands[3])); + DONE; + }) + +(define_insn "la64_atomic_fetch_" [(set (match_operand:GPR 0 "register_operand" "=&r") (match_operand:GPR 1 "memory_operand" "+ZB")) (set (match_dup 1) @@ -291,10 +342,30 @@ (match_operand:GPR 2 "reg_or_0_operand" "rJ")) (match_operand:SI 3 "const_int_operand")] ;; model UNSPEC_SYNC_OLD_OP))] - "" + "TARGET_64BIT" "am%A3.\t%0,%z2,%1" [(set (attr "length") (const_int 4))]) +(define_insn "la32_atomic_fetch_si" + [(set (match_operand:SI 0 "register_operand" "=&r") + (match_operand:SI 1 "memory_operand" "+ZB")) + (set (match_dup 1) + (unspec_volatile:SI + [(any_atomic:SI (match_dup 1) + (match_operand:SI 2 "reg_or_0_operand" "rJ")) + (match_operand:SI 3 "const_int_operand")] ;; model + UNSPEC_SYNC_OLD_OP)) + (clobber (match_scratch:SI 4 "=&r"))] + "!TARGET_64BIT" +{ + return "1:\n\t" + "ll.w\t%0,%1\n\t" + "\t%4,%z2,%0\n\t" + "sc.w\t%4,%1\n\t" + "beq\t$zero,%4,1b\n\t"; +} + [(set (attr "length") (const_int 16))]) + (define_insn "atomic_fetch_nand_mask_inverted" [(set (match_operand:GPR 0 "register_operand" "=&r") (match_operand:GPR 1 "memory_operand" "+ZC")) @@ -305,14 +376,24 @@ UNSPEC_SYNC_OLD_OP)) (clobber (match_scratch:GPR 3 "=&r"))] "" - { - return "1:\\n\\t" - "ll.\\t%0,%1\\n\\t" - "orn\\t%3,%2,%0\\n\\t" - "sc.\\t%3,%1\\n\\t" - "beqz\\t%3,1b"; - } - [(set (attr "length") (const_int 16))]) +{ + output_asm_insn ("1:", operands); + output_asm_insn ("ll.\t%0,%1", operands); + if (TARGET_32BIT_R) + { + output_asm_insn ("nor\t%3,%0,$zero", operands); + output_asm_insn ("or\t%3,%2,%3", operands); + } + else + output_asm_insn ("orn\t%3,%2,%0", operands); + output_asm_insn ("sc.\t%3,%1", operands); + output_asm_insn ("beq\t%3,$zero,1b", operands); + return ""; +} + [(set (attr "length") (if_then_else + (match_test "TARGET_32BIT_R") + (const_int 20) + (const_int 16)))]) (define_mode_iterator ALL_SC [GPR (TI "loongarch_16b_atomic_lock_free_p ()")]) (define_mode_attr _scq [(SI "") (DI "") (TI "_scq")]) @@ -338,7 +419,23 @@ DONE; }) -(define_insn "atomic_exchange" +(define_expand "atomic_exchange" + [(match_operand:GPR 0 "register_operand") ;; old value at mem + (match_operand:GPR 1 "memory_operand") ;; mem location + (match_operand:GPR 2 "register_operand") ;; value for op + (match_operand:SI 3 "const_int_operand")] ;; model + "" + { + if (TARGET_64BIT) + emit_insn (gen_la64_atomic_exchange (operands[0], operands[1], + operands[2], operands[3])); + else + emit_insn (gen_la32_atomic_exchangesi (operands[0], operands[1], + operands[2], operands[3])); + DONE; + }) + +(define_insn "la64_atomic_exchange" [(set (match_operand:GPR 0 "register_operand" "=&r") (unspec_volatile:GPR [(match_operand:GPR 1 "memory_operand" "+ZB") @@ -346,10 +443,29 @@ UNSPEC_SYNC_EXCHANGE)) (set (match_dup 1) (match_operand:GPR 2 "register_operand" "r"))] - "" + "TARGET_64BIT" "amswap%A3.\t%0,%z2,%1" [(set (attr "length") (const_int 4))]) +(define_insn "la32_atomic_exchangesi" + [(set (match_operand:SI 0 "register_operand" "=&r") + (unspec_volatile:SI + [(match_operand:SI 1 "memory_operand" "+ZB") + (match_operand:SI 3 "const_int_operand")] ;; model + UNSPEC_SYNC_EXCHANGE)) + (set (match_dup 1) + (match_operand:SI 2 "register_operand" "r")) + (clobber (match_scratch:SI 4 "=&r"))] + "!TARGET_64BIT" +{ + return "1:\n\t" + "ll.w\t%0,%1\n\t" + "or\t%4,$zero,%2\n\t" + "sc.w\t%4,%1\n\t" + "beq\t$zero,%4,1b\n\t"; +} + [(set (attr "length") (const_int 16))]) + (define_insn "atomic_exchangeti_scq" [(set (match_operand:TI 0 "register_operand" "=&r") (unspec_volatile:TI @@ -367,7 +483,7 @@ output_asm_insn ("ld.d\t%t0,%b1,8", operands); output_asm_insn ("move\t%3,%z2", operands); output_asm_insn ("sc.q\t%3,%t2,%1", operands); - output_asm_insn ("beqz\t%3,1b", operands); + output_asm_insn ("beq\t%3,$zero,1b", operands); return ""; } @@ -430,7 +546,7 @@ output_asm_insn ("or%i3\t%5,$zero,%3", operands); output_asm_insn ("sc.\t%5,%1", operands); - output_asm_insn ("beqz\t%5,1b", operands); + output_asm_insn ("beq\t%5,$zero,1b", operands); output_asm_insn ("%T4b\t3f", operands); output_asm_insn ("2:", operands); output_asm_insn ("%G4", operands); @@ -498,7 +614,7 @@ emit_insn (gen_rtx_SET (compare, difference)); } - if (word_mode != mode) + if (word_mode != GET_MODE (compare)) { rtx reg = gen_reg_rtx (word_mode); emit_insn (gen_rtx_SET (reg, gen_rtx_SIGN_EXTEND (word_mode, compare))); @@ -515,7 +631,7 @@ (any_bitwise (match_operand:SHORT 1 "memory_operand" "+ZB") ;; memory (match_operand:SHORT 2 "reg_or_0_operand" "rJ")) ;; val (match_operand:SI 3 "const_int_operand" "")] ;; model - "" + "TARGET_64BIT || TARGET_32BIT_S" { /* We have no QI/HImode bitwise atomics, so use the address LSBs to form a mask, then use an aligned SImode atomic. */ @@ -642,26 +758,26 @@ operands[3], operands[4], mod_f); } - rtx compare = operands[1]; - if (operands[3] != const0_rtx) - { - machine_mode mode = GET_MODE (operands[3]); - rtx op1 = convert_modes (SImode, mode, operands[1], true); - rtx op3 = convert_modes (SImode, mode, operands[3], true); - rtx difference = gen_rtx_MINUS (SImode, op1, op3); - compare = gen_reg_rtx (SImode); - emit_insn (gen_rtx_SET (compare, difference)); - } - - if (word_mode != mode) - { - rtx reg = gen_reg_rtx (word_mode); - emit_insn (gen_rtx_SET (reg, gen_rtx_SIGN_EXTEND (word_mode, compare))); - compare = reg; - } - - emit_insn (gen_rtx_SET (operands[0], - gen_rtx_EQ (SImode, compare, const0_rtx))); + rtx compare = operands[1]; + if (operands[3] != const0_rtx) + { + machine_mode mode = GET_MODE (operands[3]); + rtx op1 = convert_modes (SImode, mode, operands[1], true); + rtx op3 = convert_modes (SImode, mode, operands[3], true); + rtx difference = gen_rtx_MINUS (SImode, op1, op3); + compare = gen_reg_rtx (SImode); + emit_insn (gen_rtx_SET (compare, difference)); + } + + if (word_mode != GET_MODE (compare)) + { + rtx reg = gen_reg_rtx (word_mode); + emit_insn (gen_rtx_SET (reg, gen_rtx_SIGN_EXTEND (word_mode, compare))); + compare = reg; + } + + emit_insn (gen_rtx_SET (operands[0], + gen_rtx_EQ (SImode, compare, const0_rtx))); DONE; }) @@ -716,7 +832,7 @@ output_asm_insn ("sc.q\t%7,%t3,%1", operands); /* Check if sc.q has done the store. */ - output_asm_insn ("beqz\t%7,1b", operands); + output_asm_insn ("beq\t%7,$zero,1b", operands); /* Jump over the mod_f barrier if sc.q has succeeded. */ output_asm_insn ("%T4b\t3f", operands); @@ -870,7 +986,7 @@ "and\\t%7,%0,%z3\\n\\t" "or%i5\\t%7,%7,%5\\n\\t" "sc.\\t%7,%1\\n\\t" - "beqz\\t%7,1b\\n\\t"; + "beq\\t%7,$zero,1b\\n\\t"; } [(set (attr "length") (const_int 20))]) @@ -967,7 +1083,7 @@ } output_asm_insn ("sc.q\t%3,%4,%1", operands); - output_asm_insn ("beqz\t%3,1b", operands); + output_asm_insn ("beq\t%3,$zero,1b", operands); return ""; }