(set_attr "amdfam10_decode" "vector")
(set_attr "bdver1_decode" "vector")])
+(define_insn "x86_64_shld_ndd"
+ [(set (match_operand:DI 0 "register_operand" "=r")
+ (ior:DI (ashift:DI (match_operand:DI 1 "nonimmediate_operand" "rm")
+ (and:QI (match_operand:QI 3 "nonmemory_operand" "Jc")
+ (const_int 63)))
+ (subreg:DI
+ (lshiftrt:TI
+ (zero_extend:TI
+ (match_operand:DI 2 "register_operand" "r"))
+ (minus:QI (const_int 64)
+ (and:QI (match_dup 3) (const_int 63)))) 0)))
+ (clobber (reg:CC FLAGS_REG))]
+ "TARGET_APX_NDD"
+ "shld{q}\t{%s3%2, %1, %0|%0, %1, %2, %3}"
+ [(set_attr "type" "ishift")
+ (set_attr "mode" "DI")])
+
(define_insn "x86_64_shld_1"
[(set (match_operand:DI 0 "nonimmediate_operand" "+r*m")
(ior:DI (ashift:DI (match_dup 0)
(set_attr "amdfam10_decode" "vector")
(set_attr "bdver1_decode" "vector")])
+(define_insn "x86_64_shld_ndd_1"
+ [(set (match_operand:DI 0 "register_operand" "=r")
+ (ior:DI (ashift:DI (match_operand:DI 1 "nonimmediate_operand" "rm")
+ (match_operand:QI 3 "const_0_to_63_operand"))
+ (subreg:DI
+ (lshiftrt:TI
+ (zero_extend:TI
+ (match_operand:DI 2 "register_operand" "r"))
+ (match_operand:QI 4 "const_0_to_255_operand")) 0)))
+ (clobber (reg:CC FLAGS_REG))]
+ "TARGET_APX_NDD
+ && INTVAL (operands[4]) == 64 - INTVAL (operands[3])"
+ "shld{q}\t{%3, %2, %1, %0|%0, %1, %2, %3}"
+ [(set_attr "type" "ishift")
+ (set_attr "mode" "DI")
+ (set_attr "length_immediate" "1")])
+
+
(define_insn_and_split "*x86_64_shld_shrd_1_nozext"
[(set (match_operand:DI 0 "nonimmediate_operand")
(ior:DI (ashift:DI (match_operand:DI 4 "nonimmediate_operand")
operands[4] = force_reg (DImode, operands[4]);
emit_insn (gen_x86_64_shrd_1 (operands[0], operands[4], operands[3], operands[2]));
}
+ else if (TARGET_APX_NDD)
+ {
+ rtx tmp = gen_reg_rtx (DImode);
+ if (MEM_P (operands[4]))
+ {
+ operands[1] = force_reg (DImode, operands[1]);
+ emit_insn (gen_x86_64_shld_ndd_1 (tmp, operands[4], operands[1],
+ operands[2], operands[3]));
+ }
+ else if (MEM_P (operands[1]))
+ emit_insn (gen_x86_64_shrd_ndd_1 (tmp, operands[1], operands[4],
+ operands[3], operands[2]));
+ else
+ emit_insn (gen_x86_64_shld_ndd_1 (tmp, operands[4], operands[1],
+ operands[2], operands[3]));
+ emit_move_insn (operands[0], tmp);
+ }
else
{
operands[1] = force_reg (DImode, operands[1]);
(const_int 63)))) 0)))
(clobber (reg:CC FLAGS_REG))])])
+(define_insn_and_split "*x86_64_shld_ndd_2"
+ [(set (match_operand:DI 0 "nonimmediate_operand")
+ (ior:DI (ashift:DI (match_operand:DI 1 "nonimmediate_operand")
+ (match_operand:QI 3 "nonmemory_operand"))
+ (lshiftrt:DI (match_operand:DI 2 "register_operand")
+ (minus:QI (const_int 64) (match_dup 3)))))
+ (clobber (reg:CC FLAGS_REG))]
+ "TARGET_APX_NDD
+ && ix86_pre_reload_split ()"
+ "#"
+ "&& 1"
+ [(parallel [(set (match_dup 4)
+ (ior:DI (ashift:DI (match_dup 1)
+ (and:QI (match_dup 3) (const_int 63)))
+ (subreg:DI
+ (lshiftrt:TI
+ (zero_extend:TI (match_dup 2))
+ (minus:QI (const_int 64)
+ (and:QI (match_dup 3)
+ (const_int 63)))) 0)))
+ (clobber (reg:CC FLAGS_REG))
+ (set (match_dup 0) (match_dup 4))])]
+{
+ operands[4] = gen_reg_rtx (DImode);
+ emit_move_insn (operands[4], operands[0]);
+})
+
(define_insn "x86_shld"
[(set (match_operand:SI 0 "nonimmediate_operand" "+r*m")
(ior:SI (ashift:SI (match_dup 0)
(set_attr "amdfam10_decode" "vector")
(set_attr "bdver1_decode" "vector")])
+(define_insn "x86_shld_ndd"
+ [(set (match_operand:SI 0 "nonimmediate_operand" "=r")
+ (ior:SI (ashift:SI (match_operand:SI 1 "nonimmediate_operand" "rm")
+ (and:QI (match_operand:QI 3 "nonmemory_operand" "Ic")
+ (const_int 31)))
+ (subreg:SI
+ (lshiftrt:DI
+ (zero_extend:DI
+ (match_operand:SI 2 "register_operand" "r"))
+ (minus:QI (const_int 32)
+ (and:QI (match_dup 3) (const_int 31)))) 0)))
+ (clobber (reg:CC FLAGS_REG))]
+ "TARGET_APX_NDD"
+ "shld{l}\t{%s3%2, %1, %0|%0, %1, %2, %3}"
+ [(set_attr "type" "ishift")
+ (set_attr "mode" "SI")])
+
+
(define_insn "x86_shld_1"
[(set (match_operand:SI 0 "nonimmediate_operand" "+r*m")
(ior:SI (ashift:SI (match_dup 0)
(set_attr "amdfam10_decode" "vector")
(set_attr "bdver1_decode" "vector")])
+(define_insn "x86_shld_ndd_1"
+ [(set (match_operand:SI 0 "register_operand" "=r")
+ (ior:SI (ashift:SI (match_operand:SI 1 "nonimmediate_operand" "rm")
+ (match_operand:QI 3 "const_0_to_31_operand"))
+ (subreg:SI
+ (lshiftrt:DI
+ (zero_extend:DI
+ (match_operand:SI 2 "register_operand" "r"))
+ (match_operand:QI 4 "const_0_to_63_operand")) 0)))
+ (clobber (reg:CC FLAGS_REG))]
+ "TARGET_APX_NDD
+ && INTVAL (operands[4]) == 32 - INTVAL (operands[3])"
+ "shld{l}\t{%3, %2, %1, %0|%0, %1, %2, %3}"
+ [(set_attr "type" "ishift")
+ (set_attr "length_immediate" "1")
+ (set_attr "mode" "SI")])
+
+
(define_insn_and_split "*x86_shld_shrd_1_nozext"
[(set (match_operand:SI 0 "nonimmediate_operand")
(ior:SI (ashift:SI (match_operand:SI 4 "nonimmediate_operand")
operands[4] = force_reg (SImode, operands[4]);
emit_insn (gen_x86_shrd_1 (operands[0], operands[4], operands[3], operands[2]));
}
- else
+ else if (TARGET_APX_NDD)
+ {
+ rtx tmp = gen_reg_rtx (SImode);
+ if (MEM_P (operands[4]))
+ {
+ operands[1] = force_reg (SImode, operands[1]);
+ emit_insn (gen_x86_shld_ndd_1 (tmp, operands[4], operands[1],
+ operands[2], operands[3]));
+ }
+ else if (MEM_P (operands[1]))
+ emit_insn (gen_x86_shrd_ndd_1 (tmp, operands[1], operands[4],
+ operands[3], operands[2]));
+ else
+ emit_insn (gen_x86_shld_ndd_1 (tmp, operands[4], operands[1],
+ operands[2], operands[3]));
+ emit_move_insn (operands[0], tmp);
+ }
+ else
{
operands[1] = force_reg (SImode, operands[1]);
rtx tmp = gen_reg_rtx (SImode);
(const_int 31)))) 0)))
(clobber (reg:CC FLAGS_REG))])])
+(define_insn_and_split "*x86_shld_ndd_2"
+ [(set (match_operand:SI 0 "nonimmediate_operand")
+ (ior:SI (ashift:SI (match_operand:SI 1 "nonimmediate_operand")
+ (match_operand:QI 3 "nonmemory_operand"))
+ (lshiftrt:SI (match_operand:SI 2 "register_operand")
+ (minus:QI (const_int 32) (match_dup 3)))))
+ (clobber (reg:CC FLAGS_REG))]
+ "TARGET_APX_NDD
+ && ix86_pre_reload_split ()"
+ "#"
+ "&& 1"
+ [(parallel [(set (match_dup 4)
+ (ior:SI (ashift:SI (match_dup 1)
+ (and:QI (match_dup 3) (const_int 31)))
+ (subreg:SI
+ (lshiftrt:DI
+ (zero_extend:DI (match_dup 2))
+ (minus:QI (const_int 32)
+ (and:QI (match_dup 3)
+ (const_int 31)))) 0)))
+ (clobber (reg:CC FLAGS_REG))
+ (set (match_dup 0) (match_dup 4))])]
+{
+ operands[4] = gen_reg_rtx (SImode);
+ emit_move_insn (operands[4], operands[0]);
+})
+
(define_expand "@x86_shift<mode>_adj_1"
[(set (reg:CCZ FLAGS_REG)
(compare:CCZ (and:QI (match_operand:QI 2 "register_operand")
(set_attr "amdfam10_decode" "vector")
(set_attr "bdver1_decode" "vector")])
+(define_insn "x86_64_shrd_ndd"
+ [(set (match_operand:DI 0 "register_operand" "=r")
+ (ior:DI (lshiftrt:DI (match_operand:DI 1 "nonimmediate_operand" "rm")
+ (and:QI (match_operand:QI 3 "nonmemory_operand" "Jc")
+ (const_int 63)))
+ (subreg:DI
+ (ashift:TI
+ (zero_extend:TI
+ (match_operand:DI 2 "register_operand" "r"))
+ (minus:QI (const_int 64)
+ (and:QI (match_dup 3) (const_int 63)))) 0)))
+ (clobber (reg:CC FLAGS_REG))]
+ "TARGET_APX_NDD"
+ "shrd{q}\t{%s3%2, %1, %0|%0, %1, %2, %3}"
+ [(set_attr "type" "ishift")
+ (set_attr "mode" "DI")])
+
+
(define_insn "x86_64_shrd_1"
[(set (match_operand:DI 0 "nonimmediate_operand" "+r*m")
(ior:DI (lshiftrt:DI (match_dup 0)
(set_attr "amdfam10_decode" "vector")
(set_attr "bdver1_decode" "vector")])
+(define_insn "x86_64_shrd_ndd_1"
+ [(set (match_operand:DI 0 "register_operand" "=r")
+ (ior:DI (lshiftrt:DI (match_operand:DI 1 "nonimmediate_operand" "rm")
+ (match_operand:QI 3 "const_0_to_63_operand"))
+ (subreg:DI
+ (ashift:TI
+ (zero_extend:TI
+ (match_operand:DI 2 "register_operand" "r"))
+ (match_operand:QI 4 "const_0_to_255_operand")) 0)))
+ (clobber (reg:CC FLAGS_REG))]
+ "TARGET_APX_NDD
+ && INTVAL (operands[4]) == 64 - INTVAL (operands[3])"
+ "shrd{q}\t{%3, %2, %1, %0|%0, %1, %2, %3}"
+ [(set_attr "type" "ishift")
+ (set_attr "length_immediate" "1")
+ (set_attr "mode" "DI")])
+
+
(define_insn_and_split "*x86_64_shrd_shld_1_nozext"
[(set (match_operand:DI 0 "nonimmediate_operand")
(ior:DI (lshiftrt:DI (match_operand:DI 4 "nonimmediate_operand")
operands[4] = force_reg (DImode, operands[4]);
emit_insn (gen_x86_64_shld_1 (operands[0], operands[4], operands[3], operands[2]));
}
+ else if (TARGET_APX_NDD)
+ {
+ rtx tmp = gen_reg_rtx (DImode);
+ if (MEM_P (operands[4]))
+ {
+ operands[1] = force_reg (DImode, operands[1]);
+ emit_insn (gen_x86_64_shrd_ndd_1 (tmp, operands[4], operands[1],
+ operands[2], operands[3]));
+ }
+ else if (MEM_P (operands[1]))
+ emit_insn (gen_x86_64_shld_ndd_1 (tmp, operands[1], operands[4],
+ operands[3], operands[2]));
+ else
+ emit_insn (gen_x86_64_shrd_ndd_1 (tmp, operands[4], operands[1],
+ operands[2], operands[3]));
+ emit_move_insn (operands[0], tmp);
+ }
else
{
operands[1] = force_reg (DImode, operands[1]);
(const_int 63)))) 0)))
(clobber (reg:CC FLAGS_REG))])])
+(define_insn_and_split "*x86_64_shrd_ndd_2"
+ [(set (match_operand:DI 0 "nonimmediate_operand")
+ (ior:DI (lshiftrt:DI (match_operand:DI 1 "nonimmediate_operand")
+ (match_operand:QI 3 "nonmemory_operand"))
+ (ashift:DI (match_operand:DI 2 "register_operand")
+ (minus:QI (const_int 64) (match_dup 2)))))
+ (clobber (reg:CC FLAGS_REG))]
+ "TARGET_APX_NDD
+ && ix86_pre_reload_split ()"
+ "#"
+ "&& 1"
+ [(parallel [(set (match_dup 4)
+ (ior:DI (lshiftrt:DI (match_dup 1)
+ (and:QI (match_dup 3) (const_int 63)))
+ (subreg:DI
+ (ashift:TI
+ (zero_extend:TI (match_dup 2))
+ (minus:QI (const_int 64)
+ (and:QI (match_dup 3)
+ (const_int 63)))) 0)))
+ (clobber (reg:CC FLAGS_REG))
+ (set (match_dup 0) (match_dup 4))])]
+{
+ operands[4] = gen_reg_rtx (DImode);
+ emit_move_insn (operands[4], operands[0]);
+})
+
(define_insn "x86_shrd"
[(set (match_operand:SI 0 "nonimmediate_operand" "+r*m")
(ior:SI (lshiftrt:SI (match_dup 0)
(set_attr "amdfam10_decode" "vector")
(set_attr "bdver1_decode" "vector")])
+(define_insn "x86_shrd_ndd"
+ [(set (match_operand:SI 0 "register_operand" "=r")
+ (ior:SI (lshiftrt:SI (match_operand:SI 1 "nonimmediate_operand" "rm")
+ (and:QI (match_operand:QI 3 "nonmemory_operand" "Ic")
+ (const_int 31)))
+ (subreg:SI
+ (ashift:DI
+ (zero_extend:DI
+ (match_operand:SI 2 "register_operand" "r"))
+ (minus:QI (const_int 32)
+ (and:QI (match_dup 3) (const_int 31)))) 0)))
+ (clobber (reg:CC FLAGS_REG))]
+ "TARGET_APX_NDD"
+ "shrd{l}\t{%s3%2, %1, %0|%0, %1, %2, %3}"
+ [(set_attr "type" "ishift")
+ (set_attr "mode" "SI")])
+
(define_insn "x86_shrd_1"
[(set (match_operand:SI 0 "nonimmediate_operand" "+r*m")
(ior:SI (lshiftrt:SI (match_dup 0)
(set_attr "amdfam10_decode" "vector")
(set_attr "bdver1_decode" "vector")])
+(define_insn "x86_shrd_ndd_1"
+ [(set (match_operand:SI 0 "register_operand" "=r")
+ (ior:SI (lshiftrt:SI (match_operand:SI 1 "nonimmediate_operand" "rm")
+ (match_operand:QI 3 "const_0_to_31_operand"))
+ (subreg:SI
+ (ashift:DI
+ (zero_extend:DI
+ (match_operand:SI 2 "register_operand" "r"))
+ (match_operand:QI 4 "const_0_to_63_operand")) 0)))
+ (clobber (reg:CC FLAGS_REG))]
+ "TARGET_APX_NDD
+ && (INTVAL (operands[4]) == 32 - INTVAL (operands[3]))"
+ "shrd{l}\t{%3, %2, %1, %0|%0, %1, %2, %3}"
+ [(set_attr "type" "ishift")
+ (set_attr "length_immediate" "1")
+ (set_attr "mode" "SI")])
+
+
(define_insn_and_split "*x86_shrd_shld_1_nozext"
[(set (match_operand:SI 0 "nonimmediate_operand")
(ior:SI (lshiftrt:SI (match_operand:SI 4 "nonimmediate_operand")
operands[4] = force_reg (SImode, operands[4]);
emit_insn (gen_x86_shld_1 (operands[0], operands[4], operands[3], operands[2]));
}
- else
+ else if (TARGET_APX_NDD)
+ {
+ rtx tmp = gen_reg_rtx (SImode);
+ if (MEM_P (operands[4]))
+ {
+ operands[1] = force_reg (SImode, operands[1]);
+ emit_insn (gen_x86_shrd_ndd_1 (tmp, operands[4], operands[1],
+ operands[2], operands[3]));
+ }
+ else if (MEM_P (operands[1]))
+ emit_insn (gen_x86_shld_ndd_1 (tmp, operands[1], operands[4],
+ operands[3], operands[2]));
+ else
+ emit_insn (gen_x86_shrd_ndd_1 (tmp, operands[4], operands[1],
+ operands[2], operands[3]));
+ emit_move_insn (operands[0], tmp);
+ }
+ else
{
operands[1] = force_reg (SImode, operands[1]);
rtx tmp = gen_reg_rtx (SImode);
(const_int 31)))) 0)))
(clobber (reg:CC FLAGS_REG))])])
+(define_insn_and_split "*x86_shrd_ndd_2"
+ [(set (match_operand:SI 0 "nonimmediate_operand")
+ (ior:SI (lshiftrt:SI (match_operand:SI 1 "nonimmediate_operand")
+ (match_operand:QI 3 "nonmemory_operand"))
+ (ashift:SI (match_operand:SI 2 "register_operand")
+ (minus:QI (const_int 32) (match_dup 3)))))
+ (clobber (reg:CC FLAGS_REG))]
+ "TARGET_APX_NDD
+ && ix86_pre_reload_split ()"
+ "#"
+ "&& 1"
+ [(parallel [(set (match_dup 4)
+ (ior:SI (lshiftrt:SI (match_dup 1)
+ (and:QI (match_dup 3) (const_int 31)))
+ (subreg:SI
+ (ashift:DI
+ (zero_extend:DI (match_dup 2))
+ (minus:QI (const_int 32)
+ (and:QI (match_dup 3)
+ (const_int 31)))) 0)))
+ (clobber (reg:CC FLAGS_REG))
+ (set (match_dup 0) (match_dup 4))])]
+{
+ operands[4] = gen_reg_rtx (SImode);
+ emit_move_insn (operands[4], operands[0]);
+})
+
;; Base name for insn mnemonic.
(define_mode_attr cvt_mnemonic
[(SI "{cltd|cdq}") (DI "{cqto|cqo}")])