extern int vspltis_shifted (rtx);
extern HOST_WIDE_INT const_vector_elt_as_int (rtx, unsigned int);
extern bool macho_lo_sum_memory_operand (rtx, machine_mode);
+extern bool can_be_rotated_to_lowbits (unsigned HOST_WIDE_INT, int, int *);
+extern bool can_be_rotated_to_positive_16bits (HOST_WIDE_INT);
+extern bool can_be_rotated_to_negative_15bits (HOST_WIDE_INT);
extern int num_insns_constant (rtx, machine_mode);
extern int small_data_operand (rtx, machine_mode);
extern bool mem_operand_gpr (rtx, machine_mode);
return reverse_condition (code);
}
+/* Check if C (as 64bit integer) can be rotated to a constant which constains
+ nonzero bits at the LOWBITS low bits only.
+
+ Return true if C can be rotated to such constant. If so, *ROT is written
+ to the number by which C is rotated.
+ Return false otherwise. */
+
+bool
+can_be_rotated_to_lowbits (unsigned HOST_WIDE_INT c, int lowbits, int *rot)
+{
+ int clz = HOST_BITS_PER_WIDE_INT - lowbits;
+
+ /* case a. 0..0xxx: already at least clz zeros. */
+ int lz = clz_hwi (c);
+ if (lz >= clz)
+ {
+ *rot = 0;
+ return true;
+ }
+
+ /* case b. 0..0xxx0..0: at least clz zeros. */
+ int tz = ctz_hwi (c);
+ if (lz + tz >= clz)
+ {
+ *rot = HOST_BITS_PER_WIDE_INT - tz;
+ return true;
+ }
+
+ /* case c. xx10.....0xx: rotate 'clz - 1' bits first, then check case b.
+ ^bit -> Vbit, , then zeros are at head or tail.
+ 00...00xxx100, 'clz - 1' >= 'bits of xxxx'. */
+ const int rot_bits = lowbits + 1;
+ unsigned HOST_WIDE_INT rc = (c >> rot_bits) | (c << (clz - 1));
+ tz = ctz_hwi (rc);
+ if (clz_hwi (rc) + tz >= clz)
+ {
+ *rot = HOST_BITS_PER_WIDE_INT - (tz + rot_bits);
+ return true;
+ }
+
+ return false;
+}
+
+/* Check if C (as 64bit integer) can be rotated to a positive 16bits constant
+ which contains 48bits leading zeros and 16bits of any value. */
+
+bool
+can_be_rotated_to_positive_16bits (HOST_WIDE_INT c)
+{
+ int rot = 0;
+ bool res = can_be_rotated_to_lowbits (c, 16, &rot);
+ return res && rot > 0;
+}
+
+/* Check if C (as 64bit integer) can be rotated to a negative 15bits constant
+ which contains 49bits leading ones and 15bits of any value. */
+
+bool
+can_be_rotated_to_negative_15bits (HOST_WIDE_INT c)
+{
+ int rot = 0;
+ bool res = can_be_rotated_to_lowbits (~c, 15, &rot);
+ return res && rot > 0;
+}
+
/* Generate a compare for CODE. Return a brand-new rtx that
represents the result of the compare. */
"xscvdpsp %x0,%x1"
[(set_attr "type" "fp")])
+
+(define_code_iterator eqne [eq ne])
+
+;; "i == C" ==> "rotl(i,N) == rotl(C,N)"
+(define_insn_and_split "*rotate_on_cmpdi"
+ [(set (pc)
+ (if_then_else (eqne (match_operand:DI 1 "gpc_reg_operand" "r")
+ (match_operand:DI 2 "const_int_operand" "n"))
+ (label_ref (match_operand 0 ""))
+ (pc)))
+ (clobber (match_scratch:DI 3 "=r"))
+ (clobber (match_scratch:CCUNS 4 "=y"))]
+ "TARGET_POWERPC64 && num_insns_constant (operands[2], DImode) > 1
+ && (can_be_rotated_to_positive_16bits (INTVAL (operands[2]))
+ || can_be_rotated_to_negative_15bits (INTVAL (operands[2])))"
+ "#"
+ "&& 1"
+ [(pc)]
+{
+ bool sgn = false;
+ unsigned HOST_WIDE_INT C = INTVAL (operands[2]);
+ int rot;
+
+ /* cmpldi */
+ if (!can_be_rotated_to_lowbits (C, 16, &rot))
+ {
+ /* cmpdi */
+ sgn = true;
+ bool res = can_be_rotated_to_lowbits (~C, 15, &rot);
+ gcc_assert (res);
+ }
+
+ rtx n = GEN_INT (rot);
+
+ /* i' = rotl (i, n) */
+ rtx op0 = can_create_pseudo_p () ? gen_reg_rtx (DImode) : operands[3];
+ emit_insn (gen_rtx_SET (op0, gen_rtx_ROTATE (DImode, operands[1], n)));
+
+ /* C' = rotl (C, n) */
+ rtx op1 = GEN_INT ((C << rot) | (C >> (HOST_BITS_PER_WIDE_INT - rot)));
+
+ /* i' == C' */
+ machine_mode comp_mode = sgn ? CCmode : CCUNSmode;
+ rtx cc = can_create_pseudo_p () ? gen_reg_rtx (comp_mode) : operands[4];
+ PUT_MODE (cc, comp_mode);
+ emit_insn (gen_rtx_SET (cc, gen_rtx_COMPARE (comp_mode, op0, op1)));
+ rtx cmp = gen_rtx_<eqne:CODE> (CCmode, cc, const0_rtx);
+ rtx loc_ref = gen_rtx_LABEL_REF (VOIDmode, operands[0]);
+ emit_jump_insn (gen_rtx_SET (pc_rtx,
+ gen_rtx_IF_THEN_ELSE (VOIDmode, cmp,
+ loc_ref, pc_rtx)));
+
+ /* Keep the probability info for the prediction of the branch insn. */
+ rtx note = find_reg_note (curr_insn, REG_BR_PROB, 0);
+ if (note)
+ {
+ profile_probability prob
+ = profile_probability::from_reg_br_prob_note (XINT (note, 0));
+
+ add_reg_br_prob_note (get_last_insn (), prob);
+ }
+
+ DONE;
+})
+
;; Split a load of a large constant into the appropriate two-insn
;; sequence.
;; rs6000_legitimate_combined_insn prevents combine creating any of
;; the ctr<mode> insns.
-(define_code_iterator eqne [eq ne])
(define_code_attr bd [(eq "bdz") (ne "bdnz")])
(define_code_attr bd_neg [(eq "bdnz") (ne "bdz")])
--- /dev/null
+/* { dg-options "-O2" } */
+/* { dg-do compile { target has_arch_ppc64 } } */
+
+/* { dg-final { scan-assembler-times {\mcmpldi\M} 10 } } */
+/* { dg-final { scan-assembler-times {\mcmpdi\M} 4 } } */
+/* { dg-final { scan-assembler-times {\mrotldi\M} 14 } } */
+
+int foo (int a);
+
+int __attribute__ ((noinline)) udi_fun (unsigned long long in)
+{
+ if (in == (0x8642000000000000ULL))
+ return foo (1);
+ if (in == (0x7642000000000000ULL))
+ return foo (12);
+ if (in == (0x8000000000000000ULL))
+ return foo (32);
+ if (in == (0x8700000000000091ULL))
+ return foo (33);
+ if (in == (0x8642FFFFFFFFFFFFULL))
+ return foo (46);
+ if (in == (0x7642FFFFFFFFFFFFULL))
+ return foo (51);
+ if (in == (0x7567000000ULL))
+ return foo (9);
+ if (in == (0xFFF8567FFFFFFFFFULL))
+ return foo (19);
+
+ return 0;
+}
+
+int __attribute__ ((noinline)) di_fun (long long in)
+{
+ if (in == (0x8642000000000000LL))
+ return foo (1);
+ if (in == (0x7642000000000000LL))
+ return foo (12);
+ if (in == (0x8000000000000000LL))
+ return foo (32);
+ if (in == (0x8700000000000091LL))
+ return foo (33);
+ if (in == (0x8642FFFFFFFFFFFFLL))
+ return foo (46);
+ if (in == (0x7642FFFFFFFFFFFFLL))
+ return foo (51);
+ if (in == (0x7567000000LL))
+ return foo (9);
+ if (in == (0xFFF8567FFFFFFFFFLL))
+ return foo (19);
+
+ return 0;
+}
--- /dev/null
+/* { dg-do run } */
+/* { dg-options "-O2 -std=c99" } */
+
+int
+foo (int a)
+{
+ return a + 6;
+}
+
+int __attribute__ ((noipa)) udi_fun (unsigned long long in)
+{
+ if (in == (0x8642000000000000ULL))
+ return foo (1);
+ if (in == (0x7642000000000000ULL))
+ return foo (12);
+ if (in == (0x8000000000000000ULL))
+ return foo (32);
+ if (in == (0x8700000000000091ULL))
+ return foo (33);
+ if (in == (0x8642FFFFFFFFFFFFULL))
+ return foo (46);
+ if (in == (0x7642FFFFFFFFFFFFULL))
+ return foo (51);
+ if (in == (0x7567000000ULL))
+ return foo (9);
+ if (in == (0xFFF8567FFFFFFFFFULL))
+ return foo (19);
+
+ return 0;
+}
+
+int __attribute__ ((noipa)) di_fun (long long in)
+{
+ if (in == (0x8642000000000000LL))
+ return foo (1);
+ if (in == (0x7642000000000000LL))
+ return foo (12);
+ if (in == (0x8000000000000000LL))
+ return foo (32);
+ if (in == (0x8700000000000091LL))
+ return foo (33);
+ if (in == (0x8642FFFFFFFFFFFFLL))
+ return foo (46);
+ if (in == (0x7642FFFFFFFFFFFFLL))
+ return foo (51);
+ return 0;
+}
+
+int
+main ()
+{
+ int e = 0;
+ if (udi_fun (6) != 0)
+ e++;
+ if (udi_fun (0x8642000000000000ULL) != foo (1))
+ e++;
+ if (udi_fun (0x7642000000000000ULL) != foo (12))
+ e++;
+ if (udi_fun (0x8000000000000000ULL) != foo (32))
+ e++;
+ if (udi_fun (0x8700000000000091ULL) != foo (33))
+ e++;
+ if (udi_fun (0x8642FFFFFFFFFFFFULL) != foo (46))
+ e++;
+ if (udi_fun (0x7642FFFFFFFFFFFFULL) != foo (51))
+ e++;
+ if (udi_fun (0x7567000000ULL) != foo (9))
+ e++;
+ if (udi_fun (0xFFF8567FFFFFFFFFULL) != foo (19))
+ e++;
+
+ if (di_fun (6) != 0)
+ e++;
+ if (di_fun (0x8642000000000000LL) != foo (1))
+ e++;
+ if (di_fun (0x7642000000000000LL) != foo (12))
+ e++;
+ if (di_fun (0x8000000000000000LL) != foo (32))
+ e++;
+ if (di_fun (0x8700000000000091LL) != foo (33))
+ e++;
+ if (di_fun (0x8642FFFFFFFFFFFFLL) != foo (46))
+ e++;
+ if (di_fun (0x7642FFFFFFFFFFFFLL) != foo (51))
+ e++;
+ if (udi_fun (0x7567000000LL) != foo (9))
+ e++;
+ if (udi_fun (0xFFF8567FFFFFFFFFLL) != foo (19))
+ e++;
+
+ if (e)
+ __builtin_abort ();
+ return 0;
+}
+