+2012-07-31 Bill Schmidt <wschmidt@linux.ibm.com>
+
+ PR tree-optimization/53773
+ * tree-vectorizer.h (struct _loop_vec_info): Add operands_swapped.
+ (LOOP_VINFO_OPERANDS_SWAPPED): New macro.
+ * tree-vect-loop.c (new_loop_vec_info): Initialize
+ LOOP_VINFO_OPERANDS_SWAPPED field.
+ (destroy_loop_vec_info): Restore canonical form.
+ (vect_is_slp_reduction): Set LOOP_VINFO_OPERANDS_SWAPPED field.
+ (vect_is_simple_reduction_1): Likewise.
+
2012-07-31 Steven Bosscher <steven@gcc.gnu.org>
* sched-vis.c (dump_insn_slim): Print print_rtx_head at the
+2012-07-31 Bill Schmidt <wschmidt@linux.ibm.com>
+
+ PR tree-optimization/53773
+ * testsuite/gcc.dg/vect/pr53773.c: New test.
+
2012-07-31 Nick Clifton <nickc@redhat.com>
* gcc.dg/stack-usage-1.c (SIZE): Define for FRV.
--- /dev/null
+/* { dg-do compile } */
+
+int
+foo (int integral, int decimal, int power_ten)
+{
+ while (power_ten > 0)
+ {
+ integral *= 10;
+ decimal *= 10;
+ power_ten--;
+ }
+
+ return integral+decimal;
+}
+
+/* Two occurrences in annotations, two in code. */
+/* { dg-final { scan-tree-dump-times "\\* 10" 4 "vect" } } */
+/* { dg-final { cleanup-tree-dump "vect" } } */
+
LOOP_VINFO_PEELING_HTAB (res) = NULL;
LOOP_VINFO_TARGET_COST_DATA (res) = init_cost (loop);
LOOP_VINFO_PEELING_FOR_GAPS (res) = false;
+ LOOP_VINFO_OPERANDS_SWAPPED (res) = false;
return res;
}
int j;
VEC (slp_instance, heap) *slp_instances;
slp_instance instance;
+ bool swapped;
if (!loop_vinfo)
return;
bbs = LOOP_VINFO_BBS (loop_vinfo);
nbbs = loop->num_nodes;
+ swapped = LOOP_VINFO_OPERANDS_SWAPPED (loop_vinfo);
if (!clean_stmts)
{
for (si = gsi_start_bb (bb); !gsi_end_p (si); )
{
gimple stmt = gsi_stmt (si);
+
+ /* We may have broken canonical form by moving a constant
+ into RHS1 of a commutative op. Fix such occurrences. */
+ if (swapped && is_gimple_assign (stmt))
+ {
+ enum tree_code code = gimple_assign_rhs_code (stmt);
+
+ if ((code == PLUS_EXPR
+ || code == POINTER_PLUS_EXPR
+ || code == MULT_EXPR)
+ && CONSTANT_CLASS_P (gimple_assign_rhs1 (stmt)))
+ swap_tree_operands (stmt,
+ gimple_assign_rhs1_ptr (stmt),
+ gimple_assign_rhs2_ptr (stmt));
+ }
+
/* Free stmt_vec_info. */
free_stmt_vec_info (stmt);
gsi_next (&si);
gimple_assign_rhs1_ptr (next_stmt),
gimple_assign_rhs2_ptr (next_stmt));
update_stmt (next_stmt);
+
+ if (CONSTANT_CLASS_P (gimple_assign_rhs1 (next_stmt)))
+ LOOP_VINFO_OPERANDS_SWAPPED (loop_info) = true;
}
else
return false;
swap_tree_operands (def_stmt, gimple_assign_rhs1_ptr (def_stmt),
gimple_assign_rhs2_ptr (def_stmt));
+
+ if (CONSTANT_CLASS_P (gimple_assign_rhs1 (def_stmt)))
+ LOOP_VINFO_OPERANDS_SWAPPED (loop_info) = true;
}
else
{
this. */
bool peeling_for_gaps;
+ /* Reductions are canonicalized so that the last operand is the reduction
+ operand. If this places a constant into RHS1, this decanonicalizes
+ GIMPLE for other phases, so we must track when this has occurred and
+ fix it up. */
+ bool operands_swapped;
+
} *loop_vec_info;
/* Access Functions. */
#define LOOP_VINFO_PEELING_HTAB(L) (L)->peeling_htab
#define LOOP_VINFO_TARGET_COST_DATA(L) (L)->target_cost_data
#define LOOP_VINFO_PEELING_FOR_GAPS(L) (L)->peeling_for_gaps
+#define LOOP_VINFO_OPERANDS_SWAPPED(L) (L)->operands_swapped
#define LOOP_REQUIRES_VERSIONING_FOR_ALIGNMENT(L) \
VEC_length (gimple, (L)->may_misalign_stmts) > 0