+2014-02-10 Richard Biener <rguenther@suse.de>
+
+ Backport from mainline
+ 2014-01-30 Richard Biener <rguenther@suse.de>
+
+ PR tree-optimization/59903
+ * tree-vect-loop.c (vect_transform_loop): Guard multiple-types
+ check properly.
+
+ 2014-02-10 Richard Biener <rguenther@suse.de>
+
+ PR tree-optimization/60115
+ * tree-eh.c (tree_could_trap_p): Unify TARGET_MEM_REF and
+ MEM_REF handling. Properly verify that the accesses are not
+ out of the objects bound.
+
2014-02-05 James Greenhalgh <james.greenhalgh@arm.com>
Backport from mainline.
+2014-02-10 Richard Biener <rguenther@suse.de>
+
+ Backport from mainline
+ 2014-01-30 Richard Biener <rguenther@suse.de>
+
+ PR tree-optimization/59903
+ * gcc.dg/torture/pr59903.c: New testcase.
+
+ 2014-02-10 Richard Biener <rguenther@suse.de>
+
+ PR tree-optimization/60115
+ * gcc.dg/torture/pr60115.c: New testcase.
+
2014-02-09 Janus Weil <janus@gcc.gnu.org>
Backport from mainline
--- /dev/null
+/* { dg-do compile } */
+
+int a, b, c, d, e, f, g, h, i[3], l, m, n, o, p, q, r;
+
+struct S0
+{
+ int f0;
+ int f1;
+ int f2;
+ int f3;
+} j;
+
+static int
+fn1 (int p1)
+{
+ return p1 || ((p1 > 0) > (e << 1));
+}
+
+static struct S0
+fn2 (struct S0 p1)
+{
+ char s;
+ struct S0 t = {0,0,0,0};
+ int u = 2;
+ for (;;)
+ {
+ if (i[0])
+ break;
+ for (m = 0; m < 4; m++)
+ for (p1.f0 = 0; p1.f0 < 3; p1.f0++)
+ {
+ j = t;
+ t.f3 = i[p1.f0];
+ o = b || 1 >> b ? 0 : a < 0;
+ q = 1 % d;
+ if ((g < fn1 ((1 ^ (q & 1)) | n)) ^ u)
+ j.f3 |= p % 2;
+ s = j.f3 > 0 ? j.f3 : j.f3 << 1;
+ r = l = s && p1.f1 * c;
+ h = p1.f1;
+ }
+ }
+ return p1;
+}
+
+int
+main ()
+{
+ for (;f;)
+ {
+ struct S0 v = {0,0,0,0};
+ fn2 (v);
+ j.f3 = 0;
+ }
+ return 0;
+}
--- /dev/null
+/* { dg-do run } */
+
+int a, b[2];
+
+int
+main ()
+{
+lbl:
+ for (; a; a--)
+ if (b[10000])
+ goto lbl;
+
+ return 0;
+}
restart:
switch (code)
{
- case TARGET_MEM_REF:
- if (TREE_CODE (TMR_BASE (expr)) == ADDR_EXPR
- && !TMR_INDEX (expr) && !TMR_INDEX2 (expr))
- return false;
- return !TREE_THIS_NOTRAP (expr);
-
case COMPONENT_REF:
case REALPART_EXPR:
case IMAGPART_EXPR:
return false;
return !in_array_bounds_p (expr);
+ case TARGET_MEM_REF:
case MEM_REF:
- if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR)
+ if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR
+ && tree_could_trap_p (TREE_OPERAND (TREE_OPERAND (expr, 0), 0)))
+ return true;
+ if (TREE_THIS_NOTRAP (expr))
return false;
- /* Fallthru. */
+ /* We cannot prove that the access is in-bounds when we have
+ variable-index TARGET_MEM_REFs. */
+ if (code == TARGET_MEM_REF
+ && (TMR_INDEX (expr) || TMR_INDEX2 (expr)))
+ return true;
+ if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR)
+ {
+ tree base = TREE_OPERAND (TREE_OPERAND (expr, 0), 0);
+ double_int off = mem_ref_offset (expr);
+ if (off.is_negative ())
+ return true;
+ if (TREE_CODE (base) == STRING_CST)
+ return double_int::from_uhwi (TREE_STRING_LENGTH (base)).ule (off);
+ else if (DECL_SIZE_UNIT (base) == NULL_TREE
+ || TREE_CODE (DECL_SIZE_UNIT (base)) != INTEGER_CST
+ || tree_to_double_int (DECL_SIZE_UNIT (base)).ule (off))
+ return true;
+ /* Now we are sure the first byte of the access is inside
+ the object. */
+ return false;
+ }
+ return true;
+
case INDIRECT_REF:
return !TREE_THIS_NOTRAP (expr);
&& !STMT_VINFO_LIVE_P (stmt_info))
continue;
- if ((TYPE_VECTOR_SUBPARTS (STMT_VINFO_VECTYPE (stmt_info))
- != (unsigned HOST_WIDE_INT) vectorization_factor)
+ if (STMT_VINFO_VECTYPE (stmt_info)
+ && (TYPE_VECTOR_SUBPARTS (STMT_VINFO_VECTYPE (stmt_info))
+ != (unsigned HOST_WIDE_INT) vectorization_factor)
&& dump_enabled_p ())
dump_printf_loc (MSG_NOTE, vect_location, "multiple-types.");