+2019-09-02 Richard Biener <rguenther@suse.de>
+
+ Backport from mainline
+ 2019-07-19 Richard Biener <rguenther@suse.de>
+
+ PR tree-optimization/91200
+ * tree-ssa-phiopt.c (cond_store_replacement): Check we have
+ no PHI nodes in middle-bb.
+
+ 2019-07-15 Richard Biener <rguenther@suse.de>
+
+ PR middle-end/91162
+ * tree-cfg.c (move_block_to_fn): When releasing a virtual PHI
+ node make sure to replace all uses with something valid.
+
+ 2019-07-11 Richard Biener <rguenther@suse.de>
+
+ PR middle-end/91131
+ * gimplify.c (gimplify_compound_literal_expr): Force a temporary
+ when the object is volatile and we have not cleared it even though
+ there are no nonzero elements.
+
+ 2019-07-10 Richard Biener <rguenther@suse.de>
+
+ PR tree-optimization/91126
+ * tree-ssa-sccvn.c (n_walk_cb_data::push_partial_def): Adjust
+ native encoding offset for BYTES_BIG_ENDIAN.
+ (vn_reference_lookup_3): Likewise.
+
+ 2019-07-10 Richard Biener <rguenther@suse.de>
+
+ PR tree-optimization/91126
+ * tree-ssa-sccvn.c (vn_reference_lookup_3): Adjust
+ native encoding offset for BYTES_BIG_ENDIAN.
+
+ 2019-04-29 Richard Biener <rguenther@suse.de>
+
+ PR tree-optimization/90278
+ * tree-ssa-forwprop.c (pass_forwprop::execute): Transfer/clean
+ EH on comparison simplification.
+
+ 2019-04-11 Richard Biener <rguenther@suse.de>
+
+ PR tree-optimization/90020
+ * tree-ssa-sccvn.c (vn_reference_may_trap): New function.
+ * tree-ssa-sccvn.h (vn_reference_may_trap): Declare.
+ * tree-ssa-pre.c (compute_avail): Use it to not put
+ possibly trapping references after a call that might not
+ return into EXP_GEN.
+ * gcse.c (compute_hash_table_work): Do not elide
+ marking a block containing a call if the call might not
+ return.
+
2019-09-02 Bin Cheng <bin.linux@linux.alibaba.com>
Backport from mainline
0, regno, hrsi)
record_last_reg_set_info (insn, regno);
- if (! RTL_CONST_OR_PURE_CALL_P (insn))
+ if (! RTL_CONST_OR_PURE_CALL_P (insn)
+ || RTL_LOOPING_CONST_OR_PURE_CALL_P (insn))
record_last_mem_set_info (insn);
}
one field to assign, initialize the target from a temporary. */
if (TREE_THIS_VOLATILE (object)
&& !TREE_ADDRESSABLE (type)
- && num_nonzero_elements > 0
+ && (num_nonzero_elements > 0 || !cleared)
&& vec_safe_length (elts) > 1)
{
tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type));
+2019-09-02 Richard Biener <rguenther@suse.de>
+
+ Backport from mainline
+ 2019-07-19 Richard Biener <rguenther@suse.de>
+
+ PR tree-optimization/91200
+ * gcc.dg/torture/pr91200.c: New testcase.
+
+ 2019-07-15 Richard Biener <rguenther@suse.de>
+
+ PR middle-end/91162
+ * gcc.dg/autopar/pr91162.c: New testcase.
+
+ 2019-07-11 Richard Biener <rguenther@suse.de>
+
+ PR middle-end/91131
+ * gcc.target/i386/pr91131.c: New testcase.
+
+ 2019-07-10 Richard Biener <rguenther@suse.de>
+
+ PR tree-optimization/91126
+ * gcc.dg/torture/pr91126.c: New testcase.
+
+ 2019-07-10 Richard Biener <rguenther@suse.de>
+
+ PR tree-optimization/91126
+ * gcc.dg/torture/pr91126.c: New testcase.
+
+ 2019-04-29 Richard Biener <rguenther@suse.de>
+
+ PR tree-optimization/90278
+ * gcc.dg/torture/pr90278.c: New testcase.
+
+ 2019-04-11 Richard Biener <rguenther@suse.de>
+
+ PR tree-optimization/90020
+ * gcc.dg/torture/pr90020.c: New testcase.
+
2019-09-02 Bin Cheng <bin.linux@linux.alibaba.com>
Backport from mainline
--- /dev/null
+/* { dg-do compile } */
+/* { dg-options "-O -ftree-parallelize-loops=2 -fno-tree-dominator-opts" } */
+
+void
+zf (__int128 ct)
+{
+ __int128 *rk = &ct;
+
+ if (0)
+ {
+ int jj;
+
+t9:
+ for (jj = 0; jj < 60; ++jj)
+ {
+ }
+
+ __builtin_unreachable ();
+ }
+
+ while (*rk < 1)
+ ++*rk;
+
+ goto t9;
+}
--- /dev/null
+/* { dg-do run } */
+/* { dg-require-weak "" } */
+/* { dg-additional-options "-Wl,-undefined,dynamic_lookup" { target *-*-darwin* } } */
+/* { dg-additional-options "-Wl,-flat_namespace" { target *-*-darwin[89]* } } */
+
+void __attribute__((noinline,noclone))
+check (int i)
+{
+ if (i == 0)
+ __builtin_exit (0);
+}
+
+int i;
+extern int x __attribute__((weak));
+
+int main(int argc, char **argv)
+{
+ if (argc)
+ {
+ check (i);
+ return x;
+ }
+ else
+ {
+ check (i);
+ return x-1;
+ }
+ return 0;
+}
--- /dev/null
+/* { dg-do compile } */
+/* { dg-additional-options "-fexceptions -fnon-call-exceptions" } */
+
+double
+hc (void)
+{
+ double dp = 0.0;
+ double ek[1];
+
+ ek[0] = 1.0 / dp < 0.0;
+
+ return ek[0];
+}
--- /dev/null
+/* { dg-do run } */
+
+struct S
+{
+ __INT32_TYPE__ a : 24;
+ __INT32_TYPE__ b : 8;
+} s;
+
+int
+main()
+{
+ s.a = 0xfefefe;
+ s.b = 0xfe;
+ unsigned char c;
+ c = ((unsigned char *)&s)[0];
+ if (c != 0xfe)
+ __builtin_abort ();
+ c = ((unsigned char *)&s)[1];
+ if (c != 0xfe)
+ __builtin_abort ();
+ c = ((unsigned char *)&s)[2];
+ if (c != 0xfe)
+ __builtin_abort ();
+ c = ((unsigned char *)&s)[3];
+ if (c != 0xfe)
+ __builtin_abort ();
+ return 0;
+}
--- /dev/null
+/* { dg-do compile } */
+
+int printf (const char *, ...);
+
+char a;
+int b, c, **d;
+
+int main ()
+{
+ int f = -128, *g, *h[2] = {0, 0}, i;
+ printf("0");
+ if (a)
+ {
+ while (f > a) {
+ int *j = &i;
+ *j |= 0;
+ }
+ h[i] = &c;
+ }
+ if (h[1])
+ {
+ int **k = &g;
+ *k = &f;
+ while (i)
+ {
+ int **l[] = {&g};
+ }
+ int **m = &g;
+ *d = *m = &b;
+ }
+ return 0;
+}
--- /dev/null
+/* { dg-do compile } */
+/* { dg-options "-O" } */
+
+struct Reg_T {
+ unsigned int a : 3;
+ unsigned int b : 1;
+ unsigned int c : 4;
+};
+
+volatile struct Reg_T Reg_A;
+
+int
+main ()
+{
+ Reg_A = (struct Reg_T){ .a = 0, .b = 0, .c = 0 };
+ return 0;
+}
+
+/* { dg-final { scan-assembler-times "mov\[^\r\n\]*Reg_A" 1 } } */
if (virtual_operand_p (op))
{
/* Remove the phi nodes for virtual operands (alias analysis will be
- run for the new function, anyway). */
+ run for the new function, anyway). But replace all uses that
+ might be outside of the region we move. */
+ use_operand_p use_p;
+ imm_use_iterator iter;
+ gimple *use_stmt;
+ FOR_EACH_IMM_USE_STMT (use_stmt, iter, op)
+ FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
+ SET_USE (use_p, SSA_NAME_VAR (op));
remove_phi_node (&psi, true);
continue;
}
{
int did_something;
did_something = forward_propagate_into_comparison (&gsi);
+ if (maybe_clean_or_replace_eh_stmt (stmt, gsi_stmt (gsi)))
+ bitmap_set_bit (to_purge, bb->index);
if (did_something == 2)
cfg_changed = true;
changed = did_something != 0;
|| gimple_has_volatile_ops (assign))
return false;
+ /* And no PHI nodes so all uses in the single stmt are also
+ available where we insert to. */
+ if (!gimple_seq_empty_p (phi_nodes (middle_bb)))
+ return false;
+
locus = gimple_location (assign);
lhs = gimple_assign_lhs (assign);
rhs = gimple_assign_rhs1 (assign);
continue;
}
+ /* If the REFERENCE traps and there was a preceding
+ point in the block that might not return avoid
+ adding the reference to EXP_GEN. */
+ if (BB_MAY_NOTRETURN (block)
+ && vn_reference_may_trap (ref))
+ continue;
+
/* If the value of the reference is not invalidated in
this block until it is computed, add the expression
to EXP_GEN. */
tree rhs = gimple_assign_rhs1 (def_stmt);
if (TREE_CODE (rhs) == SSA_NAME)
rhs = SSA_VAL (rhs);
+ unsigned pad = 0;
+ enum machine_mode mode = TYPE_MODE (TREE_TYPE (rhs));
+ if (BYTES_BIG_ENDIAN
+ && (SCALAR_INT_MODE_P (mode)
+ || ALL_SCALAR_FIXED_POINT_MODE_P (mode)
+ || SCALAR_FLOAT_MODE_P (mode)))
+ {
+ /* On big-endian the padding is at the 'front' so
+ just skip the initial bytes. */
+ pad = GET_MODE_SIZE (mode) - size2 / BITS_PER_UNIT;
+ }
len = native_encode_expr (gimple_assign_rhs1 (def_stmt),
buffer, sizeof (buffer),
- (offset - offset2) / BITS_PER_UNIT);
+ (offset - offset2) / BITS_PER_UNIT + pad);
if (len > 0 && len * BITS_PER_UNIT >= ref->size)
{
tree type = vr->type;
return false;
}
+
+/* Return true if the reference operation REF may trap. */
+
+bool
+vn_reference_may_trap (vn_reference_t ref)
+{
+ switch (ref->operands[0].opcode)
+ {
+ case MODIFY_EXPR:
+ case CALL_EXPR:
+ /* We do not handle calls. */
+ case ADDR_EXPR:
+ /* And toplevel address computations never trap. */
+ return false;
+ default:;
+ }
+
+ vn_reference_op_t op;
+ unsigned i;
+ FOR_EACH_VEC_ELT (ref->operands, i, op)
+ {
+ switch (op->opcode)
+ {
+ case WITH_SIZE_EXPR:
+ case TARGET_MEM_REF:
+ /* Always variable. */
+ return true;
+ case COMPONENT_REF:
+ if (op->op1 && TREE_CODE (op->op1) == SSA_NAME)
+ return true;
+ break;
+ case ARRAY_RANGE_REF:
+ case ARRAY_REF:
+ if (TREE_CODE (op->op0) == SSA_NAME)
+ return true;
+ break;
+ case MEM_REF:
+ /* Nothing interesting in itself, the base is separate. */
+ break;
+ /* The following are the address bases. */
+ case SSA_NAME:
+ return true;
+ case ADDR_EXPR:
+ if (op->op0)
+ return tree_could_trap_p (TREE_OPERAND (op->op0, 0));
+ return false;
+ default:;
+ }
+ }
+ return false;
+}
bool vn_nary_op_eq (const_vn_nary_op_t const vno1,
const_vn_nary_op_t const vno2);
bool vn_nary_may_trap (vn_nary_op_t);
+bool vn_reference_may_trap (vn_reference_t);
bool vn_reference_eq (const_vn_reference_t const, const_vn_reference_t const);
unsigned int get_max_value_id (void);
unsigned int get_next_value_id (void);