/* Gimple IR support functions.
- Copyright (C) 2007-2015 Free Software Foundation, Inc.
+ Copyright (C) 2007-2020 Free Software Foundation, Inc.
Contributed by Aldy Hernandez <aldyh@redhat.com>
This file is part of GCC.
#include "backend.h"
#include "tree.h"
#include "gimple.h"
-#include "hard-reg-set.h"
#include "ssa.h"
-#include "target.h"
+#include "cgraph.h"
+#include "diagnostic.h"
#include "alias.h"
#include "fold-const.h"
#include "calls.h"
-#include "stmt.h"
#include "stor-layout.h"
#include "internal-fn.h"
#include "tree-eh.h"
#include "gimple-iterator.h"
#include "gimple-walk.h"
#include "gimplify.h"
-#include "diagnostic.h"
-#include "value-prof.h"
-#include "flags.h"
-#include "alias.h"
-#include "demangle.h"
+#include "target.h"
+#include "builtins.h"
+#include "selftest.h"
+#include "gimple-pretty-print.h"
+#include "stringpool.h"
+#include "attribs.h"
+#include "asan.h"
#include "langhooks.h"
-#include "cgraph.h"
/* All the tuples have their operand vector (if present) at the very bottom
/* Gimple stats. */
-int gimple_alloc_counts[(int) gimple_alloc_kind_all];
-int gimple_alloc_sizes[(int) gimple_alloc_kind_all];
+uint64_t gimple_alloc_counts[(int) gimple_alloc_kind_all];
+uint64_t gimple_alloc_sizes[(int) gimple_alloc_kind_all];
/* Keep in sync with gimple.h:enum gimple_alloc_kind. */
static const char * const gimple_alloc_kind_names[] = {
"everything else"
};
+/* Static gimple tuple members. */
+const enum gimple_code gassign::code_;
+const enum gimple_code gcall::code_;
+const enum gimple_code gcond::code_;
+
+
/* Gimple tuple constructors.
Note: Any constructor taking a ``gimple_seq'' as a parameter, can
be passed a NULL to start with an empty sequence. */
/* Set the code for statement G to CODE. */
static inline void
-gimple_set_code (gimple g, enum gimple_code code)
+gimple_set_code (gimple *g, enum gimple_code code)
{
g->code = code;
}
/* Return the number of bytes needed to hold a GIMPLE statement with
code CODE. */
-static inline size_t
-gimple_size (enum gimple_code code)
+size_t
+gimple_size (enum gimple_code code, unsigned num_ops)
{
- return gsstruct_code_size[gss_for_code (code)];
+ size_t size = gsstruct_code_size[gss_for_code (code)];
+ if (num_ops > 0)
+ size += (sizeof (tree) * (num_ops - 1));
+ return size;
+}
+
+/* Initialize GIMPLE statement G with CODE and NUM_OPS. */
+
+void
+gimple_init (gimple *g, enum gimple_code code, unsigned num_ops)
+{
+ gimple_set_code (g, code);
+ gimple_set_num_ops (g, num_ops);
+
+ /* Do not call gimple_set_modified here as it has other side
+ effects and this tuple is still not completely built. */
+ g->modified = 1;
+ gimple_init_singleton (g);
}
/* Allocate memory for a GIMPLE statement with code CODE and NUM_OPS
operands. */
-gimple
-gimple_alloc_stat (enum gimple_code code, unsigned num_ops MEM_STAT_DECL)
+gimple *
+gimple_alloc (enum gimple_code code, unsigned num_ops MEM_STAT_DECL)
{
size_t size;
- gimple stmt;
-
- size = gimple_size (code);
- if (num_ops > 0)
- size += sizeof (tree) * (num_ops - 1);
+ gimple *stmt;
+ size = gimple_size (code, num_ops);
if (GATHER_STATISTICS)
{
enum gimple_alloc_kind kind = gimple_alloc_kind (code);
}
stmt = ggc_alloc_cleared_gimple_statement_stat (size PASS_MEM_STAT);
- gimple_set_code (stmt, code);
- gimple_set_num_ops (stmt, num_ops);
-
- /* Do not call gimple_set_modified here as it has other side
- effects and this tuple is still not completely built. */
- stmt->modified = 1;
- gimple_init_singleton (stmt);
-
+ gimple_init (stmt, code, num_ops);
return stmt;
}
/* Set SUBCODE to be the code of the expression computed by statement G. */
static inline void
-gimple_set_subcode (gimple g, unsigned subcode)
+gimple_set_subcode (gimple *g, unsigned subcode)
{
/* We only have 16 bits for the RHS code. Assert that we are not
overflowing it. */
#define gimple_build_with_ops(c, s, n) \
gimple_build_with_ops_stat (c, s, n MEM_STAT_INFO)
-static gimple
+static gimple *
gimple_build_with_ops_stat (enum gimple_code code, unsigned subcode,
unsigned num_ops MEM_STAT_DECL)
{
- gimple s = gimple_alloc_stat (code, num_ops PASS_MEM_STAT);
+ gimple *s = gimple_alloc (code, num_ops PASS_MEM_STAT);
gimple_set_subcode (s, subcode);
return s;
this fact. */
gcall *
-gimple_build_call_from_tree (tree t)
+gimple_build_call_from_tree (tree t, tree fnptrtype)
{
unsigned i, nargs;
gcall *call;
- tree fndecl = get_callee_fndecl (t);
gcc_assert (TREE_CODE (t) == CALL_EXPR);
nargs = call_expr_nargs (t);
- call = gimple_build_call_1 (fndecl ? fndecl : CALL_EXPR_FN (t), nargs);
+
+ tree fndecl = NULL_TREE;
+ if (CALL_EXPR_FN (t) == NULL_TREE)
+ call = gimple_build_call_internal_1 (CALL_EXPR_IFN (t), nargs);
+ else
+ {
+ fndecl = get_callee_fndecl (t);
+ call = gimple_build_call_1 (fndecl ? fndecl : CALL_EXPR_FN (t), nargs);
+ }
for (i = 0; i < nargs; i++)
gimple_call_set_arg (call, i, CALL_EXPR_ARG (t, i));
gimple_set_block (call, TREE_BLOCK (t));
+ gimple_set_location (call, EXPR_LOCATION (t));
/* Carry all the CALL_EXPR flags to the new GIMPLE_CALL. */
gimple_call_set_chain (call, CALL_EXPR_STATIC_CHAIN (t));
gimple_call_set_tail (call, CALL_EXPR_TAILCALL (t));
+ gimple_call_set_must_tail (call, CALL_EXPR_MUST_TAIL_CALL (t));
gimple_call_set_return_slot_opt (call, CALL_EXPR_RETURN_SLOT_OPT (t));
if (fndecl
- && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
- && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA
- || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA_WITH_ALIGN))
+ && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL)
+ && ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (fndecl)))
gimple_call_set_alloca_for_var (call, CALL_ALLOCA_FOR_VAR_P (t));
else
gimple_call_set_from_thunk (call, CALL_FROM_THUNK_P (t));
gimple_call_set_va_arg_pack (call, CALL_EXPR_VA_ARG_PACK (t));
gimple_call_set_nothrow (call, TREE_NOTHROW (t));
+ gimple_call_set_by_descriptor (call, CALL_EXPR_BY_DESCRIPTOR (t));
gimple_set_no_warning (call, TREE_NO_WARNING (t));
- gimple_call_set_with_bounds (call, CALL_WITH_BOUNDS_P (t));
+
+ if (fnptrtype)
+ {
+ gimple_call_set_fntype (call, TREE_TYPE (fnptrtype));
+
+ /* Check if it's an indirect CALL and the type has the
+ nocf_check attribute. In that case propagate the information
+ to the gimple CALL insn. */
+ if (!fndecl)
+ {
+ gcc_assert (POINTER_TYPE_P (fnptrtype));
+ tree fntype = TREE_TYPE (fnptrtype);
+
+ if (lookup_attribute ("nocf_check", TYPE_ATTRIBUTES (fntype)))
+ gimple_call_set_nocf_check (call, TRUE);
+ }
+ }
return call;
}
enum tree_code subcode;
tree op1, op2, op3;
- extract_ops_from_tree_1 (rhs, &subcode, &op1, &op2, &op3);
+ extract_ops_from_tree (rhs, &subcode, &op1, &op2, &op3);
return gimple_build_assign (lhs, subcode, op1, op2, op3 PASS_MEM_STAT);
}
/* Build a GIMPLE_NOP statement. */
-gimple
+gimple *
gimple_build_nop (void)
{
return gimple_alloc (GIMPLE_NOP, 0);
CLEANUP is the cleanup expression. */
-gimple
+gimple *
gimple_build_wce (gimple_seq cleanup)
{
- gimple p = gimple_alloc (GIMPLE_WITH_CLEANUP_EXPR, 0);
+ gimple *p = gimple_alloc (GIMPLE_WITH_CLEANUP_EXPR, 0);
if (cleanup)
gimple_wce_set_cleanup (p, cleanup);
VAR is bound to VALUE; block and location are taken from STMT. */
gdebug *
-gimple_build_debug_bind_stat (tree var, tree value, gimple stmt MEM_STAT_DECL)
+gimple_build_debug_bind (tree var, tree value, gimple *stmt MEM_STAT_DECL)
{
gdebug *p
= as_a <gdebug *> (gimple_build_with_ops_stat (GIMPLE_DEBUG,
VAR is bound to VALUE; block and location are taken from STMT. */
gdebug *
-gimple_build_debug_source_bind_stat (tree var, tree value,
- gimple stmt MEM_STAT_DECL)
+gimple_build_debug_source_bind (tree var, tree value,
+ gimple *stmt MEM_STAT_DECL)
{
gdebug *p
= as_a <gdebug *> (
}
+/* Build a new GIMPLE_DEBUG_BEGIN_STMT statement in BLOCK at
+ LOCATION. */
+
+gdebug *
+gimple_build_debug_begin_stmt (tree block, location_t location
+ MEM_STAT_DECL)
+{
+ gdebug *p
+ = as_a <gdebug *> (
+ gimple_build_with_ops_stat (GIMPLE_DEBUG,
+ (unsigned)GIMPLE_DEBUG_BEGIN_STMT, 0
+ PASS_MEM_STAT));
+
+ gimple_set_location (p, location);
+ gimple_set_block (p, block);
+ cfun->debug_marker_count++;
+
+ return p;
+}
+
+
+/* Build a new GIMPLE_DEBUG_INLINE_ENTRY statement in BLOCK at
+ LOCATION. The BLOCK links to the inlined function. */
+
+gdebug *
+gimple_build_debug_inline_entry (tree block, location_t location
+ MEM_STAT_DECL)
+{
+ gdebug *p
+ = as_a <gdebug *> (
+ gimple_build_with_ops_stat (GIMPLE_DEBUG,
+ (unsigned)GIMPLE_DEBUG_INLINE_ENTRY, 0
+ PASS_MEM_STAT));
+
+ gimple_set_location (p, location);
+ gimple_set_block (p, block);
+ cfun->debug_marker_count++;
+
+ return p;
+}
+
+
/* Build a GIMPLE_OMP_CRITICAL statement.
BODY is the sequence of statements for which only one thread can execute.
- NAME is optional identifier for this critical block. */
+ NAME is optional identifier for this critical block.
+ CLAUSES are clauses for this critical block. */
gomp_critical *
-gimple_build_omp_critical (gimple_seq body, tree name)
+gimple_build_omp_critical (gimple_seq body, tree name, tree clauses)
{
gomp_critical *p
= as_a <gomp_critical *> (gimple_alloc (GIMPLE_OMP_CRITICAL, 0));
gimple_omp_critical_set_name (p, name);
+ gimple_omp_critical_set_clauses (p, clauses);
if (body)
gimple_omp_set_body (p, body);
BODY is sequence of statements inside the for loop.
KIND is the `for' variant.
- CLAUSES, are any of the construct's clauses.
+ CLAUSES are any of the construct's clauses.
COLLAPSE is the collapse count.
PRE_BODY is the sequence of statements that are loop invariant. */
/* Build a GIMPLE_OMP_PARALLEL statement.
BODY is sequence of statements which are executed in parallel.
- CLAUSES, are the OMP parallel construct's clauses.
+ CLAUSES are the OMP parallel construct's clauses.
CHILD_FN is the function created for the parallel threads to execute.
DATA_ARG are the shared data argument(s). */
/* Build a GIMPLE_OMP_TASK statement.
BODY is sequence of statements which are executed by the explicit task.
- CLAUSES, are the OMP parallel construct's clauses.
+ CLAUSES are the OMP task construct's clauses.
CHILD_FN is the function created for the parallel threads to execute.
DATA_ARG are the shared data argument(s).
COPY_FN is the optional function for firstprivate initialization.
BODY is the sequence of statements in the section. */
-gimple
+gimple *
gimple_build_omp_section (gimple_seq body)
{
- gimple p = gimple_alloc (GIMPLE_OMP_SECTION, 0);
+ gimple *p = gimple_alloc (GIMPLE_OMP_SECTION, 0);
if (body)
gimple_omp_set_body (p, body);
BODY is the sequence of statements to be executed by just the master. */
-gimple
+gimple *
gimple_build_omp_master (gimple_seq body)
{
- gimple p = gimple_alloc (GIMPLE_OMP_MASTER, 0);
+ gimple *p = gimple_alloc (GIMPLE_OMP_MASTER, 0);
if (body)
gimple_omp_set_body (p, body);
return p;
}
+/* Build a GIMPLE_OMP_GRID_BODY statement.
+
+ BODY is the sequence of statements to be executed by the kernel. */
+
+gimple *
+gimple_build_omp_grid_body (gimple_seq body)
+{
+ gimple *p = gimple_alloc (GIMPLE_OMP_GRID_BODY, 0);
+ if (body)
+ gimple_omp_set_body (p, body);
+
+ return p;
+}
/* Build a GIMPLE_OMP_TASKGROUP statement.
BODY is the sequence of statements to be executed by the taskgroup
- construct. */
+ construct.
+ CLAUSES are any of the construct's clauses. */
-gimple
-gimple_build_omp_taskgroup (gimple_seq body)
+gimple *
+gimple_build_omp_taskgroup (gimple_seq body, tree clauses)
{
- gimple p = gimple_alloc (GIMPLE_OMP_TASKGROUP, 0);
+ gimple *p = gimple_alloc (GIMPLE_OMP_TASKGROUP, 0);
+ gimple_omp_taskgroup_set_clauses (p, clauses);
if (body)
gimple_omp_set_body (p, body);
/* Build a GIMPLE_OMP_ORDERED statement.
BODY is the sequence of statements inside a loop that will executed in
- sequence. */
+ sequence.
+ CLAUSES are clauses for this statement. */
-gimple
-gimple_build_omp_ordered (gimple_seq body)
+gomp_ordered *
+gimple_build_omp_ordered (gimple_seq body, tree clauses)
{
- gimple p = gimple_alloc (GIMPLE_OMP_ORDERED, 0);
+ gomp_ordered *p
+ = as_a <gomp_ordered *> (gimple_alloc (GIMPLE_OMP_ORDERED, 0));
+ gimple_omp_ordered_set_clauses (p, clauses);
if (body)
gimple_omp_set_body (p, body);
/* Build a GIMPLE_OMP_RETURN statement.
WAIT_P is true if this is a non-waiting return. */
-gimple
+gimple *
gimple_build_omp_return (bool wait_p)
{
- gimple p = gimple_alloc (GIMPLE_OMP_RETURN, 0);
+ gimple *p = gimple_alloc (GIMPLE_OMP_RETURN, 0);
if (wait_p)
gimple_omp_return_set_nowait (p);
}
+/* Build a GIMPLE_OMP_SCAN statement.
+
+ BODY is the sequence of statements to be executed by the scan
+ construct.
+ CLAUSES are any of the construct's clauses. */
+
+gomp_scan *
+gimple_build_omp_scan (gimple_seq body, tree clauses)
+{
+ gomp_scan *p
+ = as_a <gomp_scan *> (gimple_alloc (GIMPLE_OMP_SCAN, 0));
+ gimple_omp_scan_set_clauses (p, clauses);
+ if (body)
+ gimple_omp_set_body (p, body);
+
+ return p;
+}
+
+
/* Build a GIMPLE_OMP_SECTIONS statement.
BODY is a sequence of section statements.
/* Build a GIMPLE_OMP_SECTIONS_SWITCH. */
-gimple
+gimple *
gimple_build_omp_sections_switch (void)
{
return gimple_alloc (GIMPLE_OMP_SECTIONS_SWITCH, 0);
/* Build a GIMPLE_OMP_ATOMIC_LOAD statement. */
gomp_atomic_load *
-gimple_build_omp_atomic_load (tree lhs, tree rhs)
+gimple_build_omp_atomic_load (tree lhs, tree rhs, enum omp_memory_order mo)
{
gomp_atomic_load *p
= as_a <gomp_atomic_load *> (gimple_alloc (GIMPLE_OMP_ATOMIC_LOAD, 0));
gimple_omp_atomic_load_set_lhs (p, lhs);
gimple_omp_atomic_load_set_rhs (p, rhs);
+ gimple_omp_atomic_set_memory_order (p, mo);
return p;
}
VAL is the value we are storing. */
gomp_atomic_store *
-gimple_build_omp_atomic_store (tree val)
+gimple_build_omp_atomic_store (tree val, enum omp_memory_order mo)
{
gomp_atomic_store *p
= as_a <gomp_atomic_store *> (gimple_alloc (GIMPLE_OMP_ATOMIC_STORE, 0));
gimple_omp_atomic_store_set_val (p, val);
+ gimple_omp_atomic_set_memory_order (p, mo);
return p;
}
/* Build a GIMPLE_TRANSACTION statement. */
gtransaction *
-gimple_build_transaction (gimple_seq body, tree label)
+gimple_build_transaction (gimple_seq body)
{
gtransaction *p
= as_a <gtransaction *> (gimple_alloc (GIMPLE_TRANSACTION, 0));
gimple_transaction_set_body (p, body);
- gimple_transaction_set_label (p, label);
+ gimple_transaction_set_label_norm (p, 0);
+ gimple_transaction_set_label_uninst (p, 0);
+ gimple_transaction_set_label_over (p, 0);
return p;
}
/* Complain of a gimple type mismatch and die. */
void
-gimple_check_failed (const_gimple gs, const char *file, int line,
+gimple_check_failed (const gimple *gs, const char *file, int line,
const char *function, enum gimple_code code,
enum tree_code subcode)
{
*SEQ_P is NULL, a new sequence is allocated. */
void
-gimple_seq_add_stmt (gimple_seq *seq_p, gimple gs)
+gimple_seq_add_stmt (gimple_seq *seq_p, gimple *gs)
{
gimple_stmt_iterator si;
if (gs == NULL)
before the def/use vectors have been constructed. */
void
-gimple_seq_add_stmt_without_update (gimple_seq *seq_p, gimple gs)
+gimple_seq_add_stmt_without_update (gimple_seq *seq_p, gimple *gs)
{
gimple_stmt_iterator si;
/* Determine whether to assign a location to the statement GS. */
static bool
-should_carry_location_p (gimple gs)
+should_carry_location_p (gimple *gs)
{
/* Don't emit a line note for a label. We particularly don't want to
emit one for the break label, since it doesn't actually correspond
/* Set the location for gimple statement GS to LOCATION. */
static void
-annotate_one_with_location (gimple gs, location_t location)
+annotate_one_with_location (gimple *gs, location_t location)
{
if (!gimple_has_location (gs)
&& !gimple_do_not_emit_location_p (gs)
for (i = gsi_start (stmt_p); !gsi_end_p (i); gsi_next (&i))
{
- gimple gs = gsi_stmt (i);
+ gimple *gs = gsi_stmt (i);
annotate_one_with_location (gs, location);
}
}
statement. */
static bool
-empty_stmt_p (gimple stmt)
+empty_stmt_p (gimple *stmt)
{
if (gimple_code (stmt) == GIMPLE_NOP)
return true;
{
gimple_stmt_iterator gsi;
gimple_seq new_seq = NULL;
- gimple stmt;
+ gimple *stmt;
for (gsi = gsi_start (src); !gsi_end_p (gsi); gsi_next (&gsi))
{
/* Return true if calls C1 and C2 are known to go to the same function. */
bool
-gimple_call_same_target_p (const_gimple c1, const_gimple c2)
+gimple_call_same_target_p (const gimple *c1, const gimple *c2)
{
if (gimple_call_internal_p (c1))
return (gimple_call_internal_p (c2)
- && gimple_call_internal_fn (c1) == gimple_call_internal_fn (c2));
+ && gimple_call_internal_fn (c1) == gimple_call_internal_fn (c2)
+ && (!gimple_call_internal_unique_p (as_a <const gcall *> (c1))
+ || c1 == c2));
else
return (gimple_call_fn (c1) == gimple_call_fn (c2)
|| (gimple_call_fndecl (c1)
call_expr_flags, but for gimple tuples. */
int
-gimple_call_flags (const_gimple stmt)
+gimple_call_flags (const gimple *stmt)
{
- int flags;
- tree decl = gimple_call_fndecl (stmt);
+ int flags = 0;
- if (decl)
- flags = flags_from_decl_or_type (decl);
- else if (gimple_call_internal_p (stmt))
+ if (gimple_call_internal_p (stmt))
flags = internal_fn_flags (gimple_call_internal_fn (stmt));
else
- flags = flags_from_decl_or_type (gimple_call_fntype (stmt));
+ {
+ tree decl = gimple_call_fndecl (stmt);
+ if (decl)
+ flags = flags_from_decl_or_type (decl);
+ flags |= flags_from_decl_or_type (gimple_call_fntype (stmt));
+ }
if (stmt->subcode & GF_CALL_NOTHROW)
flags |= ECF_NOTHROW;
+ if (stmt->subcode & GF_CALL_BY_DESCRIPTOR)
+ flags |= ECF_BY_DESCRIPTOR;
+
return flags;
}
}
+/* Return true if call STMT is known to return a non-zero result. */
+
+bool
+gimple_call_nonnull_result_p (gcall *call)
+{
+ tree fndecl = gimple_call_fndecl (call);
+ if (!fndecl)
+ return false;
+ if (flag_delete_null_pointer_checks && !flag_check_new
+ && DECL_IS_OPERATOR_NEW_P (fndecl)
+ && !TREE_NOTHROW (fndecl))
+ return true;
+
+ /* References are always non-NULL. */
+ if (flag_delete_null_pointer_checks
+ && TREE_CODE (TREE_TYPE (fndecl)) == REFERENCE_TYPE)
+ return true;
+
+ if (flag_delete_null_pointer_checks
+ && lookup_attribute ("returns_nonnull",
+ TYPE_ATTRIBUTES (gimple_call_fntype (call))))
+ return true;
+ return gimple_alloca_call_p (call);
+}
+
+
+/* If CALL returns a non-null result in an argument, return that arg. */
+
+tree
+gimple_call_nonnull_arg (gcall *call)
+{
+ tree fndecl = gimple_call_fndecl (call);
+ if (!fndecl)
+ return NULL_TREE;
+
+ unsigned rf = gimple_call_return_flags (call);
+ if (rf & ERF_RETURNS_ARG)
+ {
+ unsigned argnum = rf & ERF_RETURN_ARG_MASK;
+ if (argnum < gimple_call_num_args (call))
+ {
+ tree arg = gimple_call_arg (call, argnum);
+ if (SSA_VAR_P (arg)
+ && infer_nonnull_range_by_attribute (call, arg))
+ return arg;
+ }
+ }
+ return NULL_TREE;
+}
+
+
/* Return true if GS is a copy assignment. */
bool
-gimple_assign_copy_p (gimple gs)
+gimple_assign_copy_p (gimple *gs)
{
return (gimple_assign_single_p (gs)
&& is_gimple_val (gimple_op (gs, 1)));
/* Return true if GS is a SSA_NAME copy assignment. */
bool
-gimple_assign_ssa_name_copy_p (gimple gs)
+gimple_assign_ssa_name_copy_p (gimple *gs)
{
return (gimple_assign_single_p (gs)
&& TREE_CODE (gimple_assign_lhs (gs)) == SSA_NAME
treatment of unary NOPs is appropriate. */
bool
-gimple_assign_unary_nop_p (gimple gs)
+gimple_assign_unary_nop_p (gimple *gs)
{
return (is_gimple_assign (gs)
&& (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (gs))
/* Set BB to be the basic block holding G. */
void
-gimple_set_bb (gimple stmt, basic_block bb)
+gimple_set_bb (gimple *stmt, basic_block bb)
{
stmt->bb = bb;
enum tree_code subcode;
tree op1, op2, op3;
- extract_ops_from_tree_1 (expr, &subcode, &op1, &op2, &op3);
+ extract_ops_from_tree (expr, &subcode, &op1, &op2, &op3);
gimple_assign_set_rhs_with_ops (gsi, subcode, op1, op2, op3);
}
tree op1, tree op2, tree op3)
{
unsigned new_rhs_ops = get_gimple_rhs_num_ops (code);
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
+ gimple *old_stmt = stmt;
/* If the new CODE needs more operands, allocate a new statement. */
if (gimple_num_ops (stmt) < new_rhs_ops + 1)
{
- tree lhs = gimple_assign_lhs (stmt);
- gimple new_stmt = gimple_alloc (gimple_code (stmt), new_rhs_ops + 1);
- memcpy (new_stmt, stmt, gimple_size (gimple_code (stmt)));
- gimple_init_singleton (new_stmt);
- gsi_replace (gsi, new_stmt, true);
- stmt = new_stmt;
+ tree lhs = gimple_assign_lhs (old_stmt);
+ stmt = gimple_alloc (gimple_code (old_stmt), new_rhs_ops + 1);
+ memcpy (stmt, old_stmt, gimple_size (gimple_code (old_stmt)));
+ gimple_init_singleton (stmt);
/* The LHS needs to be reset as this also changes the SSA name
on the LHS. */
gimple_assign_set_rhs2 (stmt, op2);
if (new_rhs_ops > 2)
gimple_assign_set_rhs3 (stmt, op3);
+ if (stmt != old_stmt)
+ gsi_replace (gsi, stmt, false);
}
statement other than an assignment or a call. */
tree
-gimple_get_lhs (const_gimple stmt)
+gimple_get_lhs (const gimple *stmt)
{
enum gimple_code code = gimple_code (stmt);
return gimple_assign_lhs (stmt);
else if (code == GIMPLE_CALL)
return gimple_call_lhs (stmt);
+ else if (code == GIMPLE_PHI)
+ return gimple_phi_result (stmt);
else
return NULL_TREE;
}
either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
void
-gimple_set_lhs (gimple stmt, tree lhs)
+gimple_set_lhs (gimple *stmt, tree lhs)
{
enum gimple_code code = gimple_code (stmt);
and VUSE operand arrays are set to empty in the new copy. The new
copy isn't part of any sequence. */
-gimple
-gimple_copy (gimple stmt)
+gimple *
+gimple_copy (gimple *stmt)
{
enum gimple_code code = gimple_code (stmt);
unsigned num_ops = gimple_num_ops (stmt);
- gimple copy = gimple_alloc (code, num_ops);
+ gimple *copy = gimple_alloc (code, num_ops);
unsigned i;
/* Shallow copy all the fields from STMT. */
goto copy_omp_body;
case GIMPLE_OMP_CRITICAL:
- t = unshare_expr (gimple_omp_critical_name (
- as_a <gomp_critical *> (stmt)));
+ t = unshare_expr (gimple_omp_critical_name
+ (as_a <gomp_critical *> (stmt)));
gimple_omp_critical_set_name (as_a <gomp_critical *> (copy), t);
+ t = unshare_expr (gimple_omp_critical_clauses
+ (as_a <gomp_critical *> (stmt)));
+ gimple_omp_critical_set_clauses (as_a <gomp_critical *> (copy), t);
+ goto copy_omp_body;
+
+ case GIMPLE_OMP_ORDERED:
+ t = unshare_expr (gimple_omp_ordered_clauses
+ (as_a <gomp_ordered *> (stmt)));
+ gimple_omp_ordered_set_clauses (as_a <gomp_ordered *> (copy), t);
+ goto copy_omp_body;
+
+ case GIMPLE_OMP_SCAN:
+ t = gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt));
+ t = unshare_expr (t);
+ gimple_omp_scan_set_clauses (as_a <gomp_scan *> (copy), t);
+ goto copy_omp_body;
+
+ case GIMPLE_OMP_TASKGROUP:
+ t = unshare_expr (gimple_omp_taskgroup_clauses (stmt));
+ gimple_omp_taskgroup_set_clauses (copy, t);
goto copy_omp_body;
case GIMPLE_OMP_SECTIONS:
gimple_omp_sections_set_clauses (copy, t);
t = unshare_expr (gimple_omp_sections_control (stmt));
gimple_omp_sections_set_control (copy, t);
- /* FALLTHRU */
+ goto copy_omp_body;
case GIMPLE_OMP_SINGLE:
+ {
+ gomp_single *omp_single_copy = as_a <gomp_single *> (copy);
+ t = unshare_expr (gimple_omp_single_clauses (stmt));
+ gimple_omp_single_set_clauses (omp_single_copy, t);
+ }
+ goto copy_omp_body;
+
case GIMPLE_OMP_TARGET:
+ {
+ gomp_target *omp_target_stmt = as_a <gomp_target *> (stmt);
+ gomp_target *omp_target_copy = as_a <gomp_target *> (copy);
+ t = unshare_expr (gimple_omp_target_clauses (omp_target_stmt));
+ gimple_omp_target_set_clauses (omp_target_copy, t);
+ t = unshare_expr (gimple_omp_target_data_arg (omp_target_stmt));
+ gimple_omp_target_set_data_arg (omp_target_copy, t);
+ }
+ goto copy_omp_body;
+
case GIMPLE_OMP_TEAMS:
+ {
+ gomp_teams *omp_teams_copy = as_a <gomp_teams *> (copy);
+ t = unshare_expr (gimple_omp_teams_clauses (stmt));
+ gimple_omp_teams_set_clauses (omp_teams_copy, t);
+ }
+ /* FALLTHRU */
+
case GIMPLE_OMP_SECTION:
case GIMPLE_OMP_MASTER:
- case GIMPLE_OMP_TASKGROUP:
- case GIMPLE_OMP_ORDERED:
+ case GIMPLE_OMP_GRID_BODY:
copy_omp_body:
new_seq = gimple_seq_copy (gimple_omp_body (stmt));
gimple_omp_set_body (copy, new_seq);
gimple_set_modified (copy, true);
}
+ if (gimple_debug_nonbind_marker_p (stmt))
+ cfun->debug_marker_count++;
+
return copy;
}
+/* Move OLD_STMT's vuse and vdef operands to NEW_STMT, on the assumption
+ that OLD_STMT is about to be removed. */
+
+void
+gimple_move_vops (gimple *new_stmt, gimple *old_stmt)
+{
+ tree vdef = gimple_vdef (old_stmt);
+ gimple_set_vuse (new_stmt, gimple_vuse (old_stmt));
+ gimple_set_vdef (new_stmt, vdef);
+ if (vdef && TREE_CODE (vdef) == SSA_NAME)
+ SSA_NAME_DEF_STMT (vdef) = new_stmt;
+}
/* Return true if statement S has side-effects. We consider a
statement to have side effects if:
- Any of its operands are marked TREE_THIS_VOLATILE or TREE_SIDE_EFFECTS. */
bool
-gimple_has_side_effects (const_gimple s)
+gimple_has_side_effects (const gimple *s)
{
if (is_gimple_debug (s))
return false;
S is a GIMPLE_ASSIGN, the LHS of the assignment is also checked. */
bool
-gimple_could_trap_p_1 (gimple s, bool include_mem, bool include_stores)
+gimple_could_trap_p_1 (gimple *s, bool include_mem, bool include_stores)
{
tree t, div = NULL_TREE;
enum tree_code op;
return false;
case GIMPLE_ASSIGN:
- t = gimple_expr_type (s);
op = gimple_assign_rhs_code (s);
+
+ /* For COND_EXPR and VEC_COND_EXPR only the condition may trap. */
+ if (op == COND_EXPR || op == VEC_COND_EXPR)
+ return tree_could_trap_p (gimple_assign_rhs1 (s));
+
+ /* For comparisons we need to check rhs operand types instead of rhs type
+ (which is BOOLEAN_TYPE). */
+ if (TREE_CODE_CLASS (op) == tcc_comparison)
+ t = TREE_TYPE (gimple_assign_rhs1 (s));
+ else
+ t = gimple_expr_type (s);
+
if (get_gimple_rhs_class (op) == GIMPLE_BINARY_RHS)
div = gimple_assign_rhs2 (s);
+
return (operation_could_trap_p (op, FLOAT_TYPE_P (t),
(INTEGRAL_TYPE_P (t)
&& TYPE_OVERFLOW_TRAPS (t)),
div));
+ case GIMPLE_COND:
+ t = TREE_TYPE (gimple_cond_lhs (s));
+ return operation_could_trap_p (gimple_cond_code (s),
+ FLOAT_TYPE_P (t), false, NULL_TREE);
+
default:
break;
}
/* Return true if statement S can trap. */
bool
-gimple_could_trap_p (gimple s)
+gimple_could_trap_p (gimple *s)
{
return gimple_could_trap_p_1 (s, true, true);
}
/* Return true if RHS of a GIMPLE_ASSIGN S can trap. */
bool
-gimple_assign_rhs_could_trap_p (gimple s)
+gimple_assign_rhs_could_trap_p (gimple *s)
{
gcc_assert (is_gimple_assign (s));
return gimple_could_trap_p_1 (s, true, false);
void
dump_gimple_statistics (void)
{
- int i, total_tuples = 0, total_bytes = 0;
+ int i;
+ uint64_t total_tuples = 0, total_bytes = 0;
if (! GATHER_STATISTICS)
{
- fprintf (stderr, "No gimple statistics\n");
+ fprintf (stderr, "No GIMPLE statistics\n");
return;
}
fprintf (stderr, "---------------------------------------\n");
for (i = 0; i < (int) gimple_alloc_kind_all; ++i)
{
- fprintf (stderr, "%-20s %7d %10d\n", gimple_alloc_kind_names[i],
- gimple_alloc_counts[i], gimple_alloc_sizes[i]);
+ fprintf (stderr, "%-20s %7" PRIu64 "%c %10" PRIu64 "%c\n",
+ gimple_alloc_kind_names[i],
+ SIZE_AMOUNT (gimple_alloc_counts[i]),
+ SIZE_AMOUNT (gimple_alloc_sizes[i]));
total_tuples += gimple_alloc_counts[i];
total_bytes += gimple_alloc_sizes[i];
}
fprintf (stderr, "---------------------------------------\n");
- fprintf (stderr, "%-20s %7d %10d\n", "Total", total_tuples, total_bytes);
+ fprintf (stderr, "%-20s %7" PRIu64 "%c %10" PRIu64 "%c\n", "Total",
+ SIZE_AMOUNT (total_tuples), SIZE_AMOUNT (total_bytes));
fprintf (stderr, "---------------------------------------\n");
}
unsigned
get_gimple_rhs_num_ops (enum tree_code code)
{
- enum gimple_rhs_class rhs_class = get_gimple_rhs_class (code);
-
- if (rhs_class == GIMPLE_UNARY_RHS || rhs_class == GIMPLE_SINGLE_RHS)
- return 1;
- else if (rhs_class == GIMPLE_BINARY_RHS)
- return 2;
- else if (rhs_class == GIMPLE_TERNARY_RHS)
- return 3;
- else
- gcc_unreachable ();
+ switch (get_gimple_rhs_class (code))
+ {
+ case GIMPLE_UNARY_RHS:
+ case GIMPLE_SINGLE_RHS:
+ return 1;
+ case GIMPLE_BINARY_RHS:
+ return 2;
+ case GIMPLE_TERNARY_RHS:
+ return 3;
+ default:
+ gcc_unreachable ();
+ }
}
#define DEFTREECODE(SYM, STRING, TYPE, NARGS) \
|| (SYM) == REALIGN_LOAD_EXPR \
|| (SYM) == VEC_COND_EXPR \
|| (SYM) == VEC_PERM_EXPR \
- || (SYM) == FMA_EXPR) ? GIMPLE_TERNARY_RHS \
+ || (SYM) == BIT_INSERT_EXPR) ? GIMPLE_TERNARY_RHS \
: ((SYM) == CONSTRUCTOR \
|| (SYM) == OBJ_TYPE_REF \
|| (SYM) == ASSERT_EXPR \
alias_set_type
gimple_get_alias_set (tree t)
{
- tree u;
-
- /* Permit type-punning when accessing a union, provided the access
- is directly through the union. For example, this code does not
- permit taking the address of a union member and then storing
- through it. Even the type-punning allowed here is a GCC
- extension, albeit a common and useful one; the C standard says
- that such accesses have implementation-defined behavior. */
- for (u = t;
- TREE_CODE (u) == COMPONENT_REF || TREE_CODE (u) == ARRAY_REF;
- u = TREE_OPERAND (u, 0))
- if (TREE_CODE (u) == COMPONENT_REF
- && TREE_CODE (TREE_TYPE (TREE_OPERAND (u, 0))) == UNION_TYPE)
- return 0;
-
/* That's all the expressions we handle specially. */
if (!TYPE_P (t))
return -1;
return get_alias_set (t1);
}
+ /* Allow aliasing between enumeral types and the underlying
+ integer type. This is required for C since those are
+ compatible types. */
+ else if (TREE_CODE (t) == ENUMERAL_TYPE)
+ {
+ tree t1 = lang_hooks.types.type_for_size (tree_to_uhwi (TYPE_SIZE (t)),
+ false /* short-cut above */);
+ return get_alias_set (t1);
+ }
+
return -1;
}
/* Helper for gimple_ior_addresses_taken_1. */
static bool
-gimple_ior_addresses_taken_1 (gimple, tree addr, tree, void *data)
+gimple_ior_addresses_taken_1 (gimple *, tree addr, tree, void *data)
{
bitmap addresses_taken = (bitmap)data;
addr = get_base_address (addr);
were any in this stmt. */
bool
-gimple_ior_addresses_taken (bitmap addresses_taken, gimple stmt)
+gimple_ior_addresses_taken (bitmap addresses_taken, gimple *stmt)
{
return walk_stmt_load_store_addr_ops (stmt, addresses_taken, NULL, NULL,
gimple_ior_addresses_taken_1);
}
-/* Return true if TYPE1 and TYPE2 are compatible enough for builtin
- processing. */
-
-static bool
-validate_type (tree type1, tree type2)
-{
- if (INTEGRAL_TYPE_P (type1)
- && INTEGRAL_TYPE_P (type2))
- ;
- else if (POINTER_TYPE_P (type1)
- && POINTER_TYPE_P (type2))
- ;
- else if (TREE_CODE (type1)
- != TREE_CODE (type2))
- return false;
- return true;
-}
-
/* Return true when STMTs arguments and return value match those of FNDECL,
a decl of a builtin function. */
bool
-gimple_builtin_call_types_compatible_p (const_gimple stmt, tree fndecl)
+gimple_builtin_call_types_compatible_p (const gimple *stmt, tree fndecl)
{
gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) != NOT_BUILT_IN);
tree ret = gimple_call_lhs (stmt);
if (ret
- && !validate_type (TREE_TYPE (ret), TREE_TYPE (TREE_TYPE (fndecl))))
+ && !useless_type_conversion_p (TREE_TYPE (ret),
+ TREE_TYPE (TREE_TYPE (fndecl))))
return false;
tree targs = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
if (!targs)
return true;
tree arg = gimple_call_arg (stmt, i);
- if (!validate_type (TREE_TYPE (arg), TREE_VALUE (targs)))
+ tree type = TREE_VALUE (targs);
+ if (!useless_type_conversion_p (type, TREE_TYPE (arg))
+ /* char/short integral arguments are promoted to int
+ by several frontends if targetm.calls.promote_prototypes
+ is true. Allow such promotion too. */
+ && !(INTEGRAL_TYPE_P (type)
+ && TYPE_PRECISION (type) < TYPE_PRECISION (integer_type_node)
+ && targetm.calls.promote_prototypes (TREE_TYPE (fndecl))
+ && useless_type_conversion_p (integer_type_node,
+ TREE_TYPE (arg))))
return false;
targs = TREE_CHAIN (targs);
}
return true;
}
+/* Return true when STMT is operator a replaceable delete call. */
+
+bool
+gimple_call_replaceable_operator_delete_p (const gcall *stmt)
+{
+ tree fndecl;
+
+ if ((fndecl = gimple_call_fndecl (stmt)) != NULL_TREE)
+ return DECL_IS_REPLACEABLE_OPERATOR_DELETE_P (fndecl);
+ return false;
+}
+
/* Return true when STMT is builtins call. */
bool
-gimple_call_builtin_p (const_gimple stmt)
+gimple_call_builtin_p (const gimple *stmt)
{
tree fndecl;
if (is_gimple_call (stmt)
/* Return true when STMT is builtins call to CLASS. */
bool
-gimple_call_builtin_p (const_gimple stmt, enum built_in_class klass)
+gimple_call_builtin_p (const gimple *stmt, enum built_in_class klass)
{
tree fndecl;
if (is_gimple_call (stmt)
/* Return true when STMT is builtins call to CODE of CLASS. */
bool
-gimple_call_builtin_p (const_gimple stmt, enum built_in_function code)
+gimple_call_builtin_p (const gimple *stmt, enum built_in_function code)
{
tree fndecl;
if (is_gimple_call (stmt)
&& (fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
- && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
- && DECL_FUNCTION_CODE (fndecl) == code)
+ && fndecl_built_in_p (fndecl, code))
return gimple_builtin_call_types_compatible_p (stmt, fndecl);
return false;
}
+/* If CALL is a call to a combined_fn (i.e. an internal function or
+ a normal built-in function), return its code, otherwise return
+ CFN_LAST. */
+
+combined_fn
+gimple_call_combined_fn (const gimple *stmt)
+{
+ if (const gcall *call = dyn_cast <const gcall *> (stmt))
+ {
+ if (gimple_call_internal_p (call))
+ return as_combined_fn (gimple_call_internal_fn (call));
+
+ tree fndecl = gimple_call_fndecl (stmt);
+ if (fndecl
+ && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL)
+ && gimple_builtin_call_types_compatible_p (stmt, fndecl))
+ return as_combined_fn (DECL_FUNCTION_CODE (fndecl));
+ }
+ return CFN_LAST;
+}
+
/* Return true if STMT clobbers memory. STMT is required to be a
GIMPLE_ASM. */
return true;
}
+ /* Non-empty basic ASM implicitly clobbers memory. */
+ if (gimple_asm_input_p (stmt) && strlen (gimple_asm_string (stmt)) != 0)
+ return true;
+
return false;
}
/* Return true when CALL is a call stmt that definitely doesn't
free any memory or makes it unavailable otherwise. */
bool
-nonfreeing_call_p (gimple call)
+nonfreeing_call_p (gimple *call)
{
if (gimple_call_builtin_p (call, BUILT_IN_NORMAL)
&& gimple_call_flags (call) & ECF_LEAF)
{
case IFN_ABNORMAL_DISPATCHER:
return true;
+ case IFN_ASAN_MARK:
+ return tree_to_uhwi (gimple_call_arg (call, 0)) == ASAN_MARK_UNPOISON;
default:
if (gimple_call_flags (call) & ECF_LEAF)
return true;
return n->nonfreeing_fn;
}
+/* Return true when CALL is a call stmt that definitely need not
+ be considered to be a memory barrier. */
+bool
+nonbarrier_call_p (gimple *call)
+{
+ if (gimple_call_flags (call) & (ECF_PURE | ECF_CONST))
+ return true;
+ /* Should extend this to have a nonbarrier_fn flag, just as above in
+ the nonfreeing case. */
+ return false;
+}
+
/* Callback for walk_stmt_load_store_ops.
Return TRUE if OP will dereference the tree stored in DATA, FALSE
This routine only makes a superficial check for a dereference. Thus
it must only be used if it is safe to return a false negative. */
static bool
-check_loadstore (gimple, tree op, tree, void *data)
+check_loadstore (gimple *, tree op, tree, void *data)
{
- if ((TREE_CODE (op) == MEM_REF || TREE_CODE (op) == TARGET_MEM_REF)
- && operand_equal_p (TREE_OPERAND (op, 0), (tree)data, 0))
- return true;
+ if (TREE_CODE (op) == MEM_REF || TREE_CODE (op) == TARGET_MEM_REF)
+ {
+ /* Some address spaces may legitimately dereference zero. */
+ addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (op));
+ if (targetm.addr_space.zero_address_valid (as))
+ return false;
+
+ return operand_equal_p (TREE_OPERAND (op, 0), (tree)data, 0);
+ }
return false;
}
-/* If OP can be inferred to be non-NULL after STMT executes, return true.
- DEREFERENCE is TRUE if we can use a pointer dereference to infer a
- non-NULL range, FALSE otherwise.
-
- ATTRIBUTE is TRUE if we can use attributes to infer a non-NULL range
- for function arguments and return values. FALSE otherwise. */
+/* Return true if OP can be inferred to be non-NULL after STMT executes,
+ either by using a pointer dereference or attributes. */
+bool
+infer_nonnull_range (gimple *stmt, tree op)
+{
+ return infer_nonnull_range_by_dereference (stmt, op)
+ || infer_nonnull_range_by_attribute (stmt, op);
+}
+/* Return true if OP can be inferred to be non-NULL after STMT
+ executes by using a pointer dereference. */
bool
-infer_nonnull_range (gimple stmt, tree op, bool dereference, bool attribute)
+infer_nonnull_range_by_dereference (gimple *stmt, tree op)
{
/* We can only assume that a pointer dereference will yield
non-NULL if -fdelete-null-pointer-checks is enabled. */
|| gimple_code (stmt) == GIMPLE_ASM)
return false;
- if (dereference
- && walk_stmt_load_store_ops (stmt, (void *)op,
- check_loadstore, check_loadstore))
+ if (walk_stmt_load_store_ops (stmt, (void *)op,
+ check_loadstore, check_loadstore))
return true;
- if (attribute
- && is_gimple_call (stmt) && !gimple_call_internal_p (stmt))
+ return false;
+}
+
+/* Return true if OP can be inferred to be a non-NULL after STMT
+ executes by using attributes. */
+bool
+infer_nonnull_range_by_attribute (gimple *stmt, tree op)
+{
+ /* We can only assume that a pointer dereference will yield
+ non-NULL if -fdelete-null-pointer-checks is enabled. */
+ if (!flag_delete_null_pointer_checks
+ || !POINTER_TYPE_P (TREE_TYPE (op))
+ || gimple_code (stmt) == GIMPLE_ASM)
+ return false;
+
+ if (is_gimple_call (stmt) && !gimple_call_internal_p (stmt))
{
tree fntype = gimple_call_fntype (stmt);
tree attrs = TYPE_ATTRIBUTES (fntype);
/* Now see if op appears in the nonnull list. */
for (tree t = TREE_VALUE (attrs); t; t = TREE_CHAIN (t))
{
- int idx = TREE_INT_CST_LOW (TREE_VALUE (t)) - 1;
- tree arg = gimple_call_arg (stmt, idx);
- if (operand_equal_p (op, arg, 0))
- return true;
+ unsigned int idx = TREE_INT_CST_LOW (TREE_VALUE (t)) - 1;
+ if (idx < gimple_call_num_args (stmt))
+ {
+ tree arg = gimple_call_arg (stmt, idx);
+ if (operand_equal_p (op, arg, 0))
+ return true;
+ }
}
}
}
/* If this function is marked as returning non-null, then we can
infer OP is non-null if it is used in the return statement. */
- if (attribute)
- if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
- if (gimple_return_retval (return_stmt)
- && operand_equal_p (gimple_return_retval (return_stmt), op, 0)
- && lookup_attribute ("returns_nonnull",
- TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl))))
- return true;
+ if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
+ if (gimple_return_retval (return_stmt)
+ && operand_equal_p (gimple_return_retval (return_stmt), op, 0)
+ && lookup_attribute ("returns_nonnull",
+ TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl))))
+ return true;
return false;
}
high = CASE_LOW (labels[len - 1]);
if (tree_int_cst_equal (high, TYPE_MAX_VALUE (index_type)))
{
+ tree widest_label = labels[0];
for (i = 1; i < len; i++)
{
high = CASE_LOW (labels[i]);
low = CASE_HIGH (labels[i - 1]);
if (!low)
low = CASE_LOW (labels[i - 1]);
- if (wi::add (low, 1) != high)
+
+ if (CASE_HIGH (labels[i]) != NULL_TREE
+ && (CASE_HIGH (widest_label) == NULL_TREE
+ || (wi::gtu_p
+ (wi::to_wide (CASE_HIGH (labels[i]))
+ - wi::to_wide (CASE_LOW (labels[i])),
+ wi::to_wide (CASE_HIGH (widest_label))
+ - wi::to_wide (CASE_LOW (widest_label))))))
+ widest_label = labels[i];
+
+ if (wi::to_wide (low) + 1 != wi::to_wide (high))
break;
}
if (i == len)
{
- tree label = CASE_LABEL (labels[0]);
+ /* Designate the label with the widest range to be the
+ default label. */
+ tree label = CASE_LABEL (widest_label);
default_case = build_case_label (NULL_TREE, NULL_TREE,
label);
}
for (gsi = gsi_start (seq); !gsi_end_p (gsi); )
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
gsi_remove (&gsi, true);
release_defs (stmt);
ggc_free (stmt);
/* See if STMT now calls function that takes no parameters and if so, drop
call arguments. This is used when devirtualization machinery redirects
- to __builtiln_unreacahble or __cxa_pure_virutal. */
+ to __builtin_unreachable or __cxa_pure_virtual. */
void
-maybe_remove_unused_call_args (struct function *fn, gimple stmt)
+maybe_remove_unused_call_args (struct function *fn, gimple *stmt)
{
tree decl = gimple_call_fndecl (stmt);
if (TYPE_ARG_TYPES (TREE_TYPE (decl))
update_stmt_fn (fn, stmt);
}
}
+
+/* Return false if STMT will likely expand to real function call. */
+
+bool
+gimple_inexpensive_call_p (gcall *stmt)
+{
+ if (gimple_call_internal_p (stmt))
+ return true;
+ tree decl = gimple_call_fndecl (stmt);
+ if (decl && is_inexpensive_builtin (decl))
+ return true;
+ return false;
+}
+
+/* Return a non-artificial location for STMT. If STMT does not have
+ location information, get the location from EXPR. */
+
+location_t
+gimple_or_expr_nonartificial_location (gimple *stmt, tree expr)
+{
+ location_t loc = gimple_nonartificial_location (stmt);
+ if (loc == UNKNOWN_LOCATION && EXPR_HAS_LOCATION (expr))
+ loc = tree_nonartificial_location (expr);
+ return expansion_point_location_if_in_system_header (loc);
+}
+
+
+#if CHECKING_P
+
+namespace selftest {
+
+/* Selftests for core gimple structures. */
+
+/* Verify that STMT is pretty-printed as EXPECTED.
+ Helper function for selftests. */
+
+static void
+verify_gimple_pp (const char *expected, gimple *stmt)
+{
+ pretty_printer pp;
+ pp_gimple_stmt_1 (&pp, stmt, 0 /* spc */, TDF_NONE /* flags */);
+ ASSERT_STREQ (expected, pp_formatted_text (&pp));
+}
+
+/* Build a GIMPLE_ASSIGN equivalent to
+ tmp = 5;
+ and verify various properties of it. */
+
+static void
+test_assign_single ()
+{
+ tree type = integer_type_node;
+ tree lhs = build_decl (UNKNOWN_LOCATION, VAR_DECL,
+ get_identifier ("tmp"),
+ type);
+ tree rhs = build_int_cst (type, 5);
+ gassign *stmt = gimple_build_assign (lhs, rhs);
+ verify_gimple_pp ("tmp = 5;", stmt);
+
+ ASSERT_TRUE (is_gimple_assign (stmt));
+ ASSERT_EQ (lhs, gimple_assign_lhs (stmt));
+ ASSERT_EQ (lhs, gimple_get_lhs (stmt));
+ ASSERT_EQ (rhs, gimple_assign_rhs1 (stmt));
+ ASSERT_EQ (NULL, gimple_assign_rhs2 (stmt));
+ ASSERT_EQ (NULL, gimple_assign_rhs3 (stmt));
+ ASSERT_TRUE (gimple_assign_single_p (stmt));
+ ASSERT_EQ (INTEGER_CST, gimple_assign_rhs_code (stmt));
+}
+
+/* Build a GIMPLE_ASSIGN equivalent to
+ tmp = a * b;
+ and verify various properties of it. */
+
+static void
+test_assign_binop ()
+{
+ tree type = integer_type_node;
+ tree lhs = build_decl (UNKNOWN_LOCATION, VAR_DECL,
+ get_identifier ("tmp"),
+ type);
+ tree a = build_decl (UNKNOWN_LOCATION, VAR_DECL,
+ get_identifier ("a"),
+ type);
+ tree b = build_decl (UNKNOWN_LOCATION, VAR_DECL,
+ get_identifier ("b"),
+ type);
+ gassign *stmt = gimple_build_assign (lhs, MULT_EXPR, a, b);
+ verify_gimple_pp ("tmp = a * b;", stmt);
+
+ ASSERT_TRUE (is_gimple_assign (stmt));
+ ASSERT_EQ (lhs, gimple_assign_lhs (stmt));
+ ASSERT_EQ (lhs, gimple_get_lhs (stmt));
+ ASSERT_EQ (a, gimple_assign_rhs1 (stmt));
+ ASSERT_EQ (b, gimple_assign_rhs2 (stmt));
+ ASSERT_EQ (NULL, gimple_assign_rhs3 (stmt));
+ ASSERT_FALSE (gimple_assign_single_p (stmt));
+ ASSERT_EQ (MULT_EXPR, gimple_assign_rhs_code (stmt));
+}
+
+/* Build a GIMPLE_NOP and verify various properties of it. */
+
+static void
+test_nop_stmt ()
+{
+ gimple *stmt = gimple_build_nop ();
+ verify_gimple_pp ("GIMPLE_NOP", stmt);
+ ASSERT_EQ (GIMPLE_NOP, gimple_code (stmt));
+ ASSERT_EQ (NULL, gimple_get_lhs (stmt));
+ ASSERT_FALSE (gimple_assign_single_p (stmt));
+}
+
+/* Build a GIMPLE_RETURN equivalent to
+ return 7;
+ and verify various properties of it. */
+
+static void
+test_return_stmt ()
+{
+ tree type = integer_type_node;
+ tree val = build_int_cst (type, 7);
+ greturn *stmt = gimple_build_return (val);
+ verify_gimple_pp ("return 7;", stmt);
+
+ ASSERT_EQ (GIMPLE_RETURN, gimple_code (stmt));
+ ASSERT_EQ (NULL, gimple_get_lhs (stmt));
+ ASSERT_EQ (val, gimple_return_retval (stmt));
+ ASSERT_FALSE (gimple_assign_single_p (stmt));
+}
+
+/* Build a GIMPLE_RETURN equivalent to
+ return;
+ and verify various properties of it. */
+
+static void
+test_return_without_value ()
+{
+ greturn *stmt = gimple_build_return (NULL);
+ verify_gimple_pp ("return;", stmt);
+
+ ASSERT_EQ (GIMPLE_RETURN, gimple_code (stmt));
+ ASSERT_EQ (NULL, gimple_get_lhs (stmt));
+ ASSERT_EQ (NULL, gimple_return_retval (stmt));
+ ASSERT_FALSE (gimple_assign_single_p (stmt));
+}
+
+/* Run all of the selftests within this file. */
+
+void
+gimple_c_tests ()
+{
+ test_assign_single ();
+ test_assign_binop ();
+ test_nop_stmt ();
+ test_return_stmt ();
+ test_return_without_value ();
+}
+
+} // namespace selftest
+
+
+#endif /* CHECKING_P */