/* UndefinedBehaviorSanitizer, undefined behavior detector.
- Copyright (C) 2013-2014 Free Software Foundation, Inc.
+ Copyright (C) 2013-2020 Free Software Foundation, Inc.
Contributed by Marek Polacek <polacek@redhat.com>
This file is part of GCC.
#include "config.h"
#include "system.h"
#include "coretypes.h"
-#include "tree.h"
-#include "stor-layout.h"
-#include "stringpool.h"
-#include "predict.h"
-#include "dominance.h"
-#include "cfg.h"
-#include "cfganal.h"
-#include "basic-block.h"
-#include "hash-map.h"
-#include "is-a.h"
-#include "plugin-api.h"
-#include "vec.h"
-#include "hashtab.h"
-#include "hash-set.h"
-#include "machmode.h"
-#include "tm.h"
-#include "hard-reg-set.h"
-#include "input.h"
-#include "function.h"
-#include "ipa-ref.h"
-#include "cgraph.h"
+#include "backend.h"
+#include "rtl.h"
+#include "c-family/c-common.h"
+#include "gimple.h"
+#include "cfghooks.h"
#include "tree-pass.h"
-#include "tree-ssa-alias.h"
+#include "memmodel.h"
+#include "tm_p.h"
+#include "ssa.h"
+#include "cgraph.h"
#include "tree-pretty-print.h"
-#include "internal-fn.h"
-#include "gimple-expr.h"
-#include "gimple.h"
+#include "stor-layout.h"
+#include "cfganal.h"
#include "gimple-iterator.h"
-#include "gimple-ssa.h"
-#include "gimple-walk.h"
#include "output.h"
-#include "tm_p.h"
-#include "toplev.h"
#include "cfgloop.h"
#include "ubsan.h"
-#include "c-family/c-common.h"
-#include "rtl.h"
#include "expr.h"
-#include "tree-ssanames.h"
+#include "stringpool.h"
+#include "attribs.h"
#include "asan.h"
#include "gimplify-me.h"
-#include "intl.h"
-#include "realmpfr.h"
#include "dfp.h"
#include "builtins.h"
#include "tree-object-size.h"
-#include "tree-eh.h"
+#include "tree-cfg.h"
+#include "gimple-fold.h"
+#include "varasm.h"
/* Map from a tree to a VAR_DECL tree. */
tree decl;
};
-struct tree_type_map_cache_hasher : ggc_cache_hasher<tree_type_map *>
+struct tree_type_map_cache_hasher : ggc_cache_ptr_hash<tree_type_map>
{
static inline hashval_t
hash (tree_type_map *t)
return a->type.from == b->type.from;
}
- static void
- handle_cache_entry (tree_type_map *&m)
+ static int
+ keep_cache_entry (tree_type_map *&m)
{
- extern void gt_ggc_mx (tree_type_map *&);
- if (m == HTAB_EMPTY_ENTRY || m == HTAB_DELETED_ENTRY)
- return;
- else if (ggc_marked_p (m->type.from))
- gt_ggc_mx (m);
- else
- m = static_cast<tree_type_map *> (HTAB_DELETED_ENTRY);
+ return ggc_marked_p (m->type.from);
}
};
/* Helper routine, which encodes a value in the pointer_sized_int_node.
Arguments with precision <= POINTER_SIZE are passed directly,
the rest is passed by reference. T is a value we are to encode.
- IN_EXPAND_P is true if this function is called during expansion. */
+ PHASE determines when this function is called. */
tree
-ubsan_encode_value (tree t, bool in_expand_p)
+ubsan_encode_value (tree t, enum ubsan_encode_value_phase phase)
{
tree type = TREE_TYPE (t);
- const unsigned int bitsize = GET_MODE_BITSIZE (TYPE_MODE (type));
+ scalar_mode mode = SCALAR_TYPE_MODE (type);
+ const unsigned int bitsize = GET_MODE_BITSIZE (mode);
if (bitsize <= POINTER_SIZE)
switch (TREE_CODE (type))
{
{
/* The reason for this is that we don't want to pessimize
code by making vars unnecessarily addressable. */
- tree var = create_tmp_var (type);
- tree tem = build2 (MODIFY_EXPR, void_type_node, var, t);
- if (in_expand_p)
+ tree var;
+ if (phase != UBSAN_ENCODE_VALUE_GENERIC)
+ {
+ var = create_tmp_var (type);
+ mark_addressable (var);
+ }
+ else
{
- rtx mem
- = assign_stack_temp_for_type (TYPE_MODE (type),
- GET_MODE_SIZE (TYPE_MODE (type)),
- type);
+ var = create_tmp_var_raw (type);
+ TREE_ADDRESSABLE (var) = 1;
+ DECL_CONTEXT (var) = current_function_decl;
+ }
+ if (phase == UBSAN_ENCODE_VALUE_RTL)
+ {
+ rtx mem = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
+ type);
SET_DECL_RTL (var, mem);
expand_assignment (var, t, false);
return build_fold_addr_expr (var);
}
- t = build_fold_addr_expr (var);
- return build2 (COMPOUND_EXPR, TREE_TYPE (t), tem, t);
+ if (phase != UBSAN_ENCODE_VALUE_GENERIC)
+ {
+ tree tem = build2 (MODIFY_EXPR, void_type_node, var, t);
+ t = build_fold_addr_expr (var);
+ return build2 (COMPOUND_EXPR, TREE_TYPE (t), tem, t);
+ }
+ else
+ {
+ var = build4 (TARGET_EXPR, type, var, t, NULL_TREE, NULL_TREE);
+ return build_fold_addr_expr (var);
+ }
}
else
return build_fold_addr_expr (t);
TYPE_FIELDS (ret) = fields[0];
TYPE_NAME (ret) = type_decl;
TYPE_STUB_DECL (ret) = type_decl;
+ TYPE_ARTIFICIAL (ret) = 1;
layout_type (ret);
ubsan_type_descriptor_type = ret;
return ret;
TYPE_FIELDS (ret) = fields[0];
TYPE_NAME (ret) = type_decl;
TYPE_STUB_DECL (ret) = type_decl;
+ TYPE_ARTIFICIAL (ret) = 1;
layout_type (ret);
ubsan_source_location_type = ret;
return ret;
else
{
/* Fill in the values from LOC. */
- size_t len = strlen (xloc.file);
- str = build_string (len + 1, xloc.file);
- TREE_TYPE (str) = build_array_type (char_type_node,
- build_index_type (size_int (len)));
+ size_t len = strlen (xloc.file) + 1;
+ str = build_string (len, xloc.file);
+ TREE_TYPE (str) = build_array_type_nelts (char_type_node, len);
TREE_READONLY (str) = 1;
TREE_STATIC (str) = 1;
str = build_fold_addr_expr (str);
static unsigned short
get_ubsan_type_info_for_type (tree type)
{
- gcc_assert (TYPE_SIZE (type) && tree_fits_uhwi_p (TYPE_SIZE (type)));
if (TREE_CODE (type) == REAL_TYPE)
return tree_to_uhwi (TYPE_SIZE (type));
else if (INTEGRAL_TYPE_P (type))
return 0;
}
+/* Counters for internal labels. ubsan_ids[0] for Lubsan_type,
+ ubsan_ids[1] for Lubsan_data labels. */
+static GTY(()) unsigned int ubsan_ids[2];
+
/* Helper routine that returns ADDR_EXPR of a VAR_DECL of a type
descriptor. It first looks into the hash table; if not found,
create the VAR_DECL, put it into the hash table and return the
tree dtype = ubsan_get_type_descriptor_type ();
tree type2 = type;
const char *tname = NULL;
- char *pretty_name;
+ pretty_printer pretty_name;
unsigned char deref_depth = 0;
unsigned short tkind, tinfo;
/* We weren't able to determine the type name. */
tname = "<unknown>";
- /* Decorate the type name with '', '*', "struct", or "union". */
- pretty_name = (char *) alloca (strlen (tname) + 16 + deref_depth);
+ tree eltype = type;
if (pstyle == UBSAN_PRINT_POINTER)
{
- int pos = sprintf (pretty_name, "'%s%s%s%s%s%s%s",
- TYPE_VOLATILE (type2) ? "volatile " : "",
- TYPE_READONLY (type2) ? "const " : "",
- TYPE_RESTRICT (type2) ? "restrict " : "",
- TYPE_ATOMIC (type2) ? "_Atomic " : "",
- TREE_CODE (type2) == RECORD_TYPE
- ? "struct "
- : TREE_CODE (type2) == UNION_TYPE
- ? "union " : "", tname,
- deref_depth == 0 ? "" : " ");
+ pp_printf (&pretty_name, "'%s%s%s%s%s%s%s",
+ TYPE_VOLATILE (type2) ? "volatile " : "",
+ TYPE_READONLY (type2) ? "const " : "",
+ TYPE_RESTRICT (type2) ? "restrict " : "",
+ TYPE_ATOMIC (type2) ? "_Atomic " : "",
+ TREE_CODE (type2) == RECORD_TYPE
+ ? "struct "
+ : TREE_CODE (type2) == UNION_TYPE
+ ? "union " : "", tname,
+ deref_depth == 0 ? "" : " ");
while (deref_depth-- > 0)
- pretty_name[pos++] = '*';
- pretty_name[pos++] = '\'';
- pretty_name[pos] = '\0';
+ pp_star (&pretty_name);
+ pp_quote (&pretty_name);
}
else if (pstyle == UBSAN_PRINT_ARRAY)
{
/* Pretty print the array dimensions. */
gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
tree t = type;
- int pos = sprintf (pretty_name, "'%s ", tname);
+ pp_printf (&pretty_name, "'%s ", tname);
while (deref_depth-- > 0)
- pretty_name[pos++] = '*';
+ pp_star (&pretty_name);
while (TREE_CODE (t) == ARRAY_TYPE)
{
- pretty_name[pos++] = '[';
+ pp_left_bracket (&pretty_name);
tree dom = TYPE_DOMAIN (t);
- if (dom && TREE_CODE (TYPE_MAX_VALUE (dom)) == INTEGER_CST)
- pos += sprintf (&pretty_name[pos], HOST_WIDE_INT_PRINT_DEC,
- tree_to_uhwi (TYPE_MAX_VALUE (dom)) + 1);
+ if (dom != NULL_TREE
+ && TYPE_MAX_VALUE (dom) != NULL_TREE
+ && TREE_CODE (TYPE_MAX_VALUE (dom)) == INTEGER_CST)
+ {
+ unsigned HOST_WIDE_INT m;
+ if (tree_fits_uhwi_p (TYPE_MAX_VALUE (dom))
+ && (m = tree_to_uhwi (TYPE_MAX_VALUE (dom))) + 1 != 0)
+ pp_unsigned_wide_integer (&pretty_name, m + 1);
+ else
+ pp_wide_int (&pretty_name,
+ wi::add (wi::to_widest (TYPE_MAX_VALUE (dom)), 1),
+ TYPE_SIGN (TREE_TYPE (dom)));
+ }
else
/* ??? We can't determine the variable name; print VLA unspec. */
- pretty_name[pos++] = '*';
- pretty_name[pos++] = ']';
+ pp_star (&pretty_name);
+ pp_right_bracket (&pretty_name);
t = TREE_TYPE (t);
}
- pretty_name[pos++] = '\'';
- pretty_name[pos] = '\0';
+ pp_quote (&pretty_name);
- /* Save the tree with stripped types. */
- type = t;
+ /* Save the tree with stripped types. */
+ eltype = t;
}
else
- sprintf (pretty_name, "'%s'", tname);
+ pp_printf (&pretty_name, "'%s'", tname);
- switch (TREE_CODE (type))
+ switch (TREE_CODE (eltype))
{
case BOOLEAN_TYPE:
case ENUMERAL_TYPE:
case REAL_TYPE:
/* FIXME: libubsan right now only supports float, double and
long double type formats. */
- if (TYPE_MODE (type) == TYPE_MODE (float_type_node)
- || TYPE_MODE (type) == TYPE_MODE (double_type_node)
- || TYPE_MODE (type) == TYPE_MODE (long_double_type_node))
+ if (TYPE_MODE (eltype) == TYPE_MODE (float_type_node)
+ || TYPE_MODE (eltype) == TYPE_MODE (double_type_node)
+ || TYPE_MODE (eltype) == TYPE_MODE (long_double_type_node))
tkind = 0x0001;
else
tkind = 0xffff;
tkind = 0xffff;
break;
}
- tinfo = get_ubsan_type_info_for_type (type);
+ tinfo = get_ubsan_type_info_for_type (eltype);
/* Create a new VAR_DECL of type descriptor. */
+ const char *tmp = pp_formatted_text (&pretty_name);
+ size_t len = strlen (tmp) + 1;
+ tree str = build_string (len, tmp);
+ TREE_TYPE (str) = build_array_type_nelts (char_type_node, len);
+ TREE_READONLY (str) = 1;
+ TREE_STATIC (str) = 1;
+
char tmp_name[32];
- static unsigned int type_var_id_num;
- ASM_GENERATE_INTERNAL_LABEL (tmp_name, "Lubsan_type", type_var_id_num++);
+ ASM_GENERATE_INTERNAL_LABEL (tmp_name, "Lubsan_type", ubsan_ids[0]++);
decl = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (tmp_name),
- dtype);
+ dtype);
TREE_STATIC (decl) = 1;
TREE_PUBLIC (decl) = 0;
DECL_ARTIFICIAL (decl) = 1;
DECL_IGNORED_P (decl) = 1;
DECL_EXTERNAL (decl) = 0;
+ DECL_SIZE (decl)
+ = size_binop (PLUS_EXPR, DECL_SIZE (decl), TYPE_SIZE (TREE_TYPE (str)));
+ DECL_SIZE_UNIT (decl)
+ = size_binop (PLUS_EXPR, DECL_SIZE_UNIT (decl),
+ TYPE_SIZE_UNIT (TREE_TYPE (str)));
- size_t len = strlen (pretty_name);
- tree str = build_string (len + 1, pretty_name);
- TREE_TYPE (str) = build_array_type (char_type_node,
- build_index_type (size_int (len)));
- TREE_READONLY (str) = 1;
- TREE_STATIC (str) = 1;
tree ctor = build_constructor_va (dtype, 3, NULL_TREE,
build_int_cst (short_unsigned_type_node,
tkind), NULL_TREE,
size_t i = 0;
int j;
+ /* It is possible that PCH zapped table with definitions of sanitizer
+ builtins. Reinitialize them if needed. */
+ initialize_sanitizer_builtins ();
+
/* Firstly, create a pointer to type descriptor type. */
tree td_type = ubsan_get_type_descriptor_type ();
td_type = build_pointer_type (td_type);
TYPE_FIELDS (ret) = fields[0];
TYPE_NAME (ret) = type_decl;
TYPE_STUB_DECL (ret) = type_decl;
+ TYPE_ARTIFICIAL (ret) = 1;
layout_type (ret);
/* Now, fill in the type. */
char tmp_name[32];
- static unsigned int ubsan_var_id_num;
- ASM_GENERATE_INTERNAL_LABEL (tmp_name, "Lubsan_data", ubsan_var_id_num++);
+ ASM_GENERATE_INTERNAL_LABEL (tmp_name, "Lubsan_data", ubsan_ids[1]++);
tree var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (tmp_name),
ret);
TREE_STATIC (var) = 1;
bool
ubsan_instrument_unreachable (gimple_stmt_iterator *gsi)
{
- gimple g;
+ gimple *g;
location_t loc = gimple_location (gsi_stmt (*gsi));
if (flag_sanitize_undefined_trap_on_error)
is_ubsan_builtin_p (tree t)
{
return TREE_CODE (t) == FUNCTION_DECL
- && DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
+ && fndecl_built_in_p (t, BUILT_IN_NORMAL)
&& strncmp (IDENTIFIER_POINTER (DECL_NAME (t)),
"__builtin___ubsan_", 18) == 0;
}
+/* Create a callgraph edge for statement STMT. */
+
+static void
+ubsan_create_edge (gimple *stmt)
+{
+ gcall *call_stmt = dyn_cast <gcall *> (stmt);
+ basic_block bb = gimple_bb (stmt);
+ cgraph_node *node = cgraph_node::get (current_function_decl);
+ tree decl = gimple_call_fndecl (call_stmt);
+ if (decl)
+ node->create_edge (cgraph_node::get_create (decl), call_stmt, bb->count);
+}
+
/* Expand the UBSAN_BOUNDS special builtin function. */
bool
ubsan_expand_bounds_ifn (gimple_stmt_iterator *gsi)
{
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
location_t loc = gimple_location (stmt);
gcc_assert (gimple_call_num_args (stmt) == 3);
/* Pick up the arguments of the UBSAN_BOUNDS call. */
tree type = TREE_TYPE (TREE_TYPE (gimple_call_arg (stmt, 0)));
tree index = gimple_call_arg (stmt, 1);
- tree orig_index_type = TREE_TYPE (index);
+ tree orig_index = index;
tree bound = gimple_call_arg (stmt, 2);
gimple_stmt_iterator gsi_orig = *gsi;
index = force_gimple_operand_gsi (&cond_insert_point, index,
true, NULL_TREE,
false, GSI_NEW_STMT);
- gimple g = gimple_build_cond (GT_EXPR, index, bound, NULL_TREE, NULL_TREE);
+ gimple *g = gimple_build_cond (GT_EXPR, index, bound, NULL_TREE, NULL_TREE);
gimple_set_location (g, loc);
gsi_insert_after (&cond_insert_point, g, GSI_NEW_STMT);
tree data
= ubsan_create_data ("__ubsan_out_of_bounds_data", 1, &loc,
ubsan_type_descriptor (type, UBSAN_PRINT_ARRAY),
- ubsan_type_descriptor (orig_index_type),
+ ubsan_type_descriptor (TREE_TYPE (orig_index)),
NULL_TREE, NULL_TREE);
data = build_fold_addr_expr_loc (loc, data);
enum built_in_function bcode
? BUILT_IN_UBSAN_HANDLE_OUT_OF_BOUNDS
: BUILT_IN_UBSAN_HANDLE_OUT_OF_BOUNDS_ABORT;
tree fn = builtin_decl_explicit (bcode);
- tree val = force_gimple_operand_gsi (gsi, ubsan_encode_value (index),
- true, NULL_TREE, true,
- GSI_SAME_STMT);
+ tree val = ubsan_encode_value (orig_index, UBSAN_ENCODE_VALUE_GIMPLE);
+ val = force_gimple_operand_gsi (gsi, val, true, NULL_TREE, true,
+ GSI_SAME_STMT);
g = gimple_build_call (fn, 2, data, val);
}
gimple_set_location (g, loc);
ubsan_expand_null_ifn (gimple_stmt_iterator *gsip)
{
gimple_stmt_iterator gsi = *gsip;
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
location_t loc = gimple_location (stmt);
gcc_assert (gimple_call_num_args (stmt) == 3);
tree ptr = gimple_call_arg (stmt, 0);
basic_block cur_bb = gsi_bb (gsi);
- gimple g;
+ gimple *g;
if (!integer_zerop (align))
{
unsigned int ptralign = get_pointer_alignment (ptr) / BITS_PER_UNIT;
gsi_insert_before (&gsi, g, GSI_SAME_STMT);
}
}
- check_null = (flag_sanitize & SANITIZE_NULL) != 0;
+ check_null = sanitize_flags_p (SANITIZE_NULL);
if (check_align == NULL_TREE && !check_null)
{
/* Make an edge coming from the 'cond block' into the 'then block';
this edge is unlikely taken, so set up the probability accordingly. */
e = make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE);
- e->probability = PROB_VERY_UNLIKELY;
+ e->probability = profile_probability::very_unlikely ();
+ then_bb->count = e->count ();
/* Connect 'then block' with the 'else block'. This is needed
as the ubsan routines we call in the 'then block' are not noreturn.
/* Set up the fallthrough basic block. */
e = find_edge (cond_bb, fallthru_bb);
e->flags = EDGE_FALSE_VALUE;
- e->count = cond_bb->count;
- e->probability = REG_BR_PROB_BASE - PROB_VERY_UNLIKELY;
+ e->probability = profile_probability::very_likely ();
/* Update dominance info for the newly created then_bb; note that
fallthru_bb's dominance info has already been updated by
enum built_in_function bcode
= (flag_sanitize_recover & ((check_align ? SANITIZE_ALIGNMENT : 0)
| (check_null ? SANITIZE_NULL : 0)))
- ? BUILT_IN_UBSAN_HANDLE_TYPE_MISMATCH
- : BUILT_IN_UBSAN_HANDLE_TYPE_MISMATCH_ABORT;
+ ? BUILT_IN_UBSAN_HANDLE_TYPE_MISMATCH_V1
+ : BUILT_IN_UBSAN_HANDLE_TYPE_MISMATCH_V1_ABORT;
tree fn = builtin_decl_implicit (bcode);
+ int align_log = tree_log2 (align);
tree data
= ubsan_create_data ("__ubsan_null_data", 1, &loc,
ubsan_type_descriptor (TREE_TYPE (ckind),
UBSAN_PRINT_POINTER),
NULL_TREE,
- align,
+ build_int_cst (unsigned_char_type_node,
+ MAX (align_log, 0)),
fold_convert (unsigned_char_type_node, ckind),
NULL_TREE);
data = build_fold_addr_expr_loc (loc, data);
/* Replace the UBSAN_NULL with a GIMPLE_COND stmt. */
gsi_replace (&gsi, g, false);
+ stmt = g;
}
if (check_align)
this edge is unlikely taken, so set up the probability
accordingly. */
e = make_edge (cond1_bb, then_bb, EDGE_TRUE_VALUE);
- e->probability = PROB_VERY_UNLIKELY;
+ e->probability = profile_probability::very_unlikely ();
/* Set up the fallthrough basic block. */
e = find_edge (cond1_bb, cond2_bb);
e->flags = EDGE_FALSE_VALUE;
- e->count = cond1_bb->count;
- e->probability = REG_BR_PROB_BASE - PROB_VERY_UNLIKELY;
+ e->probability = profile_probability::very_likely ();
/* Update dominance info. */
if (dom_info_available_p (CDI_DOMINATORS))
return false;
}
+#define OBJSZ_MAX_OFFSET (1024 * 16)
+
/* Expand UBSAN_OBJECT_SIZE internal call. */
bool
ubsan_expand_objsize_ifn (gimple_stmt_iterator *gsi)
{
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
location_t loc = gimple_location (stmt);
gcc_assert (gimple_call_num_args (stmt) == 4);
tree size = gimple_call_arg (stmt, 2);
tree ckind = gimple_call_arg (stmt, 3);
gimple_stmt_iterator gsi_orig = *gsi;
- gimple g;
+ gimple *g;
/* See if we can discard the check. */
if (TREE_CODE (size) != INTEGER_CST
|| integer_all_onesp (size))
/* Yes, __builtin_object_size couldn't determine the
object size. */;
+ else if (TREE_CODE (offset) == INTEGER_CST
+ && wi::to_widest (offset) >= -OBJSZ_MAX_OFFSET
+ && wi::to_widest (offset) <= -1)
+ /* The offset is in range [-16K, -1]. */;
else
{
/* if (offset > objsize) */
gimple_set_location (g, loc);
gsi_insert_after (&cond_insert_point, g, GSI_NEW_STMT);
+ /* If the offset is small enough, we don't need the second
+ run-time check. */
+ if (TREE_CODE (offset) == INTEGER_CST
+ && wi::to_widest (offset) >= 0
+ && wi::to_widest (offset) <= OBJSZ_MAX_OFFSET)
+ *gsi = gsi_after_labels (then_bb);
+ else
+ {
+ /* Don't issue run-time error if (ptr > ptr + offset). That
+ may happen when computing a POINTER_PLUS_EXPR. */
+ basic_block then2_bb, fallthru2_bb;
+
+ gimple_stmt_iterator gsi2 = gsi_after_labels (then_bb);
+ cond_insert_point = create_cond_insert_point (&gsi2, false, false,
+ true, &then2_bb,
+ &fallthru2_bb);
+ /* Convert the pointer to an integer type. */
+ tree p = make_ssa_name (pointer_sized_int_node);
+ g = gimple_build_assign (p, NOP_EXPR, ptr);
+ gimple_set_location (g, loc);
+ gsi_insert_before (&cond_insert_point, g, GSI_NEW_STMT);
+ p = gimple_assign_lhs (g);
+ /* Compute ptr + offset. */
+ g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
+ PLUS_EXPR, p, offset);
+ gimple_set_location (g, loc);
+ gsi_insert_after (&cond_insert_point, g, GSI_NEW_STMT);
+ /* Now build the conditional and put it into the IR. */
+ g = gimple_build_cond (LE_EXPR, p, gimple_assign_lhs (g),
+ NULL_TREE, NULL_TREE);
+ gimple_set_location (g, loc);
+ gsi_insert_after (&cond_insert_point, g, GSI_NEW_STMT);
+ *gsi = gsi_after_labels (then2_bb);
+ }
+
/* Generate __ubsan_handle_type_mismatch call. */
- *gsi = gsi_after_labels (then_bb);
if (flag_sanitize_undefined_trap_on_error)
g = gimple_build_call (builtin_decl_explicit (BUILT_IN_TRAP), 0);
else
ubsan_type_descriptor (TREE_TYPE (ptr),
UBSAN_PRINT_POINTER),
NULL_TREE,
- build_zero_cst (pointer_sized_int_node),
+ build_zero_cst (unsigned_char_type_node),
ckind,
NULL_TREE);
data = build_fold_addr_expr_loc (loc, data);
enum built_in_function bcode
= (flag_sanitize_recover & SANITIZE_OBJECT_SIZE)
- ? BUILT_IN_UBSAN_HANDLE_TYPE_MISMATCH
- : BUILT_IN_UBSAN_HANDLE_TYPE_MISMATCH_ABORT;
+ ? BUILT_IN_UBSAN_HANDLE_TYPE_MISMATCH_V1
+ : BUILT_IN_UBSAN_HANDLE_TYPE_MISMATCH_V1_ABORT;
tree p = make_ssa_name (pointer_sized_int_node);
g = gimple_build_assign (p, NOP_EXPR, ptr);
gimple_set_location (g, loc);
/* Point GSI to next logical statement. */
*gsi = gsi_start_bb (fallthru_bb);
+
+ /* Get rid of the UBSAN_OBJECT_SIZE call from the IR. */
+ unlink_stmt_vdef (stmt);
+ gsi_remove (&gsi_orig, true);
+ return true;
}
/* Get rid of the UBSAN_OBJECT_SIZE call from the IR. */
unlink_stmt_vdef (stmt);
- gsi_remove (&gsi_orig, true);
- return gsi_end_p (*gsi);
+ gsi_remove (gsi, true);
+ return true;
+}
+
+/* Expand UBSAN_PTR internal call. */
+
+bool
+ubsan_expand_ptr_ifn (gimple_stmt_iterator *gsip)
+{
+ gimple_stmt_iterator gsi = *gsip;
+ gimple *stmt = gsi_stmt (gsi);
+ location_t loc = gimple_location (stmt);
+ gcc_assert (gimple_call_num_args (stmt) == 2);
+ tree ptr = gimple_call_arg (stmt, 0);
+ tree off = gimple_call_arg (stmt, 1);
+
+ if (integer_zerop (off))
+ {
+ gsi_remove (gsip, true);
+ unlink_stmt_vdef (stmt);
+ return true;
+ }
+
+ basic_block cur_bb = gsi_bb (gsi);
+ tree ptrplusoff = make_ssa_name (pointer_sized_int_node);
+ tree ptri = make_ssa_name (pointer_sized_int_node);
+ int pos_neg = get_range_pos_neg (off);
+
+ /* Split the original block holding the pointer dereference. */
+ edge e = split_block (cur_bb, stmt);
+
+ /* Get a hold on the 'condition block', the 'then block' and the
+ 'else block'. */
+ basic_block cond_bb = e->src;
+ basic_block fallthru_bb = e->dest;
+ basic_block then_bb = create_empty_bb (cond_bb);
+ basic_block cond_pos_bb = NULL, cond_neg_bb = NULL;
+ add_bb_to_loop (then_bb, cond_bb->loop_father);
+ loops_state_set (LOOPS_NEED_FIXUP);
+
+ /* Set up the fallthrough basic block. */
+ e->flags = EDGE_FALSE_VALUE;
+ if (pos_neg != 3)
+ {
+ e->probability = profile_probability::very_likely ();
+
+ /* Connect 'then block' with the 'else block'. This is needed
+ as the ubsan routines we call in the 'then block' are not noreturn.
+ The 'then block' only has one outcoming edge. */
+ make_single_succ_edge (then_bb, fallthru_bb, EDGE_FALLTHRU);
+
+ /* Make an edge coming from the 'cond block' into the 'then block';
+ this edge is unlikely taken, so set up the probability
+ accordingly. */
+ e = make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE);
+ e->probability = profile_probability::very_unlikely ();
+ then_bb->count = e->count ();
+ }
+ else
+ {
+ e->probability = profile_probability::even ();
+
+ e = split_block (fallthru_bb, (gimple *) NULL);
+ cond_neg_bb = e->src;
+ fallthru_bb = e->dest;
+ e->probability = profile_probability::very_likely ();
+ e->flags = EDGE_FALSE_VALUE;
+
+ e = make_edge (cond_neg_bb, then_bb, EDGE_TRUE_VALUE);
+ e->probability = profile_probability::very_unlikely ();
+ then_bb->count = e->count ();
+
+ cond_pos_bb = create_empty_bb (cond_bb);
+ add_bb_to_loop (cond_pos_bb, cond_bb->loop_father);
+
+ e = make_edge (cond_bb, cond_pos_bb, EDGE_TRUE_VALUE);
+ e->probability = profile_probability::even ();
+ cond_pos_bb->count = e->count ();
+
+ e = make_edge (cond_pos_bb, then_bb, EDGE_TRUE_VALUE);
+ e->probability = profile_probability::very_unlikely ();
+
+ e = make_edge (cond_pos_bb, fallthru_bb, EDGE_FALSE_VALUE);
+ e->probability = profile_probability::very_likely ();
+
+ make_single_succ_edge (then_bb, fallthru_bb, EDGE_FALLTHRU);
+ }
+
+ gimple *g = gimple_build_assign (ptri, NOP_EXPR, ptr);
+ gimple_set_location (g, loc);
+ gsi_insert_before (&gsi, g, GSI_SAME_STMT);
+ g = gimple_build_assign (ptrplusoff, PLUS_EXPR, ptri, off);
+ gimple_set_location (g, loc);
+ gsi_insert_before (&gsi, g, GSI_SAME_STMT);
+
+ /* Update dominance info for the newly created then_bb; note that
+ fallthru_bb's dominance info has already been updated by
+ split_block. */
+ if (dom_info_available_p (CDI_DOMINATORS))
+ {
+ set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb);
+ if (pos_neg == 3)
+ {
+ set_immediate_dominator (CDI_DOMINATORS, cond_pos_bb, cond_bb);
+ set_immediate_dominator (CDI_DOMINATORS, fallthru_bb, cond_bb);
+ }
+ }
+
+ /* Put the ubsan builtin call into the newly created BB. */
+ if (flag_sanitize_undefined_trap_on_error)
+ g = gimple_build_call (builtin_decl_implicit (BUILT_IN_TRAP), 0);
+ else
+ {
+ enum built_in_function bcode
+ = (flag_sanitize_recover & SANITIZE_POINTER_OVERFLOW)
+ ? BUILT_IN_UBSAN_HANDLE_POINTER_OVERFLOW
+ : BUILT_IN_UBSAN_HANDLE_POINTER_OVERFLOW_ABORT;
+ tree fn = builtin_decl_implicit (bcode);
+ tree data
+ = ubsan_create_data ("__ubsan_ptrovf_data", 1, &loc,
+ NULL_TREE, NULL_TREE);
+ data = build_fold_addr_expr_loc (loc, data);
+ g = gimple_build_call (fn, 3, data, ptr, ptrplusoff);
+ }
+ gimple_stmt_iterator gsi2 = gsi_start_bb (then_bb);
+ gimple_set_location (g, loc);
+ gsi_insert_after (&gsi2, g, GSI_NEW_STMT);
+
+ /* Unlink the UBSAN_PTRs vops before replacing it. */
+ unlink_stmt_vdef (stmt);
+
+ if (TREE_CODE (off) == INTEGER_CST)
+ g = gimple_build_cond (wi::neg_p (wi::to_wide (off)) ? LT_EXPR : GE_EXPR,
+ ptri, fold_build1 (NEGATE_EXPR, sizetype, off),
+ NULL_TREE, NULL_TREE);
+ else if (pos_neg != 3)
+ g = gimple_build_cond (pos_neg == 1 ? LT_EXPR : GT_EXPR,
+ ptrplusoff, ptri, NULL_TREE, NULL_TREE);
+ else
+ {
+ gsi2 = gsi_start_bb (cond_pos_bb);
+ g = gimple_build_cond (LT_EXPR, ptrplusoff, ptri, NULL_TREE, NULL_TREE);
+ gimple_set_location (g, loc);
+ gsi_insert_after (&gsi2, g, GSI_NEW_STMT);
+
+ gsi2 = gsi_start_bb (cond_neg_bb);
+ g = gimple_build_cond (GT_EXPR, ptrplusoff, ptri, NULL_TREE, NULL_TREE);
+ gimple_set_location (g, loc);
+ gsi_insert_after (&gsi2, g, GSI_NEW_STMT);
+
+ gimple_seq seq = NULL;
+ tree t = gimple_build (&seq, loc, NOP_EXPR, ssizetype, off);
+ t = gimple_build (&seq, loc, GE_EXPR, boolean_type_node,
+ t, ssize_int (0));
+ gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT);
+ g = gimple_build_cond (NE_EXPR, t, boolean_false_node,
+ NULL_TREE, NULL_TREE);
+ }
+ gimple_set_location (g, loc);
+ /* Replace the UBSAN_PTR with a GIMPLE_COND stmt. */
+ gsi_replace (&gsi, g, false);
+ return false;
+}
+
+
+/* Cached __ubsan_vptr_type_cache decl. */
+static GTY(()) tree ubsan_vptr_type_cache_decl;
+
+/* Expand UBSAN_VPTR internal call. The type is kept on the ckind
+ argument which is a constant, because the middle-end treats pointer
+ conversions as useless and therefore the type of the first argument
+ could be changed to any other pointer type. */
+
+bool
+ubsan_expand_vptr_ifn (gimple_stmt_iterator *gsip)
+{
+ gimple_stmt_iterator gsi = *gsip;
+ gimple *stmt = gsi_stmt (gsi);
+ location_t loc = gimple_location (stmt);
+ gcc_assert (gimple_call_num_args (stmt) == 5);
+ tree op = gimple_call_arg (stmt, 0);
+ tree vptr = gimple_call_arg (stmt, 1);
+ tree str_hash = gimple_call_arg (stmt, 2);
+ tree ti_decl_addr = gimple_call_arg (stmt, 3);
+ tree ckind_tree = gimple_call_arg (stmt, 4);
+ ubsan_null_ckind ckind = (ubsan_null_ckind) tree_to_uhwi (ckind_tree);
+ tree type = TREE_TYPE (TREE_TYPE (ckind_tree));
+ gimple *g;
+ basic_block fallthru_bb = NULL;
+
+ if (ckind == UBSAN_DOWNCAST_POINTER)
+ {
+ /* Guard everything with if (op != NULL) { ... }. */
+ basic_block then_bb;
+ gimple_stmt_iterator cond_insert_point
+ = create_cond_insert_point (gsip, false, false, true,
+ &then_bb, &fallthru_bb);
+ g = gimple_build_cond (NE_EXPR, op, build_zero_cst (TREE_TYPE (op)),
+ NULL_TREE, NULL_TREE);
+ gimple_set_location (g, loc);
+ gsi_insert_after (&cond_insert_point, g, GSI_NEW_STMT);
+ *gsip = gsi_after_labels (then_bb);
+ gsi_remove (&gsi, false);
+ gsi_insert_before (gsip, stmt, GSI_NEW_STMT);
+ gsi = *gsip;
+ }
+
+ tree htype = TREE_TYPE (str_hash);
+ tree cst = wide_int_to_tree (htype,
+ wi::uhwi (((uint64_t) 0x9ddfea08 << 32)
+ | 0xeb382d69, 64));
+ g = gimple_build_assign (make_ssa_name (htype), BIT_XOR_EXPR,
+ vptr, str_hash);
+ gimple_set_location (g, loc);
+ gsi_insert_before (gsip, g, GSI_SAME_STMT);
+ g = gimple_build_assign (make_ssa_name (htype), MULT_EXPR,
+ gimple_assign_lhs (g), cst);
+ gimple_set_location (g, loc);
+ gsi_insert_before (gsip, g, GSI_SAME_STMT);
+ tree t1 = gimple_assign_lhs (g);
+ g = gimple_build_assign (make_ssa_name (htype), LSHIFT_EXPR,
+ t1, build_int_cst (integer_type_node, 47));
+ gimple_set_location (g, loc);
+ tree t2 = gimple_assign_lhs (g);
+ gsi_insert_before (gsip, g, GSI_SAME_STMT);
+ g = gimple_build_assign (make_ssa_name (htype), BIT_XOR_EXPR,
+ vptr, t1);
+ gimple_set_location (g, loc);
+ gsi_insert_before (gsip, g, GSI_SAME_STMT);
+ g = gimple_build_assign (make_ssa_name (htype), BIT_XOR_EXPR,
+ t2, gimple_assign_lhs (g));
+ gimple_set_location (g, loc);
+ gsi_insert_before (gsip, g, GSI_SAME_STMT);
+ g = gimple_build_assign (make_ssa_name (htype), MULT_EXPR,
+ gimple_assign_lhs (g), cst);
+ gimple_set_location (g, loc);
+ gsi_insert_before (gsip, g, GSI_SAME_STMT);
+ tree t3 = gimple_assign_lhs (g);
+ g = gimple_build_assign (make_ssa_name (htype), LSHIFT_EXPR,
+ t3, build_int_cst (integer_type_node, 47));
+ gimple_set_location (g, loc);
+ gsi_insert_before (gsip, g, GSI_SAME_STMT);
+ g = gimple_build_assign (make_ssa_name (htype), BIT_XOR_EXPR,
+ t3, gimple_assign_lhs (g));
+ gimple_set_location (g, loc);
+ gsi_insert_before (gsip, g, GSI_SAME_STMT);
+ g = gimple_build_assign (make_ssa_name (htype), MULT_EXPR,
+ gimple_assign_lhs (g), cst);
+ gimple_set_location (g, loc);
+ gsi_insert_before (gsip, g, GSI_SAME_STMT);
+ if (!useless_type_conversion_p (pointer_sized_int_node, htype))
+ {
+ g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
+ NOP_EXPR, gimple_assign_lhs (g));
+ gimple_set_location (g, loc);
+ gsi_insert_before (gsip, g, GSI_SAME_STMT);
+ }
+ tree hash = gimple_assign_lhs (g);
+
+ if (ubsan_vptr_type_cache_decl == NULL_TREE)
+ {
+ tree atype = build_array_type_nelts (pointer_sized_int_node, 128);
+ tree array = build_decl (UNKNOWN_LOCATION, VAR_DECL,
+ get_identifier ("__ubsan_vptr_type_cache"),
+ atype);
+ DECL_ARTIFICIAL (array) = 1;
+ DECL_IGNORED_P (array) = 1;
+ TREE_PUBLIC (array) = 1;
+ TREE_STATIC (array) = 1;
+ DECL_EXTERNAL (array) = 1;
+ DECL_VISIBILITY (array) = VISIBILITY_DEFAULT;
+ DECL_VISIBILITY_SPECIFIED (array) = 1;
+ varpool_node::finalize_decl (array);
+ ubsan_vptr_type_cache_decl = array;
+ }
+
+ g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
+ BIT_AND_EXPR, hash,
+ build_int_cst (pointer_sized_int_node, 127));
+ gimple_set_location (g, loc);
+ gsi_insert_before (gsip, g, GSI_SAME_STMT);
+
+ tree c = build4_loc (loc, ARRAY_REF, pointer_sized_int_node,
+ ubsan_vptr_type_cache_decl, gimple_assign_lhs (g),
+ NULL_TREE, NULL_TREE);
+ g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
+ ARRAY_REF, c);
+ gimple_set_location (g, loc);
+ gsi_insert_before (gsip, g, GSI_SAME_STMT);
+
+ basic_block then_bb, fallthru2_bb;
+ gimple_stmt_iterator cond_insert_point
+ = create_cond_insert_point (gsip, false, false, true,
+ &then_bb, &fallthru2_bb);
+ g = gimple_build_cond (NE_EXPR, gimple_assign_lhs (g), hash,
+ NULL_TREE, NULL_TREE);
+ gimple_set_location (g, loc);
+ gsi_insert_after (&cond_insert_point, g, GSI_NEW_STMT);
+ *gsip = gsi_after_labels (then_bb);
+ if (fallthru_bb == NULL)
+ fallthru_bb = fallthru2_bb;
+
+ tree data
+ = ubsan_create_data ("__ubsan_vptr_data", 1, &loc,
+ ubsan_type_descriptor (type), NULL_TREE, ti_decl_addr,
+ build_int_cst (unsigned_char_type_node, ckind),
+ NULL_TREE);
+ data = build_fold_addr_expr_loc (loc, data);
+ enum built_in_function bcode
+ = (flag_sanitize_recover & SANITIZE_VPTR)
+ ? BUILT_IN_UBSAN_HANDLE_DYNAMIC_TYPE_CACHE_MISS
+ : BUILT_IN_UBSAN_HANDLE_DYNAMIC_TYPE_CACHE_MISS_ABORT;
+
+ g = gimple_build_call (builtin_decl_explicit (bcode), 3, data, op, hash);
+ gimple_set_location (g, loc);
+ gsi_insert_before (gsip, g, GSI_SAME_STMT);
+
+ /* Point GSI to next logical statement. */
+ *gsip = gsi_start_bb (fallthru_bb);
+
+ /* Get rid of the UBSAN_VPTR call from the IR. */
+ unlink_stmt_vdef (stmt);
+ gsi_remove (&gsi, true);
+ return true;
}
/* Instrument a memory reference. BASE is the base of MEM, IS_LHS says
{
enum ubsan_null_ckind ikind = is_lhs ? UBSAN_STORE_OF : UBSAN_LOAD_OF;
unsigned int align = 0;
- if (flag_sanitize & SANITIZE_ALIGNMENT)
+ if (sanitize_flags_p (SANITIZE_ALIGNMENT))
{
align = min_align_of_type (TREE_TYPE (base));
if (align <= 1)
align = 0;
}
- if (align == 0 && (flag_sanitize & SANITIZE_NULL) == 0)
+ if (align == 0 && !sanitize_flags_p (SANITIZE_NULL))
return;
tree t = TREE_OPERAND (base, 0);
if (!POINTER_TYPE_P (TREE_TYPE (t)))
return;
- if (RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_TYPE (t))) && mem != base)
+ if (RECORD_OR_UNION_TYPE_P (TREE_TYPE (base)) && mem != base)
ikind = UBSAN_MEMBER_ACCESS;
- tree kind = build_int_cst (TREE_TYPE (t), ikind);
+ tree kind = build_int_cst (build_pointer_type (TREE_TYPE (base)), ikind);
tree alignt = build_int_cst (pointer_sized_int_node, align);
gcall *g = gimple_build_call_internal (IFN_UBSAN_NULL, 3, t, kind, alignt);
gimple_set_location (g, gimple_location (gsi_stmt (*iter)));
/* Perform the pointer instrumentation. */
static void
-instrument_null (gimple_stmt_iterator gsi, bool is_lhs)
+instrument_null (gimple_stmt_iterator gsi, tree t, bool is_lhs)
{
- gimple stmt = gsi_stmt (gsi);
- tree t = is_lhs ? gimple_get_lhs (stmt) : gimple_assign_rhs1 (stmt);
+ /* Handle also e.g. &s->i. */
+ if (TREE_CODE (t) == ADDR_EXPR)
+ t = TREE_OPERAND (t, 0);
tree base = get_base_address (t);
- const enum tree_code code = TREE_CODE (base);
- if (code == MEM_REF
+ if (base != NULL_TREE
+ && TREE_CODE (base) == MEM_REF
&& TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
instrument_mem_ref (t, base, &gsi, is_lhs);
}
+/* Instrument pointer arithmetics PTR p+ OFF. */
+
+static void
+instrument_pointer_overflow (gimple_stmt_iterator *gsi, tree ptr, tree off)
+{
+ if (TYPE_PRECISION (sizetype) != POINTER_SIZE)
+ return;
+ gcall *g = gimple_build_call_internal (IFN_UBSAN_PTR, 2, ptr, off);
+ gimple_set_location (g, gimple_location (gsi_stmt (*gsi)));
+ gsi_insert_before (gsi, g, GSI_SAME_STMT);
+}
+
+/* Instrument pointer arithmetics if any. */
+
+static void
+maybe_instrument_pointer_overflow (gimple_stmt_iterator *gsi, tree t)
+{
+ if (TYPE_PRECISION (sizetype) != POINTER_SIZE)
+ return;
+
+ /* Handle also e.g. &s->i. */
+ if (TREE_CODE (t) == ADDR_EXPR)
+ t = TREE_OPERAND (t, 0);
+
+ if (!handled_component_p (t) && TREE_CODE (t) != MEM_REF)
+ return;
+
+ poly_int64 bitsize, bitpos, bytepos;
+ tree offset;
+ machine_mode mode;
+ int volatilep = 0, reversep, unsignedp = 0;
+ tree inner = get_inner_reference (t, &bitsize, &bitpos, &offset, &mode,
+ &unsignedp, &reversep, &volatilep);
+ tree moff = NULL_TREE;
+
+ bool decl_p = DECL_P (inner);
+ tree base;
+ if (decl_p)
+ {
+ if (DECL_REGISTER (inner))
+ return;
+ base = inner;
+ /* If BASE is a fixed size automatic variable or
+ global variable defined in the current TU and bitpos
+ fits, don't instrument anything. */
+ poly_int64 base_size;
+ if (offset == NULL_TREE
+ && maybe_ne (bitpos, 0)
+ && (VAR_P (base)
+ || TREE_CODE (base) == PARM_DECL
+ || TREE_CODE (base) == RESULT_DECL)
+ && poly_int_tree_p (DECL_SIZE (base), &base_size)
+ && known_ge (base_size, bitpos)
+ && (!is_global_var (base) || decl_binds_to_current_def_p (base)))
+ return;
+ }
+ else if (TREE_CODE (inner) == MEM_REF)
+ {
+ base = TREE_OPERAND (inner, 0);
+ if (TREE_CODE (base) == ADDR_EXPR
+ && DECL_P (TREE_OPERAND (base, 0))
+ && !TREE_ADDRESSABLE (TREE_OPERAND (base, 0))
+ && !is_global_var (TREE_OPERAND (base, 0)))
+ return;
+ moff = TREE_OPERAND (inner, 1);
+ if (integer_zerop (moff))
+ moff = NULL_TREE;
+ }
+ else
+ return;
+
+ if (!POINTER_TYPE_P (TREE_TYPE (base)) && !DECL_P (base))
+ return;
+ bytepos = bits_to_bytes_round_down (bitpos);
+ if (offset == NULL_TREE && known_eq (bytepos, 0) && moff == NULL_TREE)
+ return;
+
+ tree base_addr = base;
+ if (decl_p)
+ base_addr = build1 (ADDR_EXPR,
+ build_pointer_type (TREE_TYPE (base)), base);
+ t = offset;
+ if (maybe_ne (bytepos, 0))
+ {
+ if (t)
+ t = fold_build2 (PLUS_EXPR, TREE_TYPE (t), t,
+ build_int_cst (TREE_TYPE (t), bytepos));
+ else
+ t = size_int (bytepos);
+ }
+ if (moff)
+ {
+ if (t)
+ t = fold_build2 (PLUS_EXPR, TREE_TYPE (t), t,
+ fold_convert (TREE_TYPE (t), moff));
+ else
+ t = fold_convert (sizetype, moff);
+ }
+ t = force_gimple_operand_gsi (gsi, t, true, NULL_TREE, true,
+ GSI_SAME_STMT);
+ base_addr = force_gimple_operand_gsi (gsi, base_addr, true, NULL_TREE, true,
+ GSI_SAME_STMT);
+ instrument_pointer_overflow (gsi, base_addr, t);
+}
+
/* Build an ubsan builtin call for the signed-integer-overflow
sanitization. CODE says what kind of builtin are we building,
LOC is a location, LHSTYPE is the type of LHS, OP0 and OP1
tree
ubsan_build_overflow_builtin (tree_code code, location_t loc, tree lhstype,
- tree op0, tree op1)
+ tree op0, tree op1, tree *datap)
{
if (flag_sanitize_undefined_trap_on_error)
return build_call_expr_loc (loc, builtin_decl_explicit (BUILT_IN_TRAP), 0);
- tree data = ubsan_create_data ("__ubsan_overflow_data", 1, &loc,
- ubsan_type_descriptor (lhstype), NULL_TREE,
- NULL_TREE);
+ tree data;
+ if (datap && *datap)
+ data = *datap;
+ else
+ data = ubsan_create_data ("__ubsan_overflow_data", 1, &loc,
+ ubsan_type_descriptor (lhstype), NULL_TREE,
+ NULL_TREE);
+ if (datap)
+ *datap = data;
enum built_in_function fn_code;
switch (code)
tree fn = builtin_decl_explicit (fn_code);
return build_call_expr_loc (loc, fn, 2 + (code != NEGATE_EXPR),
build_fold_addr_expr_loc (loc, data),
- ubsan_encode_value (op0, true),
- op1 ? ubsan_encode_value (op1, true)
- : NULL_TREE);
+ ubsan_encode_value (op0, UBSAN_ENCODE_VALUE_RTL),
+ op1
+ ? ubsan_encode_value (op1,
+ UBSAN_ENCODE_VALUE_RTL)
+ : NULL_TREE);
}
/* Perform the signed integer instrumentation. GSI is the iterator
static void
instrument_si_overflow (gimple_stmt_iterator gsi)
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
tree_code code = gimple_assign_rhs_code (stmt);
tree lhs = gimple_assign_lhs (stmt);
tree lhstype = TREE_TYPE (lhs);
+ tree lhsinner = VECTOR_TYPE_P (lhstype) ? TREE_TYPE (lhstype) : lhstype;
tree a, b;
- gimple g;
+ gimple *g;
/* If this is not a signed operation, don't instrument anything here.
Also punt on bit-fields. */
- if (!INTEGRAL_TYPE_P (lhstype)
- || TYPE_OVERFLOW_WRAPS (lhstype)
- || GET_MODE_BITSIZE (TYPE_MODE (lhstype)) != TYPE_PRECISION (lhstype))
+ if (!INTEGRAL_TYPE_P (lhsinner)
+ || TYPE_OVERFLOW_WRAPS (lhsinner)
+ || maybe_ne (GET_MODE_BITSIZE (TYPE_MODE (lhsinner)),
+ TYPE_PRECISION (lhsinner)))
return;
switch (code)
? IFN_UBSAN_CHECK_SUB
: IFN_UBSAN_CHECK_MUL, 2, a, b);
gimple_call_set_lhs (g, lhs);
- gsi_replace (&gsi, g, false);
+ gsi_replace (&gsi, g, true);
break;
case NEGATE_EXPR:
/* Represent i = -u;
as
i = UBSAN_CHECK_SUB (0, u); */
- a = build_int_cst (lhstype, 0);
+ a = build_zero_cst (lhstype);
b = gimple_assign_rhs1 (stmt);
g = gimple_build_call_internal (IFN_UBSAN_CHECK_SUB, 2, a, b);
gimple_call_set_lhs (g, lhs);
- gsi_replace (&gsi, g, false);
+ gsi_replace (&gsi, g, true);
break;
case ABS_EXPR:
/* Transform i = ABS_EXPR<u>;
into
_N = UBSAN_CHECK_SUB (0, u);
i = ABS_EXPR<_N>; */
- a = build_int_cst (lhstype, 0);
+ a = build_zero_cst (lhstype);
b = gimple_assign_rhs1 (stmt);
g = gimple_build_call_internal (IFN_UBSAN_CHECK_SUB, 2, a, b);
a = make_ssa_name (lhstype);
static void
instrument_bool_enum_load (gimple_stmt_iterator *gsi)
{
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
tree rhs = gimple_assign_rhs1 (stmt);
tree type = TREE_TYPE (rhs);
tree minv = NULL_TREE, maxv = NULL_TREE;
- if (TREE_CODE (type) == BOOLEAN_TYPE && (flag_sanitize & SANITIZE_BOOL))
+ if (TREE_CODE (type) == BOOLEAN_TYPE
+ && sanitize_flags_p (SANITIZE_BOOL))
{
minv = boolean_false_node;
maxv = boolean_true_node;
}
else if (TREE_CODE (type) == ENUMERAL_TYPE
- && (flag_sanitize & SANITIZE_ENUM)
+ && sanitize_flags_p (SANITIZE_ENUM)
&& TREE_TYPE (type) != NULL_TREE
&& TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
&& (TYPE_PRECISION (TREE_TYPE (type))
- < GET_MODE_PRECISION (TYPE_MODE (type))))
+ < GET_MODE_PRECISION (SCALAR_INT_TYPE_MODE (type))))
{
minv = TYPE_MIN_VALUE (TREE_TYPE (type));
maxv = TYPE_MAX_VALUE (TREE_TYPE (type));
else
return;
- int modebitsize = GET_MODE_BITSIZE (TYPE_MODE (type));
- HOST_WIDE_INT bitsize, bitpos;
+ int modebitsize = GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (type));
+ poly_int64 bitsize, bitpos;
tree offset;
machine_mode mode;
- int volatilep = 0, unsignedp = 0;
+ int volatilep = 0, reversep, unsignedp = 0;
tree base = get_inner_reference (rhs, &bitsize, &bitpos, &offset, &mode,
- &unsignedp, &volatilep, false);
+ &unsignedp, &reversep, &volatilep);
tree utype = build_nonstandard_integer_type (modebitsize, 1);
- if ((TREE_CODE (base) == VAR_DECL && DECL_HARD_REGISTER (base))
- || (bitpos % modebitsize) != 0
- || bitsize != modebitsize
- || GET_MODE_BITSIZE (TYPE_MODE (utype)) != modebitsize
+ if ((VAR_P (base) && DECL_HARD_REGISTER (base))
+ || !multiple_p (bitpos, modebitsize)
+ || maybe_ne (bitsize, modebitsize)
+ || GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (utype)) != modebitsize
|| TREE_CODE (gimple_assign_lhs (stmt)) != SSA_NAME)
return;
- bool can_throw = stmt_could_throw_p (stmt);
+ bool ends_bb = stmt_ends_bb_p (stmt);
location_t loc = gimple_location (stmt);
tree lhs = gimple_assign_lhs (stmt);
tree ptype = build_pointer_type (TREE_TYPE (rhs));
tree atype = reference_alias_ptr_type (rhs);
- gimple g = gimple_build_assign (make_ssa_name (ptype),
+ gimple *g = gimple_build_assign (make_ssa_name (ptype),
build_fold_addr_expr (rhs));
gimple_set_location (g, loc);
gsi_insert_before (gsi, g, GSI_SAME_STMT);
tree mem = build2 (MEM_REF, utype, gimple_assign_lhs (g),
build_int_cst (atype, 0));
tree urhs = make_ssa_name (utype);
- if (can_throw)
+ if (ends_bb)
{
gimple_assign_set_lhs (stmt, urhs);
g = gimple_build_assign (lhs, NOP_EXPR, urhs);
gimple_set_location (g, loc);
gsi_insert_after (gsi, g, GSI_NEW_STMT);
- if (!can_throw)
+ if (!ends_bb)
{
gimple_assign_set_rhs_with_ops (&gsi2, NOP_EXPR, urhs);
update_stmt (stmt);
: BUILT_IN_UBSAN_HANDLE_LOAD_INVALID_VALUE_ABORT;
tree fn = builtin_decl_explicit (bcode);
- tree val = force_gimple_operand_gsi (&gsi2, ubsan_encode_value (urhs),
- true, NULL_TREE, true,
- GSI_SAME_STMT);
+ tree val = ubsan_encode_value (urhs, UBSAN_ENCODE_VALUE_GIMPLE);
+ val = force_gimple_operand_gsi (&gsi2, val, true, NULL_TREE, true,
+ GSI_SAME_STMT);
g = gimple_build_call (fn, 2, data, val);
}
gimple_set_location (g, loc);
gsi_insert_before (&gsi2, g, GSI_SAME_STMT);
+ ubsan_create_edge (g);
*gsi = gsi_for_stmt (stmt);
}
+/* Determine if we can propagate given LOCATION to ubsan_data descriptor to use
+ new style handlers. Libubsan uses heuristics to destinguish between old and
+ new styles and relies on these properties for filename:
+
+ a) Location's filename must not be NULL.
+ b) Location's filename must not be equal to "".
+ c) Location's filename must not be equal to "\1".
+ d) First two bytes of filename must not contain '\xff' symbol. */
+
+static bool
+ubsan_use_new_style_p (location_t loc)
+{
+ if (loc == UNKNOWN_LOCATION)
+ return false;
+
+ expanded_location xloc = expand_location (loc);
+ if (xloc.file == NULL || strncmp (xloc.file, "\1", 2) == 0
+ || xloc.file[0] == '\0' || xloc.file[0] == '\xff'
+ || xloc.file[1] == '\xff')
+ return false;
+
+ return true;
+}
+
/* Instrument float point-to-integer conversion. TYPE is an integer type of
destination, EXPR is floating-point expression. */
machine_mode mode = TYPE_MODE (expr_type);
int prec = TYPE_PRECISION (type);
bool uns_p = TYPE_UNSIGNED (type);
+ if (loc == UNKNOWN_LOCATION)
+ loc = input_location;
/* Float to integer conversion first truncates toward zero, so
even signed char c = 127.875f; is not problematic.
representable decimal number greater or equal than
1 << (prec - !uns_p). */
mpfr_init2 (m, prec + 2);
- mpfr_set_ui_2exp (m, 1, prec - !uns_p, GMP_RNDN);
+ mpfr_set_ui_2exp (m, 1, prec - !uns_p, MPFR_RNDN);
mpfr_snprintf (buf, sizeof buf, "%.*RUe", p - 1, m);
decimal_real_from_string (&maxval, buf);
max = build_real (expr_type, maxval);
/* Use mpfr_snprintf rounding to compute the largest
representable decimal number less or equal than
(-1 << (prec - 1)) - 1. */
- mpfr_set_si_2exp (m, -1, prec - 1, GMP_RNDN);
- mpfr_sub_ui (m, m, 1, GMP_RNDN);
+ mpfr_set_si_2exp (m, -1, prec - 1, MPFR_RNDN);
+ mpfr_sub_ui (m, m, 1, MPFR_RNDN);
mpfr_snprintf (buf, sizeof buf, "%.*RDe", p - 1, m);
decimal_real_from_string (&minval, buf);
min = build_real (expr_type, minval);
else
return NULL_TREE;
+ t = fold_build2 (UNLE_EXPR, boolean_type_node, expr, min);
+ tt = fold_build2 (UNGE_EXPR, boolean_type_node, expr, max);
+ t = fold_build2 (TRUTH_OR_EXPR, boolean_type_node, t, tt);
+ if (integer_zerop (t))
+ return NULL_TREE;
+
if (flag_sanitize_undefined_trap_on_error)
fn = build_call_expr_loc (loc, builtin_decl_explicit (BUILT_IN_TRAP), 0);
else
{
+ location_t *loc_ptr = NULL;
+ unsigned num_locations = 0;
+ /* Figure out if we can propagate location to ubsan_data and use new
+ style handlers in libubsan. */
+ if (ubsan_use_new_style_p (loc))
+ {
+ loc_ptr = &loc;
+ num_locations = 1;
+ }
/* Create the __ubsan_handle_float_cast_overflow fn call. */
- tree data = ubsan_create_data ("__ubsan_float_cast_overflow_data", 0,
- NULL, ubsan_type_descriptor (expr_type),
+ tree data = ubsan_create_data ("__ubsan_float_cast_overflow_data",
+ num_locations, loc_ptr,
+ ubsan_type_descriptor (expr_type),
ubsan_type_descriptor (type), NULL_TREE,
NULL_TREE);
enum built_in_function bcode
fn = builtin_decl_explicit (bcode);
fn = build_call_expr_loc (loc, fn, 2,
build_fold_addr_expr_loc (loc, data),
- ubsan_encode_value (expr, false));
+ ubsan_encode_value (expr));
}
- t = fold_build2 (UNLE_EXPR, boolean_type_node, expr, min);
- tt = fold_build2 (UNGE_EXPR, boolean_type_node, expr, max);
- return fold_build3 (COND_EXPR, void_type_node,
- fold_build2 (TRUTH_OR_EXPR, boolean_type_node, t, tt),
- fn, integer_zero_node);
+ return fold_build3 (COND_EXPR, void_type_node, t, fn, integer_zero_node);
}
/* Instrument values passed to function arguments with nonnull attribute. */
static void
instrument_nonnull_arg (gimple_stmt_iterator *gsi)
{
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
location_t loc[2];
/* infer_nonnull_range needs flag_delete_null_pointer_checks set,
while for nonnull sanitization it is clear. */
{
tree arg = gimple_call_arg (stmt, i);
if (POINTER_TYPE_P (TREE_TYPE (arg))
- && infer_nonnull_range (stmt, arg, false, true))
+ && infer_nonnull_range_by_attribute (stmt, arg))
{
- gimple g;
+ gimple *g;
if (!is_gimple_val (arg))
{
g = gimple_build_assign (make_ssa_name (TREE_TYPE (arg)), arg);
}
gimple_set_location (g, loc[0]);
gsi_insert_before (gsi, g, GSI_SAME_STMT);
+ ubsan_create_edge (g);
}
*gsi = gsi_for_stmt (stmt);
}
if (arg
&& POINTER_TYPE_P (TREE_TYPE (arg))
&& is_gimple_val (arg)
- && infer_nonnull_range (stmt, arg, false, true))
+ && infer_nonnull_range_by_attribute (stmt, arg))
{
basic_block then_bb, fallthru_bb;
*gsi = create_cond_insert_point (gsi, true, false, true,
&then_bb, &fallthru_bb);
- gimple g = gimple_build_cond (EQ_EXPR, arg,
+ gimple *g = gimple_build_cond (EQ_EXPR, arg,
build_zero_cst (TREE_TYPE (arg)),
NULL_TREE, NULL_TREE);
gimple_set_location (g, loc[0]);
else
{
tree data = ubsan_create_data ("__ubsan_nonnull_return_data",
- 2, loc, NULL_TREE, NULL_TREE);
+ 1, &loc[1], NULL_TREE, NULL_TREE);
data = build_fold_addr_expr_loc (loc[0], data);
+ tree data2 = ubsan_create_data ("__ubsan_nonnull_return_data",
+ 1, &loc[0], NULL_TREE, NULL_TREE);
+ data2 = build_fold_addr_expr_loc (loc[0], data2);
enum built_in_function bcode
= (flag_sanitize_recover & SANITIZE_RETURNS_NONNULL_ATTRIBUTE)
- ? BUILT_IN_UBSAN_HANDLE_NONNULL_RETURN
- : BUILT_IN_UBSAN_HANDLE_NONNULL_RETURN_ABORT;
+ ? BUILT_IN_UBSAN_HANDLE_NONNULL_RETURN_V1
+ : BUILT_IN_UBSAN_HANDLE_NONNULL_RETURN_V1_ABORT;
tree fn = builtin_decl_explicit (bcode);
- g = gimple_build_call (fn, 1, data);
+ g = gimple_build_call (fn, 2, data, data2);
}
gimple_set_location (g, loc[0]);
gsi_insert_before (gsi, g, GSI_SAME_STMT);
+ ubsan_create_edge (g);
*gsi = gsi_for_stmt (stmt);
}
flag_delete_null_pointer_checks = save_flag_delete_null_pointer_checks;
points to an out-of-bounds location. */
static void
-instrument_object_size (gimple_stmt_iterator *gsi, bool is_lhs)
+instrument_object_size (gimple_stmt_iterator *gsi, tree t, bool is_lhs)
{
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
location_t loc = gimple_location (stmt);
- tree t = is_lhs ? gimple_get_lhs (stmt) : gimple_assign_rhs1 (stmt);
tree type;
tree index = NULL_TREE;
HOST_WIDE_INT size_in_bytes;
{
tree repr = DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1));
t = build3 (COMPONENT_REF, TREE_TYPE (repr), TREE_OPERAND (t, 0),
- repr, NULL_TREE);
+ repr, TREE_OPERAND (t, 2));
}
break;
case ARRAY_REF:
if (size_in_bytes <= 0)
return;
- HOST_WIDE_INT bitsize, bitpos;
+ poly_int64 bitsize, bitpos;
tree offset;
machine_mode mode;
- int volatilep = 0, unsignedp = 0;
+ int volatilep = 0, reversep, unsignedp = 0;
tree inner = get_inner_reference (t, &bitsize, &bitpos, &offset, &mode,
- &unsignedp, &volatilep, false);
+ &unsignedp, &reversep, &volatilep);
- if (bitpos % BITS_PER_UNIT != 0
- || bitsize != size_in_bytes * BITS_PER_UNIT)
+ if (!multiple_p (bitpos, BITS_PER_UNIT)
+ || maybe_ne (bitsize, size_in_bytes * BITS_PER_UNIT))
return;
bool decl_p = DECL_P (inner);
tree base;
if (decl_p)
- base = inner;
+ {
+ if (DECL_REGISTER (inner))
+ return;
+ base = inner;
+ }
else if (TREE_CODE (inner) == MEM_REF)
base = TREE_OPERAND (inner, 0);
else
while (TREE_CODE (base) == SSA_NAME)
{
- gimple def_stmt = SSA_NAME_DEF_STMT (base);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (base);
if (gimple_assign_ssa_name_copy_p (def_stmt)
|| (gimple_assign_cast_p (def_stmt)
&& POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (def_stmt))))
{
tree rhs1 = gimple_assign_rhs1 (def_stmt);
if (TREE_CODE (rhs1) == SSA_NAME
- && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1))
+ && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1))
break;
else
base = rhs1;
tree sizet;
tree base_addr = base;
+ gimple *bos_stmt = NULL;
if (decl_p)
base_addr = build1 (ADDR_EXPR,
build_pointer_type (TREE_TYPE (base)), base);
- unsigned HOST_WIDE_INT size = compute_builtin_object_size (base_addr, 0);
- if (size != (unsigned HOST_WIDE_INT) -1)
+ unsigned HOST_WIDE_INT size;
+ if (compute_builtin_object_size (base_addr, 0, &size))
sizet = build_int_cst (sizetype, size);
else if (optimize)
{
integer_zero_node);
sizet = force_gimple_operand_gsi (gsi, sizet, false, NULL_TREE, true,
GSI_SAME_STMT);
+ /* If the call above didn't end up being an integer constant, go one
+ statement back and get the __builtin_object_size stmt. Save it,
+ we might need it later. */
+ if (SSA_VAR_P (sizet))
+ {
+ gsi_prev (gsi);
+ bos_stmt = gsi_stmt (*gsi);
+
+ /* Move on to where we were. */
+ gsi_next (gsi);
+ }
}
else
return;
&& TREE_CODE (index) == SSA_NAME
&& TREE_CODE (sizet) == INTEGER_CST)
{
- gimple def = SSA_NAME_DEF_STMT (index);
+ gimple *def = SSA_NAME_DEF_STMT (index);
if (is_gimple_assign (def)
&& gimple_assign_rhs_code (def) == BIT_AND_EXPR
&& TREE_CODE (gimple_assign_rhs2 (def)) == INTEGER_CST)
}
}
- /* Nope. Emit the check. */
+ if (bos_stmt && gimple_call_builtin_p (bos_stmt, BUILT_IN_OBJECT_SIZE))
+ ubsan_create_edge (bos_stmt);
+
+ /* We have to emit the check. */
t = force_gimple_operand_gsi (gsi, t, true, NULL_TREE, true,
GSI_SAME_STMT);
ptr = force_gimple_operand_gsi (gsi, ptr, true, NULL_TREE, true,
GSI_SAME_STMT);
tree ckind = build_int_cst (unsigned_char_type_node,
is_lhs ? UBSAN_STORE_OF : UBSAN_LOAD_OF);
- gimple g = gimple_build_call_internal (IFN_UBSAN_OBJECT_SIZE, 4,
+ gimple *g = gimple_build_call_internal (IFN_UBSAN_OBJECT_SIZE, 4,
ptr, t, sizet, ckind);
gimple_set_location (g, loc);
gsi_insert_before (gsi, g, GSI_SAME_STMT);
}
+/* Instrument values passed to builtin functions. */
+
+static void
+instrument_builtin (gimple_stmt_iterator *gsi)
+{
+ gimple *stmt = gsi_stmt (*gsi);
+ location_t loc = gimple_location (stmt);
+ tree arg;
+ enum built_in_function fcode
+ = DECL_FUNCTION_CODE (gimple_call_fndecl (stmt));
+ int kind = 0;
+ switch (fcode)
+ {
+ CASE_INT_FN (BUILT_IN_CLZ):
+ kind = 1;
+ gcc_fallthrough ();
+ CASE_INT_FN (BUILT_IN_CTZ):
+ arg = gimple_call_arg (stmt, 0);
+ if (!integer_nonzerop (arg))
+ {
+ gimple *g;
+ if (!is_gimple_val (arg))
+ {
+ g = gimple_build_assign (make_ssa_name (TREE_TYPE (arg)), arg);
+ gimple_set_location (g, loc);
+ gsi_insert_before (gsi, g, GSI_SAME_STMT);
+ arg = gimple_assign_lhs (g);
+ }
+
+ basic_block then_bb, fallthru_bb;
+ *gsi = create_cond_insert_point (gsi, true, false, true,
+ &then_bb, &fallthru_bb);
+ g = gimple_build_cond (EQ_EXPR, arg,
+ build_zero_cst (TREE_TYPE (arg)),
+ NULL_TREE, NULL_TREE);
+ gimple_set_location (g, loc);
+ gsi_insert_after (gsi, g, GSI_NEW_STMT);
+
+ *gsi = gsi_after_labels (then_bb);
+ if (flag_sanitize_undefined_trap_on_error)
+ g = gimple_build_call (builtin_decl_explicit (BUILT_IN_TRAP), 0);
+ else
+ {
+ tree t = build_int_cst (unsigned_char_type_node, kind);
+ tree data = ubsan_create_data ("__ubsan_builtin_data",
+ 1, &loc, NULL_TREE, t, NULL_TREE);
+ data = build_fold_addr_expr_loc (loc, data);
+ enum built_in_function bcode
+ = (flag_sanitize_recover & SANITIZE_BUILTIN)
+ ? BUILT_IN_UBSAN_HANDLE_INVALID_BUILTIN
+ : BUILT_IN_UBSAN_HANDLE_INVALID_BUILTIN_ABORT;
+ tree fn = builtin_decl_explicit (bcode);
+
+ g = gimple_build_call (fn, 1, data);
+ }
+ gimple_set_location (g, loc);
+ gsi_insert_before (gsi, g, GSI_SAME_STMT);
+ ubsan_create_edge (g);
+ }
+ *gsi = gsi_for_stmt (stmt);
+ break;
+ default:
+ break;
+ }
+}
+
namespace {
const pass_data pass_data_ubsan =
/* opt_pass methods: */
virtual bool gate (function *)
{
- return flag_sanitize & (SANITIZE_NULL | SANITIZE_SI_OVERFLOW
- | SANITIZE_BOOL | SANITIZE_ENUM
- | SANITIZE_ALIGNMENT
- | SANITIZE_NONNULL_ATTRIBUTE
- | SANITIZE_RETURNS_NONNULL_ATTRIBUTE
- | SANITIZE_OBJECT_SIZE)
- && current_function_decl != NULL_TREE
- && !lookup_attribute ("no_sanitize_undefined",
- DECL_ATTRIBUTES (current_function_decl));
+ return sanitize_flags_p ((SANITIZE_NULL | SANITIZE_SI_OVERFLOW
+ | SANITIZE_BOOL | SANITIZE_ENUM
+ | SANITIZE_ALIGNMENT
+ | SANITIZE_NONNULL_ATTRIBUTE
+ | SANITIZE_RETURNS_NONNULL_ATTRIBUTE
+ | SANITIZE_OBJECT_SIZE
+ | SANITIZE_POINTER_OVERFLOW
+ | SANITIZE_BUILTIN));
}
virtual unsigned int execute (function *);
{
basic_block bb;
gimple_stmt_iterator gsi;
+ unsigned int ret = 0;
initialize_sanitizer_builtins ();
{
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
if (is_gimple_debug (stmt) || gimple_clobber_p (stmt))
{
gsi_next (&gsi);
continue;
}
- if ((flag_sanitize & SANITIZE_SI_OVERFLOW)
+ if ((sanitize_flags_p (SANITIZE_SI_OVERFLOW, fun->decl))
&& is_gimple_assign (stmt))
instrument_si_overflow (gsi);
- if (flag_sanitize & (SANITIZE_NULL | SANITIZE_ALIGNMENT))
+ if (sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT, fun->decl))
{
if (gimple_store_p (stmt))
- instrument_null (gsi, true);
- if (gimple_assign_load_p (stmt))
- instrument_null (gsi, false);
+ instrument_null (gsi, gimple_get_lhs (stmt), true);
+ if (gimple_assign_single_p (stmt))
+ instrument_null (gsi, gimple_assign_rhs1 (stmt), false);
+ if (is_gimple_call (stmt))
+ {
+ unsigned args_num = gimple_call_num_args (stmt);
+ for (unsigned i = 0; i < args_num; ++i)
+ {
+ tree arg = gimple_call_arg (stmt, i);
+ if (is_gimple_reg (arg) || is_gimple_min_invariant (arg))
+ continue;
+ instrument_null (gsi, arg, false);
+ }
+ }
}
- if (flag_sanitize & (SANITIZE_BOOL | SANITIZE_ENUM)
+ if (sanitize_flags_p (SANITIZE_BOOL | SANITIZE_ENUM, fun->decl)
&& gimple_assign_load_p (stmt))
{
instrument_bool_enum_load (&gsi);
bb = gimple_bb (stmt);
}
- if ((flag_sanitize & SANITIZE_NONNULL_ATTRIBUTE)
+ if (sanitize_flags_p (SANITIZE_NONNULL_ATTRIBUTE, fun->decl)
&& is_gimple_call (stmt)
&& !gimple_call_internal_p (stmt))
{
bb = gimple_bb (stmt);
}
- if ((flag_sanitize & SANITIZE_RETURNS_NONNULL_ATTRIBUTE)
+ if (sanitize_flags_p (SANITIZE_BUILTIN, fun->decl)
+ && gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
+ {
+ instrument_builtin (&gsi);
+ bb = gimple_bb (stmt);
+ }
+
+ if (sanitize_flags_p (SANITIZE_RETURNS_NONNULL_ATTRIBUTE, fun->decl)
&& gimple_code (stmt) == GIMPLE_RETURN)
{
instrument_nonnull_return (&gsi);
bb = gimple_bb (stmt);
}
- if (flag_sanitize & SANITIZE_OBJECT_SIZE)
+ if (sanitize_flags_p (SANITIZE_OBJECT_SIZE, fun->decl))
{
if (gimple_store_p (stmt))
- instrument_object_size (&gsi, true);
+ instrument_object_size (&gsi, gimple_get_lhs (stmt), true);
if (gimple_assign_load_p (stmt))
- instrument_object_size (&gsi, false);
+ instrument_object_size (&gsi, gimple_assign_rhs1 (stmt),
+ false);
+ if (is_gimple_call (stmt))
+ {
+ unsigned args_num = gimple_call_num_args (stmt);
+ for (unsigned i = 0; i < args_num; ++i)
+ {
+ tree arg = gimple_call_arg (stmt, i);
+ if (is_gimple_reg (arg) || is_gimple_min_invariant (arg))
+ continue;
+ instrument_object_size (&gsi, arg, false);
+ }
+ }
+ }
+
+ if (sanitize_flags_p (SANITIZE_POINTER_OVERFLOW, fun->decl))
+ {
+ if (is_gimple_assign (stmt)
+ && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
+ instrument_pointer_overflow (&gsi,
+ gimple_assign_rhs1 (stmt),
+ gimple_assign_rhs2 (stmt));
+ if (gimple_store_p (stmt))
+ maybe_instrument_pointer_overflow (&gsi,
+ gimple_get_lhs (stmt));
+ if (gimple_assign_single_p (stmt))
+ maybe_instrument_pointer_overflow (&gsi,
+ gimple_assign_rhs1 (stmt));
+ if (is_gimple_call (stmt))
+ {
+ unsigned args_num = gimple_call_num_args (stmt);
+ for (unsigned i = 0; i < args_num; ++i)
+ {
+ tree arg = gimple_call_arg (stmt, i);
+ if (is_gimple_reg (arg))
+ continue;
+ maybe_instrument_pointer_overflow (&gsi, arg);
+ }
+ }
}
gsi_next (&gsi);
}
+ if (gimple_purge_dead_eh_edges (bb))
+ ret = TODO_cleanup_cfg;
}
- return 0;
+ return ret;
}
} // anon namespace