/* Decl for __asan_option_detect_stack_use_after_return. */
static GTY(()) tree asan_detect_stack_use_after_return;
+/* Number of instrumentations in current function so far. */
+
+static int asan_num_accesses;
+
+/* Check whether we should replace inline instrumentation with calls. */
+
+static inline bool
+use_calls_p ()
+{
+ return ASAN_INSTRUMENTATION_WITH_CALL_THRESHOLD < INT_MAX
+ && asan_num_accesses >= ASAN_INSTRUMENTATION_WITH_CALL_THRESHOLD;
+}
+
/* Hashtable support for memory references used by gimple
statements. */
IS_STORE is either 1 (for a store) or 0 (for a load). */
static tree
-report_error_func (bool is_store, HOST_WIDE_INT size_in_bytes, bool slow_p)
+report_error_func (bool is_store, HOST_WIDE_INT size_in_bytes, int *nargs)
{
static enum built_in_function report[2][6]
= { { BUILT_IN_ASAN_REPORT_LOAD1, BUILT_IN_ASAN_REPORT_LOAD2,
{ BUILT_IN_ASAN_REPORT_STORE1, BUILT_IN_ASAN_REPORT_STORE2,
BUILT_IN_ASAN_REPORT_STORE4, BUILT_IN_ASAN_REPORT_STORE8,
BUILT_IN_ASAN_REPORT_STORE16, BUILT_IN_ASAN_REPORT_STORE_N } };
- if ((size_in_bytes & (size_in_bytes - 1)) != 0
- || size_in_bytes > 16
- || slow_p)
- return builtin_decl_implicit (report[is_store][5]);
+ if (size_in_bytes == -1)
+ {
+ *nargs = 2;
+ return builtin_decl_implicit (report[is_store][5]);
+ }
+ *nargs = 1;
return builtin_decl_implicit (report[is_store][exact_log2 (size_in_bytes)]);
}
+/* Construct a function tree for __asan_{load,store}{1,2,4,8,16,_n}.
+ IS_STORE is either 1 (for a store) or 0 (for a load). */
+
+static tree
+check_func (bool is_store, int size_in_bytes, int *nargs)
+{
+ static enum built_in_function check[2][6]
+ = { { BUILT_IN_ASAN_LOAD1, BUILT_IN_ASAN_LOAD2,
+ BUILT_IN_ASAN_LOAD4, BUILT_IN_ASAN_LOAD8,
+ BUILT_IN_ASAN_LOAD16, BUILT_IN_ASAN_LOADN },
+ { BUILT_IN_ASAN_STORE1, BUILT_IN_ASAN_STORE2,
+ BUILT_IN_ASAN_STORE4, BUILT_IN_ASAN_STORE8,
+ BUILT_IN_ASAN_STORE16, BUILT_IN_ASAN_STOREN } };
+ if (size_in_bytes == -1)
+ {
+ *nargs = 2;
+ return builtin_decl_implicit (check[is_store][5]);
+ }
+ *nargs = 1;
+ return builtin_decl_implicit (check[is_store][exact_log2 (size_in_bytes)]);
+}
+
/* Split the current basic block and create a condition statement
insertion point right before or after the statement pointed to by
ITER. Return an iterator to the point at which the caller might
return gimple_assign_lhs (g);
}
+/* BASE can already be an SSA_NAME; in that case, do not create a
+ new SSA_NAME for it. */
+
+static tree
+maybe_create_ssa_name (location_t loc, tree base, gimple_stmt_iterator *iter,
+ bool before_p)
+{
+ if (TREE_CODE (base) == SSA_NAME)
+ return base;
+ gimple g
+ = gimple_build_assign_with_ops (TREE_CODE (base),
+ make_ssa_name (TREE_TYPE (base), NULL),
+ base, NULL_TREE);
+ gimple_set_location (g, loc);
+ if (before_p)
+ gsi_insert_before (iter, g, GSI_SAME_STMT);
+ else
+ gsi_insert_after (iter, g, GSI_NEW_STMT);
+ return gimple_assign_lhs (g);
+}
+
+/* Instrument the memory access instruction using callbacks.
+ Parameters are similar to BUILD_CHECK_STMT. */
+
+static void
+build_check_stmt_with_calls (location_t loc, tree base, tree len,
+ HOST_WIDE_INT size_in_bytes, gimple_stmt_iterator *iter,
+ bool before_p, bool is_store, bool is_scalar_access)
+{
+ gimple_stmt_iterator gsi = *iter;
+ tree base_ssa = maybe_create_ssa_name (loc, base, &gsi, before_p);
+
+ gimple g
+ = gimple_build_assign_with_ops (NOP_EXPR,
+ make_ssa_name (pointer_sized_int_node, NULL),
+ base_ssa, NULL_TREE);
+ gimple_set_location (g, loc);
+ if (before_p)
+ gsi_insert_before (&gsi, g, GSI_NEW_STMT);
+ else
+ gsi_insert_after (&gsi, g, GSI_NEW_STMT);
+ tree base_addr = gimple_assign_lhs (g);
+
+ int nargs;
+ tree fun
+ = check_func (is_store, is_scalar_access ? size_in_bytes : -1, &nargs);
+ if (nargs == 1)
+ g = gimple_build_call (fun, 1, base_addr);
+ else
+ {
+ gcc_assert (nargs == 2);
+ g = gimple_build_assign_with_ops (NOP_EXPR,
+ make_ssa_name (pointer_sized_int_node,
+ NULL),
+ len, NULL_TREE);
+ gimple_set_location (g, loc);
+ gsi_insert_after (&gsi, g, GSI_NEW_STMT);
+ tree sz_arg = gimple_assign_lhs (g);
+ g = gimple_build_call (fun, nargs, base_addr, sz_arg);
+ }
+ gimple_set_location (g, loc);
+ gsi_insert_after (&gsi, g, GSI_NEW_STMT);
+
+ if (!before_p)
+ {
+ gsi_next (&gsi);
+ *iter = gsi;
+ }
+}
+
/* Instrument the memory access instruction BASE. Insert new
statements before or after ITER.
SSA_NAME, or a non-SSA expression. LOCATION is the source code
location. IS_STORE is TRUE for a store, FALSE for a load.
BEFORE_P is TRUE for inserting the instrumentation code before
- ITER, FALSE for inserting it after ITER.
+ ITER, FALSE for inserting it after ITER. IS_SCALAR_ACCESS is TRUE
+ for a scalar memory access and FALSE for memory region access.
+ NON_ZERO_P is TRUE if memory region is guaranteed to have non-zero
+ length. ALIGN tells alignment of accessed memory object.
+
+ START_INSTRUMENTED and END_INSTRUMENTED are TRUE if start/end of
+ memory region have already been instrumented.
If BEFORE_P is TRUE, *ITER is arranged to still point to the
statement it was pointing to prior to calling this function,
otherwise, it points to the statement logically following it. */
static void
-build_check_stmt (location_t location, tree base, gimple_stmt_iterator *iter,
- bool before_p, bool is_store, HOST_WIDE_INT size_in_bytes,
- bool slow_p = false)
+build_check_stmt (location_t location, tree base, tree len,
+ HOST_WIDE_INT size_in_bytes, gimple_stmt_iterator *iter,
+ bool non_zero_len_p, bool before_p, bool is_store,
+ bool is_scalar_access, unsigned int align = 0,
+ bool start_instrumented = false,
+ bool end_instrumented = false)
{
- gimple_stmt_iterator gsi;
- basic_block then_bb, else_bb;
- tree t, base_addr, shadow;
+ gimple_stmt_iterator gsi = *iter;
gimple g;
- tree shadow_ptr_type = shadow_ptr_types[size_in_bytes == 16 ? 1 : 0];
- tree shadow_type = TREE_TYPE (shadow_ptr_type);
tree uintptr_type
= build_nonstandard_integer_type (TYPE_PRECISION (TREE_TYPE (base)), 1);
- tree base_ssa = base;
- HOST_WIDE_INT real_size_in_bytes = size_in_bytes;
- tree sz_arg = NULL_TREE;
-
- if (size_in_bytes == 1)
- slow_p = false;
- else if ((size_in_bytes & (size_in_bytes - 1)) != 0
- || size_in_bytes > 16
- || slow_p)
+
+ gcc_assert (!(size_in_bytes > 0 && !non_zero_len_p));
+
+ if (len)
+ len = unshare_expr (len);
+ else
+ {
+ gcc_assert (size_in_bytes != -1);
+ len = build_int_cst (pointer_sized_int_node, size_in_bytes);
+ }
+
+ if (size_in_bytes > 1)
{
- real_size_in_bytes = 1;
- slow_p = true;
+ if ((size_in_bytes & (size_in_bytes - 1)) != 0
+ || size_in_bytes > 16)
+ size_in_bytes = -1;
+ else if (align && align < size_in_bytes * BITS_PER_UNIT)
+ {
+ /* On non-strict alignment targets, if
+ 16-byte access is just 8-byte aligned,
+ this will result in misaligned shadow
+ memory 2 byte load, but otherwise can
+ be handled using one read. */
+ if (size_in_bytes != 16
+ || STRICT_ALIGNMENT
+ || align < 8 * BITS_PER_UNIT)
+ size_in_bytes = -1;
+ }
+ }
+
+ HOST_WIDE_INT real_size_in_bytes = size_in_bytes == -1 ? 1 : size_in_bytes;
+
+ tree shadow_ptr_type = shadow_ptr_types[real_size_in_bytes == 16 ? 1 : 0];
+ tree shadow_type = TREE_TYPE (shadow_ptr_type);
+
+ base = unshare_expr (base);
+
+ if (use_calls_p ())
+ {
+ gsi = *iter;
+ build_check_stmt_with_calls (location, base, len, size_in_bytes, iter,
+ before_p, is_store, is_scalar_access);
+ return;
+ }
+
+ ++asan_num_accesses;
+
+ if (!non_zero_len_p)
+ {
+ gcc_assert (before_p);
+
+ /* So, the length of the memory area to asan-protect is
+ non-constant. Let's guard the generated instrumentation code
+ like:
+
+ if (len != 0)
+ {
+ //asan instrumentation code goes here.
+ }
+ // falltrough instructions, starting with *ITER. */
+
+ g = gimple_build_cond (NE_EXPR,
+ len,
+ build_int_cst (TREE_TYPE (len), 0),
+ NULL_TREE, NULL_TREE);
+ gimple_set_location (g, location);
+
+ basic_block then_bb, fallthrough_bb;
+ insert_if_then_before_iter (g, iter, /*then_more_likely_p=*/true,
+ &then_bb, &fallthrough_bb);
+ /* Note that fallthrough_bb starts with the statement that was
+ pointed to by ITER. */
+
+ /* The 'then block' of the 'if (len != 0) condition is where
+ we'll generate the asan instrumentation code now. */
+ gsi = gsi_last_bb (then_bb);
+ build_check_stmt (location, base, len, size_in_bytes, &gsi,
+ /*non_zero_len_p*/true, /*before_p*/true, is_store,
+ is_scalar_access, align,
+ start_instrumented, end_instrumented);
+ return;
}
/* Get an iterator on the point where we can add the condition
statement for the instrumentation. */
- gsi = create_cond_insert_point (iter, before_p,
+ basic_block then_bb, else_bb;
+ gsi = create_cond_insert_point (&gsi, before_p,
/*then_more_likely_p=*/false,
/*create_then_fallthru_edge=*/false,
&then_bb,
&else_bb);
- base = unshare_expr (base);
-
- /* BASE can already be an SSA_NAME; in that case, do not create a
- new SSA_NAME for it. */
- if (TREE_CODE (base) != SSA_NAME)
- {
- g = gimple_build_assign_with_ops (TREE_CODE (base),
- make_ssa_name (TREE_TYPE (base), NULL),
- base, NULL_TREE);
- gimple_set_location (g, location);
- gsi_insert_after (&gsi, g, GSI_NEW_STMT);
- base_ssa = gimple_assign_lhs (g);
- }
+ tree base_ssa = maybe_create_ssa_name (location, base, &gsi,
+ /*before_p*/false);
g = gimple_build_assign_with_ops (NOP_EXPR,
make_ssa_name (uintptr_type, NULL),
base_ssa, NULL_TREE);
gimple_set_location (g, location);
gsi_insert_after (&gsi, g, GSI_NEW_STMT);
- base_addr = gimple_assign_lhs (g);
-
- /* Build
- (base_addr >> ASAN_SHADOW_SHIFT) + targetm.asan_shadow_offset (). */
- shadow = build_shadow_mem_access (&gsi, location, base_addr,
- shadow_ptr_type);
+ tree base_addr = gimple_assign_lhs (g);
- if (real_size_in_bytes < 8)
+ tree t;
+ if (real_size_in_bytes >= 8)
+ {
+ tree shadow = build_shadow_mem_access (&gsi, location, base_addr,
+ shadow_ptr_type);
+ t = shadow;
+ }
+ else
{
- /* Slow path for 1, 2 and 4 byte accesses.
- Test (shadow != 0)
- & ((base_addr & 7) + (real_size_in_bytes - 1)) >= shadow). */
- gimple_seq seq = NULL;
- gimple shadow_test = build_assign (NE_EXPR, shadow, 0);
- gimple_seq_add_stmt (&seq, shadow_test);
- gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR, base_addr, 7));
- gimple_seq_add_stmt (&seq, build_type_cast (shadow_type,
- gimple_seq_last (seq)));
- if (real_size_in_bytes > 1)
- gimple_seq_add_stmt (&seq,
- build_assign (PLUS_EXPR, gimple_seq_last (seq),
- real_size_in_bytes - 1));
- gimple_seq_add_stmt (&seq, build_assign (GE_EXPR, gimple_seq_last (seq),
- shadow));
- gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR, shadow_test,
- gimple_seq_last (seq)));
- t = gimple_assign_lhs (gimple_seq_last (seq));
- gimple_seq_set_location (seq, location);
- gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
- /* For weird access sizes or misaligned, check first and last byte. */
- if (slow_p)
+ /* Slow path for 1, 2 and 4 byte accesses. */
+
+ if (!start_instrumented)
{
+ /* Test (shadow != 0)
+ & ((base_addr & 7) + (real_size_in_bytes - 1)) >= shadow). */
+ tree shadow = build_shadow_mem_access (&gsi, location, base_addr,
+ shadow_ptr_type);
+ gimple shadow_test = build_assign (NE_EXPR, shadow, 0);
+ gimple_seq seq = NULL;
+ gimple_seq_add_stmt (&seq, shadow_test);
+ gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR, base_addr, 7));
+ gimple_seq_add_stmt (&seq, build_type_cast (shadow_type,
+ gimple_seq_last (seq)));
+ if (real_size_in_bytes > 1)
+ gimple_seq_add_stmt (&seq,
+ build_assign (PLUS_EXPR, gimple_seq_last (seq),
+ real_size_in_bytes - 1));
+ gimple_seq_add_stmt (&seq, build_assign (GE_EXPR,
+ gimple_seq_last (seq),
+ shadow));
+ gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR, shadow_test,
+ gimple_seq_last (seq)));
+ t = gimple_assign_lhs (gimple_seq_last (seq));
+ gimple_seq_set_location (seq, location);
+ gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
+ }
+
+ /* For non-constant, misaligned or otherwise weird access sizes,
+ check first and last byte. */
+ if (size_in_bytes == -1 && !end_instrumented)
+ {
+ g = gimple_build_assign_with_ops (MINUS_EXPR,
+ make_ssa_name (uintptr_type, NULL),
+ len,
+ build_int_cst (uintptr_type, 1));
+ gimple_set_location (g, location);
+ gsi_insert_after (&gsi, g, GSI_NEW_STMT);
+ tree last = gimple_assign_lhs (g);
g = gimple_build_assign_with_ops (PLUS_EXPR,
make_ssa_name (uintptr_type, NULL),
base_addr,
- build_int_cst (uintptr_type,
- size_in_bytes - 1));
+ last);
gimple_set_location (g, location);
gsi_insert_after (&gsi, g, GSI_NEW_STMT);
tree base_end_addr = gimple_assign_lhs (g);
- shadow = build_shadow_mem_access (&gsi, location, base_end_addr,
- shadow_ptr_type);
- seq = NULL;
- shadow_test = build_assign (NE_EXPR, shadow, 0);
+ tree shadow = build_shadow_mem_access (&gsi, location, base_end_addr,
+ shadow_ptr_type);
+ gimple shadow_test = build_assign (NE_EXPR, shadow, 0);
+ gimple_seq seq = NULL;
gimple_seq_add_stmt (&seq, shadow_test);
gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR,
base_end_addr, 7));
shadow));
gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR, shadow_test,
gimple_seq_last (seq)));
- gimple_seq_add_stmt (&seq, build_assign (BIT_IOR_EXPR, t,
- gimple_seq_last (seq)));
+ if (!start_instrumented)
+ gimple_seq_add_stmt (&seq, build_assign (BIT_IOR_EXPR, t,
+ gimple_seq_last (seq)));
t = gimple_assign_lhs (gimple_seq_last (seq));
gimple_seq_set_location (seq, location);
gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
- sz_arg = build_int_cst (pointer_sized_int_node, size_in_bytes);
}
}
- else
- t = shadow;
g = gimple_build_cond (NE_EXPR, t, build_int_cst (TREE_TYPE (t), 0),
NULL_TREE, NULL_TREE);
/* Generate call to the run-time library (e.g. __asan_report_load8). */
gsi = gsi_start_bb (then_bb);
- g = gimple_build_call (report_error_func (is_store, size_in_bytes, slow_p),
- sz_arg ? 2 : 1, base_addr, sz_arg);
+ int nargs;
+ tree fun = report_error_func (is_store, is_scalar_access ? size_in_bytes : -1,
+ &nargs);
+ if (nargs == 1)
+ g = gimple_build_call (fun, 1, base_addr);
+ else
+ {
+ gcc_assert (nargs == 2);
+ g = gimple_build_assign_with_ops (NOP_EXPR,
+ make_ssa_name (pointer_sized_int_node,
+ NULL),
+ len, NULL_TREE);
+ gimple_set_location (g, location);
+ gsi_insert_after (&gsi, g, GSI_NEW_STMT);
+ tree sz_arg = gimple_assign_lhs (g);
+ g = gimple_build_call (fun, nargs, base_addr, sz_arg);
+ }
gimple_set_location (g, location);
gsi_insert_after (&gsi, g, GSI_NEW_STMT);
base = build_fold_addr_expr (t);
if (!has_mem_ref_been_instrumented (base, size_in_bytes))
{
- bool slow_p = false;
- if (size_in_bytes > 1)
- {
- if ((size_in_bytes & (size_in_bytes - 1)) != 0
- || size_in_bytes > 16)
- slow_p = true;
- else
- {
- unsigned int align = get_object_alignment (t);
- if (align < size_in_bytes * BITS_PER_UNIT)
- {
- /* On non-strict alignment targets, if
- 16-byte access is just 8-byte aligned,
- this will result in misaligned shadow
- memory 2 byte load, but otherwise can
- be handled using one read. */
- if (size_in_bytes != 16
- || STRICT_ALIGNMENT
- || align < 8 * BITS_PER_UNIT)
- slow_p = true;
- }
- }
- }
- build_check_stmt (location, base, iter, /*before_p=*/true,
- is_store, size_in_bytes, slow_p);
+ unsigned int align = get_object_alignment (t);
+ build_check_stmt (location, base, NULL_TREE, size_in_bytes, iter,
+ /*non_zero_len_p*/size_in_bytes > 0, /*before_p=*/true,
+ is_store, /*is_scalar_access*/true, align);
update_mem_ref_hash_table (base, size_in_bytes);
update_mem_ref_hash_table (t, size_in_bytes);
}
|| integer_zerop (len))
return;
- gimple_stmt_iterator gsi = *iter;
-
- basic_block fallthrough_bb = NULL, then_bb = NULL;
-
/* If the beginning of the memory region has already been
instrumented, do not instrument it. */
bool start_instrumented = has_mem_ref_been_instrumented (base, 1);
/* If the end of the memory region has already been instrumented, do
- not instrument it. */
+ not instrument it. */
tree end = asan_mem_ref_get_end (base, len);
bool end_instrumented = has_mem_ref_been_instrumented (end, 1);
- if (start_instrumented && end_instrumented)
- return;
-
- if (!is_gimple_constant (len))
- {
- /* So, the length of the memory area to asan-protect is
- non-constant. Let's guard the generated instrumentation code
- like:
-
- if (len != 0)
- {
- //asan instrumentation code goes here.
- }
- // falltrough instructions, starting with *ITER. */
-
- gimple g = gimple_build_cond (NE_EXPR,
- len,
- build_int_cst (TREE_TYPE (len), 0),
- NULL_TREE, NULL_TREE);
- gimple_set_location (g, location);
- insert_if_then_before_iter (g, iter, /*then_more_likely_p=*/true,
- &then_bb, &fallthrough_bb);
- /* Note that fallthrough_bb starts with the statement that was
- pointed to by ITER. */
-
- /* The 'then block' of the 'if (len != 0) condition is where
- we'll generate the asan instrumentation code now. */
- gsi = gsi_last_bb (then_bb);
- }
-
- if (!start_instrumented)
- {
- /* Instrument the beginning of the memory region to be accessed,
- and arrange for the rest of the intrumentation code to be
- inserted in the then block *after* the current gsi. */
- build_check_stmt (location, base, &gsi, /*before_p=*/true, is_store, 1);
-
- if (then_bb)
- /* We are in the case where the length of the region is not
- constant; so instrumentation code is being generated in the
- 'then block' of the 'if (len != 0) condition. Let's arrange
- for the subsequent instrumentation statements to go in the
- 'then block'. */
- gsi = gsi_last_bb (then_bb);
- else
- {
- *iter = gsi;
- /* Don't remember this access as instrumented, if length
- is unknown. It might be zero and not being actually
- instrumented, so we can't rely on it being instrumented. */
- update_mem_ref_hash_table (base, 1);
- }
- }
-
- if (end_instrumented)
- return;
+ HOST_WIDE_INT size_in_bytes = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
- /* We want to instrument the access at the end of the memory region,
- which is at (base + len - 1). */
+ build_check_stmt (location, base, len, size_in_bytes, iter,
+ /*non_zero_len_p*/size_in_bytes > 0, /*before_p*/true,
+ is_store, /*is_scalar_access*/false, /*align*/0,
+ start_instrumented, end_instrumented);
- /* offset = len - 1; */
- len = unshare_expr (len);
- tree offset;
- gimple_seq seq = NULL;
- if (TREE_CODE (len) == INTEGER_CST)
- offset = fold_build2 (MINUS_EXPR, size_type_node,
- fold_convert (size_type_node, len),
- build_int_cst (size_type_node, 1));
- else
- {
- gimple g;
- tree t;
-
- if (TREE_CODE (len) != SSA_NAME)
- {
- t = make_ssa_name (TREE_TYPE (len), NULL);
- g = gimple_build_assign_with_ops (TREE_CODE (len), t, len, NULL);
- gimple_set_location (g, location);
- gimple_seq_add_stmt_without_update (&seq, g);
- len = t;
- }
- if (!useless_type_conversion_p (size_type_node, TREE_TYPE (len)))
- {
- t = make_ssa_name (size_type_node, NULL);
- g = gimple_build_assign_with_ops (NOP_EXPR, t, len, NULL);
- gimple_set_location (g, location);
- gimple_seq_add_stmt_without_update (&seq, g);
- len = t;
- }
-
- t = make_ssa_name (size_type_node, NULL);
- g = gimple_build_assign_with_ops (MINUS_EXPR, t, len,
- build_int_cst (size_type_node, 1));
- gimple_set_location (g, location);
- gimple_seq_add_stmt_without_update (&seq, g);
- offset = gimple_assign_lhs (g);
- }
-
- /* _1 = base; */
- base = unshare_expr (base);
- gimple region_end =
- gimple_build_assign_with_ops (TREE_CODE (base),
- make_ssa_name (TREE_TYPE (base), NULL),
- base, NULL);
- gimple_set_location (region_end, location);
- gimple_seq_add_stmt_without_update (&seq, region_end);
-
- /* _2 = _1 + offset; */
- region_end =
- gimple_build_assign_with_ops (POINTER_PLUS_EXPR,
- make_ssa_name (TREE_TYPE (base), NULL),
- gimple_assign_lhs (region_end),
- offset);
- gimple_set_location (region_end, location);
- gimple_seq_add_stmt_without_update (&seq, region_end);
- gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT);
-
- /* instrument access at _2; */
- gsi = gsi_for_stmt (region_end);
- build_check_stmt (location, gimple_assign_lhs (region_end),
- &gsi, /*before_p=*/false, is_store, 1);
-
- if (then_bb == NULL)
+ update_mem_ref_hash_table (base, 1);
+ if (size_in_bytes != -1)
update_mem_ref_hash_table (end, 1);
*iter = gsi_for_stmt (gsi_stmt (*iter));
location_t loc = gimple_location (call);
tree str_arg = gimple_call_arg (call, 0);
- /* Instrument the access to the first byte of str_arg. i.e:
-
- _1 = str_arg; instrument (_1); */
tree cptr_type = build_pointer_type (char_type_node);
gimple str_arg_ssa =
gimple_build_assign_with_ops (NOP_EXPR,
make_ssa_name (cptr_type, NULL),
str_arg, NULL);
gimple_set_location (str_arg_ssa, loc);
- gimple_stmt_iterator gsi = *iter;
- gsi_insert_before (&gsi, str_arg_ssa, GSI_NEW_STMT);
- build_check_stmt (loc, gimple_assign_lhs (str_arg_ssa), &gsi,
- /*before_p=*/false, /*is_store=*/false, 1);
+ gsi_insert_before (iter, str_arg_ssa, GSI_SAME_STMT);
- /* If we initially had an instruction like:
+ build_check_stmt (loc, gimple_assign_lhs (str_arg_ssa), NULL_TREE, 1, iter,
+ /*non_zero_len_p*/true, /*before_p=*/true,
+ /*is_store=*/false, /*is_scalar_access*/false, /*align*/0);
- int n = strlen (str)
-
- we now want to instrument the access to str[n], after the
- instruction above.*/
-
- /* So let's build the access to str[n] that is, access through the
- pointer_plus expr: (_1 + len). */
gimple stmt =
- gimple_build_assign_with_ops (POINTER_PLUS_EXPR,
- make_ssa_name (cptr_type, NULL),
- gimple_assign_lhs (str_arg_ssa),
- len);
+ gimple_build_assign_with_ops (PLUS_EXPR,
+ make_ssa_name (TREE_TYPE (len), NULL),
+ len,
+ build_int_cst (TREE_TYPE (len), 1));
gimple_set_location (stmt, loc);
- gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
-
- build_check_stmt (loc, gimple_assign_lhs (stmt), &gsi,
- /*before_p=*/false, /*is_store=*/false, 1);
-
- /* Ensure that iter points to the statement logically following the
- one it was initially pointing to. */
- *iter = gsi;
- /* As *ITER has been advanced to point to the next statement, let's
- return true to inform transform_statements that it shouldn't
- advance *ITER anymore; otherwises it will skip that next
- statement, which wouldn't be instrumented. */
+ gsi_insert_after (iter, stmt, GSI_NEW_STMT);
+
+ build_check_stmt (loc, gimple_assign_lhs (stmt), len, 1, iter,
+ /*non_zero_len_p*/true, /*before_p=*/false,
+ /*is_store=*/false, /*is_scalar_access*/false, /*align*/0);
+
return true;
}
{
if (shadow_ptr_types[0] == NULL_TREE)
asan_init_shadow_ptr_types ();
+ asan_num_accesses = 0;
transform_statements ();
return 0;
}