1 /* Pointer Bounds Checker insrumentation pass.
2 Copyright (C) 2014-2015 Free Software Foundation, Inc.
3 Contributed by Ilya Enkovich (ilya.enkovich@intel.com)
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
32 #include "fold-const.h"
33 #include "stor-layout.h"
36 #include "tree-iterator.h"
38 #include "langhooks.h"
39 #include "tree-pass.h"
40 #include "diagnostic.h"
44 #include "stringpool.h"
45 #include "tree-ssa-alias.h"
46 #include "tree-ssanames.h"
47 #include "tree-ssa-operands.h"
48 #include "tree-ssa-address.h"
51 #include "dominance.h"
53 #include "basic-block.h"
54 #include "tree-ssa-loop-niter.h"
55 #include "gimple-expr.h"
57 #include "tree-phinodes.h"
58 #include "gimple-ssa.h"
59 #include "ssa-iterators.h"
60 #include "gimple-pretty-print.h"
61 #include "gimple-iterator.h"
63 #include "gimplify-me.h"
64 #include "print-tree.h"
67 #include "hard-reg-set.h"
71 #include "statistics.h"
72 #include "insn-config.h"
80 #include "tree-ssa-propagate.h"
81 #include "gimple-fold.h"
82 #include "tree-chkp.h"
83 #include "gimple-walk.h"
84 #include "rtl.h" /* For MEM_P, assign_temp. */
87 #include "lto-streamer.h"
92 /* Pointer Bounds Checker instruments code with memory checks to find
93 out-of-bounds memory accesses. Checks are performed by computing
94 bounds for each pointer and then comparing address of accessed
95 memory before pointer dereferencing.
103 There are few things to instrument:
105 a) Memory accesses - add checker calls to check address of accessed memory
106 against bounds of dereferenced pointer. Obviously safe memory
107 accesses like static variable access does not have to be instrumented
114 with 4 bytes access is transformed into:
116 __builtin___chkp_bndcl (__bound_tmp.1_3, p_1);
118 __builtin___chkp_bndcu (__bound_tmp.1_3, D.1_4);
121 where __bound_tmp.1_3 are bounds computed for pointer p_1,
122 __builtin___chkp_bndcl is a lower bound check and
123 __builtin___chkp_bndcu is an upper bound check.
127 When pointer is stored in memory we need to store its bounds. To
128 achieve compatibility of instrumented code with regular codes
129 we have to keep data layout and store bounds in special bound tables
130 via special checker call. Implementation of bounds table may vary for
131 different platforms. It has to associate pointer value and its
132 location (it is required because we may have two equal pointers
133 with different bounds stored in different places) with bounds.
134 Another checker builtin allows to get bounds for specified pointer
135 loaded from specified location.
145 __builtin___chkp_bndstx (D.1_2, &buf2, __bound_tmp.1_2);
147 where __bound_tmp.1_2 are bounds of &buf2.
149 c) Static initialization.
151 The special case of pointer store is static pointer initialization.
152 Bounds initialization is performed in a few steps:
153 - register all static initializations in front-end using
154 chkp_register_var_initializer
155 - when file compilation finishes we create functions with special
156 attribute 'chkp ctor' and put explicit initialization code
157 (assignments) for all statically initialized pointers.
158 - when checker constructor is compiled checker pass adds required
159 bounds initialization for all statically initialized pointers
160 - since we do not actually need excess pointers initialization
161 in checker constructor we remove such assignments from them
165 For each call in the code we add additional arguments to pass
166 bounds for pointer arguments. We determine type of call arguments
167 using arguments list from function declaration; if function
168 declaration is not available we use function type; otherwise
169 (e.g. for unnamed arguments) we use type of passed value. Function
170 declaration/type is replaced with the instrumented one.
174 val_1 = foo (&buf1, &buf2, &buf1, 0);
178 val_1 = foo.chkp (&buf1, __bound_tmp.1_2, &buf2, __bound_tmp.1_3,
179 &buf1, __bound_tmp.1_2, 0);
183 If function returns a pointer value we have to return bounds also.
184 A new operand was added for return statement to hold returned bounds.
192 return &_buf1, __bound_tmp.1_1;
194 3. Bounds computation.
196 Compiler is fully responsible for computing bounds to be used for each
197 memory access. The first step for bounds computation is to find the
198 origin of pointer dereferenced for memory access. Basing on pointer
199 origin we define a way to compute its bounds. There are just few
202 a) Pointer is returned by call.
204 In this case we use corresponding checker builtin method to obtain returned
209 buf_1 = malloc (size_2);
214 buf_1 = malloc (size_2);
215 __bound_tmp.1_3 = __builtin___chkp_bndret (buf_1);
216 foo (buf_1, __bound_tmp.1_3);
218 b) Pointer is an address of an object.
220 In this case compiler tries to compute objects size and create corresponding
221 bounds. If object has incomplete type then special checker builtin is used to
222 obtain its size at runtime.
228 <unnamed type> __bound_tmp.3;
232 __bound_tmp.3_2 = __builtin___chkp_bndmk (&buf, 400);
235 return &buf, __bound_tmp.3_2;
240 Address of an object 'extern int buf[]' with incomplete type is
245 <unnamed type> __bound_tmp.4;
246 long unsigned int __size_tmp.3;
249 __size_tmp.3_4 = __builtin_ia32_sizeof (buf);
250 __bound_tmp.4_3 = __builtin_ia32_bndmk (&buf, __size_tmp.3_4);
253 return &buf, __bound_tmp.4_3;
256 c) Pointer is the result of object narrowing.
258 It happens when we use pointer to an object to compute pointer to a part
259 of an object. E.g. we take pointer to a field of a structure. In this
260 case we perform bounds intersection using bounds of original object and
261 bounds of object's part (which are computed basing on its type).
263 There may be some debatable questions about when narrowing should occur
264 and when it should not. To avoid false bound violations in correct
265 programs we do not perform narrowing when address of an array element is
266 obtained (it has address of the whole array) and when address of the first
267 structure field is obtained (because it is guaranteed to be equal to
268 address of the whole structure and it is legal to cast it back to structure).
270 Default narrowing behavior may be changed using compiler flags.
274 In this example address of the second structure field is returned.
276 foo (struct A * p, __bounds_type __bounds_of_p)
278 <unnamed type> __bound_tmp.3;
283 _5 = &p_1(D)->second_field;
284 __bound_tmp.3_6 = __builtin___chkp_bndmk (_5, 4);
285 __bound_tmp.3_8 = __builtin___chkp_intersect (__bound_tmp.3_6,
287 _2 = &p_1(D)->second_field;
288 return _2, __bound_tmp.3_8;
293 In this example address of the first field of array element is returned.
295 foo (struct A * p, __bounds_type __bounds_of_p, int i)
297 long unsigned int _3;
298 long unsigned int _4;
303 _3 = (long unsigned int) i_1(D);
306 _7 = &_6->first_field;
307 return _7, __bounds_of_p_2(D);
311 d) Pointer is the result of pointer arithmetic or type cast.
313 In this case bounds of the base pointer are used. In case of binary
314 operation producing a pointer we are analyzing data flow further
315 looking for operand's bounds. One operand is considered as a base
316 if it has some valid bounds. If we fall into a case when none of
317 operands (or both of them) has valid bounds, a default bounds value
320 Trying to find out bounds for binary operations we may fall into
321 cyclic dependencies for pointers. To avoid infinite recursion all
322 walked phi nodes instantly obtain corresponding bounds but created
323 bounds are marked as incomplete. It helps us to stop DF walk during
326 When we reach pointer source, some args of incomplete bounds phi obtain
327 valid bounds and those values are propagated further through phi nodes.
328 If no valid bounds were found for phi node then we mark its result as
329 invalid bounds. Process stops when all incomplete bounds become either
330 valid or invalid and we are able to choose a pointer base.
332 e) Pointer is loaded from the memory.
334 In this case we just need to load bounds from the bounds table.
340 <unnamed type> __bound_tmp.3;
346 __bound_tmp.3_4 = __builtin___chkp_bndldx (&buf, _2);
347 return _2, __bound_tmp.3_4;
352 typedef void (*assign_handler
)(tree
, tree
, void *);
354 static tree
chkp_get_zero_bounds ();
355 static tree
chkp_find_bounds (tree ptr
, gimple_stmt_iterator
*iter
);
356 static tree
chkp_find_bounds_loaded (tree ptr
, tree ptr_src
,
357 gimple_stmt_iterator
*iter
);
358 static void chkp_parse_array_and_component_ref (tree node
, tree
*ptr
,
359 tree
*elt
, bool *safe
,
362 gimple_stmt_iterator
*iter
,
363 bool innermost_bounds
);
365 #define chkp_bndldx_fndecl \
366 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDLDX))
367 #define chkp_bndstx_fndecl \
368 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDSTX))
369 #define chkp_checkl_fndecl \
370 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDCL))
371 #define chkp_checku_fndecl \
372 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDCU))
373 #define chkp_bndmk_fndecl \
374 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDMK))
375 #define chkp_ret_bnd_fndecl \
376 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDRET))
377 #define chkp_intersect_fndecl \
378 (targetm.builtin_chkp_function (BUILT_IN_CHKP_INTERSECT))
379 #define chkp_narrow_bounds_fndecl \
380 (targetm.builtin_chkp_function (BUILT_IN_CHKP_NARROW))
381 #define chkp_sizeof_fndecl \
382 (targetm.builtin_chkp_function (BUILT_IN_CHKP_SIZEOF))
383 #define chkp_extract_lower_fndecl \
384 (targetm.builtin_chkp_function (BUILT_IN_CHKP_EXTRACT_LOWER))
385 #define chkp_extract_upper_fndecl \
386 (targetm.builtin_chkp_function (BUILT_IN_CHKP_EXTRACT_UPPER))
388 static GTY (()) tree chkp_uintptr_type
;
390 static GTY (()) tree chkp_zero_bounds_var
;
391 static GTY (()) tree chkp_none_bounds_var
;
393 static GTY (()) basic_block entry_block
;
394 static GTY (()) tree zero_bounds
;
395 static GTY (()) tree none_bounds
;
396 static GTY (()) tree incomplete_bounds
;
397 static GTY (()) tree tmp_var
;
398 static GTY (()) tree size_tmp_var
;
399 static GTY (()) bitmap chkp_abnormal_copies
;
401 struct hash_set
<tree
> *chkp_invalid_bounds
;
402 struct hash_set
<tree
> *chkp_completed_bounds_set
;
403 struct hash_map
<tree
, tree
> *chkp_reg_bounds
;
404 struct hash_map
<tree
, tree
> *chkp_bound_vars
;
405 struct hash_map
<tree
, tree
> *chkp_reg_addr_bounds
;
406 struct hash_map
<tree
, tree
> *chkp_incomplete_bounds_map
;
407 struct hash_map
<tree
, tree
> *chkp_bounds_map
;
408 struct hash_map
<tree
, tree
> *chkp_static_var_bounds
;
410 static bool in_chkp_pass
;
412 #define CHKP_BOUND_TMP_NAME "__bound_tmp"
413 #define CHKP_SIZE_TMP_NAME "__size_tmp"
414 #define CHKP_BOUNDS_OF_SYMBOL_PREFIX "__chkp_bounds_of_"
415 #define CHKP_STRING_BOUNDS_PREFIX "__chkp_string_bounds_"
416 #define CHKP_VAR_BOUNDS_PREFIX "__chkp_var_bounds_"
417 #define CHKP_ZERO_BOUNDS_VAR_NAME "__chkp_zero_bounds"
418 #define CHKP_NONE_BOUNDS_VAR_NAME "__chkp_none_bounds"
420 /* Static checker constructors may become very large and their
421 compilation with optimization may take too much time.
422 Therefore we put a limit to number of statements in one
423 constructor. Tests with 100 000 statically initialized
424 pointers showed following compilation times on Sandy Bridge
426 limit 100 => ~18 sec.
427 limit 300 => ~22 sec.
428 limit 1000 => ~30 sec.
429 limit 3000 => ~49 sec.
430 limit 5000 => ~55 sec.
431 limit 10000 => ~76 sec.
432 limit 100000 => ~532 sec. */
433 #define MAX_STMTS_IN_STATIC_CHKP_CTOR (PARAM_VALUE (PARAM_CHKP_MAX_CTOR_SIZE))
435 struct chkp_ctor_stmt_list
441 /* Return 1 if function FNDECL is instrumented by Pointer
444 chkp_function_instrumented_p (tree fndecl
)
447 && lookup_attribute ("chkp instrumented", DECL_ATTRIBUTES (fndecl
));
450 /* Mark function FNDECL as instrumented. */
452 chkp_function_mark_instrumented (tree fndecl
)
454 if (chkp_function_instrumented_p (fndecl
))
457 DECL_ATTRIBUTES (fndecl
)
458 = tree_cons (get_identifier ("chkp instrumented"), NULL
,
459 DECL_ATTRIBUTES (fndecl
));
462 /* Return true when STMT is builtin call to instrumentation function
463 corresponding to CODE. */
466 chkp_gimple_call_builtin_p (gimple call
,
467 enum built_in_function code
)
470 if (is_gimple_call (call
)
471 && (fndecl
= targetm
.builtin_chkp_function (code
))
472 && gimple_call_fndecl (call
) == fndecl
)
477 /* Emit code to store zero bounds for PTR located at MEM. */
479 chkp_expand_bounds_reset_for_mem (tree mem
, tree ptr
)
481 tree zero_bnd
, bnd
, addr
, bndstx
;
483 if (flag_chkp_use_static_const_bounds
)
484 zero_bnd
= chkp_get_zero_bounds_var ();
486 zero_bnd
= chkp_build_make_bounds_call (integer_zero_node
,
488 bnd
= make_tree (pointer_bounds_type_node
,
489 assign_temp (pointer_bounds_type_node
, 0, 1));
490 addr
= build1 (ADDR_EXPR
,
491 build_pointer_type (TREE_TYPE (mem
)), mem
);
492 bndstx
= chkp_build_bndstx_call (addr
, ptr
, bnd
);
494 expand_assignment (bnd
, zero_bnd
, false);
495 expand_normal (bndstx
);
498 /* Build retbnd call for returned value RETVAL.
500 If BNDVAL is not NULL then result is stored
501 in it. Otherwise a temporary is created to
504 GSI points to a position for a retbnd call
505 and is set to created stmt.
507 Cgraph edge is created for a new call if
510 Obtained bounds are returned. */
512 chkp_insert_retbnd_call (tree bndval
, tree retval
,
513 gimple_stmt_iterator
*gsi
)
518 bndval
= create_tmp_reg (pointer_bounds_type_node
, "retbnd");
520 call
= gimple_build_call (chkp_ret_bnd_fndecl
, 1, retval
);
521 gimple_call_set_lhs (call
, bndval
);
522 gsi_insert_after (gsi
, call
, GSI_CONTINUE_LINKING
);
527 /* Build a GIMPLE_CALL identical to CALL but skipping bounds
531 chkp_copy_call_skip_bounds (gcall
*call
)
536 bitmap_obstack_initialize (NULL
);
537 bounds
= BITMAP_ALLOC (NULL
);
539 for (i
= 0; i
< gimple_call_num_args (call
); i
++)
540 if (POINTER_BOUNDS_P (gimple_call_arg (call
, i
)))
541 bitmap_set_bit (bounds
, i
);
543 if (!bitmap_empty_p (bounds
))
544 call
= gimple_call_copy_skip_args (call
, bounds
);
545 gimple_call_set_with_bounds (call
, false);
547 BITMAP_FREE (bounds
);
548 bitmap_obstack_release (NULL
);
553 /* Redirect edge E to the correct node according to call_stmt.
554 Return 1 if bounds removal from call_stmt should be done
555 instead of redirection. */
558 chkp_redirect_edge (cgraph_edge
*e
)
560 bool instrumented
= false;
561 tree decl
= e
->callee
->decl
;
563 if (e
->callee
->instrumentation_clone
564 || chkp_function_instrumented_p (decl
))
568 && !gimple_call_with_bounds_p (e
->call_stmt
))
569 e
->redirect_callee (cgraph_node::get_create (e
->callee
->orig_decl
));
570 else if (!instrumented
571 && gimple_call_with_bounds_p (e
->call_stmt
)
572 && !chkp_gimple_call_builtin_p (e
->call_stmt
, BUILT_IN_CHKP_BNDCL
)
573 && !chkp_gimple_call_builtin_p (e
->call_stmt
, BUILT_IN_CHKP_BNDCU
)
574 && !chkp_gimple_call_builtin_p (e
->call_stmt
, BUILT_IN_CHKP_BNDSTX
))
576 if (e
->callee
->instrumented_version
)
577 e
->redirect_callee (e
->callee
->instrumented_version
);
580 tree args
= TYPE_ARG_TYPES (TREE_TYPE (decl
));
581 /* Avoid bounds removal if all args will be removed. */
582 if (!args
|| TREE_VALUE (args
) != void_type_node
)
585 gimple_call_set_with_bounds (e
->call_stmt
, false);
592 /* Mark statement S to not be instrumented. */
594 chkp_mark_stmt (gimple s
)
596 gimple_set_plf (s
, GF_PLF_1
, true);
599 /* Mark statement S to be instrumented. */
601 chkp_unmark_stmt (gimple s
)
603 gimple_set_plf (s
, GF_PLF_1
, false);
606 /* Return 1 if statement S should not be instrumented. */
608 chkp_marked_stmt_p (gimple s
)
610 return gimple_plf (s
, GF_PLF_1
);
613 /* Get var to be used for bound temps. */
615 chkp_get_tmp_var (void)
618 tmp_var
= create_tmp_reg (pointer_bounds_type_node
, CHKP_BOUND_TMP_NAME
);
623 /* Get SSA_NAME to be used as temp. */
625 chkp_get_tmp_reg (gimple stmt
)
628 return make_ssa_name (chkp_get_tmp_var (), stmt
);
630 return make_temp_ssa_name (pointer_bounds_type_node
, stmt
,
631 CHKP_BOUND_TMP_NAME
);
634 /* Get var to be used for size temps. */
636 chkp_get_size_tmp_var (void)
639 size_tmp_var
= create_tmp_reg (chkp_uintptr_type
, CHKP_SIZE_TMP_NAME
);
644 /* Register bounds BND for address of OBJ. */
646 chkp_register_addr_bounds (tree obj
, tree bnd
)
648 if (bnd
== incomplete_bounds
)
651 chkp_reg_addr_bounds
->put (obj
, bnd
);
653 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
655 fprintf (dump_file
, "Regsitered bound ");
656 print_generic_expr (dump_file
, bnd
, 0);
657 fprintf (dump_file
, " for address of ");
658 print_generic_expr (dump_file
, obj
, 0);
659 fprintf (dump_file
, "\n");
663 /* Return bounds registered for address of OBJ. */
665 chkp_get_registered_addr_bounds (tree obj
)
667 tree
*slot
= chkp_reg_addr_bounds
->get (obj
);
668 return slot
? *slot
: NULL_TREE
;
671 /* Mark BOUNDS as completed. */
673 chkp_mark_completed_bounds (tree bounds
)
675 chkp_completed_bounds_set
->add (bounds
);
677 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
679 fprintf (dump_file
, "Marked bounds ");
680 print_generic_expr (dump_file
, bounds
, 0);
681 fprintf (dump_file
, " as completed\n");
685 /* Return 1 if BOUNDS were marked as completed and 0 otherwise. */
687 chkp_completed_bounds (tree bounds
)
689 return chkp_completed_bounds_set
->contains (bounds
);
692 /* Clear comleted bound marks. */
694 chkp_erase_completed_bounds (void)
696 delete chkp_completed_bounds_set
;
697 chkp_completed_bounds_set
= new hash_set
<tree
>;
700 /* Mark BOUNDS associated with PTR as incomplete. */
702 chkp_register_incomplete_bounds (tree bounds
, tree ptr
)
704 chkp_incomplete_bounds_map
->put (bounds
, ptr
);
706 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
708 fprintf (dump_file
, "Regsitered incomplete bounds ");
709 print_generic_expr (dump_file
, bounds
, 0);
710 fprintf (dump_file
, " for ");
711 print_generic_expr (dump_file
, ptr
, 0);
712 fprintf (dump_file
, "\n");
716 /* Return 1 if BOUNDS are incomplete and 0 otherwise. */
718 chkp_incomplete_bounds (tree bounds
)
720 if (bounds
== incomplete_bounds
)
723 if (chkp_completed_bounds (bounds
))
726 return chkp_incomplete_bounds_map
->get (bounds
) != NULL
;
729 /* Clear incomleted bound marks. */
731 chkp_erase_incomplete_bounds (void)
733 delete chkp_incomplete_bounds_map
;
734 chkp_incomplete_bounds_map
= new hash_map
<tree
, tree
>;
737 /* Build and return bndmk call which creates bounds for structure
738 pointed by PTR. Structure should have complete type. */
740 chkp_make_bounds_for_struct_addr (tree ptr
)
742 tree type
= TREE_TYPE (ptr
);
745 gcc_assert (POINTER_TYPE_P (type
));
747 size
= TYPE_SIZE (TREE_TYPE (type
));
751 return build_call_nary (pointer_bounds_type_node
,
752 build_fold_addr_expr (chkp_bndmk_fndecl
),
756 /* Traversal function for chkp_may_finish_incomplete_bounds.
757 Set RES to 0 if at least one argument of phi statement
758 defining bounds (passed in KEY arg) is unknown.
759 Traversal stops when first unknown phi argument is found. */
761 chkp_may_complete_phi_bounds (tree
const &bounds
, tree
*slot ATTRIBUTE_UNUSED
,
767 gcc_assert (TREE_CODE (bounds
) == SSA_NAME
);
769 phi
= SSA_NAME_DEF_STMT (bounds
);
771 gcc_assert (phi
&& gimple_code (phi
) == GIMPLE_PHI
);
773 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
775 tree phi_arg
= gimple_phi_arg_def (phi
, i
);
779 /* Do not need to traverse further. */
787 /* Return 1 if all phi nodes created for bounds have their
788 arguments computed. */
790 chkp_may_finish_incomplete_bounds (void)
794 chkp_incomplete_bounds_map
795 ->traverse
<bool *, chkp_may_complete_phi_bounds
> (&res
);
800 /* Helper function for chkp_finish_incomplete_bounds.
801 Recompute args for bounds phi node. */
803 chkp_recompute_phi_bounds (tree
const &bounds
, tree
*slot
,
804 void *res ATTRIBUTE_UNUSED
)
811 gcc_assert (TREE_CODE (bounds
) == SSA_NAME
);
812 gcc_assert (TREE_CODE (ptr
) == SSA_NAME
);
814 bounds_phi
= as_a
<gphi
*> (SSA_NAME_DEF_STMT (bounds
));
815 ptr_phi
= as_a
<gphi
*> (SSA_NAME_DEF_STMT (ptr
));
817 for (i
= 0; i
< gimple_phi_num_args (bounds_phi
); i
++)
819 tree ptr_arg
= gimple_phi_arg_def (ptr_phi
, i
);
820 tree bound_arg
= chkp_find_bounds (ptr_arg
, NULL
);
822 add_phi_arg (bounds_phi
, bound_arg
,
823 gimple_phi_arg_edge (ptr_phi
, i
),
830 /* Mark BOUNDS as invalid. */
832 chkp_mark_invalid_bounds (tree bounds
)
834 chkp_invalid_bounds
->add (bounds
);
836 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
838 fprintf (dump_file
, "Marked bounds ");
839 print_generic_expr (dump_file
, bounds
, 0);
840 fprintf (dump_file
, " as invalid\n");
844 /* Return 1 if BOUNDS were marked as invalid and 0 otherwise. */
846 chkp_valid_bounds (tree bounds
)
848 if (bounds
== zero_bounds
|| bounds
== none_bounds
)
851 return !chkp_invalid_bounds
->contains (bounds
);
854 /* Helper function for chkp_finish_incomplete_bounds.
855 Check all arguments of phi nodes trying to find
856 valid completed bounds. If there is at least one
857 such arg then bounds produced by phi node are marked
858 as valid completed bounds and all phi args are
861 chkp_find_valid_phi_bounds (tree
const &bounds
, tree
*slot
, bool *res
)
866 gcc_assert (TREE_CODE (bounds
) == SSA_NAME
);
868 if (chkp_completed_bounds (bounds
))
871 phi
= SSA_NAME_DEF_STMT (bounds
);
873 gcc_assert (phi
&& gimple_code (phi
) == GIMPLE_PHI
);
875 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
877 tree phi_arg
= gimple_phi_arg_def (phi
, i
);
879 gcc_assert (phi_arg
);
881 if (chkp_valid_bounds (phi_arg
) && !chkp_incomplete_bounds (phi_arg
))
884 chkp_mark_completed_bounds (bounds
);
885 chkp_recompute_phi_bounds (bounds
, slot
, NULL
);
893 /* Helper function for chkp_finish_incomplete_bounds.
894 Marks all incompleted bounds as invalid. */
896 chkp_mark_invalid_bounds_walker (tree
const &bounds
,
897 tree
*slot ATTRIBUTE_UNUSED
,
898 void *res ATTRIBUTE_UNUSED
)
900 if (!chkp_completed_bounds (bounds
))
902 chkp_mark_invalid_bounds (bounds
);
903 chkp_mark_completed_bounds (bounds
);
908 /* When all bound phi nodes have all their args computed
909 we have enough info to find valid bounds. We iterate
910 through all incompleted bounds searching for valid
911 bounds. Found valid bounds are marked as completed
912 and all remaining incompleted bounds are recomputed.
913 Process continues until no new valid bounds may be
914 found. All remained incompleted bounds are marked as
915 invalid (i.e. have no valid source of bounds). */
917 chkp_finish_incomplete_bounds (void)
925 chkp_incomplete_bounds_map
->
926 traverse
<bool *, chkp_find_valid_phi_bounds
> (&found_valid
);
929 chkp_incomplete_bounds_map
->
930 traverse
<void *, chkp_recompute_phi_bounds
> (NULL
);
933 chkp_incomplete_bounds_map
->
934 traverse
<void *, chkp_mark_invalid_bounds_walker
> (NULL
);
935 chkp_incomplete_bounds_map
->
936 traverse
<void *, chkp_recompute_phi_bounds
> (NULL
);
938 chkp_erase_completed_bounds ();
939 chkp_erase_incomplete_bounds ();
942 /* Return 1 if type TYPE is a pointer type or a
943 structure having a pointer type as one of its fields.
944 Otherwise return 0. */
946 chkp_type_has_pointer (const_tree type
)
950 if (BOUNDED_TYPE_P (type
))
952 else if (RECORD_OR_UNION_TYPE_P (type
))
956 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
957 if (TREE_CODE (field
) == FIELD_DECL
)
958 res
= res
|| chkp_type_has_pointer (TREE_TYPE (field
));
960 else if (TREE_CODE (type
) == ARRAY_TYPE
)
961 res
= chkp_type_has_pointer (TREE_TYPE (type
));
967 chkp_type_bounds_count (const_tree type
)
973 else if (BOUNDED_TYPE_P (type
))
975 else if (RECORD_OR_UNION_TYPE_P (type
))
979 bitmap_obstack_initialize (NULL
);
980 have_bound
= BITMAP_ALLOC (NULL
);
981 chkp_find_bound_slots (type
, have_bound
);
982 res
= bitmap_count_bits (have_bound
);
983 BITMAP_FREE (have_bound
);
984 bitmap_obstack_release (NULL
);
990 /* Get bounds associated with NODE via
991 chkp_set_bounds call. */
993 chkp_get_bounds (tree node
)
997 if (!chkp_bounds_map
)
1000 slot
= chkp_bounds_map
->get (node
);
1001 return slot
? *slot
: NULL_TREE
;
1004 /* Associate bounds VAL with NODE. */
1006 chkp_set_bounds (tree node
, tree val
)
1008 if (!chkp_bounds_map
)
1009 chkp_bounds_map
= new hash_map
<tree
, tree
>;
1011 chkp_bounds_map
->put (node
, val
);
1014 /* Check if statically initialized variable VAR require
1015 static bounds initialization. If VAR is added into
1016 bounds initlization list then 1 is returned. Otherwise
1019 chkp_register_var_initializer (tree var
)
1021 if (!flag_check_pointer_bounds
1022 || DECL_INITIAL (var
) == error_mark_node
)
1025 gcc_assert (TREE_CODE (var
) == VAR_DECL
);
1026 gcc_assert (DECL_INITIAL (var
));
1028 if (TREE_STATIC (var
)
1029 && chkp_type_has_pointer (TREE_TYPE (var
)))
1031 varpool_node::get_create (var
)->need_bounds_init
= 1;
1038 /* Helper function for chkp_finish_file.
1040 Add new modification statement (RHS is assigned to LHS)
1041 into list of static initializer statementes (passed in ARG).
1042 If statements list becomes too big, emit checker constructor
1043 and start the new one. */
1045 chkp_add_modification_to_stmt_list (tree lhs
,
1049 struct chkp_ctor_stmt_list
*stmts
= (struct chkp_ctor_stmt_list
*)arg
;
1052 if (!useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (rhs
)))
1053 rhs
= build1 (CONVERT_EXPR
, TREE_TYPE (lhs
), rhs
);
1055 modify
= build2 (MODIFY_EXPR
, TREE_TYPE (lhs
), lhs
, rhs
);
1056 append_to_statement_list (modify
, &stmts
->stmts
);
1061 /* Build and return ADDR_EXPR for specified object OBJ. */
1063 chkp_build_addr_expr (tree obj
)
1065 return TREE_CODE (obj
) == TARGET_MEM_REF
1066 ? tree_mem_ref_addr (ptr_type_node
, obj
)
1067 : build_fold_addr_expr (obj
);
1070 /* Helper function for chkp_finish_file.
1071 Initialize bound variable BND_VAR with bounds of variable
1072 VAR to statements list STMTS. If statements list becomes
1073 too big, emit checker constructor and start the new one. */
1075 chkp_output_static_bounds (tree bnd_var
, tree var
,
1076 struct chkp_ctor_stmt_list
*stmts
)
1080 if (TREE_CODE (var
) == STRING_CST
)
1082 lb
= build1 (CONVERT_EXPR
, size_type_node
, chkp_build_addr_expr (var
));
1083 size
= build_int_cst (size_type_node
, TREE_STRING_LENGTH (var
) - 1);
1085 else if (DECL_SIZE (var
)
1086 && !chkp_variable_size_type (TREE_TYPE (var
)))
1088 /* Compute bounds using statically known size. */
1089 lb
= build1 (CONVERT_EXPR
, size_type_node
, chkp_build_addr_expr (var
));
1090 size
= size_binop (MINUS_EXPR
, DECL_SIZE_UNIT (var
), size_one_node
);
1094 /* Compute bounds using dynamic size. */
1097 lb
= build1 (CONVERT_EXPR
, size_type_node
, chkp_build_addr_expr (var
));
1098 call
= build1 (ADDR_EXPR
,
1099 build_pointer_type (TREE_TYPE (chkp_sizeof_fndecl
)),
1100 chkp_sizeof_fndecl
);
1101 size
= build_call_nary (TREE_TYPE (TREE_TYPE (chkp_sizeof_fndecl
)),
1104 if (flag_chkp_zero_dynamic_size_as_infinite
)
1106 tree max_size
, cond
;
1108 max_size
= build2 (MINUS_EXPR
, size_type_node
, size_zero_node
, lb
);
1109 cond
= build2 (NE_EXPR
, boolean_type_node
, size
, size_zero_node
);
1110 size
= build3 (COND_EXPR
, size_type_node
, cond
, size
, max_size
);
1113 size
= size_binop (MINUS_EXPR
, size
, size_one_node
);
1116 ub
= size_binop (PLUS_EXPR
, lb
, size
);
1117 stmts
->avail
-= targetm
.chkp_initialize_bounds (bnd_var
, lb
, ub
,
1119 if (stmts
->avail
<= 0)
1121 cgraph_build_static_cdtor ('B', stmts
->stmts
,
1122 MAX_RESERVED_INIT_PRIORITY
+ 2);
1123 stmts
->avail
= MAX_STMTS_IN_STATIC_CHKP_CTOR
;
1124 stmts
->stmts
= NULL
;
1128 /* Return entry block to be used for checker initilization code.
1129 Create new block if required. */
1131 chkp_get_entry_block (void)
1135 = split_block_after_labels (ENTRY_BLOCK_PTR_FOR_FN (cfun
))->dest
;
1140 /* Return a bounds var to be used for pointer var PTR_VAR. */
1142 chkp_get_bounds_var (tree ptr_var
)
1147 slot
= chkp_bound_vars
->get (ptr_var
);
1152 bnd_var
= create_tmp_reg (pointer_bounds_type_node
,
1153 CHKP_BOUND_TMP_NAME
);
1154 chkp_bound_vars
->put (ptr_var
, bnd_var
);
1160 /* If BND is an abnormal bounds copy, return a copied value.
1161 Otherwise return BND. */
1163 chkp_get_orginal_bounds_for_abnormal_copy (tree bnd
)
1165 if (bitmap_bit_p (chkp_abnormal_copies
, SSA_NAME_VERSION (bnd
)))
1167 gimple bnd_def
= SSA_NAME_DEF_STMT (bnd
);
1168 gcc_checking_assert (gimple_code (bnd_def
) == GIMPLE_ASSIGN
);
1169 bnd
= gimple_assign_rhs1 (bnd_def
);
1175 /* Register bounds BND for object PTR in global bounds table.
1176 A copy of bounds may be created for abnormal ssa names.
1177 Returns bounds to use for PTR. */
1179 chkp_maybe_copy_and_register_bounds (tree ptr
, tree bnd
)
1183 if (!chkp_reg_bounds
)
1186 /* Do nothing if bounds are incomplete_bounds
1187 because it means bounds will be recomputed. */
1188 if (bnd
== incomplete_bounds
)
1191 abnormal_ptr
= (TREE_CODE (ptr
) == SSA_NAME
1192 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ptr
)
1193 && gimple_code (SSA_NAME_DEF_STMT (ptr
)) != GIMPLE_PHI
);
1195 /* A single bounds value may be reused multiple times for
1196 different pointer values. It may cause coalescing issues
1197 for abnormal SSA names. To avoid it we create a bounds
1198 copy in case it is computed for abnormal SSA name.
1200 We also cannot reuse such created copies for other pointers */
1202 || bitmap_bit_p (chkp_abnormal_copies
, SSA_NAME_VERSION (bnd
)))
1204 tree bnd_var
= NULL_TREE
;
1208 if (SSA_NAME_VAR (ptr
))
1209 bnd_var
= chkp_get_bounds_var (SSA_NAME_VAR (ptr
));
1212 bnd_var
= chkp_get_tmp_var ();
1214 /* For abnormal copies we may just find original
1215 bounds and use them. */
1216 if (!abnormal_ptr
&& !SSA_NAME_IS_DEFAULT_DEF (bnd
))
1217 bnd
= chkp_get_orginal_bounds_for_abnormal_copy (bnd
);
1218 /* For undefined values we usually use none bounds
1219 value but in case of abnormal edge it may cause
1220 coalescing failures. Use default definition of
1221 bounds variable instead to avoid it. */
1222 else if (SSA_NAME_IS_DEFAULT_DEF (ptr
)
1223 && TREE_CODE (SSA_NAME_VAR (ptr
)) != PARM_DECL
)
1225 bnd
= get_or_create_ssa_default_def (cfun
, bnd_var
);
1227 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1229 fprintf (dump_file
, "Using default def bounds ");
1230 print_generic_expr (dump_file
, bnd
, 0);
1231 fprintf (dump_file
, " for abnormal default def SSA name ");
1232 print_generic_expr (dump_file
, ptr
, 0);
1233 fprintf (dump_file
, "\n");
1239 gimple def
= SSA_NAME_DEF_STMT (ptr
);
1241 gimple_stmt_iterator gsi
;
1244 copy
= make_ssa_name (bnd_var
);
1246 copy
= make_temp_ssa_name (pointer_bounds_type_node
,
1248 CHKP_BOUND_TMP_NAME
);
1249 bnd
= chkp_get_orginal_bounds_for_abnormal_copy (bnd
);
1250 assign
= gimple_build_assign (copy
, bnd
);
1252 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1254 fprintf (dump_file
, "Creating a copy of bounds ");
1255 print_generic_expr (dump_file
, bnd
, 0);
1256 fprintf (dump_file
, " for abnormal SSA name ");
1257 print_generic_expr (dump_file
, ptr
, 0);
1258 fprintf (dump_file
, "\n");
1261 if (gimple_code (def
) == GIMPLE_NOP
)
1263 gsi
= gsi_last_bb (chkp_get_entry_block ());
1264 if (!gsi_end_p (gsi
) && is_ctrl_stmt (gsi_stmt (gsi
)))
1265 gsi_insert_before (&gsi
, assign
, GSI_CONTINUE_LINKING
);
1267 gsi_insert_after (&gsi
, assign
, GSI_CONTINUE_LINKING
);
1271 gimple bnd_def
= SSA_NAME_DEF_STMT (bnd
);
1272 /* Sometimes (e.g. when we load a pointer from a
1273 memory) bounds are produced later than a pointer.
1274 We need to insert bounds copy appropriately. */
1275 if (gimple_code (bnd_def
) != GIMPLE_NOP
1276 && stmt_dominates_stmt_p (def
, bnd_def
))
1277 gsi
= gsi_for_stmt (bnd_def
);
1279 gsi
= gsi_for_stmt (def
);
1280 gsi_insert_after (&gsi
, assign
, GSI_CONTINUE_LINKING
);
1287 bitmap_set_bit (chkp_abnormal_copies
, SSA_NAME_VERSION (bnd
));
1290 chkp_reg_bounds
->put (ptr
, bnd
);
1292 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1294 fprintf (dump_file
, "Regsitered bound ");
1295 print_generic_expr (dump_file
, bnd
, 0);
1296 fprintf (dump_file
, " for pointer ");
1297 print_generic_expr (dump_file
, ptr
, 0);
1298 fprintf (dump_file
, "\n");
1304 /* Get bounds registered for object PTR in global bounds table. */
1306 chkp_get_registered_bounds (tree ptr
)
1310 if (!chkp_reg_bounds
)
1313 slot
= chkp_reg_bounds
->get (ptr
);
1314 return slot
? *slot
: NULL_TREE
;
1317 /* Add bound retvals to return statement pointed by GSI. */
1320 chkp_add_bounds_to_ret_stmt (gimple_stmt_iterator
*gsi
)
1322 greturn
*ret
= as_a
<greturn
*> (gsi_stmt (*gsi
));
1323 tree retval
= gimple_return_retval (ret
);
1324 tree ret_decl
= DECL_RESULT (cfun
->decl
);
1330 if (BOUNDED_P (ret_decl
))
1332 bounds
= chkp_find_bounds (retval
, gsi
);
1333 bounds
= chkp_maybe_copy_and_register_bounds (ret_decl
, bounds
);
1334 gimple_return_set_retbnd (ret
, bounds
);
1340 /* Force OP to be suitable for using as an argument for call.
1341 New statements (if any) go to SEQ. */
1343 chkp_force_gimple_call_op (tree op
, gimple_seq
*seq
)
1346 gimple_stmt_iterator si
;
1348 op
= force_gimple_operand (unshare_expr (op
), &stmts
, true, NULL_TREE
);
1350 for (si
= gsi_start (stmts
); !gsi_end_p (si
); gsi_next (&si
))
1351 chkp_mark_stmt (gsi_stmt (si
));
1353 gimple_seq_add_seq (seq
, stmts
);
1358 /* Generate lower bound check for memory access by ADDR.
1359 Check is inserted before the position pointed by ITER.
1360 DIRFLAG indicates whether memory access is load or store. */
1362 chkp_check_lower (tree addr
, tree bounds
,
1363 gimple_stmt_iterator iter
,
1364 location_t location
,
1371 if (!chkp_function_instrumented_p (current_function_decl
)
1372 && bounds
== chkp_get_zero_bounds ())
1375 if (dirflag
== integer_zero_node
1376 && !flag_chkp_check_read
)
1379 if (dirflag
== integer_one_node
1380 && !flag_chkp_check_write
)
1385 node
= chkp_force_gimple_call_op (addr
, &seq
);
1387 check
= gimple_build_call (chkp_checkl_fndecl
, 2, node
, bounds
);
1388 chkp_mark_stmt (check
);
1389 gimple_call_set_with_bounds (check
, true);
1390 gimple_set_location (check
, location
);
1391 gimple_seq_add_stmt (&seq
, check
);
1393 gsi_insert_seq_before (&iter
, seq
, GSI_SAME_STMT
);
1395 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1397 gimple before
= gsi_stmt (iter
);
1398 fprintf (dump_file
, "Generated lower bound check for statement ");
1399 print_gimple_stmt (dump_file
, before
, 0, TDF_VOPS
|TDF_MEMSYMS
);
1400 fprintf (dump_file
, " ");
1401 print_gimple_stmt (dump_file
, check
, 0, TDF_VOPS
|TDF_MEMSYMS
);
1405 /* Generate upper bound check for memory access by ADDR.
1406 Check is inserted before the position pointed by ITER.
1407 DIRFLAG indicates whether memory access is load or store. */
1409 chkp_check_upper (tree addr
, tree bounds
,
1410 gimple_stmt_iterator iter
,
1411 location_t location
,
1418 if (!chkp_function_instrumented_p (current_function_decl
)
1419 && bounds
== chkp_get_zero_bounds ())
1422 if (dirflag
== integer_zero_node
1423 && !flag_chkp_check_read
)
1426 if (dirflag
== integer_one_node
1427 && !flag_chkp_check_write
)
1432 node
= chkp_force_gimple_call_op (addr
, &seq
);
1434 check
= gimple_build_call (chkp_checku_fndecl
, 2, node
, bounds
);
1435 chkp_mark_stmt (check
);
1436 gimple_call_set_with_bounds (check
, true);
1437 gimple_set_location (check
, location
);
1438 gimple_seq_add_stmt (&seq
, check
);
1440 gsi_insert_seq_before (&iter
, seq
, GSI_SAME_STMT
);
1442 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1444 gimple before
= gsi_stmt (iter
);
1445 fprintf (dump_file
, "Generated upper bound check for statement ");
1446 print_gimple_stmt (dump_file
, before
, 0, TDF_VOPS
|TDF_MEMSYMS
);
1447 fprintf (dump_file
, " ");
1448 print_gimple_stmt (dump_file
, check
, 0, TDF_VOPS
|TDF_MEMSYMS
);
1452 /* Generate lower and upper bound checks for memory access
1453 to memory slot [FIRST, LAST] againsr BOUNDS. Checks
1454 are inserted before the position pointed by ITER.
1455 DIRFLAG indicates whether memory access is load or store. */
1457 chkp_check_mem_access (tree first
, tree last
, tree bounds
,
1458 gimple_stmt_iterator iter
,
1459 location_t location
,
1462 chkp_check_lower (first
, bounds
, iter
, location
, dirflag
);
1463 chkp_check_upper (last
, bounds
, iter
, location
, dirflag
);
1466 /* Replace call to _bnd_chk_* pointed by GSI with
1467 bndcu and bndcl calls. DIRFLAG determines whether
1468 check is for read or write. */
1471 chkp_replace_address_check_builtin (gimple_stmt_iterator
*gsi
,
1474 gimple_stmt_iterator call_iter
= *gsi
;
1475 gimple call
= gsi_stmt (*gsi
);
1476 tree fndecl
= gimple_call_fndecl (call
);
1477 tree addr
= gimple_call_arg (call
, 0);
1478 tree bounds
= chkp_find_bounds (addr
, gsi
);
1480 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
1481 || DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS
)
1482 chkp_check_lower (addr
, bounds
, *gsi
, gimple_location (call
), dirflag
);
1484 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
)
1485 chkp_check_upper (addr
, bounds
, *gsi
, gimple_location (call
), dirflag
);
1487 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS
)
1489 tree size
= gimple_call_arg (call
, 1);
1490 addr
= fold_build_pointer_plus (addr
, size
);
1491 addr
= fold_build_pointer_plus_hwi (addr
, -1);
1492 chkp_check_upper (addr
, bounds
, *gsi
, gimple_location (call
), dirflag
);
1495 gsi_remove (&call_iter
, true);
1498 /* Replace call to _bnd_get_ptr_* pointed by GSI with
1499 corresponding bounds extract call. */
1502 chkp_replace_extract_builtin (gimple_stmt_iterator
*gsi
)
1504 gimple call
= gsi_stmt (*gsi
);
1505 tree fndecl
= gimple_call_fndecl (call
);
1506 tree addr
= gimple_call_arg (call
, 0);
1507 tree bounds
= chkp_find_bounds (addr
, gsi
);
1510 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CHKP_GET_PTR_LBOUND
)
1511 fndecl
= chkp_extract_lower_fndecl
;
1512 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CHKP_GET_PTR_UBOUND
)
1513 fndecl
= chkp_extract_upper_fndecl
;
1517 extract
= gimple_build_call (fndecl
, 1, bounds
);
1518 gimple_call_set_lhs (extract
, gimple_call_lhs (call
));
1519 chkp_mark_stmt (extract
);
1521 gsi_replace (gsi
, extract
, false);
1524 /* Return COMPONENT_REF accessing FIELD in OBJ. */
1526 chkp_build_component_ref (tree obj
, tree field
)
1530 /* If object is TMR then we do not use component_ref but
1531 add offset instead. We need it to be able to get addr
1532 of the reasult later. */
1533 if (TREE_CODE (obj
) == TARGET_MEM_REF
)
1535 tree offs
= TMR_OFFSET (obj
);
1536 offs
= fold_binary_to_constant (PLUS_EXPR
, TREE_TYPE (offs
),
1537 offs
, DECL_FIELD_OFFSET (field
));
1541 res
= copy_node (obj
);
1542 TREE_TYPE (res
) = TREE_TYPE (field
);
1543 TMR_OFFSET (res
) = offs
;
1546 res
= build3 (COMPONENT_REF
, TREE_TYPE (field
), obj
, field
, NULL_TREE
);
1551 /* Return ARRAY_REF for array ARR and index IDX with
1552 specified element type ETYPE and element size ESIZE. */
1554 chkp_build_array_ref (tree arr
, tree etype
, tree esize
,
1555 unsigned HOST_WIDE_INT idx
)
1557 tree index
= build_int_cst (size_type_node
, idx
);
1560 /* If object is TMR then we do not use array_ref but
1561 add offset instead. We need it to be able to get addr
1562 of the reasult later. */
1563 if (TREE_CODE (arr
) == TARGET_MEM_REF
)
1565 tree offs
= TMR_OFFSET (arr
);
1567 esize
= fold_binary_to_constant (MULT_EXPR
, TREE_TYPE (esize
),
1571 offs
= fold_binary_to_constant (PLUS_EXPR
, TREE_TYPE (offs
),
1575 res
= copy_node (arr
);
1576 TREE_TYPE (res
) = etype
;
1577 TMR_OFFSET (res
) = offs
;
1580 res
= build4 (ARRAY_REF
, etype
, arr
, index
, NULL_TREE
, NULL_TREE
);
1585 /* Helper function for chkp_add_bounds_to_call_stmt.
1586 Fill ALL_BOUNDS output array with created bounds.
1588 OFFS is used for recursive calls and holds basic
1589 offset of TYPE in outer structure in bits.
1591 ITER points a position where bounds are searched.
1593 ALL_BOUNDS[i] is filled with elem bounds if there
1594 is a field in TYPE which has pointer type and offset
1595 equal to i * POINTER_SIZE in bits. */
1597 chkp_find_bounds_for_elem (tree elem
, tree
*all_bounds
,
1599 gimple_stmt_iterator
*iter
)
1601 tree type
= TREE_TYPE (elem
);
1603 if (BOUNDED_TYPE_P (type
))
1605 if (!all_bounds
[offs
/ POINTER_SIZE
])
1607 tree temp
= make_temp_ssa_name (type
, NULL
, "");
1608 gimple assign
= gimple_build_assign (temp
, elem
);
1609 gimple_stmt_iterator gsi
;
1611 gsi_insert_before (iter
, assign
, GSI_SAME_STMT
);
1612 gsi
= gsi_for_stmt (assign
);
1614 all_bounds
[offs
/ POINTER_SIZE
] = chkp_find_bounds (temp
, &gsi
);
1617 else if (RECORD_OR_UNION_TYPE_P (type
))
1621 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
1622 if (TREE_CODE (field
) == FIELD_DECL
)
1624 tree base
= unshare_expr (elem
);
1625 tree field_ref
= chkp_build_component_ref (base
, field
);
1626 HOST_WIDE_INT field_offs
1627 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field
));
1628 if (DECL_FIELD_OFFSET (field
))
1629 field_offs
+= TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field
)) * 8;
1631 chkp_find_bounds_for_elem (field_ref
, all_bounds
,
1632 offs
+ field_offs
, iter
);
1635 else if (TREE_CODE (type
) == ARRAY_TYPE
)
1637 tree maxval
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
1638 tree etype
= TREE_TYPE (type
);
1639 HOST_WIDE_INT esize
= TREE_INT_CST_LOW (TYPE_SIZE (etype
));
1640 unsigned HOST_WIDE_INT cur
;
1642 if (!maxval
|| integer_minus_onep (maxval
))
1645 for (cur
= 0; cur
<= TREE_INT_CST_LOW (maxval
); cur
++)
1647 tree base
= unshare_expr (elem
);
1648 tree arr_elem
= chkp_build_array_ref (base
, etype
,
1651 chkp_find_bounds_for_elem (arr_elem
, all_bounds
, offs
+ cur
* esize
,
1657 /* Fill HAVE_BOUND output bitmap with information about
1658 bounds requred for object of type TYPE.
1660 OFFS is used for recursive calls and holds basic
1661 offset of TYPE in outer structure in bits.
1663 HAVE_BOUND[i] is set to 1 if there is a field
1664 in TYPE which has pointer type and offset
1665 equal to i * POINTER_SIZE - OFFS in bits. */
1667 chkp_find_bound_slots_1 (const_tree type
, bitmap have_bound
,
1670 if (BOUNDED_TYPE_P (type
))
1671 bitmap_set_bit (have_bound
, offs
/ POINTER_SIZE
);
1672 else if (RECORD_OR_UNION_TYPE_P (type
))
1676 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
1677 if (TREE_CODE (field
) == FIELD_DECL
)
1679 HOST_WIDE_INT field_offs
1680 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field
));
1681 if (DECL_FIELD_OFFSET (field
))
1682 field_offs
+= TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field
)) * 8;
1683 chkp_find_bound_slots_1 (TREE_TYPE (field
), have_bound
,
1687 else if (TREE_CODE (type
) == ARRAY_TYPE
)
1689 tree maxval
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
1690 tree etype
= TREE_TYPE (type
);
1691 HOST_WIDE_INT esize
= TREE_INT_CST_LOW (TYPE_SIZE (etype
));
1692 unsigned HOST_WIDE_INT cur
;
1695 || TREE_CODE (maxval
) != INTEGER_CST
1696 || integer_minus_onep (maxval
))
1699 for (cur
= 0; cur
<= TREE_INT_CST_LOW (maxval
); cur
++)
1700 chkp_find_bound_slots_1 (etype
, have_bound
, offs
+ cur
* esize
);
1704 /* Fill bitmap RES with information about bounds for
1705 type TYPE. See chkp_find_bound_slots_1 for more
1708 chkp_find_bound_slots (const_tree type
, bitmap res
)
1711 chkp_find_bound_slots_1 (type
, res
, 0);
1714 /* Return 1 if call to FNDECL should be instrumented
1718 chkp_instrument_normal_builtin (tree fndecl
)
1720 switch (DECL_FUNCTION_CODE (fndecl
))
1722 case BUILT_IN_STRLEN
:
1723 case BUILT_IN_STRCPY
:
1724 case BUILT_IN_STRNCPY
:
1725 case BUILT_IN_STPCPY
:
1726 case BUILT_IN_STPNCPY
:
1727 case BUILT_IN_STRCAT
:
1728 case BUILT_IN_STRNCAT
:
1729 case BUILT_IN_MEMCPY
:
1730 case BUILT_IN_MEMPCPY
:
1731 case BUILT_IN_MEMSET
:
1732 case BUILT_IN_MEMMOVE
:
1733 case BUILT_IN_BZERO
:
1734 case BUILT_IN_STRCMP
:
1735 case BUILT_IN_STRNCMP
:
1737 case BUILT_IN_MEMCMP
:
1738 case BUILT_IN_MEMCPY_CHK
:
1739 case BUILT_IN_MEMPCPY_CHK
:
1740 case BUILT_IN_MEMMOVE_CHK
:
1741 case BUILT_IN_MEMSET_CHK
:
1742 case BUILT_IN_STRCPY_CHK
:
1743 case BUILT_IN_STRNCPY_CHK
:
1744 case BUILT_IN_STPCPY_CHK
:
1745 case BUILT_IN_STPNCPY_CHK
:
1746 case BUILT_IN_STRCAT_CHK
:
1747 case BUILT_IN_STRNCAT_CHK
:
1748 case BUILT_IN_MALLOC
:
1749 case BUILT_IN_CALLOC
:
1750 case BUILT_IN_REALLOC
:
1758 /* Add bound arguments to call statement pointed by GSI.
1759 Also performs a replacement of user checker builtins calls
1760 with internal ones. */
1763 chkp_add_bounds_to_call_stmt (gimple_stmt_iterator
*gsi
)
1765 gcall
*call
= as_a
<gcall
*> (gsi_stmt (*gsi
));
1766 unsigned arg_no
= 0;
1767 tree fndecl
= gimple_call_fndecl (call
);
1769 tree first_formal_arg
;
1771 bool use_fntype
= false;
1776 /* Do nothing for internal functions. */
1777 if (gimple_call_internal_p (call
))
1780 fntype
= TREE_TYPE (TREE_TYPE (gimple_call_fn (call
)));
1782 /* Do nothing if back-end builtin is called. */
1783 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
1786 /* Do nothing for some middle-end builtins. */
1787 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
1788 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_OBJECT_SIZE
)
1791 /* Do nothing for calls to not instrumentable functions. */
1792 if (fndecl
&& !chkp_instrumentable_p (fndecl
))
1795 /* Ignore CHKP_INIT_PTR_BOUNDS, CHKP_NULL_PTR_BOUNDS
1796 and CHKP_COPY_PTR_BOUNDS. */
1797 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
1798 && (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CHKP_INIT_PTR_BOUNDS
1799 || DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CHKP_NULL_PTR_BOUNDS
1800 || DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CHKP_COPY_PTR_BOUNDS
1801 || DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CHKP_SET_PTR_BOUNDS
))
1804 /* Check user builtins are replaced with checks. */
1805 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
1806 && (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
1807 || DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
1808 || DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS
))
1810 chkp_replace_address_check_builtin (gsi
, integer_minus_one_node
);
1814 /* Check user builtins are replaced with bound extract. */
1815 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
1816 && (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CHKP_GET_PTR_LBOUND
1817 || DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CHKP_GET_PTR_UBOUND
))
1819 chkp_replace_extract_builtin (gsi
);
1823 /* BUILT_IN_CHKP_NARROW_PTR_BOUNDS call is replaced with
1824 target narrow bounds call. */
1825 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
1826 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CHKP_NARROW_PTR_BOUNDS
)
1828 tree arg
= gimple_call_arg (call
, 1);
1829 tree bounds
= chkp_find_bounds (arg
, gsi
);
1831 gimple_call_set_fndecl (call
, chkp_narrow_bounds_fndecl
);
1832 gimple_call_set_arg (call
, 1, bounds
);
1838 /* BUILT_IN_CHKP_STORE_PTR_BOUNDS call is replaced with
1840 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
1841 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CHKP_STORE_PTR_BOUNDS
)
1843 tree addr
= gimple_call_arg (call
, 0);
1844 tree ptr
= gimple_call_arg (call
, 1);
1845 tree bounds
= chkp_find_bounds (ptr
, gsi
);
1846 gimple_stmt_iterator iter
= gsi_for_stmt (call
);
1848 chkp_build_bndstx (addr
, ptr
, bounds
, gsi
);
1849 gsi_remove (&iter
, true);
1854 if (!flag_chkp_instrument_calls
)
1857 /* We instrument only some subset of builtins. We also instrument
1858 builtin calls to be inlined. */
1860 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
1861 && !chkp_instrument_normal_builtin (fndecl
))
1863 if (!lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl
)))
1866 struct cgraph_node
*clone
= chkp_maybe_create_clone (fndecl
);
1868 || !gimple_has_body_p (clone
->decl
))
1872 /* If function decl is available then use it for
1873 formal arguments list. Otherwise use function type. */
1874 if (fndecl
&& DECL_ARGUMENTS (fndecl
))
1875 first_formal_arg
= DECL_ARGUMENTS (fndecl
);
1878 first_formal_arg
= TYPE_ARG_TYPES (fntype
);
1882 /* Fill vector of new call args. */
1883 vec
<tree
> new_args
= vNULL
;
1884 new_args
.create (gimple_call_num_args (call
));
1885 arg
= first_formal_arg
;
1886 for (arg_no
= 0; arg_no
< gimple_call_num_args (call
); arg_no
++)
1888 tree call_arg
= gimple_call_arg (call
, arg_no
);
1891 /* Get arg type using formal argument description
1892 or actual argument type. */
1895 if (TREE_VALUE (arg
) != void_type_node
)
1897 type
= TREE_VALUE (arg
);
1898 arg
= TREE_CHAIN (arg
);
1901 type
= TREE_TYPE (call_arg
);
1904 type
= TREE_TYPE (arg
);
1905 arg
= TREE_CHAIN (arg
);
1908 type
= TREE_TYPE (call_arg
);
1910 new_args
.safe_push (call_arg
);
1912 if (BOUNDED_TYPE_P (type
)
1913 || pass_by_reference (NULL
, TYPE_MODE (type
), type
, true))
1914 new_args
.safe_push (chkp_find_bounds (call_arg
, gsi
));
1915 else if (chkp_type_has_pointer (type
))
1917 HOST_WIDE_INT max_bounds
1918 = TREE_INT_CST_LOW (TYPE_SIZE (type
)) / POINTER_SIZE
;
1919 tree
*all_bounds
= (tree
*)xmalloc (sizeof (tree
) * max_bounds
);
1920 HOST_WIDE_INT bnd_no
;
1922 memset (all_bounds
, 0, sizeof (tree
) * max_bounds
);
1924 chkp_find_bounds_for_elem (call_arg
, all_bounds
, 0, gsi
);
1926 for (bnd_no
= 0; bnd_no
< max_bounds
; bnd_no
++)
1927 if (all_bounds
[bnd_no
])
1928 new_args
.safe_push (all_bounds
[bnd_no
]);
1934 if (new_args
.length () == gimple_call_num_args (call
))
1938 new_call
= gimple_build_call_vec (gimple_op (call
, 1), new_args
);
1939 gimple_call_set_lhs (new_call
, gimple_call_lhs (call
));
1940 gimple_call_copy_flags (new_call
, call
);
1941 gimple_call_set_chain (new_call
, gimple_call_chain (call
));
1943 new_args
.release ();
1945 /* For direct calls fndecl is replaced with instrumented version. */
1948 tree new_decl
= chkp_maybe_create_clone (fndecl
)->decl
;
1949 gimple_call_set_fndecl (new_call
, new_decl
);
1950 gimple_call_set_fntype (new_call
, TREE_TYPE (new_decl
));
1952 /* For indirect call we should fix function pointer type if
1953 pass some bounds. */
1954 else if (new_call
!= call
)
1956 tree type
= gimple_call_fntype (call
);
1957 type
= chkp_copy_function_type_adding_bounds (type
);
1958 gimple_call_set_fntype (new_call
, type
);
1961 /* replace old call statement with the new one. */
1962 if (call
!= new_call
)
1964 FOR_EACH_SSA_TREE_OPERAND (op
, call
, iter
, SSA_OP_ALL_DEFS
)
1966 SSA_NAME_DEF_STMT (op
) = new_call
;
1968 gsi_replace (gsi
, new_call
, true);
1971 update_stmt (new_call
);
1973 gimple_call_set_with_bounds (new_call
, true);
1976 /* Return constant static bounds var with specified bounds LB and UB.
1977 If such var does not exists then new var is created with specified NAME. */
1979 chkp_make_static_const_bounds (HOST_WIDE_INT lb
,
1983 tree id
= get_identifier (name
);
1988 var
= build_decl (UNKNOWN_LOCATION
, VAR_DECL
, id
,
1989 pointer_bounds_type_node
);
1990 TREE_STATIC (var
) = 1;
1991 TREE_PUBLIC (var
) = 1;
1993 /* With LTO we may have constant bounds already in varpool.
1995 if ((snode
= symtab_node::get_for_asmname (DECL_ASSEMBLER_NAME (var
))))
1997 /* We don't allow this symbol usage for non bounds. */
1998 if (snode
->type
!= SYMTAB_VARIABLE
1999 || !POINTER_BOUNDS_P (snode
->decl
))
2000 sorry ("-fcheck-pointer-bounds requires '%s' "
2001 "name for internal usage",
2002 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (var
)));
2007 TREE_USED (var
) = 1;
2008 TREE_READONLY (var
) = 1;
2009 TREE_ADDRESSABLE (var
) = 0;
2010 DECL_ARTIFICIAL (var
) = 1;
2011 DECL_READ_P (var
) = 1;
2012 DECL_INITIAL (var
) = targetm
.chkp_make_bounds_constant (lb
, ub
);
2013 make_decl_one_only (var
, DECL_ASSEMBLER_NAME (var
));
2014 /* We may use this symbol during ctors generation in chkp_finish_file
2015 when all symbols are emitted. Force output to avoid undefined
2016 symbols in ctors. */
2017 node
= varpool_node::get_create (var
);
2018 node
->force_output
= 1;
2020 varpool_node::finalize_decl (var
);
2025 /* Generate code to make bounds with specified lower bound LB and SIZE.
2026 if AFTER is 1 then code is inserted after position pointed by ITER
2027 otherwise code is inserted before position pointed by ITER.
2028 If ITER is NULL then code is added to entry block. */
2030 chkp_make_bounds (tree lb
, tree size
, gimple_stmt_iterator
*iter
, bool after
)
2033 gimple_stmt_iterator gsi
;
2040 gsi
= gsi_start_bb (chkp_get_entry_block ());
2044 lb
= chkp_force_gimple_call_op (lb
, &seq
);
2045 size
= chkp_force_gimple_call_op (size
, &seq
);
2047 stmt
= gimple_build_call (chkp_bndmk_fndecl
, 2, lb
, size
);
2048 chkp_mark_stmt (stmt
);
2050 bounds
= chkp_get_tmp_reg (stmt
);
2051 gimple_call_set_lhs (stmt
, bounds
);
2053 gimple_seq_add_stmt (&seq
, stmt
);
2056 gsi_insert_seq_after (&gsi
, seq
, GSI_SAME_STMT
);
2058 gsi_insert_seq_before (&gsi
, seq
, GSI_SAME_STMT
);
2060 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2062 fprintf (dump_file
, "Made bounds: ");
2063 print_gimple_stmt (dump_file
, stmt
, 0, TDF_VOPS
|TDF_MEMSYMS
);
2066 fprintf (dump_file
, " inserted before statement: ");
2067 print_gimple_stmt (dump_file
, gsi_stmt (*iter
), 0, TDF_VOPS
|TDF_MEMSYMS
);
2070 fprintf (dump_file
, " at function entry\n");
2073 /* update_stmt (stmt); */
2078 /* Return var holding zero bounds. */
2080 chkp_get_zero_bounds_var (void)
2082 if (!chkp_zero_bounds_var
)
2083 chkp_zero_bounds_var
2084 = chkp_make_static_const_bounds (0, -1,
2085 CHKP_ZERO_BOUNDS_VAR_NAME
);
2086 return chkp_zero_bounds_var
;
2089 /* Return var holding none bounds. */
2091 chkp_get_none_bounds_var (void)
2093 if (!chkp_none_bounds_var
)
2094 chkp_none_bounds_var
2095 = chkp_make_static_const_bounds (-1, 0,
2096 CHKP_NONE_BOUNDS_VAR_NAME
);
2097 return chkp_none_bounds_var
;
2100 /* Return SSA_NAME used to represent zero bounds. */
2102 chkp_get_zero_bounds (void)
2107 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2108 fprintf (dump_file
, "Creating zero bounds...");
2110 if ((flag_chkp_use_static_bounds
&& flag_chkp_use_static_const_bounds
)
2111 || flag_chkp_use_static_const_bounds
> 0)
2113 gimple_stmt_iterator gsi
= gsi_start_bb (chkp_get_entry_block ());
2116 zero_bounds
= chkp_get_tmp_reg (NULL
);
2117 stmt
= gimple_build_assign (zero_bounds
, chkp_get_zero_bounds_var ());
2118 gsi_insert_before (&gsi
, stmt
, GSI_SAME_STMT
);
2121 zero_bounds
= chkp_make_bounds (integer_zero_node
,
2129 /* Return SSA_NAME used to represent none bounds. */
2131 chkp_get_none_bounds (void)
2136 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2137 fprintf (dump_file
, "Creating none bounds...");
2140 if ((flag_chkp_use_static_bounds
&& flag_chkp_use_static_const_bounds
)
2141 || flag_chkp_use_static_const_bounds
> 0)
2143 gimple_stmt_iterator gsi
= gsi_start_bb (chkp_get_entry_block ());
2146 none_bounds
= chkp_get_tmp_reg (NULL
);
2147 stmt
= gimple_build_assign (none_bounds
, chkp_get_none_bounds_var ());
2148 gsi_insert_before (&gsi
, stmt
, GSI_SAME_STMT
);
2151 none_bounds
= chkp_make_bounds (integer_minus_one_node
,
2152 build_int_cst (size_type_node
, 2),
2159 /* Return bounds to be used as a result of operation which
2160 should not create poiunter (e.g. MULT_EXPR). */
2162 chkp_get_invalid_op_bounds (void)
2164 return chkp_get_zero_bounds ();
2167 /* Return bounds to be used for loads of non-pointer values. */
2169 chkp_get_nonpointer_load_bounds (void)
2171 return chkp_get_zero_bounds ();
2174 /* Return 1 if may use bndret call to get bounds for pointer
2175 returned by CALL. */
2177 chkp_call_returns_bounds_p (gcall
*call
)
2179 if (gimple_call_internal_p (call
))
2182 if (gimple_call_builtin_p (call
, BUILT_IN_CHKP_NARROW_PTR_BOUNDS
)
2183 || chkp_gimple_call_builtin_p (call
, BUILT_IN_CHKP_NARROW
))
2186 if (gimple_call_with_bounds_p (call
))
2189 tree fndecl
= gimple_call_fndecl (call
);
2191 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
2194 if (fndecl
&& !chkp_instrumentable_p (fndecl
))
2197 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
2199 if (chkp_instrument_normal_builtin (fndecl
))
2202 if (!lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl
)))
2205 struct cgraph_node
*clone
= chkp_maybe_create_clone (fndecl
);
2206 return (clone
&& gimple_has_body_p (clone
->decl
));
2212 /* Build bounds returned by CALL. */
2214 chkp_build_returned_bound (gcall
*call
)
2216 gimple_stmt_iterator gsi
;
2219 tree fndecl
= gimple_call_fndecl (call
);
2220 unsigned int retflags
;
2222 /* To avoid fixing alloca expands in targets we handle
2225 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
2226 && (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_ALLOCA
2227 || DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_ALLOCA_WITH_ALIGN
))
2229 tree size
= gimple_call_arg (call
, 0);
2230 tree lb
= gimple_call_lhs (call
);
2231 gimple_stmt_iterator iter
= gsi_for_stmt (call
);
2232 bounds
= chkp_make_bounds (lb
, size
, &iter
, true);
2234 /* We know bounds returned by set_bounds builtin call. */
2236 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
2237 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CHKP_SET_PTR_BOUNDS
)
2239 tree lb
= gimple_call_arg (call
, 0);
2240 tree size
= gimple_call_arg (call
, 1);
2241 gimple_stmt_iterator iter
= gsi_for_stmt (call
);
2242 bounds
= chkp_make_bounds (lb
, size
, &iter
, true);
2244 /* Detect bounds initialization calls. */
2246 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
2247 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CHKP_INIT_PTR_BOUNDS
)
2248 bounds
= chkp_get_zero_bounds ();
2249 /* Detect bounds nullification calls. */
2251 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
2252 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CHKP_NULL_PTR_BOUNDS
)
2253 bounds
= chkp_get_none_bounds ();
2254 /* Detect bounds copy calls. */
2256 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
2257 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CHKP_COPY_PTR_BOUNDS
)
2259 gimple_stmt_iterator iter
= gsi_for_stmt (call
);
2260 bounds
= chkp_find_bounds (gimple_call_arg (call
, 1), &iter
);
2262 /* Do not use retbnd when returned bounds are equal to some
2263 of passed bounds. */
2264 else if (((retflags
= gimple_call_return_flags (call
)) & ERF_RETURNS_ARG
)
2265 && (retflags
& ERF_RETURN_ARG_MASK
) < gimple_call_num_args (call
))
2267 gimple_stmt_iterator iter
= gsi_for_stmt (call
);
2268 unsigned int retarg
= retflags
& ERF_RETURN_ARG_MASK
, argno
;
2269 if (gimple_call_with_bounds_p (call
))
2271 for (argno
= 0; argno
< gimple_call_num_args (call
); argno
++)
2272 if (!POINTER_BOUNDS_P (gimple_call_arg (call
, argno
)))
2283 bounds
= chkp_find_bounds (gimple_call_arg (call
, argno
), &iter
);
2285 else if (chkp_call_returns_bounds_p (call
))
2287 gcc_assert (TREE_CODE (gimple_call_lhs (call
)) == SSA_NAME
);
2289 /* In general case build checker builtin call to
2290 obtain returned bounds. */
2291 stmt
= gimple_build_call (chkp_ret_bnd_fndecl
, 1,
2292 gimple_call_lhs (call
));
2293 chkp_mark_stmt (stmt
);
2295 gsi
= gsi_for_stmt (call
);
2296 gsi_insert_after (&gsi
, stmt
, GSI_SAME_STMT
);
2298 bounds
= chkp_get_tmp_reg (stmt
);
2299 gimple_call_set_lhs (stmt
, bounds
);
2304 bounds
= chkp_get_zero_bounds ();
2306 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2308 fprintf (dump_file
, "Built returned bounds (");
2309 print_generic_expr (dump_file
, bounds
, 0);
2310 fprintf (dump_file
, ") for call: ");
2311 print_gimple_stmt (dump_file
, call
, 0, TDF_VOPS
|TDF_MEMSYMS
);
2314 bounds
= chkp_maybe_copy_and_register_bounds (gimple_call_lhs (call
), bounds
);
2319 /* Return bounds used as returned by call
2320 which produced SSA name VAL. */
2322 chkp_retbnd_call_by_val (tree val
)
2324 if (TREE_CODE (val
) != SSA_NAME
)
2327 gcc_assert (gimple_code (SSA_NAME_DEF_STMT (val
)) == GIMPLE_CALL
);
2329 imm_use_iterator use_iter
;
2330 use_operand_p use_p
;
2331 FOR_EACH_IMM_USE_FAST (use_p
, use_iter
, val
)
2332 if (gimple_code (USE_STMT (use_p
)) == GIMPLE_CALL
2333 && gimple_call_fndecl (USE_STMT (use_p
)) == chkp_ret_bnd_fndecl
)
2334 return as_a
<gcall
*> (USE_STMT (use_p
));
2339 /* Check the next parameter for the given PARM is bounds
2340 and return it's default SSA_NAME (create if required). */
2342 chkp_get_next_bounds_parm (tree parm
)
2344 tree bounds
= TREE_CHAIN (parm
);
2345 gcc_assert (POINTER_BOUNDS_P (bounds
));
2346 bounds
= ssa_default_def (cfun
, bounds
);
2349 bounds
= make_ssa_name (TREE_CHAIN (parm
), gimple_build_nop ());
2350 set_ssa_default_def (cfun
, TREE_CHAIN (parm
), bounds
);
2355 /* Return bounds to be used for input argument PARM. */
2357 chkp_get_bound_for_parm (tree parm
)
2359 tree decl
= SSA_NAME_VAR (parm
);
2362 gcc_assert (TREE_CODE (decl
) == PARM_DECL
);
2364 bounds
= chkp_get_registered_bounds (parm
);
2367 bounds
= chkp_get_registered_bounds (decl
);
2371 tree orig_decl
= cgraph_node::get (cfun
->decl
)->orig_decl
;
2373 /* For static chain param we return zero bounds
2374 because currently we do not check dereferences
2376 if (cfun
->static_chain_decl
== decl
)
2377 bounds
= chkp_get_zero_bounds ();
2378 /* If non instrumented runtime is used then it may be useful
2379 to use zero bounds for input arguments of main
2381 else if (flag_chkp_zero_input_bounds_for_main
2382 && strcmp (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (orig_decl
)),
2384 bounds
= chkp_get_zero_bounds ();
2385 else if (BOUNDED_P (parm
))
2387 bounds
= chkp_get_next_bounds_parm (decl
);
2388 bounds
= chkp_maybe_copy_and_register_bounds (decl
, bounds
);
2390 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2392 fprintf (dump_file
, "Built arg bounds (");
2393 print_generic_expr (dump_file
, bounds
, 0);
2394 fprintf (dump_file
, ") for arg: ");
2395 print_node (dump_file
, "", decl
, 0);
2399 bounds
= chkp_get_zero_bounds ();
2402 if (!chkp_get_registered_bounds (parm
))
2403 bounds
= chkp_maybe_copy_and_register_bounds (parm
, bounds
);
2405 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2407 fprintf (dump_file
, "Using bounds ");
2408 print_generic_expr (dump_file
, bounds
, 0);
2409 fprintf (dump_file
, " for parm ");
2410 print_generic_expr (dump_file
, parm
, 0);
2411 fprintf (dump_file
, " of type ");
2412 print_generic_expr (dump_file
, TREE_TYPE (parm
), 0);
2413 fprintf (dump_file
, ".\n");
2419 /* Build and return CALL_EXPR for bndstx builtin with specified
2422 chkp_build_bndldx_call (tree addr
, tree ptr
)
2424 tree fn
= build1 (ADDR_EXPR
,
2425 build_pointer_type (TREE_TYPE (chkp_bndldx_fndecl
)),
2426 chkp_bndldx_fndecl
);
2427 tree call
= build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndldx_fndecl
)),
2429 CALL_WITH_BOUNDS_P (call
) = true;
2433 /* Insert code to load bounds for PTR located by ADDR.
2434 Code is inserted after position pointed by GSI.
2435 Loaded bounds are returned. */
2437 chkp_build_bndldx (tree addr
, tree ptr
, gimple_stmt_iterator
*gsi
)
2445 addr
= chkp_force_gimple_call_op (addr
, &seq
);
2446 ptr
= chkp_force_gimple_call_op (ptr
, &seq
);
2448 stmt
= gimple_build_call (chkp_bndldx_fndecl
, 2, addr
, ptr
);
2449 chkp_mark_stmt (stmt
);
2450 bounds
= chkp_get_tmp_reg (stmt
);
2451 gimple_call_set_lhs (stmt
, bounds
);
2453 gimple_seq_add_stmt (&seq
, stmt
);
2455 gsi_insert_seq_after (gsi
, seq
, GSI_CONTINUE_LINKING
);
2457 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2459 fprintf (dump_file
, "Generated bndldx for pointer ");
2460 print_generic_expr (dump_file
, ptr
, 0);
2461 fprintf (dump_file
, ": ");
2462 print_gimple_stmt (dump_file
, stmt
, 0, TDF_VOPS
|TDF_MEMSYMS
);
2468 /* Build and return CALL_EXPR for bndstx builtin with specified
2471 chkp_build_bndstx_call (tree addr
, tree ptr
, tree bounds
)
2473 tree fn
= build1 (ADDR_EXPR
,
2474 build_pointer_type (TREE_TYPE (chkp_bndstx_fndecl
)),
2475 chkp_bndstx_fndecl
);
2476 tree call
= build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndstx_fndecl
)),
2477 fn
, 3, ptr
, bounds
, addr
);
2478 CALL_WITH_BOUNDS_P (call
) = true;
2482 /* Insert code to store BOUNDS for PTR stored by ADDR.
2483 New statements are inserted after position pointed
2486 chkp_build_bndstx (tree addr
, tree ptr
, tree bounds
,
2487 gimple_stmt_iterator
*gsi
)
2494 addr
= chkp_force_gimple_call_op (addr
, &seq
);
2495 ptr
= chkp_force_gimple_call_op (ptr
, &seq
);
2497 stmt
= gimple_build_call (chkp_bndstx_fndecl
, 3, ptr
, bounds
, addr
);
2498 chkp_mark_stmt (stmt
);
2499 gimple_call_set_with_bounds (stmt
, true);
2501 gimple_seq_add_stmt (&seq
, stmt
);
2503 gsi_insert_seq_after (gsi
, seq
, GSI_CONTINUE_LINKING
);
2505 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2507 fprintf (dump_file
, "Generated bndstx for pointer store ");
2508 print_gimple_stmt (dump_file
, gsi_stmt (*gsi
), 0, TDF_VOPS
|TDF_MEMSYMS
);
2509 print_gimple_stmt (dump_file
, stmt
, 2, TDF_VOPS
|TDF_MEMSYMS
);
2513 /* Compute bounds for pointer NODE which was assigned in
2514 assignment statement ASSIGN. Return computed bounds. */
2516 chkp_compute_bounds_for_assignment (tree node
, gimple assign
)
2518 enum tree_code rhs_code
= gimple_assign_rhs_code (assign
);
2519 tree rhs1
= gimple_assign_rhs1 (assign
);
2520 tree bounds
= NULL_TREE
;
2521 gimple_stmt_iterator iter
= gsi_for_stmt (assign
);
2523 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2525 fprintf (dump_file
, "Computing bounds for assignment: ");
2526 print_gimple_stmt (dump_file
, assign
, 0, TDF_VOPS
|TDF_MEMSYMS
);
2532 case TARGET_MEM_REF
:
2535 /* We need to load bounds from the bounds table. */
2536 bounds
= chkp_find_bounds_loaded (node
, rhs1
, &iter
);
2542 case POINTER_PLUS_EXPR
:
2546 /* Bounds are just propagated from RHS. */
2547 bounds
= chkp_find_bounds (rhs1
, &iter
);
2550 case VIEW_CONVERT_EXPR
:
2551 /* Bounds are just propagated from RHS. */
2552 bounds
= chkp_find_bounds (TREE_OPERAND (rhs1
, 0), &iter
);
2556 if (BOUNDED_P (rhs1
))
2558 /* We need to load bounds from the bounds table. */
2559 bounds
= chkp_build_bndldx (chkp_build_addr_expr (rhs1
),
2561 TREE_ADDRESSABLE (rhs1
) = 1;
2564 bounds
= chkp_get_nonpointer_load_bounds ();
2573 tree rhs2
= gimple_assign_rhs2 (assign
);
2574 tree bnd1
= chkp_find_bounds (rhs1
, &iter
);
2575 tree bnd2
= chkp_find_bounds (rhs2
, &iter
);
2577 /* First we try to check types of operands. If it
2578 does not help then look at bound values.
2580 If some bounds are incomplete and other are
2581 not proven to be valid (i.e. also incomplete
2582 or invalid because value is not pointer) then
2583 resulting value is incomplete and will be
2584 recomputed later in chkp_finish_incomplete_bounds. */
2585 if (BOUNDED_P (rhs1
)
2586 && !BOUNDED_P (rhs2
))
2588 else if (BOUNDED_P (rhs2
)
2589 && !BOUNDED_P (rhs1
)
2590 && rhs_code
!= MINUS_EXPR
)
2592 else if (chkp_incomplete_bounds (bnd1
))
2593 if (chkp_valid_bounds (bnd2
) && rhs_code
!= MINUS_EXPR
2594 && !chkp_incomplete_bounds (bnd2
))
2597 bounds
= incomplete_bounds
;
2598 else if (chkp_incomplete_bounds (bnd2
))
2599 if (chkp_valid_bounds (bnd1
)
2600 && !chkp_incomplete_bounds (bnd1
))
2603 bounds
= incomplete_bounds
;
2604 else if (!chkp_valid_bounds (bnd1
))
2605 if (chkp_valid_bounds (bnd2
) && rhs_code
!= MINUS_EXPR
)
2607 else if (bnd2
== chkp_get_zero_bounds ())
2611 else if (!chkp_valid_bounds (bnd2
))
2614 /* Seems both operands may have valid bounds
2615 (e.g. pointer minus pointer). In such case
2616 use default invalid op bounds. */
2617 bounds
= chkp_get_invalid_op_bounds ();
2635 case TRUNC_DIV_EXPR
:
2636 case FLOOR_DIV_EXPR
:
2638 case ROUND_DIV_EXPR
:
2639 case TRUNC_MOD_EXPR
:
2640 case FLOOR_MOD_EXPR
:
2642 case ROUND_MOD_EXPR
:
2643 case EXACT_DIV_EXPR
:
2644 case FIX_TRUNC_EXPR
:
2648 /* No valid bounds may be produced by these exprs. */
2649 bounds
= chkp_get_invalid_op_bounds ();
2654 tree val1
= gimple_assign_rhs2 (assign
);
2655 tree val2
= gimple_assign_rhs3 (assign
);
2656 tree bnd1
= chkp_find_bounds (val1
, &iter
);
2657 tree bnd2
= chkp_find_bounds (val2
, &iter
);
2660 if (chkp_incomplete_bounds (bnd1
) || chkp_incomplete_bounds (bnd2
))
2661 bounds
= incomplete_bounds
;
2662 else if (bnd1
== bnd2
)
2666 rhs1
= unshare_expr (rhs1
);
2668 bounds
= chkp_get_tmp_reg (assign
);
2669 stmt
= gimple_build_assign (bounds
, COND_EXPR
, rhs1
, bnd1
, bnd2
);
2670 gsi_insert_after (&iter
, stmt
, GSI_SAME_STMT
);
2672 if (!chkp_valid_bounds (bnd1
) && !chkp_valid_bounds (bnd2
))
2673 chkp_mark_invalid_bounds (bounds
);
2681 tree rhs2
= gimple_assign_rhs2 (assign
);
2682 tree bnd1
= chkp_find_bounds (rhs1
, &iter
);
2683 tree bnd2
= chkp_find_bounds (rhs2
, &iter
);
2685 if (chkp_incomplete_bounds (bnd1
) || chkp_incomplete_bounds (bnd2
))
2686 bounds
= incomplete_bounds
;
2687 else if (bnd1
== bnd2
)
2692 tree cond
= build2 (rhs_code
== MAX_EXPR
? GT_EXPR
: LT_EXPR
,
2693 boolean_type_node
, rhs1
, rhs2
);
2694 bounds
= chkp_get_tmp_reg (assign
);
2695 stmt
= gimple_build_assign (bounds
, COND_EXPR
, cond
, bnd1
, bnd2
);
2697 gsi_insert_after (&iter
, stmt
, GSI_SAME_STMT
);
2699 if (!chkp_valid_bounds (bnd1
) && !chkp_valid_bounds (bnd2
))
2700 chkp_mark_invalid_bounds (bounds
);
2706 bounds
= chkp_get_zero_bounds ();
2707 warning (0, "pointer bounds were lost due to unexpected expression %s",
2708 get_tree_code_name (rhs_code
));
2711 gcc_assert (bounds
);
2714 bounds
= chkp_maybe_copy_and_register_bounds (node
, bounds
);
2719 /* Compute bounds for ssa name NODE defined by DEF_STMT pointed by ITER.
2721 There are just few statement codes allowed: NOP (for default ssa names),
2722 ASSIGN, CALL, PHI, ASM.
2724 Return computed bounds. */
2726 chkp_get_bounds_by_definition (tree node
, gimple def_stmt
,
2727 gphi_iterator
*iter
)
2730 enum gimple_code code
= gimple_code (def_stmt
);
2733 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2735 fprintf (dump_file
, "Searching for bounds for node: ");
2736 print_generic_expr (dump_file
, node
, 0);
2738 fprintf (dump_file
, " using its definition: ");
2739 print_gimple_stmt (dump_file
, def_stmt
, 0, TDF_VOPS
|TDF_MEMSYMS
);
2745 var
= SSA_NAME_VAR (node
);
2746 switch (TREE_CODE (var
))
2749 bounds
= chkp_get_bound_for_parm (node
);
2753 /* For uninitialized pointers use none bounds. */
2754 bounds
= chkp_get_none_bounds ();
2755 bounds
= chkp_maybe_copy_and_register_bounds (node
, bounds
);
2762 gcc_assert (TREE_CODE (TREE_TYPE (node
)) == REFERENCE_TYPE
);
2764 base_type
= TREE_TYPE (TREE_TYPE (node
));
2766 gcc_assert (TYPE_SIZE (base_type
)
2767 && TREE_CODE (TYPE_SIZE (base_type
)) == INTEGER_CST
2768 && tree_to_uhwi (TYPE_SIZE (base_type
)) != 0);
2770 bounds
= chkp_make_bounds (node
, TYPE_SIZE_UNIT (base_type
),
2772 bounds
= chkp_maybe_copy_and_register_bounds (node
, bounds
);
2777 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2779 fprintf (dump_file
, "Unexpected var with no definition\n");
2780 print_generic_expr (dump_file
, var
, 0);
2782 internal_error ("chkp_get_bounds_by_definition: Unexpected var of type %s",
2783 get_tree_code_name (TREE_CODE (var
)));
2788 bounds
= chkp_compute_bounds_for_assignment (node
, def_stmt
);
2792 bounds
= chkp_build_returned_bound (as_a
<gcall
*> (def_stmt
));
2796 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (node
))
2797 if (SSA_NAME_VAR (node
))
2798 var
= chkp_get_bounds_var (SSA_NAME_VAR (node
));
2800 var
= make_temp_ssa_name (pointer_bounds_type_node
,
2802 CHKP_BOUND_TMP_NAME
);
2804 var
= chkp_get_tmp_var ();
2805 stmt
= create_phi_node (var
, gimple_bb (def_stmt
));
2806 bounds
= gimple_phi_result (stmt
);
2807 *iter
= gsi_for_phi (stmt
);
2809 bounds
= chkp_maybe_copy_and_register_bounds (node
, bounds
);
2811 /* Created bounds do not have all phi args computed and
2812 therefore we do not know if there is a valid source
2813 of bounds for that node. Therefore we mark bounds
2814 as incomplete and then recompute them when all phi
2815 args are computed. */
2816 chkp_register_incomplete_bounds (bounds
, node
);
2820 bounds
= chkp_get_zero_bounds ();
2821 bounds
= chkp_maybe_copy_and_register_bounds (node
, bounds
);
2825 internal_error ("chkp_get_bounds_by_definition: Unexpected GIMPLE code %s",
2826 gimple_code_name
[code
]);
2832 /* Return CALL_EXPR for bndmk with specified LOWER_BOUND and SIZE. */
2834 chkp_build_make_bounds_call (tree lower_bound
, tree size
)
2836 tree call
= build1 (ADDR_EXPR
,
2837 build_pointer_type (TREE_TYPE (chkp_bndmk_fndecl
)),
2839 return build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndmk_fndecl
)),
2840 call
, 2, lower_bound
, size
);
2843 /* Create static bounds var of specfified OBJ which is
2844 is either VAR_DECL or string constant. */
2846 chkp_make_static_bounds (tree obj
)
2848 static int string_id
= 1;
2849 static int var_id
= 1;
2851 const char *var_name
;
2855 /* First check if we already have required var. */
2856 if (chkp_static_var_bounds
)
2858 /* For vars we use assembler name as a key in
2859 chkp_static_var_bounds map. It allows to
2860 avoid duplicating bound vars for decls
2861 sharing assembler name. */
2862 if (TREE_CODE (obj
) == VAR_DECL
)
2864 tree name
= DECL_ASSEMBLER_NAME (obj
);
2865 slot
= chkp_static_var_bounds
->get (name
);
2871 slot
= chkp_static_var_bounds
->get (obj
);
2877 /* Build decl for bounds var. */
2878 if (TREE_CODE (obj
) == VAR_DECL
)
2880 if (DECL_IGNORED_P (obj
))
2882 bnd_var_name
= (char *) xmalloc (strlen (CHKP_VAR_BOUNDS_PREFIX
) + 10);
2883 sprintf (bnd_var_name
, "%s%d", CHKP_VAR_BOUNDS_PREFIX
, var_id
++);
2887 var_name
= IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (obj
));
2889 /* For hidden symbols we want to skip first '*' char. */
2890 if (*var_name
== '*')
2893 bnd_var_name
= (char *) xmalloc (strlen (var_name
)
2894 + strlen (CHKP_BOUNDS_OF_SYMBOL_PREFIX
) + 1);
2895 strcpy (bnd_var_name
, CHKP_BOUNDS_OF_SYMBOL_PREFIX
);
2896 strcat (bnd_var_name
, var_name
);
2899 bnd_var
= build_decl (UNKNOWN_LOCATION
, VAR_DECL
,
2900 get_identifier (bnd_var_name
),
2901 pointer_bounds_type_node
);
2903 /* Address of the obj will be used as lower bound. */
2904 TREE_ADDRESSABLE (obj
) = 1;
2908 bnd_var_name
= (char *) xmalloc (strlen (CHKP_STRING_BOUNDS_PREFIX
) + 10);
2909 sprintf (bnd_var_name
, "%s%d", CHKP_STRING_BOUNDS_PREFIX
, string_id
++);
2911 bnd_var
= build_decl (UNKNOWN_LOCATION
, VAR_DECL
,
2912 get_identifier (bnd_var_name
),
2913 pointer_bounds_type_node
);
2916 TREE_PUBLIC (bnd_var
) = 0;
2917 TREE_USED (bnd_var
) = 1;
2918 TREE_READONLY (bnd_var
) = 0;
2919 TREE_STATIC (bnd_var
) = 1;
2920 TREE_ADDRESSABLE (bnd_var
) = 0;
2921 DECL_ARTIFICIAL (bnd_var
) = 1;
2922 DECL_COMMON (bnd_var
) = 1;
2923 DECL_COMDAT (bnd_var
) = 1;
2924 DECL_READ_P (bnd_var
) = 1;
2925 DECL_INITIAL (bnd_var
) = chkp_build_addr_expr (obj
);
2926 /* Force output similar to constant bounds.
2927 See chkp_make_static_const_bounds. */
2928 varpool_node::get_create (bnd_var
)->force_output
= 1;
2929 /* Mark symbol as requiring bounds initialization. */
2930 varpool_node::get_create (bnd_var
)->need_bounds_init
= 1;
2931 varpool_node::finalize_decl (bnd_var
);
2933 /* Add created var to the map to use it for other references
2935 if (!chkp_static_var_bounds
)
2936 chkp_static_var_bounds
= new hash_map
<tree
, tree
>;
2938 if (TREE_CODE (obj
) == VAR_DECL
)
2940 tree name
= DECL_ASSEMBLER_NAME (obj
);
2941 chkp_static_var_bounds
->put (name
, bnd_var
);
2944 chkp_static_var_bounds
->put (obj
, bnd_var
);
2949 /* When var has incomplete type we cannot get size to
2950 compute its bounds. In such cases we use checker
2951 builtin call which determines object size at runtime. */
2953 chkp_generate_extern_var_bounds (tree var
)
2955 tree bounds
, size_reloc
, lb
, size
, max_size
, cond
;
2956 gimple_stmt_iterator gsi
;
2957 gimple_seq seq
= NULL
;
2960 /* If instrumentation is not enabled for vars having
2961 incomplete type then just return zero bounds to avoid
2962 checks for this var. */
2963 if (!flag_chkp_incomplete_type
)
2964 return chkp_get_zero_bounds ();
2966 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2968 fprintf (dump_file
, "Generating bounds for extern symbol '");
2969 print_generic_expr (dump_file
, var
, 0);
2970 fprintf (dump_file
, "'\n");
2973 stmt
= gimple_build_call (chkp_sizeof_fndecl
, 1, var
);
2975 size_reloc
= create_tmp_reg (chkp_uintptr_type
, CHKP_SIZE_TMP_NAME
);
2976 gimple_call_set_lhs (stmt
, size_reloc
);
2978 gimple_seq_add_stmt (&seq
, stmt
);
2980 lb
= chkp_build_addr_expr (var
);
2981 size
= make_ssa_name (chkp_get_size_tmp_var ());
2983 if (flag_chkp_zero_dynamic_size_as_infinite
)
2985 /* We should check that size relocation was resolved.
2986 If it was not then use maximum possible size for the var. */
2987 max_size
= build2 (MINUS_EXPR
, chkp_uintptr_type
, integer_zero_node
,
2988 fold_convert (chkp_uintptr_type
, lb
));
2989 max_size
= chkp_force_gimple_call_op (max_size
, &seq
);
2991 cond
= build2 (NE_EXPR
, boolean_type_node
,
2992 size_reloc
, integer_zero_node
);
2993 stmt
= gimple_build_assign (size
, COND_EXPR
, cond
, size_reloc
, max_size
);
2994 gimple_seq_add_stmt (&seq
, stmt
);
2998 stmt
= gimple_build_assign (size
, size_reloc
);
2999 gimple_seq_add_stmt (&seq
, stmt
);
3002 gsi
= gsi_start_bb (chkp_get_entry_block ());
3003 gsi_insert_seq_after (&gsi
, seq
, GSI_CONTINUE_LINKING
);
3005 bounds
= chkp_make_bounds (lb
, size
, &gsi
, true);
3010 /* Return 1 if TYPE has fields with zero size or fields
3011 marked with chkp_variable_size attribute. */
3013 chkp_variable_size_type (tree type
)
3018 if (RECORD_OR_UNION_TYPE_P (type
))
3019 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
3021 if (TREE_CODE (field
) == FIELD_DECL
)
3023 || lookup_attribute ("bnd_variable_size", DECL_ATTRIBUTES (field
))
3024 || chkp_variable_size_type (TREE_TYPE (field
));
3027 res
= !TYPE_SIZE (type
)
3028 || TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
3029 || tree_to_uhwi (TYPE_SIZE (type
)) == 0;
3034 /* Compute and return bounds for address of DECL which is
3035 one of VAR_DECL, PARM_DECL, RESULT_DECL. */
3037 chkp_get_bounds_for_decl_addr (tree decl
)
3041 gcc_assert (TREE_CODE (decl
) == VAR_DECL
3042 || TREE_CODE (decl
) == PARM_DECL
3043 || TREE_CODE (decl
) == RESULT_DECL
);
3045 bounds
= chkp_get_registered_addr_bounds (decl
);
3050 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3052 fprintf (dump_file
, "Building bounds for address of decl ");
3053 print_generic_expr (dump_file
, decl
, 0);
3054 fprintf (dump_file
, "\n");
3057 /* Use zero bounds if size is unknown and checks for
3058 unknown sizes are restricted. */
3059 if ((!DECL_SIZE (decl
)
3060 || (chkp_variable_size_type (TREE_TYPE (decl
))
3061 && (TREE_STATIC (decl
)
3062 || DECL_EXTERNAL (decl
)
3063 || TREE_PUBLIC (decl
))))
3064 && !flag_chkp_incomplete_type
)
3065 return chkp_get_zero_bounds ();
3067 if (flag_chkp_use_static_bounds
3068 && TREE_CODE (decl
) == VAR_DECL
3069 && (TREE_STATIC (decl
)
3070 || DECL_EXTERNAL (decl
)
3071 || TREE_PUBLIC (decl
))
3072 && !DECL_THREAD_LOCAL_P (decl
))
3074 tree bnd_var
= chkp_make_static_bounds (decl
);
3075 gimple_stmt_iterator gsi
= gsi_start_bb (chkp_get_entry_block ());
3078 bounds
= chkp_get_tmp_reg (NULL
);
3079 stmt
= gimple_build_assign (bounds
, bnd_var
);
3080 gsi_insert_before (&gsi
, stmt
, GSI_SAME_STMT
);
3082 else if (!DECL_SIZE (decl
)
3083 || (chkp_variable_size_type (TREE_TYPE (decl
))
3084 && (TREE_STATIC (decl
)
3085 || DECL_EXTERNAL (decl
)
3086 || TREE_PUBLIC (decl
))))
3088 gcc_assert (TREE_CODE (decl
) == VAR_DECL
);
3089 bounds
= chkp_generate_extern_var_bounds (decl
);
3093 tree lb
= chkp_build_addr_expr (decl
);
3094 bounds
= chkp_make_bounds (lb
, DECL_SIZE_UNIT (decl
), NULL
, false);
3100 /* Compute and return bounds for constant string. */
3102 chkp_get_bounds_for_string_cst (tree cst
)
3108 gcc_assert (TREE_CODE (cst
) == STRING_CST
);
3110 bounds
= chkp_get_registered_bounds (cst
);
3115 if ((flag_chkp_use_static_bounds
&& flag_chkp_use_static_const_bounds
)
3116 || flag_chkp_use_static_const_bounds
> 0)
3118 tree bnd_var
= chkp_make_static_bounds (cst
);
3119 gimple_stmt_iterator gsi
= gsi_start_bb (chkp_get_entry_block ());
3122 bounds
= chkp_get_tmp_reg (NULL
);
3123 stmt
= gimple_build_assign (bounds
, bnd_var
);
3124 gsi_insert_before (&gsi
, stmt
, GSI_SAME_STMT
);
3128 lb
= chkp_build_addr_expr (cst
);
3129 size
= build_int_cst (chkp_uintptr_type
, TREE_STRING_LENGTH (cst
));
3130 bounds
= chkp_make_bounds (lb
, size
, NULL
, false);
3133 bounds
= chkp_maybe_copy_and_register_bounds (cst
, bounds
);
3138 /* Generate code to instersect bounds BOUNDS1 and BOUNDS2 and
3139 return the result. if ITER is not NULL then Code is inserted
3140 before position pointed by ITER. Otherwise code is added to
3143 chkp_intersect_bounds (tree bounds1
, tree bounds2
, gimple_stmt_iterator
*iter
)
3145 if (!bounds1
|| bounds1
== chkp_get_zero_bounds ())
3146 return bounds2
? bounds2
: bounds1
;
3147 else if (!bounds2
|| bounds2
== chkp_get_zero_bounds ())
3157 stmt
= gimple_build_call (chkp_intersect_fndecl
, 2, bounds1
, bounds2
);
3158 chkp_mark_stmt (stmt
);
3160 bounds
= chkp_get_tmp_reg (stmt
);
3161 gimple_call_set_lhs (stmt
, bounds
);
3163 gimple_seq_add_stmt (&seq
, stmt
);
3165 /* We are probably doing narrowing for constant expression.
3166 In such case iter may be undefined. */
3169 gimple_stmt_iterator gsi
= gsi_last_bb (chkp_get_entry_block ());
3171 gsi_insert_seq_after (iter
, seq
, GSI_SAME_STMT
);
3174 gsi_insert_seq_before (iter
, seq
, GSI_SAME_STMT
);
3176 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3178 fprintf (dump_file
, "Bounds intersection: ");
3179 print_gimple_stmt (dump_file
, stmt
, 0, TDF_VOPS
|TDF_MEMSYMS
);
3180 fprintf (dump_file
, " inserted before statement: ");
3181 print_gimple_stmt (dump_file
, gsi_stmt (*iter
), 0,
3182 TDF_VOPS
|TDF_MEMSYMS
);
3189 /* Return 1 if we are allowed to narrow bounds for addressed FIELD
3192 chkp_may_narrow_to_field (tree field
)
3194 return DECL_SIZE (field
) && TREE_CODE (DECL_SIZE (field
)) == INTEGER_CST
3195 && tree_to_uhwi (DECL_SIZE (field
)) != 0
3196 && (!DECL_FIELD_OFFSET (field
)
3197 || TREE_CODE (DECL_FIELD_OFFSET (field
)) == INTEGER_CST
)
3198 && (!DECL_FIELD_BIT_OFFSET (field
)
3199 || TREE_CODE (DECL_FIELD_BIT_OFFSET (field
)) == INTEGER_CST
)
3200 && !lookup_attribute ("bnd_variable_size", DECL_ATTRIBUTES (field
))
3201 && !chkp_variable_size_type (TREE_TYPE (field
));
3204 /* Return 1 if bounds for FIELD should be narrowed to
3205 field's own size. */
3207 chkp_narrow_bounds_for_field (tree field
)
3210 HOST_WIDE_INT bit_offs
;
3212 if (!chkp_may_narrow_to_field (field
))
3215 /* Accesse to compiler generated fields should not cause
3216 bounds narrowing. */
3217 if (DECL_ARTIFICIAL (field
))
3220 offs
= tree_to_uhwi (DECL_FIELD_OFFSET (field
));
3221 bit_offs
= tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field
));
3223 return (flag_chkp_narrow_bounds
3224 && (flag_chkp_first_field_has_own_bounds
3229 /* Perform narrowing for BOUNDS using bounds computed for field
3230 access COMPONENT. ITER meaning is the same as for
3231 chkp_intersect_bounds. */
3233 chkp_narrow_bounds_to_field (tree bounds
, tree component
,
3234 gimple_stmt_iterator
*iter
)
3236 tree field
= TREE_OPERAND (component
, 1);
3237 tree size
= DECL_SIZE_UNIT (field
);
3238 tree field_ptr
= chkp_build_addr_expr (component
);
3241 field_bounds
= chkp_make_bounds (field_ptr
, size
, iter
, false);
3243 return chkp_intersect_bounds (field_bounds
, bounds
, iter
);
3246 /* Parse field or array access NODE.
3248 PTR ouput parameter holds a pointer to the outermost
3251 BITFIELD output parameter is set to 1 if bitfield is
3252 accessed and to 0 otherwise. If it is 1 then ELT holds
3253 outer component for accessed bit field.
3255 SAFE outer parameter is set to 1 if access is safe and
3256 checks are not required.
3258 BOUNDS outer parameter holds bounds to be used to check
3259 access (may be NULL).
3261 If INNERMOST_BOUNDS is 1 then try to narrow bounds to the
3262 innermost accessed component. */
3264 chkp_parse_array_and_component_ref (tree node
, tree
*ptr
,
3265 tree
*elt
, bool *safe
,
3268 gimple_stmt_iterator
*iter
,
3269 bool innermost_bounds
)
3271 tree comp_to_narrow
= NULL_TREE
;
3272 tree last_comp
= NULL_TREE
;
3273 bool array_ref_found
= false;
3279 /* Compute tree height for expression. */
3282 while (TREE_CODE (var
) == COMPONENT_REF
3283 || TREE_CODE (var
) == ARRAY_REF
3284 || TREE_CODE (var
) == VIEW_CONVERT_EXPR
)
3286 var
= TREE_OPERAND (var
, 0);
3290 gcc_assert (len
> 1);
3292 /* It is more convenient for us to scan left-to-right,
3293 so walk tree again and put all node to nodes vector
3294 in reversed order. */
3295 nodes
= XALLOCAVEC (tree
, len
);
3296 nodes
[len
- 1] = node
;
3297 for (i
= len
- 2; i
>= 0; i
--)
3298 nodes
[i
] = TREE_OPERAND (nodes
[i
+ 1], 0);
3303 *bitfield
= (TREE_CODE (node
) == COMPONENT_REF
3304 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (node
, 1)));
3305 /* To get bitfield address we will need outer elemnt. */
3307 *elt
= nodes
[len
- 2];
3311 /* If we have indirection in expression then compute
3312 outermost structure bounds. Computed bounds may be
3314 if (TREE_CODE (nodes
[0]) == MEM_REF
|| INDIRECT_REF_P (nodes
[0]))
3317 *ptr
= TREE_OPERAND (nodes
[0], 0);
3319 *bounds
= chkp_find_bounds (*ptr
, iter
);
3323 gcc_assert (TREE_CODE (var
) == VAR_DECL
3324 || TREE_CODE (var
) == PARM_DECL
3325 || TREE_CODE (var
) == RESULT_DECL
3326 || TREE_CODE (var
) == STRING_CST
3327 || TREE_CODE (var
) == SSA_NAME
);
3329 *ptr
= chkp_build_addr_expr (var
);
3332 /* In this loop we are trying to find a field access
3333 requiring narrowing. There are two simple rules
3335 1. Leftmost array_ref is chosen if any.
3336 2. Rightmost suitable component_ref is chosen if innermost
3337 bounds are required and no array_ref exists. */
3338 for (i
= 1; i
< len
; i
++)
3342 if (TREE_CODE (var
) == ARRAY_REF
)
3345 array_ref_found
= true;
3346 if (flag_chkp_narrow_bounds
3347 && !flag_chkp_narrow_to_innermost_arrray
3349 || chkp_may_narrow_to_field (TREE_OPERAND (last_comp
, 1))))
3351 comp_to_narrow
= last_comp
;
3355 else if (TREE_CODE (var
) == COMPONENT_REF
)
3357 tree field
= TREE_OPERAND (var
, 1);
3359 if (innermost_bounds
3361 && chkp_narrow_bounds_for_field (field
))
3362 comp_to_narrow
= var
;
3365 if (flag_chkp_narrow_bounds
3366 && flag_chkp_narrow_to_innermost_arrray
3367 && TREE_CODE (TREE_TYPE (field
)) == ARRAY_TYPE
)
3370 *bounds
= chkp_narrow_bounds_to_field (*bounds
, var
, iter
);
3371 comp_to_narrow
= NULL
;
3374 else if (TREE_CODE (var
) == VIEW_CONVERT_EXPR
)
3375 /* Nothing to do for it. */
3381 if (comp_to_narrow
&& DECL_SIZE (TREE_OPERAND (comp_to_narrow
, 1)) && bounds
)
3382 *bounds
= chkp_narrow_bounds_to_field (*bounds
, comp_to_narrow
, iter
);
3384 if (innermost_bounds
&& bounds
&& !*bounds
)
3385 *bounds
= chkp_find_bounds (*ptr
, iter
);
3388 /* Compute and return bounds for address of OBJ. */
3390 chkp_make_addressed_object_bounds (tree obj
, gimple_stmt_iterator
*iter
)
3392 tree bounds
= chkp_get_registered_addr_bounds (obj
);
3397 switch (TREE_CODE (obj
))
3402 bounds
= chkp_get_bounds_for_decl_addr (obj
);
3406 bounds
= chkp_get_bounds_for_string_cst (obj
);
3417 chkp_parse_array_and_component_ref (obj
, &ptr
, &elt
, &safe
,
3418 &bitfield
, &bounds
, iter
, true);
3420 gcc_assert (bounds
);
3426 bounds
= chkp_get_zero_bounds ();
3430 bounds
= chkp_find_bounds (TREE_OPERAND (obj
, 0), iter
);
3435 bounds
= chkp_make_addressed_object_bounds (TREE_OPERAND (obj
, 0), iter
);
3439 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3441 fprintf (dump_file
, "chkp_make_addressed_object_bounds: "
3442 "unexpected object of type %s\n",
3443 get_tree_code_name (TREE_CODE (obj
)));
3444 print_node (dump_file
, "", obj
, 0);
3446 internal_error ("chkp_make_addressed_object_bounds: "
3447 "Unexpected tree code %s",
3448 get_tree_code_name (TREE_CODE (obj
)));
3451 chkp_register_addr_bounds (obj
, bounds
);
3456 /* Compute bounds for pointer PTR loaded from PTR_SRC. Generate statements
3457 to compute bounds if required. Computed bounds should be available at
3458 position pointed by ITER.
3460 If PTR_SRC is NULL_TREE then pointer definition is identified.
3462 If PTR_SRC is not NULL_TREE then ITER points to statements which loads
3463 PTR. If PTR is a any memory reference then ITER points to a statement
3464 after which bndldx will be inserterd. In both cases ITER will be updated
3465 to point to the inserted bndldx statement. */
3468 chkp_find_bounds_1 (tree ptr
, tree ptr_src
, gimple_stmt_iterator
*iter
)
3470 tree addr
= NULL_TREE
;
3471 tree bounds
= NULL_TREE
;
3476 bounds
= chkp_get_registered_bounds (ptr_src
);
3481 switch (TREE_CODE (ptr_src
))
3485 if (BOUNDED_P (ptr_src
))
3486 if (TREE_CODE (ptr
) == VAR_DECL
&& DECL_REGISTER (ptr
))
3487 bounds
= chkp_get_zero_bounds ();
3490 addr
= chkp_build_addr_expr (ptr_src
);
3491 bounds
= chkp_build_bndldx (addr
, ptr
, iter
);
3494 bounds
= chkp_get_nonpointer_load_bounds ();
3499 addr
= get_base_address (ptr_src
);
3501 || TREE_CODE (addr
) == MEM_REF
3502 || TREE_CODE (addr
) == TARGET_MEM_REF
)
3504 if (BOUNDED_P (ptr_src
))
3505 if (TREE_CODE (ptr
) == VAR_DECL
&& DECL_REGISTER (ptr
))
3506 bounds
= chkp_get_zero_bounds ();
3509 addr
= chkp_build_addr_expr (ptr_src
);
3510 bounds
= chkp_build_bndldx (addr
, ptr
, iter
);
3513 bounds
= chkp_get_nonpointer_load_bounds ();
3517 gcc_assert (TREE_CODE (addr
) == SSA_NAME
);
3518 bounds
= chkp_find_bounds (addr
, iter
);
3524 bounds
= chkp_get_bound_for_parm (ptr_src
);
3527 case TARGET_MEM_REF
:
3528 addr
= chkp_build_addr_expr (ptr_src
);
3529 bounds
= chkp_build_bndldx (addr
, ptr
, iter
);
3533 bounds
= chkp_get_registered_bounds (ptr_src
);
3536 gimple def_stmt
= SSA_NAME_DEF_STMT (ptr_src
);
3537 gphi_iterator phi_iter
;
3539 bounds
= chkp_get_bounds_by_definition (ptr_src
, def_stmt
, &phi_iter
);
3541 gcc_assert (bounds
);
3543 if (gphi
*def_phi
= dyn_cast
<gphi
*> (def_stmt
))
3547 for (i
= 0; i
< gimple_phi_num_args (def_phi
); i
++)
3549 tree arg
= gimple_phi_arg_def (def_phi
, i
);
3553 arg_bnd
= chkp_find_bounds (arg
, NULL
);
3555 /* chkp_get_bounds_by_definition created new phi
3556 statement and phi_iter points to it.
3558 Previous call to chkp_find_bounds could create
3559 new basic block and therefore change phi statement
3560 phi_iter points to. */
3561 phi_bnd
= phi_iter
.phi ();
3563 add_phi_arg (phi_bnd
, arg_bnd
,
3564 gimple_phi_arg_edge (def_phi
, i
),
3568 /* If all bound phi nodes have their arg computed
3569 then we may finish its computation. See
3570 chkp_finish_incomplete_bounds for more details. */
3571 if (chkp_may_finish_incomplete_bounds ())
3572 chkp_finish_incomplete_bounds ();
3575 gcc_assert (bounds
== chkp_get_registered_bounds (ptr_src
)
3576 || chkp_incomplete_bounds (bounds
));
3581 bounds
= chkp_make_addressed_object_bounds (TREE_OPERAND (ptr_src
, 0), iter
);
3585 if (integer_zerop (ptr_src
))
3586 bounds
= chkp_get_none_bounds ();
3588 bounds
= chkp_get_invalid_op_bounds ();
3592 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3594 fprintf (dump_file
, "chkp_find_bounds: unexpected ptr of type %s\n",
3595 get_tree_code_name (TREE_CODE (ptr_src
)));
3596 print_node (dump_file
, "", ptr_src
, 0);
3598 internal_error ("chkp_find_bounds: Unexpected tree code %s",
3599 get_tree_code_name (TREE_CODE (ptr_src
)));
3604 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3606 fprintf (stderr
, "chkp_find_bounds: cannot find bounds for pointer\n");
3607 print_node (dump_file
, "", ptr_src
, 0);
3609 internal_error ("chkp_find_bounds: Cannot find bounds for pointer");
3615 /* Normal case for bounds search without forced narrowing. */
3617 chkp_find_bounds (tree ptr
, gimple_stmt_iterator
*iter
)
3619 return chkp_find_bounds_1 (ptr
, NULL_TREE
, iter
);
3622 /* Search bounds for pointer PTR loaded from PTR_SRC
3623 by statement *ITER points to. */
3625 chkp_find_bounds_loaded (tree ptr
, tree ptr_src
, gimple_stmt_iterator
*iter
)
3627 return chkp_find_bounds_1 (ptr
, ptr_src
, iter
);
3630 /* Helper function which checks type of RHS and finds all pointers in
3631 it. For each found pointer we build it's accesses in LHS and RHS
3632 objects and then call HANDLER for them. Function is used to copy
3633 or initilize bounds for copied object. */
3635 chkp_walk_pointer_assignments (tree lhs
, tree rhs
, void *arg
,
3636 assign_handler handler
)
3638 tree type
= TREE_TYPE (lhs
);
3640 /* We have nothing to do with clobbers. */
3641 if (TREE_CLOBBER_P (rhs
))
3644 if (BOUNDED_TYPE_P (type
))
3645 handler (lhs
, rhs
, arg
);
3646 else if (RECORD_OR_UNION_TYPE_P (type
))
3650 if (TREE_CODE (rhs
) == CONSTRUCTOR
)
3652 unsigned HOST_WIDE_INT cnt
;
3655 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs
), cnt
, field
, val
)
3657 if (chkp_type_has_pointer (TREE_TYPE (field
)))
3659 tree lhs_field
= chkp_build_component_ref (lhs
, field
);
3660 chkp_walk_pointer_assignments (lhs_field
, val
, arg
, handler
);
3665 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
3666 if (TREE_CODE (field
) == FIELD_DECL
3667 && chkp_type_has_pointer (TREE_TYPE (field
)))
3669 tree rhs_field
= chkp_build_component_ref (rhs
, field
);
3670 tree lhs_field
= chkp_build_component_ref (lhs
, field
);
3671 chkp_walk_pointer_assignments (lhs_field
, rhs_field
, arg
, handler
);
3674 else if (TREE_CODE (type
) == ARRAY_TYPE
)
3676 unsigned HOST_WIDE_INT cur
= 0;
3677 tree maxval
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
3678 tree etype
= TREE_TYPE (type
);
3679 tree esize
= TYPE_SIZE (etype
);
3681 if (TREE_CODE (rhs
) == CONSTRUCTOR
)
3683 unsigned HOST_WIDE_INT cnt
;
3684 tree purp
, val
, lhs_elem
;
3686 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs
), cnt
, purp
, val
)
3688 if (purp
&& TREE_CODE (purp
) == RANGE_EXPR
)
3690 tree lo_index
= TREE_OPERAND (purp
, 0);
3691 tree hi_index
= TREE_OPERAND (purp
, 1);
3693 for (cur
= (unsigned)tree_to_uhwi (lo_index
);
3694 cur
<= (unsigned)tree_to_uhwi (hi_index
);
3697 lhs_elem
= chkp_build_array_ref (lhs
, etype
, esize
, cur
);
3698 chkp_walk_pointer_assignments (lhs_elem
, val
, arg
, handler
);
3705 gcc_assert (TREE_CODE (purp
) == INTEGER_CST
);
3706 cur
= tree_to_uhwi (purp
);
3709 lhs_elem
= chkp_build_array_ref (lhs
, etype
, esize
, cur
++);
3711 chkp_walk_pointer_assignments (lhs_elem
, val
, arg
, handler
);
3715 /* Copy array only when size is known. */
3716 else if (maxval
&& !integer_minus_onep (maxval
))
3717 for (cur
= 0; cur
<= TREE_INT_CST_LOW (maxval
); cur
++)
3719 tree lhs_elem
= chkp_build_array_ref (lhs
, etype
, esize
, cur
);
3720 tree rhs_elem
= chkp_build_array_ref (rhs
, etype
, esize
, cur
);
3721 chkp_walk_pointer_assignments (lhs_elem
, rhs_elem
, arg
, handler
);
3725 internal_error("chkp_walk_pointer_assignments: unexpected RHS type: %s",
3726 get_tree_code_name (TREE_CODE (type
)));
3729 /* Add code to copy bounds for assignment of RHS to LHS.
3730 ARG is an iterator pointing ne code position. */
3732 chkp_copy_bounds_for_elem (tree lhs
, tree rhs
, void *arg
)
3734 gimple_stmt_iterator
*iter
= (gimple_stmt_iterator
*)arg
;
3735 tree bounds
= chkp_find_bounds (rhs
, iter
);
3736 tree addr
= chkp_build_addr_expr(lhs
);
3738 chkp_build_bndstx (addr
, rhs
, bounds
, iter
);
3741 /* Emit static bound initilizers and size vars. */
3743 chkp_finish_file (void)
3745 struct varpool_node
*node
;
3746 struct chkp_ctor_stmt_list stmts
;
3751 /* Iterate through varpool and generate bounds initialization
3752 constructors for all statically initialized pointers. */
3753 stmts
.avail
= MAX_STMTS_IN_STATIC_CHKP_CTOR
;
3755 FOR_EACH_VARIABLE (node
)
3756 /* Check that var is actually emitted and we need and may initialize
3758 if (node
->need_bounds_init
3759 && !POINTER_BOUNDS_P (node
->decl
)
3760 && DECL_RTL (node
->decl
)
3761 && MEM_P (DECL_RTL (node
->decl
))
3762 && TREE_ASM_WRITTEN (node
->decl
))
3764 chkp_walk_pointer_assignments (node
->decl
,
3765 DECL_INITIAL (node
->decl
),
3767 chkp_add_modification_to_stmt_list
);
3769 if (stmts
.avail
<= 0)
3771 cgraph_build_static_cdtor ('P', stmts
.stmts
,
3772 MAX_RESERVED_INIT_PRIORITY
+ 3);
3773 stmts
.avail
= MAX_STMTS_IN_STATIC_CHKP_CTOR
;
3779 cgraph_build_static_cdtor ('P', stmts
.stmts
,
3780 MAX_RESERVED_INIT_PRIORITY
+ 3);
3782 /* Iterate through varpool and generate bounds initialization
3783 constructors for all static bounds vars. */
3784 stmts
.avail
= MAX_STMTS_IN_STATIC_CHKP_CTOR
;
3786 FOR_EACH_VARIABLE (node
)
3787 if (node
->need_bounds_init
3788 && POINTER_BOUNDS_P (node
->decl
)
3789 && TREE_ASM_WRITTEN (node
->decl
))
3791 tree bnd
= node
->decl
;
3794 gcc_assert (DECL_INITIAL (bnd
)
3795 && TREE_CODE (DECL_INITIAL (bnd
)) == ADDR_EXPR
);
3797 var
= TREE_OPERAND (DECL_INITIAL (bnd
), 0);
3798 chkp_output_static_bounds (bnd
, var
, &stmts
);
3802 cgraph_build_static_cdtor ('B', stmts
.stmts
,
3803 MAX_RESERVED_INIT_PRIORITY
+ 2);
3805 delete chkp_static_var_bounds
;
3806 delete chkp_bounds_map
;
3809 /* An instrumentation function which is called for each statement
3810 having memory access we want to instrument. It inserts check
3811 code and bounds copy code.
3813 ITER points to statement to instrument.
3815 NODE holds memory access in statement to check.
3817 LOC holds the location information for statement.
3819 DIRFLAGS determines whether access is read or write.
3821 ACCESS_OFFS should be added to address used in NODE
3824 ACCESS_SIZE holds size of checked access.
3826 SAFE indicates if NODE access is safe and should not be
3829 chkp_process_stmt (gimple_stmt_iterator
*iter
, tree node
,
3830 location_t loc
, tree dirflag
,
3831 tree access_offs
, tree access_size
,
3834 tree node_type
= TREE_TYPE (node
);
3835 tree size
= access_size
? access_size
: TYPE_SIZE_UNIT (node_type
);
3836 tree addr_first
= NULL_TREE
; /* address of the first accessed byte */
3837 tree addr_last
= NULL_TREE
; /* address of the last accessed byte */
3838 tree ptr
= NULL_TREE
; /* a pointer used for dereference */
3839 tree bounds
= NULL_TREE
;
3841 /* We do not need instrumentation for clobbers. */
3842 if (dirflag
== integer_one_node
3843 && gimple_code (gsi_stmt (*iter
)) == GIMPLE_ASSIGN
3844 && TREE_CLOBBER_P (gimple_assign_rhs1 (gsi_stmt (*iter
))))
3847 switch (TREE_CODE (node
))
3857 /* We are not going to generate any checks, so do not
3858 generate bounds as well. */
3859 addr_first
= chkp_build_addr_expr (node
);
3863 chkp_parse_array_and_component_ref (node
, &ptr
, &elt
, &safe
,
3864 &bitfield
, &bounds
, iter
, false);
3866 /* Break if there is no dereference and operation is safe. */
3870 tree field
= TREE_OPERAND (node
, 1);
3872 if (TREE_CODE (DECL_SIZE_UNIT (field
)) == INTEGER_CST
)
3873 size
= DECL_SIZE_UNIT (field
);
3876 elt
= chkp_build_addr_expr (elt
);
3877 addr_first
= fold_convert_loc (loc
, ptr_type_node
, elt
? elt
: ptr
);
3878 addr_first
= fold_build_pointer_plus_loc (loc
,
3880 byte_position (field
));
3883 addr_first
= chkp_build_addr_expr (node
);
3888 ptr
= TREE_OPERAND (node
, 0);
3893 ptr
= TREE_OPERAND (node
, 0);
3894 addr_first
= chkp_build_addr_expr (node
);
3897 case TARGET_MEM_REF
:
3898 ptr
= TMR_BASE (node
);
3899 addr_first
= chkp_build_addr_expr (node
);
3902 case ARRAY_RANGE_REF
:
3903 printf("ARRAY_RANGE_REF\n");
3904 debug_gimple_stmt(gsi_stmt(*iter
));
3911 tree offs
, rem
, bpu
;
3913 gcc_assert (!access_offs
);
3914 gcc_assert (!access_size
);
3916 bpu
= fold_convert (size_type_node
, bitsize_int (BITS_PER_UNIT
));
3917 offs
= fold_convert (size_type_node
, TREE_OPERAND (node
, 2));
3918 rem
= size_binop_loc (loc
, TRUNC_MOD_EXPR
, offs
, bpu
);
3919 offs
= size_binop_loc (loc
, TRUNC_DIV_EXPR
, offs
, bpu
);
3921 size
= fold_convert (size_type_node
, TREE_OPERAND (node
, 1));
3922 size
= size_binop_loc (loc
, PLUS_EXPR
, size
, rem
);
3923 size
= size_binop_loc (loc
, CEIL_DIV_EXPR
, size
, bpu
);
3924 size
= fold_convert (size_type_node
, size
);
3926 chkp_process_stmt (iter
, TREE_OPERAND (node
, 0), loc
,
3927 dirflag
, offs
, size
, safe
);
3935 if (dirflag
!= integer_one_node
3936 || DECL_REGISTER (node
))
3940 addr_first
= chkp_build_addr_expr (node
);
3947 /* If addr_last was not computed then use (addr_first + size - 1)
3948 expression to compute it. */
3951 addr_last
= fold_build_pointer_plus_loc (loc
, addr_first
, size
);
3952 addr_last
= fold_build_pointer_plus_hwi_loc (loc
, addr_last
, -1);
3955 /* Shift both first_addr and last_addr by access_offs if specified. */
3958 addr_first
= fold_build_pointer_plus_loc (loc
, addr_first
, access_offs
);
3959 addr_last
= fold_build_pointer_plus_loc (loc
, addr_last
, access_offs
);
3962 /* Generate bndcl/bndcu checks if memory access is not safe. */
3965 gimple_stmt_iterator stmt_iter
= *iter
;
3968 bounds
= chkp_find_bounds (ptr
, iter
);
3970 chkp_check_mem_access (addr_first
, addr_last
, bounds
,
3971 stmt_iter
, loc
, dirflag
);
3974 /* We need to store bounds in case pointer is stored. */
3975 if (dirflag
== integer_one_node
3976 && chkp_type_has_pointer (node_type
)
3977 && flag_chkp_store_bounds
)
3979 gimple stmt
= gsi_stmt (*iter
);
3980 tree rhs1
= gimple_assign_rhs1 (stmt
);
3981 enum tree_code rhs_code
= gimple_assign_rhs_code (stmt
);
3983 if (get_gimple_rhs_class (rhs_code
) == GIMPLE_SINGLE_RHS
)
3984 chkp_walk_pointer_assignments (node
, rhs1
, iter
,
3985 chkp_copy_bounds_for_elem
);
3988 bounds
= chkp_compute_bounds_for_assignment (NULL_TREE
, stmt
);
3989 chkp_build_bndstx (addr_first
, rhs1
, bounds
, iter
);
3994 /* Add code to copy bounds for all pointers copied
3995 in ASSIGN created during inline of EDGE. */
3997 chkp_copy_bounds_for_assign (gimple assign
, struct cgraph_edge
*edge
)
3999 tree lhs
= gimple_assign_lhs (assign
);
4000 tree rhs
= gimple_assign_rhs1 (assign
);
4001 gimple_stmt_iterator iter
= gsi_for_stmt (assign
);
4003 if (!flag_chkp_store_bounds
)
4006 chkp_walk_pointer_assignments (lhs
, rhs
, &iter
, chkp_copy_bounds_for_elem
);
4008 /* We should create edges for all created calls to bndldx and bndstx. */
4009 while (gsi_stmt (iter
) != assign
)
4011 gimple stmt
= gsi_stmt (iter
);
4012 if (gimple_code (stmt
) == GIMPLE_CALL
)
4014 tree fndecl
= gimple_call_fndecl (stmt
);
4015 struct cgraph_node
*callee
= cgraph_node::get_create (fndecl
);
4016 struct cgraph_edge
*new_edge
;
4018 gcc_assert (fndecl
== chkp_bndstx_fndecl
4019 || fndecl
== chkp_bndldx_fndecl
4020 || fndecl
== chkp_ret_bnd_fndecl
);
4022 new_edge
= edge
->caller
->create_edge (callee
,
4023 as_a
<gcall
*> (stmt
),
4026 new_edge
->frequency
= compute_call_stmt_bb_frequency
4027 (edge
->caller
->decl
, gimple_bb (stmt
));
4033 /* Some code transformation made during instrumentation pass
4034 may put code into inconsistent state. Here we find and fix
4040 gimple_stmt_iterator i
;
4042 /* We could insert some code right after stmt which ends bb.
4043 We wanted to put this code on fallthru edge but did not
4044 add new edges from the beginning because it may cause new
4045 phi node creation which may be incorrect due to incomplete
4047 FOR_ALL_BB_FN (bb
, cfun
)
4048 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
4050 gimple stmt
= gsi_stmt (i
);
4051 gimple_stmt_iterator next
= i
;
4055 if (stmt_ends_bb_p (stmt
)
4056 && !gsi_end_p (next
))
4058 edge fall
= find_fallthru_edge (bb
->succs
);
4059 basic_block dest
= NULL
;
4064 /* We cannot split abnormal edge. Therefore we
4065 store its params, make it regular and then
4066 rebuild abnormal edge after split. */
4067 if (fall
->flags
& EDGE_ABNORMAL
)
4069 flags
= fall
->flags
& ~EDGE_FALLTHRU
;
4072 fall
->flags
&= ~EDGE_COMPLEX
;
4075 while (!gsi_end_p (next
))
4077 gimple next_stmt
= gsi_stmt (next
);
4078 gsi_remove (&next
, false);
4079 gsi_insert_on_edge (fall
, next_stmt
);
4082 gsi_commit_edge_inserts ();
4084 /* Re-create abnormal edge. */
4086 make_edge (bb
, dest
, flags
);
4091 /* Walker callback for chkp_replace_function_pointers. Replaces
4092 function pointer in the specified operand with pointer to the
4093 instrumented function version. */
4095 chkp_replace_function_pointer (tree
*op
, int *walk_subtrees
,
4096 void *data ATTRIBUTE_UNUSED
)
4098 if (TREE_CODE (*op
) == FUNCTION_DECL
4099 && !lookup_attribute ("bnd_legacy", DECL_ATTRIBUTES (*op
))
4100 && (DECL_BUILT_IN_CLASS (*op
) == NOT_BUILT_IN
4101 /* For builtins we replace pointers only for selected
4102 function and functions having definitions. */
4103 || (DECL_BUILT_IN_CLASS (*op
) == BUILT_IN_NORMAL
4104 && (chkp_instrument_normal_builtin (*op
)
4105 || gimple_has_body_p (*op
)))))
4107 struct cgraph_node
*node
= cgraph_node::get_create (*op
);
4108 struct cgraph_node
*clone
= NULL
;
4110 if (!node
->instrumentation_clone
)
4111 clone
= chkp_maybe_create_clone (*op
);
4121 /* This function searches for function pointers in statement
4122 pointed by GSI and replaces them with pointers to instrumented
4123 function versions. */
4125 chkp_replace_function_pointers (gimple_stmt_iterator
*gsi
)
4127 gimple stmt
= gsi_stmt (*gsi
);
4128 /* For calls we want to walk call args only. */
4129 if (gimple_code (stmt
) == GIMPLE_CALL
)
4132 for (i
= 0; i
< gimple_call_num_args (stmt
); i
++)
4133 walk_tree (gimple_call_arg_ptr (stmt
, i
),
4134 chkp_replace_function_pointer
, NULL
, NULL
);
4137 walk_gimple_stmt (gsi
, NULL
, chkp_replace_function_pointer
, NULL
);
4140 /* This function instruments all statements working with memory,
4143 It also removes excess statements from static initializers. */
4145 chkp_instrument_function (void)
4147 basic_block bb
, next
;
4148 gimple_stmt_iterator i
;
4149 enum gimple_rhs_class grhs_class
;
4150 bool safe
= lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun
->decl
));
4152 bb
= ENTRY_BLOCK_PTR_FOR_FN (cfun
)->next_bb
;
4156 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); )
4158 gimple s
= gsi_stmt (i
);
4160 /* Skip statement marked to not be instrumented. */
4161 if (chkp_marked_stmt_p (s
))
4167 chkp_replace_function_pointers (&i
);
4169 switch (gimple_code (s
))
4172 chkp_process_stmt (&i
, gimple_assign_lhs (s
),
4173 gimple_location (s
), integer_one_node
,
4174 NULL_TREE
, NULL_TREE
, safe
);
4175 chkp_process_stmt (&i
, gimple_assign_rhs1 (s
),
4176 gimple_location (s
), integer_zero_node
,
4177 NULL_TREE
, NULL_TREE
, safe
);
4178 grhs_class
= get_gimple_rhs_class (gimple_assign_rhs_code (s
));
4179 if (grhs_class
== GIMPLE_BINARY_RHS
)
4180 chkp_process_stmt (&i
, gimple_assign_rhs2 (s
),
4181 gimple_location (s
), integer_zero_node
,
4182 NULL_TREE
, NULL_TREE
, safe
);
4187 greturn
*r
= as_a
<greturn
*> (s
);
4188 if (gimple_return_retval (r
) != NULL_TREE
)
4190 chkp_process_stmt (&i
, gimple_return_retval (r
),
4191 gimple_location (r
),
4193 NULL_TREE
, NULL_TREE
, safe
);
4195 /* Additionally we need to add bounds
4196 to return statement. */
4197 chkp_add_bounds_to_ret_stmt (&i
);
4203 chkp_add_bounds_to_call_stmt (&i
);
4212 /* We do not need any actual pointer stores in checker
4213 static initializer. */
4214 if (lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun
->decl
))
4215 && gimple_code (s
) == GIMPLE_ASSIGN
4216 && gimple_store_p (s
))
4218 gimple_stmt_iterator del_iter
= gsi_for_stmt (s
);
4219 gsi_remove (&del_iter
, true);
4220 unlink_stmt_vdef (s
);
4228 /* Some input params may have bounds and be address taken. In this case
4229 we should store incoming bounds into bounds table. */
4231 if (flag_chkp_store_bounds
)
4232 for (arg
= DECL_ARGUMENTS (cfun
->decl
); arg
; arg
= DECL_CHAIN (arg
))
4233 if (TREE_ADDRESSABLE (arg
))
4235 if (BOUNDED_P (arg
))
4237 tree bounds
= chkp_get_next_bounds_parm (arg
);
4238 tree def_ptr
= ssa_default_def (cfun
, arg
);
4239 gimple_stmt_iterator iter
4240 = gsi_start_bb (chkp_get_entry_block ());
4241 chkp_build_bndstx (chkp_build_addr_expr (arg
),
4242 def_ptr
? def_ptr
: arg
,
4245 /* Skip bounds arg. */
4246 arg
= TREE_CHAIN (arg
);
4248 else if (chkp_type_has_pointer (TREE_TYPE (arg
)))
4250 tree orig_arg
= arg
;
4251 bitmap slots
= BITMAP_ALLOC (NULL
);
4252 gimple_stmt_iterator iter
4253 = gsi_start_bb (chkp_get_entry_block ());
4257 chkp_find_bound_slots (TREE_TYPE (arg
), slots
);
4259 EXECUTE_IF_SET_IN_BITMAP (slots
, 0, bnd_no
, bi
)
4261 tree bounds
= chkp_get_next_bounds_parm (arg
);
4262 HOST_WIDE_INT offs
= bnd_no
* POINTER_SIZE
/ BITS_PER_UNIT
;
4263 tree addr
= chkp_build_addr_expr (orig_arg
);
4264 tree ptr
= build2 (MEM_REF
, ptr_type_node
, addr
,
4265 build_int_cst (ptr_type_node
, offs
));
4266 chkp_build_bndstx (chkp_build_addr_expr (ptr
), ptr
,
4269 arg
= DECL_CHAIN (arg
);
4271 BITMAP_FREE (slots
);
4276 /* Find init/null/copy_ptr_bounds calls and replace them
4277 with assignments. It should allow better code
4281 chkp_remove_useless_builtins ()
4284 gimple_stmt_iterator gsi
;
4286 FOR_EACH_BB_FN (bb
, cfun
)
4288 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
4290 gimple stmt
= gsi_stmt (gsi
);
4292 enum built_in_function fcode
;
4294 /* Find builtins returning first arg and replace
4295 them with assignments. */
4296 if (gimple_code (stmt
) == GIMPLE_CALL
4297 && (fndecl
= gimple_call_fndecl (stmt
))
4298 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
4299 && (fcode
= DECL_FUNCTION_CODE (fndecl
))
4300 && (fcode
== BUILT_IN_CHKP_INIT_PTR_BOUNDS
4301 || fcode
== BUILT_IN_CHKP_NULL_PTR_BOUNDS
4302 || fcode
== BUILT_IN_CHKP_COPY_PTR_BOUNDS
4303 || fcode
== BUILT_IN_CHKP_SET_PTR_BOUNDS
))
4305 tree res
= gimple_call_arg (stmt
, 0);
4306 update_call_from_tree (&gsi
, res
);
4307 stmt
= gsi_stmt (gsi
);
4314 /* Initialize pass. */
4319 gimple_stmt_iterator i
;
4321 in_chkp_pass
= true;
4323 for (bb
= ENTRY_BLOCK_PTR_FOR_FN (cfun
)->next_bb
; bb
; bb
= bb
->next_bb
)
4324 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
4325 chkp_unmark_stmt (gsi_stmt (i
));
4327 chkp_invalid_bounds
= new hash_set
<tree
>;
4328 chkp_completed_bounds_set
= new hash_set
<tree
>;
4329 delete chkp_reg_bounds
;
4330 chkp_reg_bounds
= new hash_map
<tree
, tree
>;
4331 delete chkp_bound_vars
;
4332 chkp_bound_vars
= new hash_map
<tree
, tree
>;
4333 chkp_reg_addr_bounds
= new hash_map
<tree
, tree
>;
4334 chkp_incomplete_bounds_map
= new hash_map
<tree
, tree
>;
4335 delete chkp_bounds_map
;
4336 chkp_bounds_map
= new hash_map
<tree
, tree
>;
4337 chkp_abnormal_copies
= BITMAP_GGC_ALLOC ();
4340 zero_bounds
= NULL_TREE
;
4341 none_bounds
= NULL_TREE
;
4342 incomplete_bounds
= integer_zero_node
;
4343 tmp_var
= NULL_TREE
;
4344 size_tmp_var
= NULL_TREE
;
4346 chkp_uintptr_type
= lang_hooks
.types
.type_for_mode (ptr_mode
, true);
4348 /* We create these constant bounds once for each object file.
4349 These symbols go to comdat section and result in single copy
4350 of each one in the final binary. */
4351 chkp_get_zero_bounds_var ();
4352 chkp_get_none_bounds_var ();
4354 calculate_dominance_info (CDI_DOMINATORS
);
4355 calculate_dominance_info (CDI_POST_DOMINATORS
);
4357 bitmap_obstack_initialize (NULL
);
4360 /* Finalize instrumentation pass. */
4364 in_chkp_pass
= false;
4366 delete chkp_invalid_bounds
;
4367 delete chkp_completed_bounds_set
;
4368 delete chkp_reg_addr_bounds
;
4369 delete chkp_incomplete_bounds_map
;
4371 free_dominance_info (CDI_DOMINATORS
);
4372 free_dominance_info (CDI_POST_DOMINATORS
);
4374 bitmap_obstack_release (NULL
);
4377 zero_bounds
= NULL_TREE
;
4378 none_bounds
= NULL_TREE
;
4381 /* Main instrumentation pass function. */
4387 chkp_instrument_function ();
4389 chkp_remove_useless_builtins ();
4391 chkp_function_mark_instrumented (cfun
->decl
);
4400 /* Instrumentation pass gate. */
4404 cgraph_node
*node
= cgraph_node::get (cfun
->decl
);
4405 return ((node
!= NULL
4406 && node
->instrumentation_clone
)
4407 || lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun
->decl
)));
4412 const pass_data pass_data_chkp
=
4414 GIMPLE_PASS
, /* type */
4416 OPTGROUP_NONE
, /* optinfo_flags */
4417 TV_NONE
, /* tv_id */
4418 PROP_ssa
| PROP_cfg
, /* properties_required */
4419 0, /* properties_provided */
4420 0, /* properties_destroyed */
4421 0, /* todo_flags_start */
4423 | TODO_update_ssa
/* todo_flags_finish */
4426 class pass_chkp
: public gimple_opt_pass
4429 pass_chkp (gcc::context
*ctxt
)
4430 : gimple_opt_pass (pass_data_chkp
, ctxt
)
4433 /* opt_pass methods: */
4434 virtual opt_pass
* clone ()
4436 return new pass_chkp (m_ctxt
);
4439 virtual bool gate (function
*)
4441 return chkp_gate ();
4444 virtual unsigned int execute (function
*)
4446 return chkp_execute ();
4449 }; // class pass_chkp
4454 make_pass_chkp (gcc::context
*ctxt
)
4456 return new pass_chkp (ctxt
);
4459 #include "gt-tree-chkp.h"