]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-chkp.c
2015-06-04 Andrew MacLeod <amacleod@redhat.com>
[thirdparty/gcc.git] / gcc / tree-chkp.c
1 /* Pointer Bounds Checker insrumentation pass.
2 Copyright (C) 2014-2015 Free Software Foundation, Inc.
3 Contributed by Ilya Enkovich (ilya.enkovich@intel.com)
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "hash-set.h"
25 #include "vec.h"
26 #include "input.h"
27 #include "alias.h"
28 #include "symtab.h"
29 #include "options.h"
30 #include "inchash.h"
31 #include "tree.h"
32 #include "fold-const.h"
33 #include "stor-layout.h"
34 #include "varasm.h"
35 #include "target.h"
36 #include "tree-iterator.h"
37 #include "tree-cfg.h"
38 #include "langhooks.h"
39 #include "tree-pass.h"
40 #include "diagnostic.h"
41 #include "ggc.h"
42 #include "is-a.h"
43 #include "cfgloop.h"
44 #include "stringpool.h"
45 #include "tree-ssa-alias.h"
46 #include "tree-ssanames.h"
47 #include "tree-ssa-operands.h"
48 #include "tree-ssa-address.h"
49 #include "tree-ssa.h"
50 #include "predict.h"
51 #include "dominance.h"
52 #include "cfg.h"
53 #include "basic-block.h"
54 #include "tree-ssa-loop-niter.h"
55 #include "gimple-expr.h"
56 #include "gimple.h"
57 #include "tree-phinodes.h"
58 #include "gimple-ssa.h"
59 #include "ssa-iterators.h"
60 #include "gimple-pretty-print.h"
61 #include "gimple-iterator.h"
62 #include "gimplify.h"
63 #include "gimplify-me.h"
64 #include "print-tree.h"
65 #include "hashtab.h"
66 #include "tm.h"
67 #include "hard-reg-set.h"
68 #include "function.h"
69 #include "rtl.h"
70 #include "flags.h"
71 #include "statistics.h"
72 #include "insn-config.h"
73 #include "expmed.h"
74 #include "dojump.h"
75 #include "explow.h"
76 #include "calls.h"
77 #include "emit-rtl.h"
78 #include "stmt.h"
79 #include "expr.h"
80 #include "tree-ssa-propagate.h"
81 #include "gimple-fold.h"
82 #include "tree-chkp.h"
83 #include "gimple-walk.h"
84 #include "rtl.h" /* For MEM_P, assign_temp. */
85 #include "tree-dfa.h"
86 #include "ipa-ref.h"
87 #include "lto-streamer.h"
88 #include "cgraph.h"
89 #include "ipa-chkp.h"
90 #include "params.h"
91
92 /* Pointer Bounds Checker instruments code with memory checks to find
93 out-of-bounds memory accesses. Checks are performed by computing
94 bounds for each pointer and then comparing address of accessed
95 memory before pointer dereferencing.
96
97 1. Function clones.
98
99 See ipa-chkp.c.
100
101 2. Instrumentation.
102
103 There are few things to instrument:
104
105 a) Memory accesses - add checker calls to check address of accessed memory
106 against bounds of dereferenced pointer. Obviously safe memory
107 accesses like static variable access does not have to be instrumented
108 with checks.
109
110 Example:
111
112 val_2 = *p_1;
113
114 with 4 bytes access is transformed into:
115
116 __builtin___chkp_bndcl (__bound_tmp.1_3, p_1);
117 D.1_4 = p_1 + 3;
118 __builtin___chkp_bndcu (__bound_tmp.1_3, D.1_4);
119 val_2 = *p_1;
120
121 where __bound_tmp.1_3 are bounds computed for pointer p_1,
122 __builtin___chkp_bndcl is a lower bound check and
123 __builtin___chkp_bndcu is an upper bound check.
124
125 b) Pointer stores.
126
127 When pointer is stored in memory we need to store its bounds. To
128 achieve compatibility of instrumented code with regular codes
129 we have to keep data layout and store bounds in special bound tables
130 via special checker call. Implementation of bounds table may vary for
131 different platforms. It has to associate pointer value and its
132 location (it is required because we may have two equal pointers
133 with different bounds stored in different places) with bounds.
134 Another checker builtin allows to get bounds for specified pointer
135 loaded from specified location.
136
137 Example:
138
139 buf1[i_1] = &buf2;
140
141 is transformed into:
142
143 buf1[i_1] = &buf2;
144 D.1_2 = &buf1[i_1];
145 __builtin___chkp_bndstx (D.1_2, &buf2, __bound_tmp.1_2);
146
147 where __bound_tmp.1_2 are bounds of &buf2.
148
149 c) Static initialization.
150
151 The special case of pointer store is static pointer initialization.
152 Bounds initialization is performed in a few steps:
153 - register all static initializations in front-end using
154 chkp_register_var_initializer
155 - when file compilation finishes we create functions with special
156 attribute 'chkp ctor' and put explicit initialization code
157 (assignments) for all statically initialized pointers.
158 - when checker constructor is compiled checker pass adds required
159 bounds initialization for all statically initialized pointers
160 - since we do not actually need excess pointers initialization
161 in checker constructor we remove such assignments from them
162
163 d) Calls.
164
165 For each call in the code we add additional arguments to pass
166 bounds for pointer arguments. We determine type of call arguments
167 using arguments list from function declaration; if function
168 declaration is not available we use function type; otherwise
169 (e.g. for unnamed arguments) we use type of passed value. Function
170 declaration/type is replaced with the instrumented one.
171
172 Example:
173
174 val_1 = foo (&buf1, &buf2, &buf1, 0);
175
176 is translated into:
177
178 val_1 = foo.chkp (&buf1, __bound_tmp.1_2, &buf2, __bound_tmp.1_3,
179 &buf1, __bound_tmp.1_2, 0);
180
181 e) Returns.
182
183 If function returns a pointer value we have to return bounds also.
184 A new operand was added for return statement to hold returned bounds.
185
186 Example:
187
188 return &_buf1;
189
190 is transformed into
191
192 return &_buf1, __bound_tmp.1_1;
193
194 3. Bounds computation.
195
196 Compiler is fully responsible for computing bounds to be used for each
197 memory access. The first step for bounds computation is to find the
198 origin of pointer dereferenced for memory access. Basing on pointer
199 origin we define a way to compute its bounds. There are just few
200 possible cases:
201
202 a) Pointer is returned by call.
203
204 In this case we use corresponding checker builtin method to obtain returned
205 bounds.
206
207 Example:
208
209 buf_1 = malloc (size_2);
210 foo (buf_1);
211
212 is translated into:
213
214 buf_1 = malloc (size_2);
215 __bound_tmp.1_3 = __builtin___chkp_bndret (buf_1);
216 foo (buf_1, __bound_tmp.1_3);
217
218 b) Pointer is an address of an object.
219
220 In this case compiler tries to compute objects size and create corresponding
221 bounds. If object has incomplete type then special checker builtin is used to
222 obtain its size at runtime.
223
224 Example:
225
226 foo ()
227 {
228 <unnamed type> __bound_tmp.3;
229 static int buf[100];
230
231 <bb 3>:
232 __bound_tmp.3_2 = __builtin___chkp_bndmk (&buf, 400);
233
234 <bb 2>:
235 return &buf, __bound_tmp.3_2;
236 }
237
238 Example:
239
240 Address of an object 'extern int buf[]' with incomplete type is
241 returned.
242
243 foo ()
244 {
245 <unnamed type> __bound_tmp.4;
246 long unsigned int __size_tmp.3;
247
248 <bb 3>:
249 __size_tmp.3_4 = __builtin_ia32_sizeof (buf);
250 __bound_tmp.4_3 = __builtin_ia32_bndmk (&buf, __size_tmp.3_4);
251
252 <bb 2>:
253 return &buf, __bound_tmp.4_3;
254 }
255
256 c) Pointer is the result of object narrowing.
257
258 It happens when we use pointer to an object to compute pointer to a part
259 of an object. E.g. we take pointer to a field of a structure. In this
260 case we perform bounds intersection using bounds of original object and
261 bounds of object's part (which are computed basing on its type).
262
263 There may be some debatable questions about when narrowing should occur
264 and when it should not. To avoid false bound violations in correct
265 programs we do not perform narrowing when address of an array element is
266 obtained (it has address of the whole array) and when address of the first
267 structure field is obtained (because it is guaranteed to be equal to
268 address of the whole structure and it is legal to cast it back to structure).
269
270 Default narrowing behavior may be changed using compiler flags.
271
272 Example:
273
274 In this example address of the second structure field is returned.
275
276 foo (struct A * p, __bounds_type __bounds_of_p)
277 {
278 <unnamed type> __bound_tmp.3;
279 int * _2;
280 int * _5;
281
282 <bb 2>:
283 _5 = &p_1(D)->second_field;
284 __bound_tmp.3_6 = __builtin___chkp_bndmk (_5, 4);
285 __bound_tmp.3_8 = __builtin___chkp_intersect (__bound_tmp.3_6,
286 __bounds_of_p_3(D));
287 _2 = &p_1(D)->second_field;
288 return _2, __bound_tmp.3_8;
289 }
290
291 Example:
292
293 In this example address of the first field of array element is returned.
294
295 foo (struct A * p, __bounds_type __bounds_of_p, int i)
296 {
297 long unsigned int _3;
298 long unsigned int _4;
299 struct A * _6;
300 int * _7;
301
302 <bb 2>:
303 _3 = (long unsigned int) i_1(D);
304 _4 = _3 * 8;
305 _6 = p_5(D) + _4;
306 _7 = &_6->first_field;
307 return _7, __bounds_of_p_2(D);
308 }
309
310
311 d) Pointer is the result of pointer arithmetic or type cast.
312
313 In this case bounds of the base pointer are used. In case of binary
314 operation producing a pointer we are analyzing data flow further
315 looking for operand's bounds. One operand is considered as a base
316 if it has some valid bounds. If we fall into a case when none of
317 operands (or both of them) has valid bounds, a default bounds value
318 is used.
319
320 Trying to find out bounds for binary operations we may fall into
321 cyclic dependencies for pointers. To avoid infinite recursion all
322 walked phi nodes instantly obtain corresponding bounds but created
323 bounds are marked as incomplete. It helps us to stop DF walk during
324 bounds search.
325
326 When we reach pointer source, some args of incomplete bounds phi obtain
327 valid bounds and those values are propagated further through phi nodes.
328 If no valid bounds were found for phi node then we mark its result as
329 invalid bounds. Process stops when all incomplete bounds become either
330 valid or invalid and we are able to choose a pointer base.
331
332 e) Pointer is loaded from the memory.
333
334 In this case we just need to load bounds from the bounds table.
335
336 Example:
337
338 foo ()
339 {
340 <unnamed type> __bound_tmp.3;
341 static int * buf;
342 int * _2;
343
344 <bb 2>:
345 _2 = buf;
346 __bound_tmp.3_4 = __builtin___chkp_bndldx (&buf, _2);
347 return _2, __bound_tmp.3_4;
348 }
349
350 */
351
352 typedef void (*assign_handler)(tree, tree, void *);
353
354 static tree chkp_get_zero_bounds ();
355 static tree chkp_find_bounds (tree ptr, gimple_stmt_iterator *iter);
356 static tree chkp_find_bounds_loaded (tree ptr, tree ptr_src,
357 gimple_stmt_iterator *iter);
358 static void chkp_parse_array_and_component_ref (tree node, tree *ptr,
359 tree *elt, bool *safe,
360 bool *bitfield,
361 tree *bounds,
362 gimple_stmt_iterator *iter,
363 bool innermost_bounds);
364
365 #define chkp_bndldx_fndecl \
366 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDLDX))
367 #define chkp_bndstx_fndecl \
368 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDSTX))
369 #define chkp_checkl_fndecl \
370 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDCL))
371 #define chkp_checku_fndecl \
372 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDCU))
373 #define chkp_bndmk_fndecl \
374 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDMK))
375 #define chkp_ret_bnd_fndecl \
376 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDRET))
377 #define chkp_intersect_fndecl \
378 (targetm.builtin_chkp_function (BUILT_IN_CHKP_INTERSECT))
379 #define chkp_narrow_bounds_fndecl \
380 (targetm.builtin_chkp_function (BUILT_IN_CHKP_NARROW))
381 #define chkp_sizeof_fndecl \
382 (targetm.builtin_chkp_function (BUILT_IN_CHKP_SIZEOF))
383 #define chkp_extract_lower_fndecl \
384 (targetm.builtin_chkp_function (BUILT_IN_CHKP_EXTRACT_LOWER))
385 #define chkp_extract_upper_fndecl \
386 (targetm.builtin_chkp_function (BUILT_IN_CHKP_EXTRACT_UPPER))
387
388 static GTY (()) tree chkp_uintptr_type;
389
390 static GTY (()) tree chkp_zero_bounds_var;
391 static GTY (()) tree chkp_none_bounds_var;
392
393 static GTY (()) basic_block entry_block;
394 static GTY (()) tree zero_bounds;
395 static GTY (()) tree none_bounds;
396 static GTY (()) tree incomplete_bounds;
397 static GTY (()) tree tmp_var;
398 static GTY (()) tree size_tmp_var;
399 static GTY (()) bitmap chkp_abnormal_copies;
400
401 struct hash_set<tree> *chkp_invalid_bounds;
402 struct hash_set<tree> *chkp_completed_bounds_set;
403 struct hash_map<tree, tree> *chkp_reg_bounds;
404 struct hash_map<tree, tree> *chkp_bound_vars;
405 struct hash_map<tree, tree> *chkp_reg_addr_bounds;
406 struct hash_map<tree, tree> *chkp_incomplete_bounds_map;
407 struct hash_map<tree, tree> *chkp_bounds_map;
408 struct hash_map<tree, tree> *chkp_static_var_bounds;
409
410 static bool in_chkp_pass;
411
412 #define CHKP_BOUND_TMP_NAME "__bound_tmp"
413 #define CHKP_SIZE_TMP_NAME "__size_tmp"
414 #define CHKP_BOUNDS_OF_SYMBOL_PREFIX "__chkp_bounds_of_"
415 #define CHKP_STRING_BOUNDS_PREFIX "__chkp_string_bounds_"
416 #define CHKP_VAR_BOUNDS_PREFIX "__chkp_var_bounds_"
417 #define CHKP_ZERO_BOUNDS_VAR_NAME "__chkp_zero_bounds"
418 #define CHKP_NONE_BOUNDS_VAR_NAME "__chkp_none_bounds"
419
420 /* Static checker constructors may become very large and their
421 compilation with optimization may take too much time.
422 Therefore we put a limit to number of statements in one
423 constructor. Tests with 100 000 statically initialized
424 pointers showed following compilation times on Sandy Bridge
425 server (used -O2):
426 limit 100 => ~18 sec.
427 limit 300 => ~22 sec.
428 limit 1000 => ~30 sec.
429 limit 3000 => ~49 sec.
430 limit 5000 => ~55 sec.
431 limit 10000 => ~76 sec.
432 limit 100000 => ~532 sec. */
433 #define MAX_STMTS_IN_STATIC_CHKP_CTOR (PARAM_VALUE (PARAM_CHKP_MAX_CTOR_SIZE))
434
435 struct chkp_ctor_stmt_list
436 {
437 tree stmts;
438 int avail;
439 };
440
441 /* Return 1 if function FNDECL is instrumented by Pointer
442 Bounds Checker. */
443 bool
444 chkp_function_instrumented_p (tree fndecl)
445 {
446 return fndecl
447 && lookup_attribute ("chkp instrumented", DECL_ATTRIBUTES (fndecl));
448 }
449
450 /* Mark function FNDECL as instrumented. */
451 void
452 chkp_function_mark_instrumented (tree fndecl)
453 {
454 if (chkp_function_instrumented_p (fndecl))
455 return;
456
457 DECL_ATTRIBUTES (fndecl)
458 = tree_cons (get_identifier ("chkp instrumented"), NULL,
459 DECL_ATTRIBUTES (fndecl));
460 }
461
462 /* Return true when STMT is builtin call to instrumentation function
463 corresponding to CODE. */
464
465 bool
466 chkp_gimple_call_builtin_p (gimple call,
467 enum built_in_function code)
468 {
469 tree fndecl;
470 if (is_gimple_call (call)
471 && (fndecl = targetm.builtin_chkp_function (code))
472 && gimple_call_fndecl (call) == fndecl)
473 return true;
474 return false;
475 }
476
477 /* Emit code to store zero bounds for PTR located at MEM. */
478 void
479 chkp_expand_bounds_reset_for_mem (tree mem, tree ptr)
480 {
481 tree zero_bnd, bnd, addr, bndstx;
482
483 if (flag_chkp_use_static_const_bounds)
484 zero_bnd = chkp_get_zero_bounds_var ();
485 else
486 zero_bnd = chkp_build_make_bounds_call (integer_zero_node,
487 integer_zero_node);
488 bnd = make_tree (pointer_bounds_type_node,
489 assign_temp (pointer_bounds_type_node, 0, 1));
490 addr = build1 (ADDR_EXPR,
491 build_pointer_type (TREE_TYPE (mem)), mem);
492 bndstx = chkp_build_bndstx_call (addr, ptr, bnd);
493
494 expand_assignment (bnd, zero_bnd, false);
495 expand_normal (bndstx);
496 }
497
498 /* Build retbnd call for returned value RETVAL.
499
500 If BNDVAL is not NULL then result is stored
501 in it. Otherwise a temporary is created to
502 hold returned value.
503
504 GSI points to a position for a retbnd call
505 and is set to created stmt.
506
507 Cgraph edge is created for a new call if
508 UPDATE_EDGE is 1.
509
510 Obtained bounds are returned. */
511 tree
512 chkp_insert_retbnd_call (tree bndval, tree retval,
513 gimple_stmt_iterator *gsi)
514 {
515 gimple call;
516
517 if (!bndval)
518 bndval = create_tmp_reg (pointer_bounds_type_node, "retbnd");
519
520 call = gimple_build_call (chkp_ret_bnd_fndecl, 1, retval);
521 gimple_call_set_lhs (call, bndval);
522 gsi_insert_after (gsi, call, GSI_CONTINUE_LINKING);
523
524 return bndval;
525 }
526
527 /* Build a GIMPLE_CALL identical to CALL but skipping bounds
528 arguments. */
529
530 gcall *
531 chkp_copy_call_skip_bounds (gcall *call)
532 {
533 bitmap bounds;
534 unsigned i;
535
536 bitmap_obstack_initialize (NULL);
537 bounds = BITMAP_ALLOC (NULL);
538
539 for (i = 0; i < gimple_call_num_args (call); i++)
540 if (POINTER_BOUNDS_P (gimple_call_arg (call, i)))
541 bitmap_set_bit (bounds, i);
542
543 if (!bitmap_empty_p (bounds))
544 call = gimple_call_copy_skip_args (call, bounds);
545 gimple_call_set_with_bounds (call, false);
546
547 BITMAP_FREE (bounds);
548 bitmap_obstack_release (NULL);
549
550 return call;
551 }
552
553 /* Redirect edge E to the correct node according to call_stmt.
554 Return 1 if bounds removal from call_stmt should be done
555 instead of redirection. */
556
557 bool
558 chkp_redirect_edge (cgraph_edge *e)
559 {
560 bool instrumented = false;
561 tree decl = e->callee->decl;
562
563 if (e->callee->instrumentation_clone
564 || chkp_function_instrumented_p (decl))
565 instrumented = true;
566
567 if (instrumented
568 && !gimple_call_with_bounds_p (e->call_stmt))
569 e->redirect_callee (cgraph_node::get_create (e->callee->orig_decl));
570 else if (!instrumented
571 && gimple_call_with_bounds_p (e->call_stmt)
572 && !chkp_gimple_call_builtin_p (e->call_stmt, BUILT_IN_CHKP_BNDCL)
573 && !chkp_gimple_call_builtin_p (e->call_stmt, BUILT_IN_CHKP_BNDCU)
574 && !chkp_gimple_call_builtin_p (e->call_stmt, BUILT_IN_CHKP_BNDSTX))
575 {
576 if (e->callee->instrumented_version)
577 e->redirect_callee (e->callee->instrumented_version);
578 else
579 {
580 tree args = TYPE_ARG_TYPES (TREE_TYPE (decl));
581 /* Avoid bounds removal if all args will be removed. */
582 if (!args || TREE_VALUE (args) != void_type_node)
583 return true;
584 else
585 gimple_call_set_with_bounds (e->call_stmt, false);
586 }
587 }
588
589 return false;
590 }
591
592 /* Mark statement S to not be instrumented. */
593 static void
594 chkp_mark_stmt (gimple s)
595 {
596 gimple_set_plf (s, GF_PLF_1, true);
597 }
598
599 /* Mark statement S to be instrumented. */
600 static void
601 chkp_unmark_stmt (gimple s)
602 {
603 gimple_set_plf (s, GF_PLF_1, false);
604 }
605
606 /* Return 1 if statement S should not be instrumented. */
607 static bool
608 chkp_marked_stmt_p (gimple s)
609 {
610 return gimple_plf (s, GF_PLF_1);
611 }
612
613 /* Get var to be used for bound temps. */
614 static tree
615 chkp_get_tmp_var (void)
616 {
617 if (!tmp_var)
618 tmp_var = create_tmp_reg (pointer_bounds_type_node, CHKP_BOUND_TMP_NAME);
619
620 return tmp_var;
621 }
622
623 /* Get SSA_NAME to be used as temp. */
624 static tree
625 chkp_get_tmp_reg (gimple stmt)
626 {
627 if (in_chkp_pass)
628 return make_ssa_name (chkp_get_tmp_var (), stmt);
629
630 return make_temp_ssa_name (pointer_bounds_type_node, stmt,
631 CHKP_BOUND_TMP_NAME);
632 }
633
634 /* Get var to be used for size temps. */
635 static tree
636 chkp_get_size_tmp_var (void)
637 {
638 if (!size_tmp_var)
639 size_tmp_var = create_tmp_reg (chkp_uintptr_type, CHKP_SIZE_TMP_NAME);
640
641 return size_tmp_var;
642 }
643
644 /* Register bounds BND for address of OBJ. */
645 static void
646 chkp_register_addr_bounds (tree obj, tree bnd)
647 {
648 if (bnd == incomplete_bounds)
649 return;
650
651 chkp_reg_addr_bounds->put (obj, bnd);
652
653 if (dump_file && (dump_flags & TDF_DETAILS))
654 {
655 fprintf (dump_file, "Regsitered bound ");
656 print_generic_expr (dump_file, bnd, 0);
657 fprintf (dump_file, " for address of ");
658 print_generic_expr (dump_file, obj, 0);
659 fprintf (dump_file, "\n");
660 }
661 }
662
663 /* Return bounds registered for address of OBJ. */
664 static tree
665 chkp_get_registered_addr_bounds (tree obj)
666 {
667 tree *slot = chkp_reg_addr_bounds->get (obj);
668 return slot ? *slot : NULL_TREE;
669 }
670
671 /* Mark BOUNDS as completed. */
672 static void
673 chkp_mark_completed_bounds (tree bounds)
674 {
675 chkp_completed_bounds_set->add (bounds);
676
677 if (dump_file && (dump_flags & TDF_DETAILS))
678 {
679 fprintf (dump_file, "Marked bounds ");
680 print_generic_expr (dump_file, bounds, 0);
681 fprintf (dump_file, " as completed\n");
682 }
683 }
684
685 /* Return 1 if BOUNDS were marked as completed and 0 otherwise. */
686 static bool
687 chkp_completed_bounds (tree bounds)
688 {
689 return chkp_completed_bounds_set->contains (bounds);
690 }
691
692 /* Clear comleted bound marks. */
693 static void
694 chkp_erase_completed_bounds (void)
695 {
696 delete chkp_completed_bounds_set;
697 chkp_completed_bounds_set = new hash_set<tree>;
698 }
699
700 /* Mark BOUNDS associated with PTR as incomplete. */
701 static void
702 chkp_register_incomplete_bounds (tree bounds, tree ptr)
703 {
704 chkp_incomplete_bounds_map->put (bounds, ptr);
705
706 if (dump_file && (dump_flags & TDF_DETAILS))
707 {
708 fprintf (dump_file, "Regsitered incomplete bounds ");
709 print_generic_expr (dump_file, bounds, 0);
710 fprintf (dump_file, " for ");
711 print_generic_expr (dump_file, ptr, 0);
712 fprintf (dump_file, "\n");
713 }
714 }
715
716 /* Return 1 if BOUNDS are incomplete and 0 otherwise. */
717 static bool
718 chkp_incomplete_bounds (tree bounds)
719 {
720 if (bounds == incomplete_bounds)
721 return true;
722
723 if (chkp_completed_bounds (bounds))
724 return false;
725
726 return chkp_incomplete_bounds_map->get (bounds) != NULL;
727 }
728
729 /* Clear incomleted bound marks. */
730 static void
731 chkp_erase_incomplete_bounds (void)
732 {
733 delete chkp_incomplete_bounds_map;
734 chkp_incomplete_bounds_map = new hash_map<tree, tree>;
735 }
736
737 /* Build and return bndmk call which creates bounds for structure
738 pointed by PTR. Structure should have complete type. */
739 tree
740 chkp_make_bounds_for_struct_addr (tree ptr)
741 {
742 tree type = TREE_TYPE (ptr);
743 tree size;
744
745 gcc_assert (POINTER_TYPE_P (type));
746
747 size = TYPE_SIZE (TREE_TYPE (type));
748
749 gcc_assert (size);
750
751 return build_call_nary (pointer_bounds_type_node,
752 build_fold_addr_expr (chkp_bndmk_fndecl),
753 2, ptr, size);
754 }
755
756 /* Traversal function for chkp_may_finish_incomplete_bounds.
757 Set RES to 0 if at least one argument of phi statement
758 defining bounds (passed in KEY arg) is unknown.
759 Traversal stops when first unknown phi argument is found. */
760 bool
761 chkp_may_complete_phi_bounds (tree const &bounds, tree *slot ATTRIBUTE_UNUSED,
762 bool *res)
763 {
764 gimple phi;
765 unsigned i;
766
767 gcc_assert (TREE_CODE (bounds) == SSA_NAME);
768
769 phi = SSA_NAME_DEF_STMT (bounds);
770
771 gcc_assert (phi && gimple_code (phi) == GIMPLE_PHI);
772
773 for (i = 0; i < gimple_phi_num_args (phi); i++)
774 {
775 tree phi_arg = gimple_phi_arg_def (phi, i);
776 if (!phi_arg)
777 {
778 *res = false;
779 /* Do not need to traverse further. */
780 return false;
781 }
782 }
783
784 return true;
785 }
786
787 /* Return 1 if all phi nodes created for bounds have their
788 arguments computed. */
789 static bool
790 chkp_may_finish_incomplete_bounds (void)
791 {
792 bool res = true;
793
794 chkp_incomplete_bounds_map
795 ->traverse<bool *, chkp_may_complete_phi_bounds> (&res);
796
797 return res;
798 }
799
800 /* Helper function for chkp_finish_incomplete_bounds.
801 Recompute args for bounds phi node. */
802 bool
803 chkp_recompute_phi_bounds (tree const &bounds, tree *slot,
804 void *res ATTRIBUTE_UNUSED)
805 {
806 tree ptr = *slot;
807 gphi *bounds_phi;
808 gphi *ptr_phi;
809 unsigned i;
810
811 gcc_assert (TREE_CODE (bounds) == SSA_NAME);
812 gcc_assert (TREE_CODE (ptr) == SSA_NAME);
813
814 bounds_phi = as_a <gphi *> (SSA_NAME_DEF_STMT (bounds));
815 ptr_phi = as_a <gphi *> (SSA_NAME_DEF_STMT (ptr));
816
817 for (i = 0; i < gimple_phi_num_args (bounds_phi); i++)
818 {
819 tree ptr_arg = gimple_phi_arg_def (ptr_phi, i);
820 tree bound_arg = chkp_find_bounds (ptr_arg, NULL);
821
822 add_phi_arg (bounds_phi, bound_arg,
823 gimple_phi_arg_edge (ptr_phi, i),
824 UNKNOWN_LOCATION);
825 }
826
827 return true;
828 }
829
830 /* Mark BOUNDS as invalid. */
831 static void
832 chkp_mark_invalid_bounds (tree bounds)
833 {
834 chkp_invalid_bounds->add (bounds);
835
836 if (dump_file && (dump_flags & TDF_DETAILS))
837 {
838 fprintf (dump_file, "Marked bounds ");
839 print_generic_expr (dump_file, bounds, 0);
840 fprintf (dump_file, " as invalid\n");
841 }
842 }
843
844 /* Return 1 if BOUNDS were marked as invalid and 0 otherwise. */
845 static bool
846 chkp_valid_bounds (tree bounds)
847 {
848 if (bounds == zero_bounds || bounds == none_bounds)
849 return false;
850
851 return !chkp_invalid_bounds->contains (bounds);
852 }
853
854 /* Helper function for chkp_finish_incomplete_bounds.
855 Check all arguments of phi nodes trying to find
856 valid completed bounds. If there is at least one
857 such arg then bounds produced by phi node are marked
858 as valid completed bounds and all phi args are
859 recomputed. */
860 bool
861 chkp_find_valid_phi_bounds (tree const &bounds, tree *slot, bool *res)
862 {
863 gimple phi;
864 unsigned i;
865
866 gcc_assert (TREE_CODE (bounds) == SSA_NAME);
867
868 if (chkp_completed_bounds (bounds))
869 return true;
870
871 phi = SSA_NAME_DEF_STMT (bounds);
872
873 gcc_assert (phi && gimple_code (phi) == GIMPLE_PHI);
874
875 for (i = 0; i < gimple_phi_num_args (phi); i++)
876 {
877 tree phi_arg = gimple_phi_arg_def (phi, i);
878
879 gcc_assert (phi_arg);
880
881 if (chkp_valid_bounds (phi_arg) && !chkp_incomplete_bounds (phi_arg))
882 {
883 *res = true;
884 chkp_mark_completed_bounds (bounds);
885 chkp_recompute_phi_bounds (bounds, slot, NULL);
886 return true;
887 }
888 }
889
890 return true;
891 }
892
893 /* Helper function for chkp_finish_incomplete_bounds.
894 Marks all incompleted bounds as invalid. */
895 bool
896 chkp_mark_invalid_bounds_walker (tree const &bounds,
897 tree *slot ATTRIBUTE_UNUSED,
898 void *res ATTRIBUTE_UNUSED)
899 {
900 if (!chkp_completed_bounds (bounds))
901 {
902 chkp_mark_invalid_bounds (bounds);
903 chkp_mark_completed_bounds (bounds);
904 }
905 return true;
906 }
907
908 /* When all bound phi nodes have all their args computed
909 we have enough info to find valid bounds. We iterate
910 through all incompleted bounds searching for valid
911 bounds. Found valid bounds are marked as completed
912 and all remaining incompleted bounds are recomputed.
913 Process continues until no new valid bounds may be
914 found. All remained incompleted bounds are marked as
915 invalid (i.e. have no valid source of bounds). */
916 static void
917 chkp_finish_incomplete_bounds (void)
918 {
919 bool found_valid;
920
921 while (found_valid)
922 {
923 found_valid = false;
924
925 chkp_incomplete_bounds_map->
926 traverse<bool *, chkp_find_valid_phi_bounds> (&found_valid);
927
928 if (found_valid)
929 chkp_incomplete_bounds_map->
930 traverse<void *, chkp_recompute_phi_bounds> (NULL);
931 }
932
933 chkp_incomplete_bounds_map->
934 traverse<void *, chkp_mark_invalid_bounds_walker> (NULL);
935 chkp_incomplete_bounds_map->
936 traverse<void *, chkp_recompute_phi_bounds> (NULL);
937
938 chkp_erase_completed_bounds ();
939 chkp_erase_incomplete_bounds ();
940 }
941
942 /* Return 1 if type TYPE is a pointer type or a
943 structure having a pointer type as one of its fields.
944 Otherwise return 0. */
945 bool
946 chkp_type_has_pointer (const_tree type)
947 {
948 bool res = false;
949
950 if (BOUNDED_TYPE_P (type))
951 res = true;
952 else if (RECORD_OR_UNION_TYPE_P (type))
953 {
954 tree field;
955
956 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
957 if (TREE_CODE (field) == FIELD_DECL)
958 res = res || chkp_type_has_pointer (TREE_TYPE (field));
959 }
960 else if (TREE_CODE (type) == ARRAY_TYPE)
961 res = chkp_type_has_pointer (TREE_TYPE (type));
962
963 return res;
964 }
965
966 unsigned
967 chkp_type_bounds_count (const_tree type)
968 {
969 unsigned res = 0;
970
971 if (!type)
972 res = 0;
973 else if (BOUNDED_TYPE_P (type))
974 res = 1;
975 else if (RECORD_OR_UNION_TYPE_P (type))
976 {
977 bitmap have_bound;
978
979 bitmap_obstack_initialize (NULL);
980 have_bound = BITMAP_ALLOC (NULL);
981 chkp_find_bound_slots (type, have_bound);
982 res = bitmap_count_bits (have_bound);
983 BITMAP_FREE (have_bound);
984 bitmap_obstack_release (NULL);
985 }
986
987 return res;
988 }
989
990 /* Get bounds associated with NODE via
991 chkp_set_bounds call. */
992 tree
993 chkp_get_bounds (tree node)
994 {
995 tree *slot;
996
997 if (!chkp_bounds_map)
998 return NULL_TREE;
999
1000 slot = chkp_bounds_map->get (node);
1001 return slot ? *slot : NULL_TREE;
1002 }
1003
1004 /* Associate bounds VAL with NODE. */
1005 void
1006 chkp_set_bounds (tree node, tree val)
1007 {
1008 if (!chkp_bounds_map)
1009 chkp_bounds_map = new hash_map<tree, tree>;
1010
1011 chkp_bounds_map->put (node, val);
1012 }
1013
1014 /* Check if statically initialized variable VAR require
1015 static bounds initialization. If VAR is added into
1016 bounds initlization list then 1 is returned. Otherwise
1017 return 0. */
1018 extern bool
1019 chkp_register_var_initializer (tree var)
1020 {
1021 if (!flag_check_pointer_bounds
1022 || DECL_INITIAL (var) == error_mark_node)
1023 return false;
1024
1025 gcc_assert (TREE_CODE (var) == VAR_DECL);
1026 gcc_assert (DECL_INITIAL (var));
1027
1028 if (TREE_STATIC (var)
1029 && chkp_type_has_pointer (TREE_TYPE (var)))
1030 {
1031 varpool_node::get_create (var)->need_bounds_init = 1;
1032 return true;
1033 }
1034
1035 return false;
1036 }
1037
1038 /* Helper function for chkp_finish_file.
1039
1040 Add new modification statement (RHS is assigned to LHS)
1041 into list of static initializer statementes (passed in ARG).
1042 If statements list becomes too big, emit checker constructor
1043 and start the new one. */
1044 static void
1045 chkp_add_modification_to_stmt_list (tree lhs,
1046 tree rhs,
1047 void *arg)
1048 {
1049 struct chkp_ctor_stmt_list *stmts = (struct chkp_ctor_stmt_list *)arg;
1050 tree modify;
1051
1052 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
1053 rhs = build1 (CONVERT_EXPR, TREE_TYPE (lhs), rhs);
1054
1055 modify = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
1056 append_to_statement_list (modify, &stmts->stmts);
1057
1058 stmts->avail--;
1059 }
1060
1061 /* Build and return ADDR_EXPR for specified object OBJ. */
1062 static tree
1063 chkp_build_addr_expr (tree obj)
1064 {
1065 return TREE_CODE (obj) == TARGET_MEM_REF
1066 ? tree_mem_ref_addr (ptr_type_node, obj)
1067 : build_fold_addr_expr (obj);
1068 }
1069
1070 /* Helper function for chkp_finish_file.
1071 Initialize bound variable BND_VAR with bounds of variable
1072 VAR to statements list STMTS. If statements list becomes
1073 too big, emit checker constructor and start the new one. */
1074 static void
1075 chkp_output_static_bounds (tree bnd_var, tree var,
1076 struct chkp_ctor_stmt_list *stmts)
1077 {
1078 tree lb, ub, size;
1079
1080 if (TREE_CODE (var) == STRING_CST)
1081 {
1082 lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var));
1083 size = build_int_cst (size_type_node, TREE_STRING_LENGTH (var) - 1);
1084 }
1085 else if (DECL_SIZE (var)
1086 && !chkp_variable_size_type (TREE_TYPE (var)))
1087 {
1088 /* Compute bounds using statically known size. */
1089 lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var));
1090 size = size_binop (MINUS_EXPR, DECL_SIZE_UNIT (var), size_one_node);
1091 }
1092 else
1093 {
1094 /* Compute bounds using dynamic size. */
1095 tree call;
1096
1097 lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var));
1098 call = build1 (ADDR_EXPR,
1099 build_pointer_type (TREE_TYPE (chkp_sizeof_fndecl)),
1100 chkp_sizeof_fndecl);
1101 size = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_sizeof_fndecl)),
1102 call, 1, var);
1103
1104 if (flag_chkp_zero_dynamic_size_as_infinite)
1105 {
1106 tree max_size, cond;
1107
1108 max_size = build2 (MINUS_EXPR, size_type_node, size_zero_node, lb);
1109 cond = build2 (NE_EXPR, boolean_type_node, size, size_zero_node);
1110 size = build3 (COND_EXPR, size_type_node, cond, size, max_size);
1111 }
1112
1113 size = size_binop (MINUS_EXPR, size, size_one_node);
1114 }
1115
1116 ub = size_binop (PLUS_EXPR, lb, size);
1117 stmts->avail -= targetm.chkp_initialize_bounds (bnd_var, lb, ub,
1118 &stmts->stmts);
1119 if (stmts->avail <= 0)
1120 {
1121 cgraph_build_static_cdtor ('B', stmts->stmts,
1122 MAX_RESERVED_INIT_PRIORITY + 2);
1123 stmts->avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
1124 stmts->stmts = NULL;
1125 }
1126 }
1127
1128 /* Return entry block to be used for checker initilization code.
1129 Create new block if required. */
1130 static basic_block
1131 chkp_get_entry_block (void)
1132 {
1133 if (!entry_block)
1134 entry_block
1135 = split_block_after_labels (ENTRY_BLOCK_PTR_FOR_FN (cfun))->dest;
1136
1137 return entry_block;
1138 }
1139
1140 /* Return a bounds var to be used for pointer var PTR_VAR. */
1141 static tree
1142 chkp_get_bounds_var (tree ptr_var)
1143 {
1144 tree bnd_var;
1145 tree *slot;
1146
1147 slot = chkp_bound_vars->get (ptr_var);
1148 if (slot)
1149 bnd_var = *slot;
1150 else
1151 {
1152 bnd_var = create_tmp_reg (pointer_bounds_type_node,
1153 CHKP_BOUND_TMP_NAME);
1154 chkp_bound_vars->put (ptr_var, bnd_var);
1155 }
1156
1157 return bnd_var;
1158 }
1159
1160 /* If BND is an abnormal bounds copy, return a copied value.
1161 Otherwise return BND. */
1162 static tree
1163 chkp_get_orginal_bounds_for_abnormal_copy (tree bnd)
1164 {
1165 if (bitmap_bit_p (chkp_abnormal_copies, SSA_NAME_VERSION (bnd)))
1166 {
1167 gimple bnd_def = SSA_NAME_DEF_STMT (bnd);
1168 gcc_checking_assert (gimple_code (bnd_def) == GIMPLE_ASSIGN);
1169 bnd = gimple_assign_rhs1 (bnd_def);
1170 }
1171
1172 return bnd;
1173 }
1174
1175 /* Register bounds BND for object PTR in global bounds table.
1176 A copy of bounds may be created for abnormal ssa names.
1177 Returns bounds to use for PTR. */
1178 static tree
1179 chkp_maybe_copy_and_register_bounds (tree ptr, tree bnd)
1180 {
1181 bool abnormal_ptr;
1182
1183 if (!chkp_reg_bounds)
1184 return bnd;
1185
1186 /* Do nothing if bounds are incomplete_bounds
1187 because it means bounds will be recomputed. */
1188 if (bnd == incomplete_bounds)
1189 return bnd;
1190
1191 abnormal_ptr = (TREE_CODE (ptr) == SSA_NAME
1192 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ptr)
1193 && gimple_code (SSA_NAME_DEF_STMT (ptr)) != GIMPLE_PHI);
1194
1195 /* A single bounds value may be reused multiple times for
1196 different pointer values. It may cause coalescing issues
1197 for abnormal SSA names. To avoid it we create a bounds
1198 copy in case it is computed for abnormal SSA name.
1199
1200 We also cannot reuse such created copies for other pointers */
1201 if (abnormal_ptr
1202 || bitmap_bit_p (chkp_abnormal_copies, SSA_NAME_VERSION (bnd)))
1203 {
1204 tree bnd_var = NULL_TREE;
1205
1206 if (abnormal_ptr)
1207 {
1208 if (SSA_NAME_VAR (ptr))
1209 bnd_var = chkp_get_bounds_var (SSA_NAME_VAR (ptr));
1210 }
1211 else
1212 bnd_var = chkp_get_tmp_var ();
1213
1214 /* For abnormal copies we may just find original
1215 bounds and use them. */
1216 if (!abnormal_ptr && !SSA_NAME_IS_DEFAULT_DEF (bnd))
1217 bnd = chkp_get_orginal_bounds_for_abnormal_copy (bnd);
1218 /* For undefined values we usually use none bounds
1219 value but in case of abnormal edge it may cause
1220 coalescing failures. Use default definition of
1221 bounds variable instead to avoid it. */
1222 else if (SSA_NAME_IS_DEFAULT_DEF (ptr)
1223 && TREE_CODE (SSA_NAME_VAR (ptr)) != PARM_DECL)
1224 {
1225 bnd = get_or_create_ssa_default_def (cfun, bnd_var);
1226
1227 if (dump_file && (dump_flags & TDF_DETAILS))
1228 {
1229 fprintf (dump_file, "Using default def bounds ");
1230 print_generic_expr (dump_file, bnd, 0);
1231 fprintf (dump_file, " for abnormal default def SSA name ");
1232 print_generic_expr (dump_file, ptr, 0);
1233 fprintf (dump_file, "\n");
1234 }
1235 }
1236 else
1237 {
1238 tree copy;
1239 gimple def = SSA_NAME_DEF_STMT (ptr);
1240 gimple assign;
1241 gimple_stmt_iterator gsi;
1242
1243 if (bnd_var)
1244 copy = make_ssa_name (bnd_var);
1245 else
1246 copy = make_temp_ssa_name (pointer_bounds_type_node,
1247 NULL,
1248 CHKP_BOUND_TMP_NAME);
1249 bnd = chkp_get_orginal_bounds_for_abnormal_copy (bnd);
1250 assign = gimple_build_assign (copy, bnd);
1251
1252 if (dump_file && (dump_flags & TDF_DETAILS))
1253 {
1254 fprintf (dump_file, "Creating a copy of bounds ");
1255 print_generic_expr (dump_file, bnd, 0);
1256 fprintf (dump_file, " for abnormal SSA name ");
1257 print_generic_expr (dump_file, ptr, 0);
1258 fprintf (dump_file, "\n");
1259 }
1260
1261 if (gimple_code (def) == GIMPLE_NOP)
1262 {
1263 gsi = gsi_last_bb (chkp_get_entry_block ());
1264 if (!gsi_end_p (gsi) && is_ctrl_stmt (gsi_stmt (gsi)))
1265 gsi_insert_before (&gsi, assign, GSI_CONTINUE_LINKING);
1266 else
1267 gsi_insert_after (&gsi, assign, GSI_CONTINUE_LINKING);
1268 }
1269 else
1270 {
1271 gimple bnd_def = SSA_NAME_DEF_STMT (bnd);
1272 /* Sometimes (e.g. when we load a pointer from a
1273 memory) bounds are produced later than a pointer.
1274 We need to insert bounds copy appropriately. */
1275 if (gimple_code (bnd_def) != GIMPLE_NOP
1276 && stmt_dominates_stmt_p (def, bnd_def))
1277 gsi = gsi_for_stmt (bnd_def);
1278 else
1279 gsi = gsi_for_stmt (def);
1280 gsi_insert_after (&gsi, assign, GSI_CONTINUE_LINKING);
1281 }
1282
1283 bnd = copy;
1284 }
1285
1286 if (abnormal_ptr)
1287 bitmap_set_bit (chkp_abnormal_copies, SSA_NAME_VERSION (bnd));
1288 }
1289
1290 chkp_reg_bounds->put (ptr, bnd);
1291
1292 if (dump_file && (dump_flags & TDF_DETAILS))
1293 {
1294 fprintf (dump_file, "Regsitered bound ");
1295 print_generic_expr (dump_file, bnd, 0);
1296 fprintf (dump_file, " for pointer ");
1297 print_generic_expr (dump_file, ptr, 0);
1298 fprintf (dump_file, "\n");
1299 }
1300
1301 return bnd;
1302 }
1303
1304 /* Get bounds registered for object PTR in global bounds table. */
1305 static tree
1306 chkp_get_registered_bounds (tree ptr)
1307 {
1308 tree *slot;
1309
1310 if (!chkp_reg_bounds)
1311 return NULL_TREE;
1312
1313 slot = chkp_reg_bounds->get (ptr);
1314 return slot ? *slot : NULL_TREE;
1315 }
1316
1317 /* Add bound retvals to return statement pointed by GSI. */
1318
1319 static void
1320 chkp_add_bounds_to_ret_stmt (gimple_stmt_iterator *gsi)
1321 {
1322 greturn *ret = as_a <greturn *> (gsi_stmt (*gsi));
1323 tree retval = gimple_return_retval (ret);
1324 tree ret_decl = DECL_RESULT (cfun->decl);
1325 tree bounds;
1326
1327 if (!retval)
1328 return;
1329
1330 if (BOUNDED_P (ret_decl))
1331 {
1332 bounds = chkp_find_bounds (retval, gsi);
1333 bounds = chkp_maybe_copy_and_register_bounds (ret_decl, bounds);
1334 gimple_return_set_retbnd (ret, bounds);
1335 }
1336
1337 update_stmt (ret);
1338 }
1339
1340 /* Force OP to be suitable for using as an argument for call.
1341 New statements (if any) go to SEQ. */
1342 static tree
1343 chkp_force_gimple_call_op (tree op, gimple_seq *seq)
1344 {
1345 gimple_seq stmts;
1346 gimple_stmt_iterator si;
1347
1348 op = force_gimple_operand (unshare_expr (op), &stmts, true, NULL_TREE);
1349
1350 for (si = gsi_start (stmts); !gsi_end_p (si); gsi_next (&si))
1351 chkp_mark_stmt (gsi_stmt (si));
1352
1353 gimple_seq_add_seq (seq, stmts);
1354
1355 return op;
1356 }
1357
1358 /* Generate lower bound check for memory access by ADDR.
1359 Check is inserted before the position pointed by ITER.
1360 DIRFLAG indicates whether memory access is load or store. */
1361 static void
1362 chkp_check_lower (tree addr, tree bounds,
1363 gimple_stmt_iterator iter,
1364 location_t location,
1365 tree dirflag)
1366 {
1367 gimple_seq seq;
1368 gimple check;
1369 tree node;
1370
1371 if (!chkp_function_instrumented_p (current_function_decl)
1372 && bounds == chkp_get_zero_bounds ())
1373 return;
1374
1375 if (dirflag == integer_zero_node
1376 && !flag_chkp_check_read)
1377 return;
1378
1379 if (dirflag == integer_one_node
1380 && !flag_chkp_check_write)
1381 return;
1382
1383 seq = NULL;
1384
1385 node = chkp_force_gimple_call_op (addr, &seq);
1386
1387 check = gimple_build_call (chkp_checkl_fndecl, 2, node, bounds);
1388 chkp_mark_stmt (check);
1389 gimple_call_set_with_bounds (check, true);
1390 gimple_set_location (check, location);
1391 gimple_seq_add_stmt (&seq, check);
1392
1393 gsi_insert_seq_before (&iter, seq, GSI_SAME_STMT);
1394
1395 if (dump_file && (dump_flags & TDF_DETAILS))
1396 {
1397 gimple before = gsi_stmt (iter);
1398 fprintf (dump_file, "Generated lower bound check for statement ");
1399 print_gimple_stmt (dump_file, before, 0, TDF_VOPS|TDF_MEMSYMS);
1400 fprintf (dump_file, " ");
1401 print_gimple_stmt (dump_file, check, 0, TDF_VOPS|TDF_MEMSYMS);
1402 }
1403 }
1404
1405 /* Generate upper bound check for memory access by ADDR.
1406 Check is inserted before the position pointed by ITER.
1407 DIRFLAG indicates whether memory access is load or store. */
1408 static void
1409 chkp_check_upper (tree addr, tree bounds,
1410 gimple_stmt_iterator iter,
1411 location_t location,
1412 tree dirflag)
1413 {
1414 gimple_seq seq;
1415 gimple check;
1416 tree node;
1417
1418 if (!chkp_function_instrumented_p (current_function_decl)
1419 && bounds == chkp_get_zero_bounds ())
1420 return;
1421
1422 if (dirflag == integer_zero_node
1423 && !flag_chkp_check_read)
1424 return;
1425
1426 if (dirflag == integer_one_node
1427 && !flag_chkp_check_write)
1428 return;
1429
1430 seq = NULL;
1431
1432 node = chkp_force_gimple_call_op (addr, &seq);
1433
1434 check = gimple_build_call (chkp_checku_fndecl, 2, node, bounds);
1435 chkp_mark_stmt (check);
1436 gimple_call_set_with_bounds (check, true);
1437 gimple_set_location (check, location);
1438 gimple_seq_add_stmt (&seq, check);
1439
1440 gsi_insert_seq_before (&iter, seq, GSI_SAME_STMT);
1441
1442 if (dump_file && (dump_flags & TDF_DETAILS))
1443 {
1444 gimple before = gsi_stmt (iter);
1445 fprintf (dump_file, "Generated upper bound check for statement ");
1446 print_gimple_stmt (dump_file, before, 0, TDF_VOPS|TDF_MEMSYMS);
1447 fprintf (dump_file, " ");
1448 print_gimple_stmt (dump_file, check, 0, TDF_VOPS|TDF_MEMSYMS);
1449 }
1450 }
1451
1452 /* Generate lower and upper bound checks for memory access
1453 to memory slot [FIRST, LAST] againsr BOUNDS. Checks
1454 are inserted before the position pointed by ITER.
1455 DIRFLAG indicates whether memory access is load or store. */
1456 void
1457 chkp_check_mem_access (tree first, tree last, tree bounds,
1458 gimple_stmt_iterator iter,
1459 location_t location,
1460 tree dirflag)
1461 {
1462 chkp_check_lower (first, bounds, iter, location, dirflag);
1463 chkp_check_upper (last, bounds, iter, location, dirflag);
1464 }
1465
1466 /* Replace call to _bnd_chk_* pointed by GSI with
1467 bndcu and bndcl calls. DIRFLAG determines whether
1468 check is for read or write. */
1469
1470 void
1471 chkp_replace_address_check_builtin (gimple_stmt_iterator *gsi,
1472 tree dirflag)
1473 {
1474 gimple_stmt_iterator call_iter = *gsi;
1475 gimple call = gsi_stmt (*gsi);
1476 tree fndecl = gimple_call_fndecl (call);
1477 tree addr = gimple_call_arg (call, 0);
1478 tree bounds = chkp_find_bounds (addr, gsi);
1479
1480 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
1481 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS)
1482 chkp_check_lower (addr, bounds, *gsi, gimple_location (call), dirflag);
1483
1484 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_UBOUNDS)
1485 chkp_check_upper (addr, bounds, *gsi, gimple_location (call), dirflag);
1486
1487 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS)
1488 {
1489 tree size = gimple_call_arg (call, 1);
1490 addr = fold_build_pointer_plus (addr, size);
1491 addr = fold_build_pointer_plus_hwi (addr, -1);
1492 chkp_check_upper (addr, bounds, *gsi, gimple_location (call), dirflag);
1493 }
1494
1495 gsi_remove (&call_iter, true);
1496 }
1497
1498 /* Replace call to _bnd_get_ptr_* pointed by GSI with
1499 corresponding bounds extract call. */
1500
1501 void
1502 chkp_replace_extract_builtin (gimple_stmt_iterator *gsi)
1503 {
1504 gimple call = gsi_stmt (*gsi);
1505 tree fndecl = gimple_call_fndecl (call);
1506 tree addr = gimple_call_arg (call, 0);
1507 tree bounds = chkp_find_bounds (addr, gsi);
1508 gimple extract;
1509
1510 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_LBOUND)
1511 fndecl = chkp_extract_lower_fndecl;
1512 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_UBOUND)
1513 fndecl = chkp_extract_upper_fndecl;
1514 else
1515 gcc_unreachable ();
1516
1517 extract = gimple_build_call (fndecl, 1, bounds);
1518 gimple_call_set_lhs (extract, gimple_call_lhs (call));
1519 chkp_mark_stmt (extract);
1520
1521 gsi_replace (gsi, extract, false);
1522 }
1523
1524 /* Return COMPONENT_REF accessing FIELD in OBJ. */
1525 static tree
1526 chkp_build_component_ref (tree obj, tree field)
1527 {
1528 tree res;
1529
1530 /* If object is TMR then we do not use component_ref but
1531 add offset instead. We need it to be able to get addr
1532 of the reasult later. */
1533 if (TREE_CODE (obj) == TARGET_MEM_REF)
1534 {
1535 tree offs = TMR_OFFSET (obj);
1536 offs = fold_binary_to_constant (PLUS_EXPR, TREE_TYPE (offs),
1537 offs, DECL_FIELD_OFFSET (field));
1538
1539 gcc_assert (offs);
1540
1541 res = copy_node (obj);
1542 TREE_TYPE (res) = TREE_TYPE (field);
1543 TMR_OFFSET (res) = offs;
1544 }
1545 else
1546 res = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL_TREE);
1547
1548 return res;
1549 }
1550
1551 /* Return ARRAY_REF for array ARR and index IDX with
1552 specified element type ETYPE and element size ESIZE. */
1553 static tree
1554 chkp_build_array_ref (tree arr, tree etype, tree esize,
1555 unsigned HOST_WIDE_INT idx)
1556 {
1557 tree index = build_int_cst (size_type_node, idx);
1558 tree res;
1559
1560 /* If object is TMR then we do not use array_ref but
1561 add offset instead. We need it to be able to get addr
1562 of the reasult later. */
1563 if (TREE_CODE (arr) == TARGET_MEM_REF)
1564 {
1565 tree offs = TMR_OFFSET (arr);
1566
1567 esize = fold_binary_to_constant (MULT_EXPR, TREE_TYPE (esize),
1568 esize, index);
1569 gcc_assert(esize);
1570
1571 offs = fold_binary_to_constant (PLUS_EXPR, TREE_TYPE (offs),
1572 offs, esize);
1573 gcc_assert (offs);
1574
1575 res = copy_node (arr);
1576 TREE_TYPE (res) = etype;
1577 TMR_OFFSET (res) = offs;
1578 }
1579 else
1580 res = build4 (ARRAY_REF, etype, arr, index, NULL_TREE, NULL_TREE);
1581
1582 return res;
1583 }
1584
1585 /* Helper function for chkp_add_bounds_to_call_stmt.
1586 Fill ALL_BOUNDS output array with created bounds.
1587
1588 OFFS is used for recursive calls and holds basic
1589 offset of TYPE in outer structure in bits.
1590
1591 ITER points a position where bounds are searched.
1592
1593 ALL_BOUNDS[i] is filled with elem bounds if there
1594 is a field in TYPE which has pointer type and offset
1595 equal to i * POINTER_SIZE in bits. */
1596 static void
1597 chkp_find_bounds_for_elem (tree elem, tree *all_bounds,
1598 HOST_WIDE_INT offs,
1599 gimple_stmt_iterator *iter)
1600 {
1601 tree type = TREE_TYPE (elem);
1602
1603 if (BOUNDED_TYPE_P (type))
1604 {
1605 if (!all_bounds[offs / POINTER_SIZE])
1606 {
1607 tree temp = make_temp_ssa_name (type, NULL, "");
1608 gimple assign = gimple_build_assign (temp, elem);
1609 gimple_stmt_iterator gsi;
1610
1611 gsi_insert_before (iter, assign, GSI_SAME_STMT);
1612 gsi = gsi_for_stmt (assign);
1613
1614 all_bounds[offs / POINTER_SIZE] = chkp_find_bounds (temp, &gsi);
1615 }
1616 }
1617 else if (RECORD_OR_UNION_TYPE_P (type))
1618 {
1619 tree field;
1620
1621 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
1622 if (TREE_CODE (field) == FIELD_DECL)
1623 {
1624 tree base = unshare_expr (elem);
1625 tree field_ref = chkp_build_component_ref (base, field);
1626 HOST_WIDE_INT field_offs
1627 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
1628 if (DECL_FIELD_OFFSET (field))
1629 field_offs += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field)) * 8;
1630
1631 chkp_find_bounds_for_elem (field_ref, all_bounds,
1632 offs + field_offs, iter);
1633 }
1634 }
1635 else if (TREE_CODE (type) == ARRAY_TYPE)
1636 {
1637 tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
1638 tree etype = TREE_TYPE (type);
1639 HOST_WIDE_INT esize = TREE_INT_CST_LOW (TYPE_SIZE (etype));
1640 unsigned HOST_WIDE_INT cur;
1641
1642 if (!maxval || integer_minus_onep (maxval))
1643 return;
1644
1645 for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++)
1646 {
1647 tree base = unshare_expr (elem);
1648 tree arr_elem = chkp_build_array_ref (base, etype,
1649 TYPE_SIZE (etype),
1650 cur);
1651 chkp_find_bounds_for_elem (arr_elem, all_bounds, offs + cur * esize,
1652 iter);
1653 }
1654 }
1655 }
1656
1657 /* Fill HAVE_BOUND output bitmap with information about
1658 bounds requred for object of type TYPE.
1659
1660 OFFS is used for recursive calls and holds basic
1661 offset of TYPE in outer structure in bits.
1662
1663 HAVE_BOUND[i] is set to 1 if there is a field
1664 in TYPE which has pointer type and offset
1665 equal to i * POINTER_SIZE - OFFS in bits. */
1666 void
1667 chkp_find_bound_slots_1 (const_tree type, bitmap have_bound,
1668 HOST_WIDE_INT offs)
1669 {
1670 if (BOUNDED_TYPE_P (type))
1671 bitmap_set_bit (have_bound, offs / POINTER_SIZE);
1672 else if (RECORD_OR_UNION_TYPE_P (type))
1673 {
1674 tree field;
1675
1676 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
1677 if (TREE_CODE (field) == FIELD_DECL)
1678 {
1679 HOST_WIDE_INT field_offs
1680 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
1681 if (DECL_FIELD_OFFSET (field))
1682 field_offs += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field)) * 8;
1683 chkp_find_bound_slots_1 (TREE_TYPE (field), have_bound,
1684 offs + field_offs);
1685 }
1686 }
1687 else if (TREE_CODE (type) == ARRAY_TYPE)
1688 {
1689 tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
1690 tree etype = TREE_TYPE (type);
1691 HOST_WIDE_INT esize = TREE_INT_CST_LOW (TYPE_SIZE (etype));
1692 unsigned HOST_WIDE_INT cur;
1693
1694 if (!maxval
1695 || TREE_CODE (maxval) != INTEGER_CST
1696 || integer_minus_onep (maxval))
1697 return;
1698
1699 for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++)
1700 chkp_find_bound_slots_1 (etype, have_bound, offs + cur * esize);
1701 }
1702 }
1703
1704 /* Fill bitmap RES with information about bounds for
1705 type TYPE. See chkp_find_bound_slots_1 for more
1706 details. */
1707 void
1708 chkp_find_bound_slots (const_tree type, bitmap res)
1709 {
1710 bitmap_clear (res);
1711 chkp_find_bound_slots_1 (type, res, 0);
1712 }
1713
1714 /* Return 1 if call to FNDECL should be instrumented
1715 and 0 otherwise. */
1716
1717 static bool
1718 chkp_instrument_normal_builtin (tree fndecl)
1719 {
1720 switch (DECL_FUNCTION_CODE (fndecl))
1721 {
1722 case BUILT_IN_STRLEN:
1723 case BUILT_IN_STRCPY:
1724 case BUILT_IN_STRNCPY:
1725 case BUILT_IN_STPCPY:
1726 case BUILT_IN_STPNCPY:
1727 case BUILT_IN_STRCAT:
1728 case BUILT_IN_STRNCAT:
1729 case BUILT_IN_MEMCPY:
1730 case BUILT_IN_MEMPCPY:
1731 case BUILT_IN_MEMSET:
1732 case BUILT_IN_MEMMOVE:
1733 case BUILT_IN_BZERO:
1734 case BUILT_IN_STRCMP:
1735 case BUILT_IN_STRNCMP:
1736 case BUILT_IN_BCMP:
1737 case BUILT_IN_MEMCMP:
1738 case BUILT_IN_MEMCPY_CHK:
1739 case BUILT_IN_MEMPCPY_CHK:
1740 case BUILT_IN_MEMMOVE_CHK:
1741 case BUILT_IN_MEMSET_CHK:
1742 case BUILT_IN_STRCPY_CHK:
1743 case BUILT_IN_STRNCPY_CHK:
1744 case BUILT_IN_STPCPY_CHK:
1745 case BUILT_IN_STPNCPY_CHK:
1746 case BUILT_IN_STRCAT_CHK:
1747 case BUILT_IN_STRNCAT_CHK:
1748 case BUILT_IN_MALLOC:
1749 case BUILT_IN_CALLOC:
1750 case BUILT_IN_REALLOC:
1751 return 1;
1752
1753 default:
1754 return 0;
1755 }
1756 }
1757
1758 /* Add bound arguments to call statement pointed by GSI.
1759 Also performs a replacement of user checker builtins calls
1760 with internal ones. */
1761
1762 static void
1763 chkp_add_bounds_to_call_stmt (gimple_stmt_iterator *gsi)
1764 {
1765 gcall *call = as_a <gcall *> (gsi_stmt (*gsi));
1766 unsigned arg_no = 0;
1767 tree fndecl = gimple_call_fndecl (call);
1768 tree fntype;
1769 tree first_formal_arg;
1770 tree arg;
1771 bool use_fntype = false;
1772 tree op;
1773 ssa_op_iter iter;
1774 gcall *new_call;
1775
1776 /* Do nothing for internal functions. */
1777 if (gimple_call_internal_p (call))
1778 return;
1779
1780 fntype = TREE_TYPE (TREE_TYPE (gimple_call_fn (call)));
1781
1782 /* Do nothing if back-end builtin is called. */
1783 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
1784 return;
1785
1786 /* Do nothing for some middle-end builtins. */
1787 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1788 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_OBJECT_SIZE)
1789 return;
1790
1791 /* Do nothing for calls to not instrumentable functions. */
1792 if (fndecl && !chkp_instrumentable_p (fndecl))
1793 return;
1794
1795 /* Ignore CHKP_INIT_PTR_BOUNDS, CHKP_NULL_PTR_BOUNDS
1796 and CHKP_COPY_PTR_BOUNDS. */
1797 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1798 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_INIT_PTR_BOUNDS
1799 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NULL_PTR_BOUNDS
1800 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_COPY_PTR_BOUNDS
1801 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_SET_PTR_BOUNDS))
1802 return;
1803
1804 /* Check user builtins are replaced with checks. */
1805 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1806 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
1807 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
1808 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS))
1809 {
1810 chkp_replace_address_check_builtin (gsi, integer_minus_one_node);
1811 return;
1812 }
1813
1814 /* Check user builtins are replaced with bound extract. */
1815 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1816 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_LBOUND
1817 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_UBOUND))
1818 {
1819 chkp_replace_extract_builtin (gsi);
1820 return;
1821 }
1822
1823 /* BUILT_IN_CHKP_NARROW_PTR_BOUNDS call is replaced with
1824 target narrow bounds call. */
1825 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1826 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NARROW_PTR_BOUNDS)
1827 {
1828 tree arg = gimple_call_arg (call, 1);
1829 tree bounds = chkp_find_bounds (arg, gsi);
1830
1831 gimple_call_set_fndecl (call, chkp_narrow_bounds_fndecl);
1832 gimple_call_set_arg (call, 1, bounds);
1833 update_stmt (call);
1834
1835 return;
1836 }
1837
1838 /* BUILT_IN_CHKP_STORE_PTR_BOUNDS call is replaced with
1839 bndstx call. */
1840 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1841 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_STORE_PTR_BOUNDS)
1842 {
1843 tree addr = gimple_call_arg (call, 0);
1844 tree ptr = gimple_call_arg (call, 1);
1845 tree bounds = chkp_find_bounds (ptr, gsi);
1846 gimple_stmt_iterator iter = gsi_for_stmt (call);
1847
1848 chkp_build_bndstx (addr, ptr, bounds, gsi);
1849 gsi_remove (&iter, true);
1850
1851 return;
1852 }
1853
1854 if (!flag_chkp_instrument_calls)
1855 return;
1856
1857 /* We instrument only some subset of builtins. We also instrument
1858 builtin calls to be inlined. */
1859 if (fndecl
1860 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1861 && !chkp_instrument_normal_builtin (fndecl))
1862 {
1863 if (!lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)))
1864 return;
1865
1866 struct cgraph_node *clone = chkp_maybe_create_clone (fndecl);
1867 if (!clone
1868 || !gimple_has_body_p (clone->decl))
1869 return;
1870 }
1871
1872 /* If function decl is available then use it for
1873 formal arguments list. Otherwise use function type. */
1874 if (fndecl && DECL_ARGUMENTS (fndecl))
1875 first_formal_arg = DECL_ARGUMENTS (fndecl);
1876 else
1877 {
1878 first_formal_arg = TYPE_ARG_TYPES (fntype);
1879 use_fntype = true;
1880 }
1881
1882 /* Fill vector of new call args. */
1883 vec<tree> new_args = vNULL;
1884 new_args.create (gimple_call_num_args (call));
1885 arg = first_formal_arg;
1886 for (arg_no = 0; arg_no < gimple_call_num_args (call); arg_no++)
1887 {
1888 tree call_arg = gimple_call_arg (call, arg_no);
1889 tree type;
1890
1891 /* Get arg type using formal argument description
1892 or actual argument type. */
1893 if (arg)
1894 if (use_fntype)
1895 if (TREE_VALUE (arg) != void_type_node)
1896 {
1897 type = TREE_VALUE (arg);
1898 arg = TREE_CHAIN (arg);
1899 }
1900 else
1901 type = TREE_TYPE (call_arg);
1902 else
1903 {
1904 type = TREE_TYPE (arg);
1905 arg = TREE_CHAIN (arg);
1906 }
1907 else
1908 type = TREE_TYPE (call_arg);
1909
1910 new_args.safe_push (call_arg);
1911
1912 if (BOUNDED_TYPE_P (type)
1913 || pass_by_reference (NULL, TYPE_MODE (type), type, true))
1914 new_args.safe_push (chkp_find_bounds (call_arg, gsi));
1915 else if (chkp_type_has_pointer (type))
1916 {
1917 HOST_WIDE_INT max_bounds
1918 = TREE_INT_CST_LOW (TYPE_SIZE (type)) / POINTER_SIZE;
1919 tree *all_bounds = (tree *)xmalloc (sizeof (tree) * max_bounds);
1920 HOST_WIDE_INT bnd_no;
1921
1922 memset (all_bounds, 0, sizeof (tree) * max_bounds);
1923
1924 chkp_find_bounds_for_elem (call_arg, all_bounds, 0, gsi);
1925
1926 for (bnd_no = 0; bnd_no < max_bounds; bnd_no++)
1927 if (all_bounds[bnd_no])
1928 new_args.safe_push (all_bounds[bnd_no]);
1929
1930 free (all_bounds);
1931 }
1932 }
1933
1934 if (new_args.length () == gimple_call_num_args (call))
1935 new_call = call;
1936 else
1937 {
1938 new_call = gimple_build_call_vec (gimple_op (call, 1), new_args);
1939 gimple_call_set_lhs (new_call, gimple_call_lhs (call));
1940 gimple_call_copy_flags (new_call, call);
1941 gimple_call_set_chain (new_call, gimple_call_chain (call));
1942 }
1943 new_args.release ();
1944
1945 /* For direct calls fndecl is replaced with instrumented version. */
1946 if (fndecl)
1947 {
1948 tree new_decl = chkp_maybe_create_clone (fndecl)->decl;
1949 gimple_call_set_fndecl (new_call, new_decl);
1950 gimple_call_set_fntype (new_call, TREE_TYPE (new_decl));
1951 }
1952 /* For indirect call we should fix function pointer type if
1953 pass some bounds. */
1954 else if (new_call != call)
1955 {
1956 tree type = gimple_call_fntype (call);
1957 type = chkp_copy_function_type_adding_bounds (type);
1958 gimple_call_set_fntype (new_call, type);
1959 }
1960
1961 /* replace old call statement with the new one. */
1962 if (call != new_call)
1963 {
1964 FOR_EACH_SSA_TREE_OPERAND (op, call, iter, SSA_OP_ALL_DEFS)
1965 {
1966 SSA_NAME_DEF_STMT (op) = new_call;
1967 }
1968 gsi_replace (gsi, new_call, true);
1969 }
1970 else
1971 update_stmt (new_call);
1972
1973 gimple_call_set_with_bounds (new_call, true);
1974 }
1975
1976 /* Return constant static bounds var with specified bounds LB and UB.
1977 If such var does not exists then new var is created with specified NAME. */
1978 static tree
1979 chkp_make_static_const_bounds (HOST_WIDE_INT lb,
1980 HOST_WIDE_INT ub,
1981 const char *name)
1982 {
1983 tree id = get_identifier (name);
1984 tree var;
1985 varpool_node *node;
1986 symtab_node *snode;
1987
1988 var = build_decl (UNKNOWN_LOCATION, VAR_DECL, id,
1989 pointer_bounds_type_node);
1990 TREE_STATIC (var) = 1;
1991 TREE_PUBLIC (var) = 1;
1992
1993 /* With LTO we may have constant bounds already in varpool.
1994 Try to find it. */
1995 if ((snode = symtab_node::get_for_asmname (DECL_ASSEMBLER_NAME (var))))
1996 {
1997 /* We don't allow this symbol usage for non bounds. */
1998 if (snode->type != SYMTAB_VARIABLE
1999 || !POINTER_BOUNDS_P (snode->decl))
2000 sorry ("-fcheck-pointer-bounds requires '%s' "
2001 "name for internal usage",
2002 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (var)));
2003
2004 return snode->decl;
2005 }
2006
2007 TREE_USED (var) = 1;
2008 TREE_READONLY (var) = 1;
2009 TREE_ADDRESSABLE (var) = 0;
2010 DECL_ARTIFICIAL (var) = 1;
2011 DECL_READ_P (var) = 1;
2012 DECL_INITIAL (var) = targetm.chkp_make_bounds_constant (lb, ub);
2013 make_decl_one_only (var, DECL_ASSEMBLER_NAME (var));
2014 /* We may use this symbol during ctors generation in chkp_finish_file
2015 when all symbols are emitted. Force output to avoid undefined
2016 symbols in ctors. */
2017 node = varpool_node::get_create (var);
2018 node->force_output = 1;
2019
2020 varpool_node::finalize_decl (var);
2021
2022 return var;
2023 }
2024
2025 /* Generate code to make bounds with specified lower bound LB and SIZE.
2026 if AFTER is 1 then code is inserted after position pointed by ITER
2027 otherwise code is inserted before position pointed by ITER.
2028 If ITER is NULL then code is added to entry block. */
2029 static tree
2030 chkp_make_bounds (tree lb, tree size, gimple_stmt_iterator *iter, bool after)
2031 {
2032 gimple_seq seq;
2033 gimple_stmt_iterator gsi;
2034 gimple stmt;
2035 tree bounds;
2036
2037 if (iter)
2038 gsi = *iter;
2039 else
2040 gsi = gsi_start_bb (chkp_get_entry_block ());
2041
2042 seq = NULL;
2043
2044 lb = chkp_force_gimple_call_op (lb, &seq);
2045 size = chkp_force_gimple_call_op (size, &seq);
2046
2047 stmt = gimple_build_call (chkp_bndmk_fndecl, 2, lb, size);
2048 chkp_mark_stmt (stmt);
2049
2050 bounds = chkp_get_tmp_reg (stmt);
2051 gimple_call_set_lhs (stmt, bounds);
2052
2053 gimple_seq_add_stmt (&seq, stmt);
2054
2055 if (iter && after)
2056 gsi_insert_seq_after (&gsi, seq, GSI_SAME_STMT);
2057 else
2058 gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT);
2059
2060 if (dump_file && (dump_flags & TDF_DETAILS))
2061 {
2062 fprintf (dump_file, "Made bounds: ");
2063 print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
2064 if (iter)
2065 {
2066 fprintf (dump_file, " inserted before statement: ");
2067 print_gimple_stmt (dump_file, gsi_stmt (*iter), 0, TDF_VOPS|TDF_MEMSYMS);
2068 }
2069 else
2070 fprintf (dump_file, " at function entry\n");
2071 }
2072
2073 /* update_stmt (stmt); */
2074
2075 return bounds;
2076 }
2077
2078 /* Return var holding zero bounds. */
2079 tree
2080 chkp_get_zero_bounds_var (void)
2081 {
2082 if (!chkp_zero_bounds_var)
2083 chkp_zero_bounds_var
2084 = chkp_make_static_const_bounds (0, -1,
2085 CHKP_ZERO_BOUNDS_VAR_NAME);
2086 return chkp_zero_bounds_var;
2087 }
2088
2089 /* Return var holding none bounds. */
2090 tree
2091 chkp_get_none_bounds_var (void)
2092 {
2093 if (!chkp_none_bounds_var)
2094 chkp_none_bounds_var
2095 = chkp_make_static_const_bounds (-1, 0,
2096 CHKP_NONE_BOUNDS_VAR_NAME);
2097 return chkp_none_bounds_var;
2098 }
2099
2100 /* Return SSA_NAME used to represent zero bounds. */
2101 static tree
2102 chkp_get_zero_bounds (void)
2103 {
2104 if (zero_bounds)
2105 return zero_bounds;
2106
2107 if (dump_file && (dump_flags & TDF_DETAILS))
2108 fprintf (dump_file, "Creating zero bounds...");
2109
2110 if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds)
2111 || flag_chkp_use_static_const_bounds > 0)
2112 {
2113 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
2114 gimple stmt;
2115
2116 zero_bounds = chkp_get_tmp_reg (NULL);
2117 stmt = gimple_build_assign (zero_bounds, chkp_get_zero_bounds_var ());
2118 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
2119 }
2120 else
2121 zero_bounds = chkp_make_bounds (integer_zero_node,
2122 integer_zero_node,
2123 NULL,
2124 false);
2125
2126 return zero_bounds;
2127 }
2128
2129 /* Return SSA_NAME used to represent none bounds. */
2130 static tree
2131 chkp_get_none_bounds (void)
2132 {
2133 if (none_bounds)
2134 return none_bounds;
2135
2136 if (dump_file && (dump_flags & TDF_DETAILS))
2137 fprintf (dump_file, "Creating none bounds...");
2138
2139
2140 if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds)
2141 || flag_chkp_use_static_const_bounds > 0)
2142 {
2143 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
2144 gimple stmt;
2145
2146 none_bounds = chkp_get_tmp_reg (NULL);
2147 stmt = gimple_build_assign (none_bounds, chkp_get_none_bounds_var ());
2148 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
2149 }
2150 else
2151 none_bounds = chkp_make_bounds (integer_minus_one_node,
2152 build_int_cst (size_type_node, 2),
2153 NULL,
2154 false);
2155
2156 return none_bounds;
2157 }
2158
2159 /* Return bounds to be used as a result of operation which
2160 should not create poiunter (e.g. MULT_EXPR). */
2161 static tree
2162 chkp_get_invalid_op_bounds (void)
2163 {
2164 return chkp_get_zero_bounds ();
2165 }
2166
2167 /* Return bounds to be used for loads of non-pointer values. */
2168 static tree
2169 chkp_get_nonpointer_load_bounds (void)
2170 {
2171 return chkp_get_zero_bounds ();
2172 }
2173
2174 /* Return 1 if may use bndret call to get bounds for pointer
2175 returned by CALL. */
2176 static bool
2177 chkp_call_returns_bounds_p (gcall *call)
2178 {
2179 if (gimple_call_internal_p (call))
2180 return false;
2181
2182 if (gimple_call_builtin_p (call, BUILT_IN_CHKP_NARROW_PTR_BOUNDS)
2183 || chkp_gimple_call_builtin_p (call, BUILT_IN_CHKP_NARROW))
2184 return true;
2185
2186 if (gimple_call_with_bounds_p (call))
2187 return true;
2188
2189 tree fndecl = gimple_call_fndecl (call);
2190
2191 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
2192 return false;
2193
2194 if (fndecl && !chkp_instrumentable_p (fndecl))
2195 return false;
2196
2197 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
2198 {
2199 if (chkp_instrument_normal_builtin (fndecl))
2200 return true;
2201
2202 if (!lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)))
2203 return false;
2204
2205 struct cgraph_node *clone = chkp_maybe_create_clone (fndecl);
2206 return (clone && gimple_has_body_p (clone->decl));
2207 }
2208
2209 return true;
2210 }
2211
2212 /* Build bounds returned by CALL. */
2213 static tree
2214 chkp_build_returned_bound (gcall *call)
2215 {
2216 gimple_stmt_iterator gsi;
2217 tree bounds;
2218 gimple stmt;
2219 tree fndecl = gimple_call_fndecl (call);
2220 unsigned int retflags;
2221
2222 /* To avoid fixing alloca expands in targets we handle
2223 it separately. */
2224 if (fndecl
2225 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2226 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA
2227 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA_WITH_ALIGN))
2228 {
2229 tree size = gimple_call_arg (call, 0);
2230 tree lb = gimple_call_lhs (call);
2231 gimple_stmt_iterator iter = gsi_for_stmt (call);
2232 bounds = chkp_make_bounds (lb, size, &iter, true);
2233 }
2234 /* We know bounds returned by set_bounds builtin call. */
2235 else if (fndecl
2236 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2237 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_SET_PTR_BOUNDS)
2238 {
2239 tree lb = gimple_call_arg (call, 0);
2240 tree size = gimple_call_arg (call, 1);
2241 gimple_stmt_iterator iter = gsi_for_stmt (call);
2242 bounds = chkp_make_bounds (lb, size, &iter, true);
2243 }
2244 /* Detect bounds initialization calls. */
2245 else if (fndecl
2246 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2247 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_INIT_PTR_BOUNDS)
2248 bounds = chkp_get_zero_bounds ();
2249 /* Detect bounds nullification calls. */
2250 else if (fndecl
2251 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2252 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NULL_PTR_BOUNDS)
2253 bounds = chkp_get_none_bounds ();
2254 /* Detect bounds copy calls. */
2255 else if (fndecl
2256 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2257 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
2258 {
2259 gimple_stmt_iterator iter = gsi_for_stmt (call);
2260 bounds = chkp_find_bounds (gimple_call_arg (call, 1), &iter);
2261 }
2262 /* Do not use retbnd when returned bounds are equal to some
2263 of passed bounds. */
2264 else if (((retflags = gimple_call_return_flags (call)) & ERF_RETURNS_ARG)
2265 && (retflags & ERF_RETURN_ARG_MASK) < gimple_call_num_args (call))
2266 {
2267 gimple_stmt_iterator iter = gsi_for_stmt (call);
2268 unsigned int retarg = retflags & ERF_RETURN_ARG_MASK, argno;
2269 if (gimple_call_with_bounds_p (call))
2270 {
2271 for (argno = 0; argno < gimple_call_num_args (call); argno++)
2272 if (!POINTER_BOUNDS_P (gimple_call_arg (call, argno)))
2273 {
2274 if (retarg)
2275 retarg--;
2276 else
2277 break;
2278 }
2279 }
2280 else
2281 argno = retarg;
2282
2283 bounds = chkp_find_bounds (gimple_call_arg (call, argno), &iter);
2284 }
2285 else if (chkp_call_returns_bounds_p (call))
2286 {
2287 gcc_assert (TREE_CODE (gimple_call_lhs (call)) == SSA_NAME);
2288
2289 /* In general case build checker builtin call to
2290 obtain returned bounds. */
2291 stmt = gimple_build_call (chkp_ret_bnd_fndecl, 1,
2292 gimple_call_lhs (call));
2293 chkp_mark_stmt (stmt);
2294
2295 gsi = gsi_for_stmt (call);
2296 gsi_insert_after (&gsi, stmt, GSI_SAME_STMT);
2297
2298 bounds = chkp_get_tmp_reg (stmt);
2299 gimple_call_set_lhs (stmt, bounds);
2300
2301 update_stmt (stmt);
2302 }
2303 else
2304 bounds = chkp_get_zero_bounds ();
2305
2306 if (dump_file && (dump_flags & TDF_DETAILS))
2307 {
2308 fprintf (dump_file, "Built returned bounds (");
2309 print_generic_expr (dump_file, bounds, 0);
2310 fprintf (dump_file, ") for call: ");
2311 print_gimple_stmt (dump_file, call, 0, TDF_VOPS|TDF_MEMSYMS);
2312 }
2313
2314 bounds = chkp_maybe_copy_and_register_bounds (gimple_call_lhs (call), bounds);
2315
2316 return bounds;
2317 }
2318
2319 /* Return bounds used as returned by call
2320 which produced SSA name VAL. */
2321 gcall *
2322 chkp_retbnd_call_by_val (tree val)
2323 {
2324 if (TREE_CODE (val) != SSA_NAME)
2325 return NULL;
2326
2327 gcc_assert (gimple_code (SSA_NAME_DEF_STMT (val)) == GIMPLE_CALL);
2328
2329 imm_use_iterator use_iter;
2330 use_operand_p use_p;
2331 FOR_EACH_IMM_USE_FAST (use_p, use_iter, val)
2332 if (gimple_code (USE_STMT (use_p)) == GIMPLE_CALL
2333 && gimple_call_fndecl (USE_STMT (use_p)) == chkp_ret_bnd_fndecl)
2334 return as_a <gcall *> (USE_STMT (use_p));
2335
2336 return NULL;
2337 }
2338
2339 /* Check the next parameter for the given PARM is bounds
2340 and return it's default SSA_NAME (create if required). */
2341 static tree
2342 chkp_get_next_bounds_parm (tree parm)
2343 {
2344 tree bounds = TREE_CHAIN (parm);
2345 gcc_assert (POINTER_BOUNDS_P (bounds));
2346 bounds = ssa_default_def (cfun, bounds);
2347 if (!bounds)
2348 {
2349 bounds = make_ssa_name (TREE_CHAIN (parm), gimple_build_nop ());
2350 set_ssa_default_def (cfun, TREE_CHAIN (parm), bounds);
2351 }
2352 return bounds;
2353 }
2354
2355 /* Return bounds to be used for input argument PARM. */
2356 static tree
2357 chkp_get_bound_for_parm (tree parm)
2358 {
2359 tree decl = SSA_NAME_VAR (parm);
2360 tree bounds;
2361
2362 gcc_assert (TREE_CODE (decl) == PARM_DECL);
2363
2364 bounds = chkp_get_registered_bounds (parm);
2365
2366 if (!bounds)
2367 bounds = chkp_get_registered_bounds (decl);
2368
2369 if (!bounds)
2370 {
2371 tree orig_decl = cgraph_node::get (cfun->decl)->orig_decl;
2372
2373 /* For static chain param we return zero bounds
2374 because currently we do not check dereferences
2375 of this pointer. */
2376 if (cfun->static_chain_decl == decl)
2377 bounds = chkp_get_zero_bounds ();
2378 /* If non instrumented runtime is used then it may be useful
2379 to use zero bounds for input arguments of main
2380 function. */
2381 else if (flag_chkp_zero_input_bounds_for_main
2382 && strcmp (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (orig_decl)),
2383 "main") == 0)
2384 bounds = chkp_get_zero_bounds ();
2385 else if (BOUNDED_P (parm))
2386 {
2387 bounds = chkp_get_next_bounds_parm (decl);
2388 bounds = chkp_maybe_copy_and_register_bounds (decl, bounds);
2389
2390 if (dump_file && (dump_flags & TDF_DETAILS))
2391 {
2392 fprintf (dump_file, "Built arg bounds (");
2393 print_generic_expr (dump_file, bounds, 0);
2394 fprintf (dump_file, ") for arg: ");
2395 print_node (dump_file, "", decl, 0);
2396 }
2397 }
2398 else
2399 bounds = chkp_get_zero_bounds ();
2400 }
2401
2402 if (!chkp_get_registered_bounds (parm))
2403 bounds = chkp_maybe_copy_and_register_bounds (parm, bounds);
2404
2405 if (dump_file && (dump_flags & TDF_DETAILS))
2406 {
2407 fprintf (dump_file, "Using bounds ");
2408 print_generic_expr (dump_file, bounds, 0);
2409 fprintf (dump_file, " for parm ");
2410 print_generic_expr (dump_file, parm, 0);
2411 fprintf (dump_file, " of type ");
2412 print_generic_expr (dump_file, TREE_TYPE (parm), 0);
2413 fprintf (dump_file, ".\n");
2414 }
2415
2416 return bounds;
2417 }
2418
2419 /* Build and return CALL_EXPR for bndstx builtin with specified
2420 arguments. */
2421 tree
2422 chkp_build_bndldx_call (tree addr, tree ptr)
2423 {
2424 tree fn = build1 (ADDR_EXPR,
2425 build_pointer_type (TREE_TYPE (chkp_bndldx_fndecl)),
2426 chkp_bndldx_fndecl);
2427 tree call = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndldx_fndecl)),
2428 fn, 2, addr, ptr);
2429 CALL_WITH_BOUNDS_P (call) = true;
2430 return call;
2431 }
2432
2433 /* Insert code to load bounds for PTR located by ADDR.
2434 Code is inserted after position pointed by GSI.
2435 Loaded bounds are returned. */
2436 static tree
2437 chkp_build_bndldx (tree addr, tree ptr, gimple_stmt_iterator *gsi)
2438 {
2439 gimple_seq seq;
2440 gimple stmt;
2441 tree bounds;
2442
2443 seq = NULL;
2444
2445 addr = chkp_force_gimple_call_op (addr, &seq);
2446 ptr = chkp_force_gimple_call_op (ptr, &seq);
2447
2448 stmt = gimple_build_call (chkp_bndldx_fndecl, 2, addr, ptr);
2449 chkp_mark_stmt (stmt);
2450 bounds = chkp_get_tmp_reg (stmt);
2451 gimple_call_set_lhs (stmt, bounds);
2452
2453 gimple_seq_add_stmt (&seq, stmt);
2454
2455 gsi_insert_seq_after (gsi, seq, GSI_CONTINUE_LINKING);
2456
2457 if (dump_file && (dump_flags & TDF_DETAILS))
2458 {
2459 fprintf (dump_file, "Generated bndldx for pointer ");
2460 print_generic_expr (dump_file, ptr, 0);
2461 fprintf (dump_file, ": ");
2462 print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
2463 }
2464
2465 return bounds;
2466 }
2467
2468 /* Build and return CALL_EXPR for bndstx builtin with specified
2469 arguments. */
2470 tree
2471 chkp_build_bndstx_call (tree addr, tree ptr, tree bounds)
2472 {
2473 tree fn = build1 (ADDR_EXPR,
2474 build_pointer_type (TREE_TYPE (chkp_bndstx_fndecl)),
2475 chkp_bndstx_fndecl);
2476 tree call = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndstx_fndecl)),
2477 fn, 3, ptr, bounds, addr);
2478 CALL_WITH_BOUNDS_P (call) = true;
2479 return call;
2480 }
2481
2482 /* Insert code to store BOUNDS for PTR stored by ADDR.
2483 New statements are inserted after position pointed
2484 by GSI. */
2485 void
2486 chkp_build_bndstx (tree addr, tree ptr, tree bounds,
2487 gimple_stmt_iterator *gsi)
2488 {
2489 gimple_seq seq;
2490 gimple stmt;
2491
2492 seq = NULL;
2493
2494 addr = chkp_force_gimple_call_op (addr, &seq);
2495 ptr = chkp_force_gimple_call_op (ptr, &seq);
2496
2497 stmt = gimple_build_call (chkp_bndstx_fndecl, 3, ptr, bounds, addr);
2498 chkp_mark_stmt (stmt);
2499 gimple_call_set_with_bounds (stmt, true);
2500
2501 gimple_seq_add_stmt (&seq, stmt);
2502
2503 gsi_insert_seq_after (gsi, seq, GSI_CONTINUE_LINKING);
2504
2505 if (dump_file && (dump_flags & TDF_DETAILS))
2506 {
2507 fprintf (dump_file, "Generated bndstx for pointer store ");
2508 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_VOPS|TDF_MEMSYMS);
2509 print_gimple_stmt (dump_file, stmt, 2, TDF_VOPS|TDF_MEMSYMS);
2510 }
2511 }
2512
2513 /* Compute bounds for pointer NODE which was assigned in
2514 assignment statement ASSIGN. Return computed bounds. */
2515 static tree
2516 chkp_compute_bounds_for_assignment (tree node, gimple assign)
2517 {
2518 enum tree_code rhs_code = gimple_assign_rhs_code (assign);
2519 tree rhs1 = gimple_assign_rhs1 (assign);
2520 tree bounds = NULL_TREE;
2521 gimple_stmt_iterator iter = gsi_for_stmt (assign);
2522
2523 if (dump_file && (dump_flags & TDF_DETAILS))
2524 {
2525 fprintf (dump_file, "Computing bounds for assignment: ");
2526 print_gimple_stmt (dump_file, assign, 0, TDF_VOPS|TDF_MEMSYMS);
2527 }
2528
2529 switch (rhs_code)
2530 {
2531 case MEM_REF:
2532 case TARGET_MEM_REF:
2533 case COMPONENT_REF:
2534 case ARRAY_REF:
2535 /* We need to load bounds from the bounds table. */
2536 bounds = chkp_find_bounds_loaded (node, rhs1, &iter);
2537 break;
2538
2539 case VAR_DECL:
2540 case SSA_NAME:
2541 case ADDR_EXPR:
2542 case POINTER_PLUS_EXPR:
2543 case NOP_EXPR:
2544 case CONVERT_EXPR:
2545 case INTEGER_CST:
2546 /* Bounds are just propagated from RHS. */
2547 bounds = chkp_find_bounds (rhs1, &iter);
2548 break;
2549
2550 case VIEW_CONVERT_EXPR:
2551 /* Bounds are just propagated from RHS. */
2552 bounds = chkp_find_bounds (TREE_OPERAND (rhs1, 0), &iter);
2553 break;
2554
2555 case PARM_DECL:
2556 if (BOUNDED_P (rhs1))
2557 {
2558 /* We need to load bounds from the bounds table. */
2559 bounds = chkp_build_bndldx (chkp_build_addr_expr (rhs1),
2560 node, &iter);
2561 TREE_ADDRESSABLE (rhs1) = 1;
2562 }
2563 else
2564 bounds = chkp_get_nonpointer_load_bounds ();
2565 break;
2566
2567 case MINUS_EXPR:
2568 case PLUS_EXPR:
2569 case BIT_AND_EXPR:
2570 case BIT_IOR_EXPR:
2571 case BIT_XOR_EXPR:
2572 {
2573 tree rhs2 = gimple_assign_rhs2 (assign);
2574 tree bnd1 = chkp_find_bounds (rhs1, &iter);
2575 tree bnd2 = chkp_find_bounds (rhs2, &iter);
2576
2577 /* First we try to check types of operands. If it
2578 does not help then look at bound values.
2579
2580 If some bounds are incomplete and other are
2581 not proven to be valid (i.e. also incomplete
2582 or invalid because value is not pointer) then
2583 resulting value is incomplete and will be
2584 recomputed later in chkp_finish_incomplete_bounds. */
2585 if (BOUNDED_P (rhs1)
2586 && !BOUNDED_P (rhs2))
2587 bounds = bnd1;
2588 else if (BOUNDED_P (rhs2)
2589 && !BOUNDED_P (rhs1)
2590 && rhs_code != MINUS_EXPR)
2591 bounds = bnd2;
2592 else if (chkp_incomplete_bounds (bnd1))
2593 if (chkp_valid_bounds (bnd2) && rhs_code != MINUS_EXPR
2594 && !chkp_incomplete_bounds (bnd2))
2595 bounds = bnd2;
2596 else
2597 bounds = incomplete_bounds;
2598 else if (chkp_incomplete_bounds (bnd2))
2599 if (chkp_valid_bounds (bnd1)
2600 && !chkp_incomplete_bounds (bnd1))
2601 bounds = bnd1;
2602 else
2603 bounds = incomplete_bounds;
2604 else if (!chkp_valid_bounds (bnd1))
2605 if (chkp_valid_bounds (bnd2) && rhs_code != MINUS_EXPR)
2606 bounds = bnd2;
2607 else if (bnd2 == chkp_get_zero_bounds ())
2608 bounds = bnd2;
2609 else
2610 bounds = bnd1;
2611 else if (!chkp_valid_bounds (bnd2))
2612 bounds = bnd1;
2613 else
2614 /* Seems both operands may have valid bounds
2615 (e.g. pointer minus pointer). In such case
2616 use default invalid op bounds. */
2617 bounds = chkp_get_invalid_op_bounds ();
2618 }
2619 break;
2620
2621 case BIT_NOT_EXPR:
2622 case NEGATE_EXPR:
2623 case LSHIFT_EXPR:
2624 case RSHIFT_EXPR:
2625 case LROTATE_EXPR:
2626 case RROTATE_EXPR:
2627 case EQ_EXPR:
2628 case NE_EXPR:
2629 case LT_EXPR:
2630 case LE_EXPR:
2631 case GT_EXPR:
2632 case GE_EXPR:
2633 case MULT_EXPR:
2634 case RDIV_EXPR:
2635 case TRUNC_DIV_EXPR:
2636 case FLOOR_DIV_EXPR:
2637 case CEIL_DIV_EXPR:
2638 case ROUND_DIV_EXPR:
2639 case TRUNC_MOD_EXPR:
2640 case FLOOR_MOD_EXPR:
2641 case CEIL_MOD_EXPR:
2642 case ROUND_MOD_EXPR:
2643 case EXACT_DIV_EXPR:
2644 case FIX_TRUNC_EXPR:
2645 case FLOAT_EXPR:
2646 case REALPART_EXPR:
2647 case IMAGPART_EXPR:
2648 /* No valid bounds may be produced by these exprs. */
2649 bounds = chkp_get_invalid_op_bounds ();
2650 break;
2651
2652 case COND_EXPR:
2653 {
2654 tree val1 = gimple_assign_rhs2 (assign);
2655 tree val2 = gimple_assign_rhs3 (assign);
2656 tree bnd1 = chkp_find_bounds (val1, &iter);
2657 tree bnd2 = chkp_find_bounds (val2, &iter);
2658 gimple stmt;
2659
2660 if (chkp_incomplete_bounds (bnd1) || chkp_incomplete_bounds (bnd2))
2661 bounds = incomplete_bounds;
2662 else if (bnd1 == bnd2)
2663 bounds = bnd1;
2664 else
2665 {
2666 rhs1 = unshare_expr (rhs1);
2667
2668 bounds = chkp_get_tmp_reg (assign);
2669 stmt = gimple_build_assign (bounds, COND_EXPR, rhs1, bnd1, bnd2);
2670 gsi_insert_after (&iter, stmt, GSI_SAME_STMT);
2671
2672 if (!chkp_valid_bounds (bnd1) && !chkp_valid_bounds (bnd2))
2673 chkp_mark_invalid_bounds (bounds);
2674 }
2675 }
2676 break;
2677
2678 case MAX_EXPR:
2679 case MIN_EXPR:
2680 {
2681 tree rhs2 = gimple_assign_rhs2 (assign);
2682 tree bnd1 = chkp_find_bounds (rhs1, &iter);
2683 tree bnd2 = chkp_find_bounds (rhs2, &iter);
2684
2685 if (chkp_incomplete_bounds (bnd1) || chkp_incomplete_bounds (bnd2))
2686 bounds = incomplete_bounds;
2687 else if (bnd1 == bnd2)
2688 bounds = bnd1;
2689 else
2690 {
2691 gimple stmt;
2692 tree cond = build2 (rhs_code == MAX_EXPR ? GT_EXPR : LT_EXPR,
2693 boolean_type_node, rhs1, rhs2);
2694 bounds = chkp_get_tmp_reg (assign);
2695 stmt = gimple_build_assign (bounds, COND_EXPR, cond, bnd1, bnd2);
2696
2697 gsi_insert_after (&iter, stmt, GSI_SAME_STMT);
2698
2699 if (!chkp_valid_bounds (bnd1) && !chkp_valid_bounds (bnd2))
2700 chkp_mark_invalid_bounds (bounds);
2701 }
2702 }
2703 break;
2704
2705 default:
2706 bounds = chkp_get_zero_bounds ();
2707 warning (0, "pointer bounds were lost due to unexpected expression %s",
2708 get_tree_code_name (rhs_code));
2709 }
2710
2711 gcc_assert (bounds);
2712
2713 if (node)
2714 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2715
2716 return bounds;
2717 }
2718
2719 /* Compute bounds for ssa name NODE defined by DEF_STMT pointed by ITER.
2720
2721 There are just few statement codes allowed: NOP (for default ssa names),
2722 ASSIGN, CALL, PHI, ASM.
2723
2724 Return computed bounds. */
2725 static tree
2726 chkp_get_bounds_by_definition (tree node, gimple def_stmt,
2727 gphi_iterator *iter)
2728 {
2729 tree var, bounds;
2730 enum gimple_code code = gimple_code (def_stmt);
2731 gphi *stmt;
2732
2733 if (dump_file && (dump_flags & TDF_DETAILS))
2734 {
2735 fprintf (dump_file, "Searching for bounds for node: ");
2736 print_generic_expr (dump_file, node, 0);
2737
2738 fprintf (dump_file, " using its definition: ");
2739 print_gimple_stmt (dump_file, def_stmt, 0, TDF_VOPS|TDF_MEMSYMS);
2740 }
2741
2742 switch (code)
2743 {
2744 case GIMPLE_NOP:
2745 var = SSA_NAME_VAR (node);
2746 switch (TREE_CODE (var))
2747 {
2748 case PARM_DECL:
2749 bounds = chkp_get_bound_for_parm (node);
2750 break;
2751
2752 case VAR_DECL:
2753 /* For uninitialized pointers use none bounds. */
2754 bounds = chkp_get_none_bounds ();
2755 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2756 break;
2757
2758 case RESULT_DECL:
2759 {
2760 tree base_type;
2761
2762 gcc_assert (TREE_CODE (TREE_TYPE (node)) == REFERENCE_TYPE);
2763
2764 base_type = TREE_TYPE (TREE_TYPE (node));
2765
2766 gcc_assert (TYPE_SIZE (base_type)
2767 && TREE_CODE (TYPE_SIZE (base_type)) == INTEGER_CST
2768 && tree_to_uhwi (TYPE_SIZE (base_type)) != 0);
2769
2770 bounds = chkp_make_bounds (node, TYPE_SIZE_UNIT (base_type),
2771 NULL, false);
2772 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2773 }
2774 break;
2775
2776 default:
2777 if (dump_file && (dump_flags & TDF_DETAILS))
2778 {
2779 fprintf (dump_file, "Unexpected var with no definition\n");
2780 print_generic_expr (dump_file, var, 0);
2781 }
2782 internal_error ("chkp_get_bounds_by_definition: Unexpected var of type %s",
2783 get_tree_code_name (TREE_CODE (var)));
2784 }
2785 break;
2786
2787 case GIMPLE_ASSIGN:
2788 bounds = chkp_compute_bounds_for_assignment (node, def_stmt);
2789 break;
2790
2791 case GIMPLE_CALL:
2792 bounds = chkp_build_returned_bound (as_a <gcall *> (def_stmt));
2793 break;
2794
2795 case GIMPLE_PHI:
2796 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (node))
2797 if (SSA_NAME_VAR (node))
2798 var = chkp_get_bounds_var (SSA_NAME_VAR (node));
2799 else
2800 var = make_temp_ssa_name (pointer_bounds_type_node,
2801 NULL,
2802 CHKP_BOUND_TMP_NAME);
2803 else
2804 var = chkp_get_tmp_var ();
2805 stmt = create_phi_node (var, gimple_bb (def_stmt));
2806 bounds = gimple_phi_result (stmt);
2807 *iter = gsi_for_phi (stmt);
2808
2809 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2810
2811 /* Created bounds do not have all phi args computed and
2812 therefore we do not know if there is a valid source
2813 of bounds for that node. Therefore we mark bounds
2814 as incomplete and then recompute them when all phi
2815 args are computed. */
2816 chkp_register_incomplete_bounds (bounds, node);
2817 break;
2818
2819 case GIMPLE_ASM:
2820 bounds = chkp_get_zero_bounds ();
2821 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2822 break;
2823
2824 default:
2825 internal_error ("chkp_get_bounds_by_definition: Unexpected GIMPLE code %s",
2826 gimple_code_name[code]);
2827 }
2828
2829 return bounds;
2830 }
2831
2832 /* Return CALL_EXPR for bndmk with specified LOWER_BOUND and SIZE. */
2833 tree
2834 chkp_build_make_bounds_call (tree lower_bound, tree size)
2835 {
2836 tree call = build1 (ADDR_EXPR,
2837 build_pointer_type (TREE_TYPE (chkp_bndmk_fndecl)),
2838 chkp_bndmk_fndecl);
2839 return build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndmk_fndecl)),
2840 call, 2, lower_bound, size);
2841 }
2842
2843 /* Create static bounds var of specfified OBJ which is
2844 is either VAR_DECL or string constant. */
2845 static tree
2846 chkp_make_static_bounds (tree obj)
2847 {
2848 static int string_id = 1;
2849 static int var_id = 1;
2850 tree *slot;
2851 const char *var_name;
2852 char *bnd_var_name;
2853 tree bnd_var;
2854
2855 /* First check if we already have required var. */
2856 if (chkp_static_var_bounds)
2857 {
2858 /* For vars we use assembler name as a key in
2859 chkp_static_var_bounds map. It allows to
2860 avoid duplicating bound vars for decls
2861 sharing assembler name. */
2862 if (TREE_CODE (obj) == VAR_DECL)
2863 {
2864 tree name = DECL_ASSEMBLER_NAME (obj);
2865 slot = chkp_static_var_bounds->get (name);
2866 if (slot)
2867 return *slot;
2868 }
2869 else
2870 {
2871 slot = chkp_static_var_bounds->get (obj);
2872 if (slot)
2873 return *slot;
2874 }
2875 }
2876
2877 /* Build decl for bounds var. */
2878 if (TREE_CODE (obj) == VAR_DECL)
2879 {
2880 if (DECL_IGNORED_P (obj))
2881 {
2882 bnd_var_name = (char *) xmalloc (strlen (CHKP_VAR_BOUNDS_PREFIX) + 10);
2883 sprintf (bnd_var_name, "%s%d", CHKP_VAR_BOUNDS_PREFIX, var_id++);
2884 }
2885 else
2886 {
2887 var_name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (obj));
2888
2889 /* For hidden symbols we want to skip first '*' char. */
2890 if (*var_name == '*')
2891 var_name++;
2892
2893 bnd_var_name = (char *) xmalloc (strlen (var_name)
2894 + strlen (CHKP_BOUNDS_OF_SYMBOL_PREFIX) + 1);
2895 strcpy (bnd_var_name, CHKP_BOUNDS_OF_SYMBOL_PREFIX);
2896 strcat (bnd_var_name, var_name);
2897 }
2898
2899 bnd_var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
2900 get_identifier (bnd_var_name),
2901 pointer_bounds_type_node);
2902
2903 /* Address of the obj will be used as lower bound. */
2904 TREE_ADDRESSABLE (obj) = 1;
2905 }
2906 else
2907 {
2908 bnd_var_name = (char *) xmalloc (strlen (CHKP_STRING_BOUNDS_PREFIX) + 10);
2909 sprintf (bnd_var_name, "%s%d", CHKP_STRING_BOUNDS_PREFIX, string_id++);
2910
2911 bnd_var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
2912 get_identifier (bnd_var_name),
2913 pointer_bounds_type_node);
2914 }
2915
2916 TREE_PUBLIC (bnd_var) = 0;
2917 TREE_USED (bnd_var) = 1;
2918 TREE_READONLY (bnd_var) = 0;
2919 TREE_STATIC (bnd_var) = 1;
2920 TREE_ADDRESSABLE (bnd_var) = 0;
2921 DECL_ARTIFICIAL (bnd_var) = 1;
2922 DECL_COMMON (bnd_var) = 1;
2923 DECL_COMDAT (bnd_var) = 1;
2924 DECL_READ_P (bnd_var) = 1;
2925 DECL_INITIAL (bnd_var) = chkp_build_addr_expr (obj);
2926 /* Force output similar to constant bounds.
2927 See chkp_make_static_const_bounds. */
2928 varpool_node::get_create (bnd_var)->force_output = 1;
2929 /* Mark symbol as requiring bounds initialization. */
2930 varpool_node::get_create (bnd_var)->need_bounds_init = 1;
2931 varpool_node::finalize_decl (bnd_var);
2932
2933 /* Add created var to the map to use it for other references
2934 to obj. */
2935 if (!chkp_static_var_bounds)
2936 chkp_static_var_bounds = new hash_map<tree, tree>;
2937
2938 if (TREE_CODE (obj) == VAR_DECL)
2939 {
2940 tree name = DECL_ASSEMBLER_NAME (obj);
2941 chkp_static_var_bounds->put (name, bnd_var);
2942 }
2943 else
2944 chkp_static_var_bounds->put (obj, bnd_var);
2945
2946 return bnd_var;
2947 }
2948
2949 /* When var has incomplete type we cannot get size to
2950 compute its bounds. In such cases we use checker
2951 builtin call which determines object size at runtime. */
2952 static tree
2953 chkp_generate_extern_var_bounds (tree var)
2954 {
2955 tree bounds, size_reloc, lb, size, max_size, cond;
2956 gimple_stmt_iterator gsi;
2957 gimple_seq seq = NULL;
2958 gimple stmt;
2959
2960 /* If instrumentation is not enabled for vars having
2961 incomplete type then just return zero bounds to avoid
2962 checks for this var. */
2963 if (!flag_chkp_incomplete_type)
2964 return chkp_get_zero_bounds ();
2965
2966 if (dump_file && (dump_flags & TDF_DETAILS))
2967 {
2968 fprintf (dump_file, "Generating bounds for extern symbol '");
2969 print_generic_expr (dump_file, var, 0);
2970 fprintf (dump_file, "'\n");
2971 }
2972
2973 stmt = gimple_build_call (chkp_sizeof_fndecl, 1, var);
2974
2975 size_reloc = create_tmp_reg (chkp_uintptr_type, CHKP_SIZE_TMP_NAME);
2976 gimple_call_set_lhs (stmt, size_reloc);
2977
2978 gimple_seq_add_stmt (&seq, stmt);
2979
2980 lb = chkp_build_addr_expr (var);
2981 size = make_ssa_name (chkp_get_size_tmp_var ());
2982
2983 if (flag_chkp_zero_dynamic_size_as_infinite)
2984 {
2985 /* We should check that size relocation was resolved.
2986 If it was not then use maximum possible size for the var. */
2987 max_size = build2 (MINUS_EXPR, chkp_uintptr_type, integer_zero_node,
2988 fold_convert (chkp_uintptr_type, lb));
2989 max_size = chkp_force_gimple_call_op (max_size, &seq);
2990
2991 cond = build2 (NE_EXPR, boolean_type_node,
2992 size_reloc, integer_zero_node);
2993 stmt = gimple_build_assign (size, COND_EXPR, cond, size_reloc, max_size);
2994 gimple_seq_add_stmt (&seq, stmt);
2995 }
2996 else
2997 {
2998 stmt = gimple_build_assign (size, size_reloc);
2999 gimple_seq_add_stmt (&seq, stmt);
3000 }
3001
3002 gsi = gsi_start_bb (chkp_get_entry_block ());
3003 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
3004
3005 bounds = chkp_make_bounds (lb, size, &gsi, true);
3006
3007 return bounds;
3008 }
3009
3010 /* Return 1 if TYPE has fields with zero size or fields
3011 marked with chkp_variable_size attribute. */
3012 bool
3013 chkp_variable_size_type (tree type)
3014 {
3015 bool res = false;
3016 tree field;
3017
3018 if (RECORD_OR_UNION_TYPE_P (type))
3019 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3020 {
3021 if (TREE_CODE (field) == FIELD_DECL)
3022 res = res
3023 || lookup_attribute ("bnd_variable_size", DECL_ATTRIBUTES (field))
3024 || chkp_variable_size_type (TREE_TYPE (field));
3025 }
3026 else
3027 res = !TYPE_SIZE (type)
3028 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
3029 || tree_to_uhwi (TYPE_SIZE (type)) == 0;
3030
3031 return res;
3032 }
3033
3034 /* Compute and return bounds for address of DECL which is
3035 one of VAR_DECL, PARM_DECL, RESULT_DECL. */
3036 static tree
3037 chkp_get_bounds_for_decl_addr (tree decl)
3038 {
3039 tree bounds;
3040
3041 gcc_assert (TREE_CODE (decl) == VAR_DECL
3042 || TREE_CODE (decl) == PARM_DECL
3043 || TREE_CODE (decl) == RESULT_DECL);
3044
3045 bounds = chkp_get_registered_addr_bounds (decl);
3046
3047 if (bounds)
3048 return bounds;
3049
3050 if (dump_file && (dump_flags & TDF_DETAILS))
3051 {
3052 fprintf (dump_file, "Building bounds for address of decl ");
3053 print_generic_expr (dump_file, decl, 0);
3054 fprintf (dump_file, "\n");
3055 }
3056
3057 /* Use zero bounds if size is unknown and checks for
3058 unknown sizes are restricted. */
3059 if ((!DECL_SIZE (decl)
3060 || (chkp_variable_size_type (TREE_TYPE (decl))
3061 && (TREE_STATIC (decl)
3062 || DECL_EXTERNAL (decl)
3063 || TREE_PUBLIC (decl))))
3064 && !flag_chkp_incomplete_type)
3065 return chkp_get_zero_bounds ();
3066
3067 if (flag_chkp_use_static_bounds
3068 && TREE_CODE (decl) == VAR_DECL
3069 && (TREE_STATIC (decl)
3070 || DECL_EXTERNAL (decl)
3071 || TREE_PUBLIC (decl))
3072 && !DECL_THREAD_LOCAL_P (decl))
3073 {
3074 tree bnd_var = chkp_make_static_bounds (decl);
3075 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
3076 gimple stmt;
3077
3078 bounds = chkp_get_tmp_reg (NULL);
3079 stmt = gimple_build_assign (bounds, bnd_var);
3080 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
3081 }
3082 else if (!DECL_SIZE (decl)
3083 || (chkp_variable_size_type (TREE_TYPE (decl))
3084 && (TREE_STATIC (decl)
3085 || DECL_EXTERNAL (decl)
3086 || TREE_PUBLIC (decl))))
3087 {
3088 gcc_assert (TREE_CODE (decl) == VAR_DECL);
3089 bounds = chkp_generate_extern_var_bounds (decl);
3090 }
3091 else
3092 {
3093 tree lb = chkp_build_addr_expr (decl);
3094 bounds = chkp_make_bounds (lb, DECL_SIZE_UNIT (decl), NULL, false);
3095 }
3096
3097 return bounds;
3098 }
3099
3100 /* Compute and return bounds for constant string. */
3101 static tree
3102 chkp_get_bounds_for_string_cst (tree cst)
3103 {
3104 tree bounds;
3105 tree lb;
3106 tree size;
3107
3108 gcc_assert (TREE_CODE (cst) == STRING_CST);
3109
3110 bounds = chkp_get_registered_bounds (cst);
3111
3112 if (bounds)
3113 return bounds;
3114
3115 if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds)
3116 || flag_chkp_use_static_const_bounds > 0)
3117 {
3118 tree bnd_var = chkp_make_static_bounds (cst);
3119 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
3120 gimple stmt;
3121
3122 bounds = chkp_get_tmp_reg (NULL);
3123 stmt = gimple_build_assign (bounds, bnd_var);
3124 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
3125 }
3126 else
3127 {
3128 lb = chkp_build_addr_expr (cst);
3129 size = build_int_cst (chkp_uintptr_type, TREE_STRING_LENGTH (cst));
3130 bounds = chkp_make_bounds (lb, size, NULL, false);
3131 }
3132
3133 bounds = chkp_maybe_copy_and_register_bounds (cst, bounds);
3134
3135 return bounds;
3136 }
3137
3138 /* Generate code to instersect bounds BOUNDS1 and BOUNDS2 and
3139 return the result. if ITER is not NULL then Code is inserted
3140 before position pointed by ITER. Otherwise code is added to
3141 entry block. */
3142 static tree
3143 chkp_intersect_bounds (tree bounds1, tree bounds2, gimple_stmt_iterator *iter)
3144 {
3145 if (!bounds1 || bounds1 == chkp_get_zero_bounds ())
3146 return bounds2 ? bounds2 : bounds1;
3147 else if (!bounds2 || bounds2 == chkp_get_zero_bounds ())
3148 return bounds1;
3149 else
3150 {
3151 gimple_seq seq;
3152 gimple stmt;
3153 tree bounds;
3154
3155 seq = NULL;
3156
3157 stmt = gimple_build_call (chkp_intersect_fndecl, 2, bounds1, bounds2);
3158 chkp_mark_stmt (stmt);
3159
3160 bounds = chkp_get_tmp_reg (stmt);
3161 gimple_call_set_lhs (stmt, bounds);
3162
3163 gimple_seq_add_stmt (&seq, stmt);
3164
3165 /* We are probably doing narrowing for constant expression.
3166 In such case iter may be undefined. */
3167 if (!iter)
3168 {
3169 gimple_stmt_iterator gsi = gsi_last_bb (chkp_get_entry_block ());
3170 iter = &gsi;
3171 gsi_insert_seq_after (iter, seq, GSI_SAME_STMT);
3172 }
3173 else
3174 gsi_insert_seq_before (iter, seq, GSI_SAME_STMT);
3175
3176 if (dump_file && (dump_flags & TDF_DETAILS))
3177 {
3178 fprintf (dump_file, "Bounds intersection: ");
3179 print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
3180 fprintf (dump_file, " inserted before statement: ");
3181 print_gimple_stmt (dump_file, gsi_stmt (*iter), 0,
3182 TDF_VOPS|TDF_MEMSYMS);
3183 }
3184
3185 return bounds;
3186 }
3187 }
3188
3189 /* Return 1 if we are allowed to narrow bounds for addressed FIELD
3190 and 0 othersize. */
3191 static bool
3192 chkp_may_narrow_to_field (tree field)
3193 {
3194 return DECL_SIZE (field) && TREE_CODE (DECL_SIZE (field)) == INTEGER_CST
3195 && tree_to_uhwi (DECL_SIZE (field)) != 0
3196 && (!DECL_FIELD_OFFSET (field)
3197 || TREE_CODE (DECL_FIELD_OFFSET (field)) == INTEGER_CST)
3198 && (!DECL_FIELD_BIT_OFFSET (field)
3199 || TREE_CODE (DECL_FIELD_BIT_OFFSET (field)) == INTEGER_CST)
3200 && !lookup_attribute ("bnd_variable_size", DECL_ATTRIBUTES (field))
3201 && !chkp_variable_size_type (TREE_TYPE (field));
3202 }
3203
3204 /* Return 1 if bounds for FIELD should be narrowed to
3205 field's own size. */
3206 static bool
3207 chkp_narrow_bounds_for_field (tree field)
3208 {
3209 HOST_WIDE_INT offs;
3210 HOST_WIDE_INT bit_offs;
3211
3212 if (!chkp_may_narrow_to_field (field))
3213 return false;
3214
3215 /* Accesse to compiler generated fields should not cause
3216 bounds narrowing. */
3217 if (DECL_ARTIFICIAL (field))
3218 return false;
3219
3220 offs = tree_to_uhwi (DECL_FIELD_OFFSET (field));
3221 bit_offs = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
3222
3223 return (flag_chkp_narrow_bounds
3224 && (flag_chkp_first_field_has_own_bounds
3225 || offs
3226 || bit_offs));
3227 }
3228
3229 /* Perform narrowing for BOUNDS using bounds computed for field
3230 access COMPONENT. ITER meaning is the same as for
3231 chkp_intersect_bounds. */
3232 static tree
3233 chkp_narrow_bounds_to_field (tree bounds, tree component,
3234 gimple_stmt_iterator *iter)
3235 {
3236 tree field = TREE_OPERAND (component, 1);
3237 tree size = DECL_SIZE_UNIT (field);
3238 tree field_ptr = chkp_build_addr_expr (component);
3239 tree field_bounds;
3240
3241 field_bounds = chkp_make_bounds (field_ptr, size, iter, false);
3242
3243 return chkp_intersect_bounds (field_bounds, bounds, iter);
3244 }
3245
3246 /* Parse field or array access NODE.
3247
3248 PTR ouput parameter holds a pointer to the outermost
3249 object.
3250
3251 BITFIELD output parameter is set to 1 if bitfield is
3252 accessed and to 0 otherwise. If it is 1 then ELT holds
3253 outer component for accessed bit field.
3254
3255 SAFE outer parameter is set to 1 if access is safe and
3256 checks are not required.
3257
3258 BOUNDS outer parameter holds bounds to be used to check
3259 access (may be NULL).
3260
3261 If INNERMOST_BOUNDS is 1 then try to narrow bounds to the
3262 innermost accessed component. */
3263 static void
3264 chkp_parse_array_and_component_ref (tree node, tree *ptr,
3265 tree *elt, bool *safe,
3266 bool *bitfield,
3267 tree *bounds,
3268 gimple_stmt_iterator *iter,
3269 bool innermost_bounds)
3270 {
3271 tree comp_to_narrow = NULL_TREE;
3272 tree last_comp = NULL_TREE;
3273 bool array_ref_found = false;
3274 tree *nodes;
3275 tree var;
3276 int len;
3277 int i;
3278
3279 /* Compute tree height for expression. */
3280 var = node;
3281 len = 1;
3282 while (TREE_CODE (var) == COMPONENT_REF
3283 || TREE_CODE (var) == ARRAY_REF
3284 || TREE_CODE (var) == VIEW_CONVERT_EXPR)
3285 {
3286 var = TREE_OPERAND (var, 0);
3287 len++;
3288 }
3289
3290 gcc_assert (len > 1);
3291
3292 /* It is more convenient for us to scan left-to-right,
3293 so walk tree again and put all node to nodes vector
3294 in reversed order. */
3295 nodes = XALLOCAVEC (tree, len);
3296 nodes[len - 1] = node;
3297 for (i = len - 2; i >= 0; i--)
3298 nodes[i] = TREE_OPERAND (nodes[i + 1], 0);
3299
3300 if (bounds)
3301 *bounds = NULL;
3302 *safe = true;
3303 *bitfield = (TREE_CODE (node) == COMPONENT_REF
3304 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (node, 1)));
3305 /* To get bitfield address we will need outer elemnt. */
3306 if (*bitfield)
3307 *elt = nodes[len - 2];
3308 else
3309 *elt = NULL_TREE;
3310
3311 /* If we have indirection in expression then compute
3312 outermost structure bounds. Computed bounds may be
3313 narrowed later. */
3314 if (TREE_CODE (nodes[0]) == MEM_REF || INDIRECT_REF_P (nodes[0]))
3315 {
3316 *safe = false;
3317 *ptr = TREE_OPERAND (nodes[0], 0);
3318 if (bounds)
3319 *bounds = chkp_find_bounds (*ptr, iter);
3320 }
3321 else
3322 {
3323 gcc_assert (TREE_CODE (var) == VAR_DECL
3324 || TREE_CODE (var) == PARM_DECL
3325 || TREE_CODE (var) == RESULT_DECL
3326 || TREE_CODE (var) == STRING_CST
3327 || TREE_CODE (var) == SSA_NAME);
3328
3329 *ptr = chkp_build_addr_expr (var);
3330 }
3331
3332 /* In this loop we are trying to find a field access
3333 requiring narrowing. There are two simple rules
3334 for search:
3335 1. Leftmost array_ref is chosen if any.
3336 2. Rightmost suitable component_ref is chosen if innermost
3337 bounds are required and no array_ref exists. */
3338 for (i = 1; i < len; i++)
3339 {
3340 var = nodes[i];
3341
3342 if (TREE_CODE (var) == ARRAY_REF)
3343 {
3344 *safe = false;
3345 array_ref_found = true;
3346 if (flag_chkp_narrow_bounds
3347 && !flag_chkp_narrow_to_innermost_arrray
3348 && (!last_comp
3349 || chkp_may_narrow_to_field (TREE_OPERAND (last_comp, 1))))
3350 {
3351 comp_to_narrow = last_comp;
3352 break;
3353 }
3354 }
3355 else if (TREE_CODE (var) == COMPONENT_REF)
3356 {
3357 tree field = TREE_OPERAND (var, 1);
3358
3359 if (innermost_bounds
3360 && !array_ref_found
3361 && chkp_narrow_bounds_for_field (field))
3362 comp_to_narrow = var;
3363 last_comp = var;
3364
3365 if (flag_chkp_narrow_bounds
3366 && flag_chkp_narrow_to_innermost_arrray
3367 && TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE)
3368 {
3369 if (bounds)
3370 *bounds = chkp_narrow_bounds_to_field (*bounds, var, iter);
3371 comp_to_narrow = NULL;
3372 }
3373 }
3374 else if (TREE_CODE (var) == VIEW_CONVERT_EXPR)
3375 /* Nothing to do for it. */
3376 ;
3377 else
3378 gcc_unreachable ();
3379 }
3380
3381 if (comp_to_narrow && DECL_SIZE (TREE_OPERAND (comp_to_narrow, 1)) && bounds)
3382 *bounds = chkp_narrow_bounds_to_field (*bounds, comp_to_narrow, iter);
3383
3384 if (innermost_bounds && bounds && !*bounds)
3385 *bounds = chkp_find_bounds (*ptr, iter);
3386 }
3387
3388 /* Compute and return bounds for address of OBJ. */
3389 static tree
3390 chkp_make_addressed_object_bounds (tree obj, gimple_stmt_iterator *iter)
3391 {
3392 tree bounds = chkp_get_registered_addr_bounds (obj);
3393
3394 if (bounds)
3395 return bounds;
3396
3397 switch (TREE_CODE (obj))
3398 {
3399 case VAR_DECL:
3400 case PARM_DECL:
3401 case RESULT_DECL:
3402 bounds = chkp_get_bounds_for_decl_addr (obj);
3403 break;
3404
3405 case STRING_CST:
3406 bounds = chkp_get_bounds_for_string_cst (obj);
3407 break;
3408
3409 case ARRAY_REF:
3410 case COMPONENT_REF:
3411 {
3412 tree elt;
3413 tree ptr;
3414 bool safe;
3415 bool bitfield;
3416
3417 chkp_parse_array_and_component_ref (obj, &ptr, &elt, &safe,
3418 &bitfield, &bounds, iter, true);
3419
3420 gcc_assert (bounds);
3421 }
3422 break;
3423
3424 case FUNCTION_DECL:
3425 case LABEL_DECL:
3426 bounds = chkp_get_zero_bounds ();
3427 break;
3428
3429 case MEM_REF:
3430 bounds = chkp_find_bounds (TREE_OPERAND (obj, 0), iter);
3431 break;
3432
3433 case REALPART_EXPR:
3434 case IMAGPART_EXPR:
3435 bounds = chkp_make_addressed_object_bounds (TREE_OPERAND (obj, 0), iter);
3436 break;
3437
3438 default:
3439 if (dump_file && (dump_flags & TDF_DETAILS))
3440 {
3441 fprintf (dump_file, "chkp_make_addressed_object_bounds: "
3442 "unexpected object of type %s\n",
3443 get_tree_code_name (TREE_CODE (obj)));
3444 print_node (dump_file, "", obj, 0);
3445 }
3446 internal_error ("chkp_make_addressed_object_bounds: "
3447 "Unexpected tree code %s",
3448 get_tree_code_name (TREE_CODE (obj)));
3449 }
3450
3451 chkp_register_addr_bounds (obj, bounds);
3452
3453 return bounds;
3454 }
3455
3456 /* Compute bounds for pointer PTR loaded from PTR_SRC. Generate statements
3457 to compute bounds if required. Computed bounds should be available at
3458 position pointed by ITER.
3459
3460 If PTR_SRC is NULL_TREE then pointer definition is identified.
3461
3462 If PTR_SRC is not NULL_TREE then ITER points to statements which loads
3463 PTR. If PTR is a any memory reference then ITER points to a statement
3464 after which bndldx will be inserterd. In both cases ITER will be updated
3465 to point to the inserted bndldx statement. */
3466
3467 static tree
3468 chkp_find_bounds_1 (tree ptr, tree ptr_src, gimple_stmt_iterator *iter)
3469 {
3470 tree addr = NULL_TREE;
3471 tree bounds = NULL_TREE;
3472
3473 if (!ptr_src)
3474 ptr_src = ptr;
3475
3476 bounds = chkp_get_registered_bounds (ptr_src);
3477
3478 if (bounds)
3479 return bounds;
3480
3481 switch (TREE_CODE (ptr_src))
3482 {
3483 case MEM_REF:
3484 case VAR_DECL:
3485 if (BOUNDED_P (ptr_src))
3486 if (TREE_CODE (ptr) == VAR_DECL && DECL_REGISTER (ptr))
3487 bounds = chkp_get_zero_bounds ();
3488 else
3489 {
3490 addr = chkp_build_addr_expr (ptr_src);
3491 bounds = chkp_build_bndldx (addr, ptr, iter);
3492 }
3493 else
3494 bounds = chkp_get_nonpointer_load_bounds ();
3495 break;
3496
3497 case ARRAY_REF:
3498 case COMPONENT_REF:
3499 addr = get_base_address (ptr_src);
3500 if (DECL_P (addr)
3501 || TREE_CODE (addr) == MEM_REF
3502 || TREE_CODE (addr) == TARGET_MEM_REF)
3503 {
3504 if (BOUNDED_P (ptr_src))
3505 if (TREE_CODE (ptr) == VAR_DECL && DECL_REGISTER (ptr))
3506 bounds = chkp_get_zero_bounds ();
3507 else
3508 {
3509 addr = chkp_build_addr_expr (ptr_src);
3510 bounds = chkp_build_bndldx (addr, ptr, iter);
3511 }
3512 else
3513 bounds = chkp_get_nonpointer_load_bounds ();
3514 }
3515 else
3516 {
3517 gcc_assert (TREE_CODE (addr) == SSA_NAME);
3518 bounds = chkp_find_bounds (addr, iter);
3519 }
3520 break;
3521
3522 case PARM_DECL:
3523 gcc_unreachable ();
3524 bounds = chkp_get_bound_for_parm (ptr_src);
3525 break;
3526
3527 case TARGET_MEM_REF:
3528 addr = chkp_build_addr_expr (ptr_src);
3529 bounds = chkp_build_bndldx (addr, ptr, iter);
3530 break;
3531
3532 case SSA_NAME:
3533 bounds = chkp_get_registered_bounds (ptr_src);
3534 if (!bounds)
3535 {
3536 gimple def_stmt = SSA_NAME_DEF_STMT (ptr_src);
3537 gphi_iterator phi_iter;
3538
3539 bounds = chkp_get_bounds_by_definition (ptr_src, def_stmt, &phi_iter);
3540
3541 gcc_assert (bounds);
3542
3543 if (gphi *def_phi = dyn_cast <gphi *> (def_stmt))
3544 {
3545 unsigned i;
3546
3547 for (i = 0; i < gimple_phi_num_args (def_phi); i++)
3548 {
3549 tree arg = gimple_phi_arg_def (def_phi, i);
3550 tree arg_bnd;
3551 gphi *phi_bnd;
3552
3553 arg_bnd = chkp_find_bounds (arg, NULL);
3554
3555 /* chkp_get_bounds_by_definition created new phi
3556 statement and phi_iter points to it.
3557
3558 Previous call to chkp_find_bounds could create
3559 new basic block and therefore change phi statement
3560 phi_iter points to. */
3561 phi_bnd = phi_iter.phi ();
3562
3563 add_phi_arg (phi_bnd, arg_bnd,
3564 gimple_phi_arg_edge (def_phi, i),
3565 UNKNOWN_LOCATION);
3566 }
3567
3568 /* If all bound phi nodes have their arg computed
3569 then we may finish its computation. See
3570 chkp_finish_incomplete_bounds for more details. */
3571 if (chkp_may_finish_incomplete_bounds ())
3572 chkp_finish_incomplete_bounds ();
3573 }
3574
3575 gcc_assert (bounds == chkp_get_registered_bounds (ptr_src)
3576 || chkp_incomplete_bounds (bounds));
3577 }
3578 break;
3579
3580 case ADDR_EXPR:
3581 bounds = chkp_make_addressed_object_bounds (TREE_OPERAND (ptr_src, 0), iter);
3582 break;
3583
3584 case INTEGER_CST:
3585 if (integer_zerop (ptr_src))
3586 bounds = chkp_get_none_bounds ();
3587 else
3588 bounds = chkp_get_invalid_op_bounds ();
3589 break;
3590
3591 default:
3592 if (dump_file && (dump_flags & TDF_DETAILS))
3593 {
3594 fprintf (dump_file, "chkp_find_bounds: unexpected ptr of type %s\n",
3595 get_tree_code_name (TREE_CODE (ptr_src)));
3596 print_node (dump_file, "", ptr_src, 0);
3597 }
3598 internal_error ("chkp_find_bounds: Unexpected tree code %s",
3599 get_tree_code_name (TREE_CODE (ptr_src)));
3600 }
3601
3602 if (!bounds)
3603 {
3604 if (dump_file && (dump_flags & TDF_DETAILS))
3605 {
3606 fprintf (stderr, "chkp_find_bounds: cannot find bounds for pointer\n");
3607 print_node (dump_file, "", ptr_src, 0);
3608 }
3609 internal_error ("chkp_find_bounds: Cannot find bounds for pointer");
3610 }
3611
3612 return bounds;
3613 }
3614
3615 /* Normal case for bounds search without forced narrowing. */
3616 static tree
3617 chkp_find_bounds (tree ptr, gimple_stmt_iterator *iter)
3618 {
3619 return chkp_find_bounds_1 (ptr, NULL_TREE, iter);
3620 }
3621
3622 /* Search bounds for pointer PTR loaded from PTR_SRC
3623 by statement *ITER points to. */
3624 static tree
3625 chkp_find_bounds_loaded (tree ptr, tree ptr_src, gimple_stmt_iterator *iter)
3626 {
3627 return chkp_find_bounds_1 (ptr, ptr_src, iter);
3628 }
3629
3630 /* Helper function which checks type of RHS and finds all pointers in
3631 it. For each found pointer we build it's accesses in LHS and RHS
3632 objects and then call HANDLER for them. Function is used to copy
3633 or initilize bounds for copied object. */
3634 static void
3635 chkp_walk_pointer_assignments (tree lhs, tree rhs, void *arg,
3636 assign_handler handler)
3637 {
3638 tree type = TREE_TYPE (lhs);
3639
3640 /* We have nothing to do with clobbers. */
3641 if (TREE_CLOBBER_P (rhs))
3642 return;
3643
3644 if (BOUNDED_TYPE_P (type))
3645 handler (lhs, rhs, arg);
3646 else if (RECORD_OR_UNION_TYPE_P (type))
3647 {
3648 tree field;
3649
3650 if (TREE_CODE (rhs) == CONSTRUCTOR)
3651 {
3652 unsigned HOST_WIDE_INT cnt;
3653 tree val;
3654
3655 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs), cnt, field, val)
3656 {
3657 if (chkp_type_has_pointer (TREE_TYPE (field)))
3658 {
3659 tree lhs_field = chkp_build_component_ref (lhs, field);
3660 chkp_walk_pointer_assignments (lhs_field, val, arg, handler);
3661 }
3662 }
3663 }
3664 else
3665 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3666 if (TREE_CODE (field) == FIELD_DECL
3667 && chkp_type_has_pointer (TREE_TYPE (field)))
3668 {
3669 tree rhs_field = chkp_build_component_ref (rhs, field);
3670 tree lhs_field = chkp_build_component_ref (lhs, field);
3671 chkp_walk_pointer_assignments (lhs_field, rhs_field, arg, handler);
3672 }
3673 }
3674 else if (TREE_CODE (type) == ARRAY_TYPE)
3675 {
3676 unsigned HOST_WIDE_INT cur = 0;
3677 tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
3678 tree etype = TREE_TYPE (type);
3679 tree esize = TYPE_SIZE (etype);
3680
3681 if (TREE_CODE (rhs) == CONSTRUCTOR)
3682 {
3683 unsigned HOST_WIDE_INT cnt;
3684 tree purp, val, lhs_elem;
3685
3686 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs), cnt, purp, val)
3687 {
3688 if (purp && TREE_CODE (purp) == RANGE_EXPR)
3689 {
3690 tree lo_index = TREE_OPERAND (purp, 0);
3691 tree hi_index = TREE_OPERAND (purp, 1);
3692
3693 for (cur = (unsigned)tree_to_uhwi (lo_index);
3694 cur <= (unsigned)tree_to_uhwi (hi_index);
3695 cur++)
3696 {
3697 lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur);
3698 chkp_walk_pointer_assignments (lhs_elem, val, arg, handler);
3699 }
3700 }
3701 else
3702 {
3703 if (purp)
3704 {
3705 gcc_assert (TREE_CODE (purp) == INTEGER_CST);
3706 cur = tree_to_uhwi (purp);
3707 }
3708
3709 lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur++);
3710
3711 chkp_walk_pointer_assignments (lhs_elem, val, arg, handler);
3712 }
3713 }
3714 }
3715 /* Copy array only when size is known. */
3716 else if (maxval && !integer_minus_onep (maxval))
3717 for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++)
3718 {
3719 tree lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur);
3720 tree rhs_elem = chkp_build_array_ref (rhs, etype, esize, cur);
3721 chkp_walk_pointer_assignments (lhs_elem, rhs_elem, arg, handler);
3722 }
3723 }
3724 else
3725 internal_error("chkp_walk_pointer_assignments: unexpected RHS type: %s",
3726 get_tree_code_name (TREE_CODE (type)));
3727 }
3728
3729 /* Add code to copy bounds for assignment of RHS to LHS.
3730 ARG is an iterator pointing ne code position. */
3731 static void
3732 chkp_copy_bounds_for_elem (tree lhs, tree rhs, void *arg)
3733 {
3734 gimple_stmt_iterator *iter = (gimple_stmt_iterator *)arg;
3735 tree bounds = chkp_find_bounds (rhs, iter);
3736 tree addr = chkp_build_addr_expr(lhs);
3737
3738 chkp_build_bndstx (addr, rhs, bounds, iter);
3739 }
3740
3741 /* Emit static bound initilizers and size vars. */
3742 void
3743 chkp_finish_file (void)
3744 {
3745 struct varpool_node *node;
3746 struct chkp_ctor_stmt_list stmts;
3747
3748 if (seen_error ())
3749 return;
3750
3751 /* Iterate through varpool and generate bounds initialization
3752 constructors for all statically initialized pointers. */
3753 stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
3754 stmts.stmts = NULL;
3755 FOR_EACH_VARIABLE (node)
3756 /* Check that var is actually emitted and we need and may initialize
3757 its bounds. */
3758 if (node->need_bounds_init
3759 && !POINTER_BOUNDS_P (node->decl)
3760 && DECL_RTL (node->decl)
3761 && MEM_P (DECL_RTL (node->decl))
3762 && TREE_ASM_WRITTEN (node->decl))
3763 {
3764 chkp_walk_pointer_assignments (node->decl,
3765 DECL_INITIAL (node->decl),
3766 &stmts,
3767 chkp_add_modification_to_stmt_list);
3768
3769 if (stmts.avail <= 0)
3770 {
3771 cgraph_build_static_cdtor ('P', stmts.stmts,
3772 MAX_RESERVED_INIT_PRIORITY + 3);
3773 stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
3774 stmts.stmts = NULL;
3775 }
3776 }
3777
3778 if (stmts.stmts)
3779 cgraph_build_static_cdtor ('P', stmts.stmts,
3780 MAX_RESERVED_INIT_PRIORITY + 3);
3781
3782 /* Iterate through varpool and generate bounds initialization
3783 constructors for all static bounds vars. */
3784 stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
3785 stmts.stmts = NULL;
3786 FOR_EACH_VARIABLE (node)
3787 if (node->need_bounds_init
3788 && POINTER_BOUNDS_P (node->decl)
3789 && TREE_ASM_WRITTEN (node->decl))
3790 {
3791 tree bnd = node->decl;
3792 tree var;
3793
3794 gcc_assert (DECL_INITIAL (bnd)
3795 && TREE_CODE (DECL_INITIAL (bnd)) == ADDR_EXPR);
3796
3797 var = TREE_OPERAND (DECL_INITIAL (bnd), 0);
3798 chkp_output_static_bounds (bnd, var, &stmts);
3799 }
3800
3801 if (stmts.stmts)
3802 cgraph_build_static_cdtor ('B', stmts.stmts,
3803 MAX_RESERVED_INIT_PRIORITY + 2);
3804
3805 delete chkp_static_var_bounds;
3806 delete chkp_bounds_map;
3807 }
3808
3809 /* An instrumentation function which is called for each statement
3810 having memory access we want to instrument. It inserts check
3811 code and bounds copy code.
3812
3813 ITER points to statement to instrument.
3814
3815 NODE holds memory access in statement to check.
3816
3817 LOC holds the location information for statement.
3818
3819 DIRFLAGS determines whether access is read or write.
3820
3821 ACCESS_OFFS should be added to address used in NODE
3822 before check.
3823
3824 ACCESS_SIZE holds size of checked access.
3825
3826 SAFE indicates if NODE access is safe and should not be
3827 checked. */
3828 static void
3829 chkp_process_stmt (gimple_stmt_iterator *iter, tree node,
3830 location_t loc, tree dirflag,
3831 tree access_offs, tree access_size,
3832 bool safe)
3833 {
3834 tree node_type = TREE_TYPE (node);
3835 tree size = access_size ? access_size : TYPE_SIZE_UNIT (node_type);
3836 tree addr_first = NULL_TREE; /* address of the first accessed byte */
3837 tree addr_last = NULL_TREE; /* address of the last accessed byte */
3838 tree ptr = NULL_TREE; /* a pointer used for dereference */
3839 tree bounds = NULL_TREE;
3840
3841 /* We do not need instrumentation for clobbers. */
3842 if (dirflag == integer_one_node
3843 && gimple_code (gsi_stmt (*iter)) == GIMPLE_ASSIGN
3844 && TREE_CLOBBER_P (gimple_assign_rhs1 (gsi_stmt (*iter))))
3845 return;
3846
3847 switch (TREE_CODE (node))
3848 {
3849 case ARRAY_REF:
3850 case COMPONENT_REF:
3851 {
3852 bool bitfield;
3853 tree elt;
3854
3855 if (safe)
3856 {
3857 /* We are not going to generate any checks, so do not
3858 generate bounds as well. */
3859 addr_first = chkp_build_addr_expr (node);
3860 break;
3861 }
3862
3863 chkp_parse_array_and_component_ref (node, &ptr, &elt, &safe,
3864 &bitfield, &bounds, iter, false);
3865
3866 /* Break if there is no dereference and operation is safe. */
3867
3868 if (bitfield)
3869 {
3870 tree field = TREE_OPERAND (node, 1);
3871
3872 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST)
3873 size = DECL_SIZE_UNIT (field);
3874
3875 if (elt)
3876 elt = chkp_build_addr_expr (elt);
3877 addr_first = fold_convert_loc (loc, ptr_type_node, elt ? elt : ptr);
3878 addr_first = fold_build_pointer_plus_loc (loc,
3879 addr_first,
3880 byte_position (field));
3881 }
3882 else
3883 addr_first = chkp_build_addr_expr (node);
3884 }
3885 break;
3886
3887 case INDIRECT_REF:
3888 ptr = TREE_OPERAND (node, 0);
3889 addr_first = ptr;
3890 break;
3891
3892 case MEM_REF:
3893 ptr = TREE_OPERAND (node, 0);
3894 addr_first = chkp_build_addr_expr (node);
3895 break;
3896
3897 case TARGET_MEM_REF:
3898 ptr = TMR_BASE (node);
3899 addr_first = chkp_build_addr_expr (node);
3900 break;
3901
3902 case ARRAY_RANGE_REF:
3903 printf("ARRAY_RANGE_REF\n");
3904 debug_gimple_stmt(gsi_stmt(*iter));
3905 debug_tree(node);
3906 gcc_unreachable ();
3907 break;
3908
3909 case BIT_FIELD_REF:
3910 {
3911 tree offs, rem, bpu;
3912
3913 gcc_assert (!access_offs);
3914 gcc_assert (!access_size);
3915
3916 bpu = fold_convert (size_type_node, bitsize_int (BITS_PER_UNIT));
3917 offs = fold_convert (size_type_node, TREE_OPERAND (node, 2));
3918 rem = size_binop_loc (loc, TRUNC_MOD_EXPR, offs, bpu);
3919 offs = size_binop_loc (loc, TRUNC_DIV_EXPR, offs, bpu);
3920
3921 size = fold_convert (size_type_node, TREE_OPERAND (node, 1));
3922 size = size_binop_loc (loc, PLUS_EXPR, size, rem);
3923 size = size_binop_loc (loc, CEIL_DIV_EXPR, size, bpu);
3924 size = fold_convert (size_type_node, size);
3925
3926 chkp_process_stmt (iter, TREE_OPERAND (node, 0), loc,
3927 dirflag, offs, size, safe);
3928 return;
3929 }
3930 break;
3931
3932 case VAR_DECL:
3933 case RESULT_DECL:
3934 case PARM_DECL:
3935 if (dirflag != integer_one_node
3936 || DECL_REGISTER (node))
3937 return;
3938
3939 safe = true;
3940 addr_first = chkp_build_addr_expr (node);
3941 break;
3942
3943 default:
3944 return;
3945 }
3946
3947 /* If addr_last was not computed then use (addr_first + size - 1)
3948 expression to compute it. */
3949 if (!addr_last)
3950 {
3951 addr_last = fold_build_pointer_plus_loc (loc, addr_first, size);
3952 addr_last = fold_build_pointer_plus_hwi_loc (loc, addr_last, -1);
3953 }
3954
3955 /* Shift both first_addr and last_addr by access_offs if specified. */
3956 if (access_offs)
3957 {
3958 addr_first = fold_build_pointer_plus_loc (loc, addr_first, access_offs);
3959 addr_last = fold_build_pointer_plus_loc (loc, addr_last, access_offs);
3960 }
3961
3962 /* Generate bndcl/bndcu checks if memory access is not safe. */
3963 if (!safe)
3964 {
3965 gimple_stmt_iterator stmt_iter = *iter;
3966
3967 if (!bounds)
3968 bounds = chkp_find_bounds (ptr, iter);
3969
3970 chkp_check_mem_access (addr_first, addr_last, bounds,
3971 stmt_iter, loc, dirflag);
3972 }
3973
3974 /* We need to store bounds in case pointer is stored. */
3975 if (dirflag == integer_one_node
3976 && chkp_type_has_pointer (node_type)
3977 && flag_chkp_store_bounds)
3978 {
3979 gimple stmt = gsi_stmt (*iter);
3980 tree rhs1 = gimple_assign_rhs1 (stmt);
3981 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3982
3983 if (get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS)
3984 chkp_walk_pointer_assignments (node, rhs1, iter,
3985 chkp_copy_bounds_for_elem);
3986 else
3987 {
3988 bounds = chkp_compute_bounds_for_assignment (NULL_TREE, stmt);
3989 chkp_build_bndstx (addr_first, rhs1, bounds, iter);
3990 }
3991 }
3992 }
3993
3994 /* Add code to copy bounds for all pointers copied
3995 in ASSIGN created during inline of EDGE. */
3996 void
3997 chkp_copy_bounds_for_assign (gimple assign, struct cgraph_edge *edge)
3998 {
3999 tree lhs = gimple_assign_lhs (assign);
4000 tree rhs = gimple_assign_rhs1 (assign);
4001 gimple_stmt_iterator iter = gsi_for_stmt (assign);
4002
4003 if (!flag_chkp_store_bounds)
4004 return;
4005
4006 chkp_walk_pointer_assignments (lhs, rhs, &iter, chkp_copy_bounds_for_elem);
4007
4008 /* We should create edges for all created calls to bndldx and bndstx. */
4009 while (gsi_stmt (iter) != assign)
4010 {
4011 gimple stmt = gsi_stmt (iter);
4012 if (gimple_code (stmt) == GIMPLE_CALL)
4013 {
4014 tree fndecl = gimple_call_fndecl (stmt);
4015 struct cgraph_node *callee = cgraph_node::get_create (fndecl);
4016 struct cgraph_edge *new_edge;
4017
4018 gcc_assert (fndecl == chkp_bndstx_fndecl
4019 || fndecl == chkp_bndldx_fndecl
4020 || fndecl == chkp_ret_bnd_fndecl);
4021
4022 new_edge = edge->caller->create_edge (callee,
4023 as_a <gcall *> (stmt),
4024 edge->count,
4025 edge->frequency);
4026 new_edge->frequency = compute_call_stmt_bb_frequency
4027 (edge->caller->decl, gimple_bb (stmt));
4028 }
4029 gsi_prev (&iter);
4030 }
4031 }
4032
4033 /* Some code transformation made during instrumentation pass
4034 may put code into inconsistent state. Here we find and fix
4035 such flaws. */
4036 void
4037 chkp_fix_cfg ()
4038 {
4039 basic_block bb;
4040 gimple_stmt_iterator i;
4041
4042 /* We could insert some code right after stmt which ends bb.
4043 We wanted to put this code on fallthru edge but did not
4044 add new edges from the beginning because it may cause new
4045 phi node creation which may be incorrect due to incomplete
4046 bound phi nodes. */
4047 FOR_ALL_BB_FN (bb, cfun)
4048 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
4049 {
4050 gimple stmt = gsi_stmt (i);
4051 gimple_stmt_iterator next = i;
4052
4053 gsi_next (&next);
4054
4055 if (stmt_ends_bb_p (stmt)
4056 && !gsi_end_p (next))
4057 {
4058 edge fall = find_fallthru_edge (bb->succs);
4059 basic_block dest = NULL;
4060 int flags = 0;
4061
4062 gcc_assert (fall);
4063
4064 /* We cannot split abnormal edge. Therefore we
4065 store its params, make it regular and then
4066 rebuild abnormal edge after split. */
4067 if (fall->flags & EDGE_ABNORMAL)
4068 {
4069 flags = fall->flags & ~EDGE_FALLTHRU;
4070 dest = fall->dest;
4071
4072 fall->flags &= ~EDGE_COMPLEX;
4073 }
4074
4075 while (!gsi_end_p (next))
4076 {
4077 gimple next_stmt = gsi_stmt (next);
4078 gsi_remove (&next, false);
4079 gsi_insert_on_edge (fall, next_stmt);
4080 }
4081
4082 gsi_commit_edge_inserts ();
4083
4084 /* Re-create abnormal edge. */
4085 if (dest)
4086 make_edge (bb, dest, flags);
4087 }
4088 }
4089 }
4090
4091 /* Walker callback for chkp_replace_function_pointers. Replaces
4092 function pointer in the specified operand with pointer to the
4093 instrumented function version. */
4094 static tree
4095 chkp_replace_function_pointer (tree *op, int *walk_subtrees,
4096 void *data ATTRIBUTE_UNUSED)
4097 {
4098 if (TREE_CODE (*op) == FUNCTION_DECL
4099 && !lookup_attribute ("bnd_legacy", DECL_ATTRIBUTES (*op))
4100 && (DECL_BUILT_IN_CLASS (*op) == NOT_BUILT_IN
4101 /* For builtins we replace pointers only for selected
4102 function and functions having definitions. */
4103 || (DECL_BUILT_IN_CLASS (*op) == BUILT_IN_NORMAL
4104 && (chkp_instrument_normal_builtin (*op)
4105 || gimple_has_body_p (*op)))))
4106 {
4107 struct cgraph_node *node = cgraph_node::get_create (*op);
4108 struct cgraph_node *clone = NULL;
4109
4110 if (!node->instrumentation_clone)
4111 clone = chkp_maybe_create_clone (*op);
4112
4113 if (clone)
4114 *op = clone->decl;
4115 *walk_subtrees = 0;
4116 }
4117
4118 return NULL;
4119 }
4120
4121 /* This function searches for function pointers in statement
4122 pointed by GSI and replaces them with pointers to instrumented
4123 function versions. */
4124 static void
4125 chkp_replace_function_pointers (gimple_stmt_iterator *gsi)
4126 {
4127 gimple stmt = gsi_stmt (*gsi);
4128 /* For calls we want to walk call args only. */
4129 if (gimple_code (stmt) == GIMPLE_CALL)
4130 {
4131 unsigned i;
4132 for (i = 0; i < gimple_call_num_args (stmt); i++)
4133 walk_tree (gimple_call_arg_ptr (stmt, i),
4134 chkp_replace_function_pointer, NULL, NULL);
4135 }
4136 else
4137 walk_gimple_stmt (gsi, NULL, chkp_replace_function_pointer, NULL);
4138 }
4139
4140 /* This function instruments all statements working with memory,
4141 calls and rets.
4142
4143 It also removes excess statements from static initializers. */
4144 static void
4145 chkp_instrument_function (void)
4146 {
4147 basic_block bb, next;
4148 gimple_stmt_iterator i;
4149 enum gimple_rhs_class grhs_class;
4150 bool safe = lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl));
4151
4152 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb;
4153 do
4154 {
4155 next = bb->next_bb;
4156 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
4157 {
4158 gimple s = gsi_stmt (i);
4159
4160 /* Skip statement marked to not be instrumented. */
4161 if (chkp_marked_stmt_p (s))
4162 {
4163 gsi_next (&i);
4164 continue;
4165 }
4166
4167 chkp_replace_function_pointers (&i);
4168
4169 switch (gimple_code (s))
4170 {
4171 case GIMPLE_ASSIGN:
4172 chkp_process_stmt (&i, gimple_assign_lhs (s),
4173 gimple_location (s), integer_one_node,
4174 NULL_TREE, NULL_TREE, safe);
4175 chkp_process_stmt (&i, gimple_assign_rhs1 (s),
4176 gimple_location (s), integer_zero_node,
4177 NULL_TREE, NULL_TREE, safe);
4178 grhs_class = get_gimple_rhs_class (gimple_assign_rhs_code (s));
4179 if (grhs_class == GIMPLE_BINARY_RHS)
4180 chkp_process_stmt (&i, gimple_assign_rhs2 (s),
4181 gimple_location (s), integer_zero_node,
4182 NULL_TREE, NULL_TREE, safe);
4183 break;
4184
4185 case GIMPLE_RETURN:
4186 {
4187 greturn *r = as_a <greturn *> (s);
4188 if (gimple_return_retval (r) != NULL_TREE)
4189 {
4190 chkp_process_stmt (&i, gimple_return_retval (r),
4191 gimple_location (r),
4192 integer_zero_node,
4193 NULL_TREE, NULL_TREE, safe);
4194
4195 /* Additionally we need to add bounds
4196 to return statement. */
4197 chkp_add_bounds_to_ret_stmt (&i);
4198 }
4199 }
4200 break;
4201
4202 case GIMPLE_CALL:
4203 chkp_add_bounds_to_call_stmt (&i);
4204 break;
4205
4206 default:
4207 ;
4208 }
4209
4210 gsi_next (&i);
4211
4212 /* We do not need any actual pointer stores in checker
4213 static initializer. */
4214 if (lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl))
4215 && gimple_code (s) == GIMPLE_ASSIGN
4216 && gimple_store_p (s))
4217 {
4218 gimple_stmt_iterator del_iter = gsi_for_stmt (s);
4219 gsi_remove (&del_iter, true);
4220 unlink_stmt_vdef (s);
4221 release_defs(s);
4222 }
4223 }
4224 bb = next;
4225 }
4226 while (bb);
4227
4228 /* Some input params may have bounds and be address taken. In this case
4229 we should store incoming bounds into bounds table. */
4230 tree arg;
4231 if (flag_chkp_store_bounds)
4232 for (arg = DECL_ARGUMENTS (cfun->decl); arg; arg = DECL_CHAIN (arg))
4233 if (TREE_ADDRESSABLE (arg))
4234 {
4235 if (BOUNDED_P (arg))
4236 {
4237 tree bounds = chkp_get_next_bounds_parm (arg);
4238 tree def_ptr = ssa_default_def (cfun, arg);
4239 gimple_stmt_iterator iter
4240 = gsi_start_bb (chkp_get_entry_block ());
4241 chkp_build_bndstx (chkp_build_addr_expr (arg),
4242 def_ptr ? def_ptr : arg,
4243 bounds, &iter);
4244
4245 /* Skip bounds arg. */
4246 arg = TREE_CHAIN (arg);
4247 }
4248 else if (chkp_type_has_pointer (TREE_TYPE (arg)))
4249 {
4250 tree orig_arg = arg;
4251 bitmap slots = BITMAP_ALLOC (NULL);
4252 gimple_stmt_iterator iter
4253 = gsi_start_bb (chkp_get_entry_block ());
4254 bitmap_iterator bi;
4255 unsigned bnd_no;
4256
4257 chkp_find_bound_slots (TREE_TYPE (arg), slots);
4258
4259 EXECUTE_IF_SET_IN_BITMAP (slots, 0, bnd_no, bi)
4260 {
4261 tree bounds = chkp_get_next_bounds_parm (arg);
4262 HOST_WIDE_INT offs = bnd_no * POINTER_SIZE / BITS_PER_UNIT;
4263 tree addr = chkp_build_addr_expr (orig_arg);
4264 tree ptr = build2 (MEM_REF, ptr_type_node, addr,
4265 build_int_cst (ptr_type_node, offs));
4266 chkp_build_bndstx (chkp_build_addr_expr (ptr), ptr,
4267 bounds, &iter);
4268
4269 arg = DECL_CHAIN (arg);
4270 }
4271 BITMAP_FREE (slots);
4272 }
4273 }
4274 }
4275
4276 /* Find init/null/copy_ptr_bounds calls and replace them
4277 with assignments. It should allow better code
4278 optimization. */
4279
4280 static void
4281 chkp_remove_useless_builtins ()
4282 {
4283 basic_block bb;
4284 gimple_stmt_iterator gsi;
4285
4286 FOR_EACH_BB_FN (bb, cfun)
4287 {
4288 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4289 {
4290 gimple stmt = gsi_stmt (gsi);
4291 tree fndecl;
4292 enum built_in_function fcode;
4293
4294 /* Find builtins returning first arg and replace
4295 them with assignments. */
4296 if (gimple_code (stmt) == GIMPLE_CALL
4297 && (fndecl = gimple_call_fndecl (stmt))
4298 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
4299 && (fcode = DECL_FUNCTION_CODE (fndecl))
4300 && (fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
4301 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
4302 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS
4303 || fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS))
4304 {
4305 tree res = gimple_call_arg (stmt, 0);
4306 update_call_from_tree (&gsi, res);
4307 stmt = gsi_stmt (gsi);
4308 update_stmt (stmt);
4309 }
4310 }
4311 }
4312 }
4313
4314 /* Initialize pass. */
4315 static void
4316 chkp_init (void)
4317 {
4318 basic_block bb;
4319 gimple_stmt_iterator i;
4320
4321 in_chkp_pass = true;
4322
4323 for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; bb; bb = bb->next_bb)
4324 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
4325 chkp_unmark_stmt (gsi_stmt (i));
4326
4327 chkp_invalid_bounds = new hash_set<tree>;
4328 chkp_completed_bounds_set = new hash_set<tree>;
4329 delete chkp_reg_bounds;
4330 chkp_reg_bounds = new hash_map<tree, tree>;
4331 delete chkp_bound_vars;
4332 chkp_bound_vars = new hash_map<tree, tree>;
4333 chkp_reg_addr_bounds = new hash_map<tree, tree>;
4334 chkp_incomplete_bounds_map = new hash_map<tree, tree>;
4335 delete chkp_bounds_map;
4336 chkp_bounds_map = new hash_map<tree, tree>;
4337 chkp_abnormal_copies = BITMAP_GGC_ALLOC ();
4338
4339 entry_block = NULL;
4340 zero_bounds = NULL_TREE;
4341 none_bounds = NULL_TREE;
4342 incomplete_bounds = integer_zero_node;
4343 tmp_var = NULL_TREE;
4344 size_tmp_var = NULL_TREE;
4345
4346 chkp_uintptr_type = lang_hooks.types.type_for_mode (ptr_mode, true);
4347
4348 /* We create these constant bounds once for each object file.
4349 These symbols go to comdat section and result in single copy
4350 of each one in the final binary. */
4351 chkp_get_zero_bounds_var ();
4352 chkp_get_none_bounds_var ();
4353
4354 calculate_dominance_info (CDI_DOMINATORS);
4355 calculate_dominance_info (CDI_POST_DOMINATORS);
4356
4357 bitmap_obstack_initialize (NULL);
4358 }
4359
4360 /* Finalize instrumentation pass. */
4361 static void
4362 chkp_fini (void)
4363 {
4364 in_chkp_pass = false;
4365
4366 delete chkp_invalid_bounds;
4367 delete chkp_completed_bounds_set;
4368 delete chkp_reg_addr_bounds;
4369 delete chkp_incomplete_bounds_map;
4370
4371 free_dominance_info (CDI_DOMINATORS);
4372 free_dominance_info (CDI_POST_DOMINATORS);
4373
4374 bitmap_obstack_release (NULL);
4375
4376 entry_block = NULL;
4377 zero_bounds = NULL_TREE;
4378 none_bounds = NULL_TREE;
4379 }
4380
4381 /* Main instrumentation pass function. */
4382 static unsigned int
4383 chkp_execute (void)
4384 {
4385 chkp_init ();
4386
4387 chkp_instrument_function ();
4388
4389 chkp_remove_useless_builtins ();
4390
4391 chkp_function_mark_instrumented (cfun->decl);
4392
4393 chkp_fix_cfg ();
4394
4395 chkp_fini ();
4396
4397 return 0;
4398 }
4399
4400 /* Instrumentation pass gate. */
4401 static bool
4402 chkp_gate (void)
4403 {
4404 cgraph_node *node = cgraph_node::get (cfun->decl);
4405 return ((node != NULL
4406 && node->instrumentation_clone)
4407 || lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl)));
4408 }
4409
4410 namespace {
4411
4412 const pass_data pass_data_chkp =
4413 {
4414 GIMPLE_PASS, /* type */
4415 "chkp", /* name */
4416 OPTGROUP_NONE, /* optinfo_flags */
4417 TV_NONE, /* tv_id */
4418 PROP_ssa | PROP_cfg, /* properties_required */
4419 0, /* properties_provided */
4420 0, /* properties_destroyed */
4421 0, /* todo_flags_start */
4422 TODO_verify_il
4423 | TODO_update_ssa /* todo_flags_finish */
4424 };
4425
4426 class pass_chkp : public gimple_opt_pass
4427 {
4428 public:
4429 pass_chkp (gcc::context *ctxt)
4430 : gimple_opt_pass (pass_data_chkp, ctxt)
4431 {}
4432
4433 /* opt_pass methods: */
4434 virtual opt_pass * clone ()
4435 {
4436 return new pass_chkp (m_ctxt);
4437 }
4438
4439 virtual bool gate (function *)
4440 {
4441 return chkp_gate ();
4442 }
4443
4444 virtual unsigned int execute (function *)
4445 {
4446 return chkp_execute ();
4447 }
4448
4449 }; // class pass_chkp
4450
4451 } // anon namespace
4452
4453 gimple_opt_pass *
4454 make_pass_chkp (gcc::context *ctxt)
4455 {
4456 return new pass_chkp (ctxt);
4457 }
4458
4459 #include "gt-tree-chkp.h"