]>
Commit | Line | Data |
---|---|---|
d5e254e1 | 1 | /* Pointer Bounds Checker insrumentation pass. |
5624e564 | 2 | Copyright (C) 2014-2015 Free Software Foundation, Inc. |
d5e254e1 IE |
3 | Contributed by Ilya Enkovich (ilya.enkovich@intel.com) |
4 | ||
5 | This file is part of GCC. | |
6 | ||
7 | GCC is free software; you can redistribute it and/or modify it under | |
8 | the terms of the GNU General Public License as published by the Free | |
9 | Software Foundation; either version 3, or (at your option) any later | |
10 | version. | |
11 | ||
12 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY | |
13 | WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
14 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
15 | for more details. | |
16 | ||
17 | You should have received a copy of the GNU General Public License | |
18 | along with GCC; see the file COPYING3. If not see | |
19 | <http://www.gnu.org/licenses/>. */ | |
20 | ||
21 | #include "config.h" | |
22 | #include "system.h" | |
23 | #include "coretypes.h" | |
40e23961 MC |
24 | #include "hash-set.h" |
25 | #include "machmode.h" | |
26 | #include "vec.h" | |
27 | #include "double-int.h" | |
28 | #include "input.h" | |
29 | #include "alias.h" | |
30 | #include "symtab.h" | |
31 | #include "options.h" | |
32 | #include "wide-int.h" | |
33 | #include "inchash.h" | |
34 | #include "tree.h" | |
35 | #include "fold-const.h" | |
d5e254e1 IE |
36 | #include "stor-layout.h" |
37 | #include "varasm.h" | |
d5e254e1 IE |
38 | #include "target.h" |
39 | #include "tree-iterator.h" | |
40 | #include "tree-cfg.h" | |
41 | #include "langhooks.h" | |
42 | #include "tree-pass.h" | |
43 | #include "diagnostic.h" | |
44 | #include "ggc.h" | |
45 | #include "is-a.h" | |
46 | #include "cfgloop.h" | |
47 | #include "stringpool.h" | |
48 | #include "tree-ssa-alias.h" | |
49 | #include "tree-ssanames.h" | |
50 | #include "tree-ssa-operands.h" | |
51 | #include "tree-ssa-address.h" | |
52 | #include "tree-ssa.h" | |
53 | #include "predict.h" | |
54 | #include "dominance.h" | |
55 | #include "cfg.h" | |
56 | #include "basic-block.h" | |
57 | #include "tree-ssa-loop-niter.h" | |
58 | #include "gimple-expr.h" | |
59 | #include "gimple.h" | |
60 | #include "tree-phinodes.h" | |
61 | #include "gimple-ssa.h" | |
62 | #include "ssa-iterators.h" | |
63 | #include "gimple-pretty-print.h" | |
64 | #include "gimple-iterator.h" | |
65 | #include "gimplify.h" | |
66 | #include "gimplify-me.h" | |
67 | #include "print-tree.h" | |
36566b39 PK |
68 | #include "hashtab.h" |
69 | #include "tm.h" | |
70 | #include "hard-reg-set.h" | |
71 | #include "function.h" | |
72 | #include "rtl.h" | |
73 | #include "flags.h" | |
74 | #include "statistics.h" | |
75 | #include "real.h" | |
76 | #include "fixed-value.h" | |
77 | #include "insn-config.h" | |
78 | #include "expmed.h" | |
79 | #include "dojump.h" | |
80 | #include "explow.h" | |
81 | #include "calls.h" | |
82 | #include "emit-rtl.h" | |
83 | #include "stmt.h" | |
d5e254e1 IE |
84 | #include "expr.h" |
85 | #include "tree-ssa-propagate.h" | |
86 | #include "gimple-fold.h" | |
87 | #include "tree-chkp.h" | |
88 | #include "gimple-walk.h" | |
89 | #include "rtl.h" /* For MEM_P, assign_temp. */ | |
90 | #include "tree-dfa.h" | |
91 | #include "ipa-ref.h" | |
92 | #include "lto-streamer.h" | |
93 | #include "cgraph.h" | |
94 | #include "ipa-chkp.h" | |
95 | #include "params.h" | |
d5e254e1 IE |
96 | |
97 | /* Pointer Bounds Checker instruments code with memory checks to find | |
98 | out-of-bounds memory accesses. Checks are performed by computing | |
99 | bounds for each pointer and then comparing address of accessed | |
100 | memory before pointer dereferencing. | |
101 | ||
102 | 1. Function clones. | |
103 | ||
104 | See ipa-chkp.c. | |
105 | ||
106 | 2. Instrumentation. | |
107 | ||
108 | There are few things to instrument: | |
109 | ||
110 | a) Memory accesses - add checker calls to check address of accessed memory | |
111 | against bounds of dereferenced pointer. Obviously safe memory | |
112 | accesses like static variable access does not have to be instrumented | |
113 | with checks. | |
114 | ||
115 | Example: | |
116 | ||
117 | val_2 = *p_1; | |
118 | ||
119 | with 4 bytes access is transformed into: | |
120 | ||
121 | __builtin___chkp_bndcl (__bound_tmp.1_3, p_1); | |
122 | D.1_4 = p_1 + 3; | |
123 | __builtin___chkp_bndcu (__bound_tmp.1_3, D.1_4); | |
124 | val_2 = *p_1; | |
125 | ||
126 | where __bound_tmp.1_3 are bounds computed for pointer p_1, | |
127 | __builtin___chkp_bndcl is a lower bound check and | |
128 | __builtin___chkp_bndcu is an upper bound check. | |
129 | ||
130 | b) Pointer stores. | |
131 | ||
132 | When pointer is stored in memory we need to store its bounds. To | |
133 | achieve compatibility of instrumented code with regular codes | |
134 | we have to keep data layout and store bounds in special bound tables | |
135 | via special checker call. Implementation of bounds table may vary for | |
136 | different platforms. It has to associate pointer value and its | |
137 | location (it is required because we may have two equal pointers | |
138 | with different bounds stored in different places) with bounds. | |
139 | Another checker builtin allows to get bounds for specified pointer | |
140 | loaded from specified location. | |
141 | ||
142 | Example: | |
143 | ||
144 | buf1[i_1] = &buf2; | |
145 | ||
146 | is transformed into: | |
147 | ||
148 | buf1[i_1] = &buf2; | |
149 | D.1_2 = &buf1[i_1]; | |
150 | __builtin___chkp_bndstx (D.1_2, &buf2, __bound_tmp.1_2); | |
151 | ||
152 | where __bound_tmp.1_2 are bounds of &buf2. | |
153 | ||
154 | c) Static initialization. | |
155 | ||
156 | The special case of pointer store is static pointer initialization. | |
157 | Bounds initialization is performed in a few steps: | |
158 | - register all static initializations in front-end using | |
159 | chkp_register_var_initializer | |
160 | - when file compilation finishes we create functions with special | |
161 | attribute 'chkp ctor' and put explicit initialization code | |
162 | (assignments) for all statically initialized pointers. | |
163 | - when checker constructor is compiled checker pass adds required | |
164 | bounds initialization for all statically initialized pointers | |
165 | - since we do not actually need excess pointers initialization | |
166 | in checker constructor we remove such assignments from them | |
167 | ||
168 | d) Calls. | |
169 | ||
170 | For each call in the code we add additional arguments to pass | |
171 | bounds for pointer arguments. We determine type of call arguments | |
172 | using arguments list from function declaration; if function | |
173 | declaration is not available we use function type; otherwise | |
174 | (e.g. for unnamed arguments) we use type of passed value. Function | |
175 | declaration/type is replaced with the instrumented one. | |
176 | ||
177 | Example: | |
178 | ||
179 | val_1 = foo (&buf1, &buf2, &buf1, 0); | |
180 | ||
181 | is translated into: | |
182 | ||
183 | val_1 = foo.chkp (&buf1, __bound_tmp.1_2, &buf2, __bound_tmp.1_3, | |
184 | &buf1, __bound_tmp.1_2, 0); | |
185 | ||
186 | e) Returns. | |
187 | ||
188 | If function returns a pointer value we have to return bounds also. | |
189 | A new operand was added for return statement to hold returned bounds. | |
190 | ||
191 | Example: | |
192 | ||
193 | return &_buf1; | |
194 | ||
195 | is transformed into | |
196 | ||
197 | return &_buf1, __bound_tmp.1_1; | |
198 | ||
199 | 3. Bounds computation. | |
200 | ||
201 | Compiler is fully responsible for computing bounds to be used for each | |
202 | memory access. The first step for bounds computation is to find the | |
203 | origin of pointer dereferenced for memory access. Basing on pointer | |
204 | origin we define a way to compute its bounds. There are just few | |
205 | possible cases: | |
206 | ||
207 | a) Pointer is returned by call. | |
208 | ||
209 | In this case we use corresponding checker builtin method to obtain returned | |
210 | bounds. | |
211 | ||
212 | Example: | |
213 | ||
214 | buf_1 = malloc (size_2); | |
215 | foo (buf_1); | |
216 | ||
217 | is translated into: | |
218 | ||
219 | buf_1 = malloc (size_2); | |
220 | __bound_tmp.1_3 = __builtin___chkp_bndret (buf_1); | |
221 | foo (buf_1, __bound_tmp.1_3); | |
222 | ||
223 | b) Pointer is an address of an object. | |
224 | ||
225 | In this case compiler tries to compute objects size and create corresponding | |
226 | bounds. If object has incomplete type then special checker builtin is used to | |
227 | obtain its size at runtime. | |
228 | ||
229 | Example: | |
230 | ||
231 | foo () | |
232 | { | |
233 | <unnamed type> __bound_tmp.3; | |
234 | static int buf[100]; | |
235 | ||
236 | <bb 3>: | |
237 | __bound_tmp.3_2 = __builtin___chkp_bndmk (&buf, 400); | |
238 | ||
239 | <bb 2>: | |
240 | return &buf, __bound_tmp.3_2; | |
241 | } | |
242 | ||
243 | Example: | |
244 | ||
245 | Address of an object 'extern int buf[]' with incomplete type is | |
246 | returned. | |
247 | ||
248 | foo () | |
249 | { | |
250 | <unnamed type> __bound_tmp.4; | |
251 | long unsigned int __size_tmp.3; | |
252 | ||
253 | <bb 3>: | |
254 | __size_tmp.3_4 = __builtin_ia32_sizeof (buf); | |
255 | __bound_tmp.4_3 = __builtin_ia32_bndmk (&buf, __size_tmp.3_4); | |
256 | ||
257 | <bb 2>: | |
258 | return &buf, __bound_tmp.4_3; | |
259 | } | |
260 | ||
261 | c) Pointer is the result of object narrowing. | |
262 | ||
263 | It happens when we use pointer to an object to compute pointer to a part | |
264 | of an object. E.g. we take pointer to a field of a structure. In this | |
265 | case we perform bounds intersection using bounds of original object and | |
266 | bounds of object's part (which are computed basing on its type). | |
267 | ||
268 | There may be some debatable questions about when narrowing should occur | |
269 | and when it should not. To avoid false bound violations in correct | |
270 | programs we do not perform narrowing when address of an array element is | |
271 | obtained (it has address of the whole array) and when address of the first | |
272 | structure field is obtained (because it is guaranteed to be equal to | |
273 | address of the whole structure and it is legal to cast it back to structure). | |
274 | ||
275 | Default narrowing behavior may be changed using compiler flags. | |
276 | ||
277 | Example: | |
278 | ||
279 | In this example address of the second structure field is returned. | |
280 | ||
281 | foo (struct A * p, __bounds_type __bounds_of_p) | |
282 | { | |
283 | <unnamed type> __bound_tmp.3; | |
284 | int * _2; | |
285 | int * _5; | |
286 | ||
287 | <bb 2>: | |
288 | _5 = &p_1(D)->second_field; | |
289 | __bound_tmp.3_6 = __builtin___chkp_bndmk (_5, 4); | |
290 | __bound_tmp.3_8 = __builtin___chkp_intersect (__bound_tmp.3_6, | |
291 | __bounds_of_p_3(D)); | |
292 | _2 = &p_1(D)->second_field; | |
293 | return _2, __bound_tmp.3_8; | |
294 | } | |
295 | ||
296 | Example: | |
297 | ||
298 | In this example address of the first field of array element is returned. | |
299 | ||
300 | foo (struct A * p, __bounds_type __bounds_of_p, int i) | |
301 | { | |
302 | long unsigned int _3; | |
303 | long unsigned int _4; | |
304 | struct A * _6; | |
305 | int * _7; | |
306 | ||
307 | <bb 2>: | |
308 | _3 = (long unsigned int) i_1(D); | |
309 | _4 = _3 * 8; | |
310 | _6 = p_5(D) + _4; | |
311 | _7 = &_6->first_field; | |
312 | return _7, __bounds_of_p_2(D); | |
313 | } | |
314 | ||
315 | ||
316 | d) Pointer is the result of pointer arithmetic or type cast. | |
317 | ||
318 | In this case bounds of the base pointer are used. In case of binary | |
319 | operation producing a pointer we are analyzing data flow further | |
320 | looking for operand's bounds. One operand is considered as a base | |
321 | if it has some valid bounds. If we fall into a case when none of | |
322 | operands (or both of them) has valid bounds, a default bounds value | |
323 | is used. | |
324 | ||
325 | Trying to find out bounds for binary operations we may fall into | |
326 | cyclic dependencies for pointers. To avoid infinite recursion all | |
327 | walked phi nodes instantly obtain corresponding bounds but created | |
328 | bounds are marked as incomplete. It helps us to stop DF walk during | |
329 | bounds search. | |
330 | ||
331 | When we reach pointer source, some args of incomplete bounds phi obtain | |
332 | valid bounds and those values are propagated further through phi nodes. | |
333 | If no valid bounds were found for phi node then we mark its result as | |
334 | invalid bounds. Process stops when all incomplete bounds become either | |
335 | valid or invalid and we are able to choose a pointer base. | |
336 | ||
337 | e) Pointer is loaded from the memory. | |
338 | ||
339 | In this case we just need to load bounds from the bounds table. | |
340 | ||
341 | Example: | |
342 | ||
343 | foo () | |
344 | { | |
345 | <unnamed type> __bound_tmp.3; | |
346 | static int * buf; | |
347 | int * _2; | |
348 | ||
349 | <bb 2>: | |
350 | _2 = buf; | |
351 | __bound_tmp.3_4 = __builtin___chkp_bndldx (&buf, _2); | |
352 | return _2, __bound_tmp.3_4; | |
353 | } | |
354 | ||
355 | */ | |
356 | ||
357 | typedef void (*assign_handler)(tree, tree, void *); | |
358 | ||
359 | static tree chkp_get_zero_bounds (); | |
360 | static tree chkp_find_bounds (tree ptr, gimple_stmt_iterator *iter); | |
361 | static tree chkp_find_bounds_loaded (tree ptr, tree ptr_src, | |
362 | gimple_stmt_iterator *iter); | |
363 | static void chkp_parse_array_and_component_ref (tree node, tree *ptr, | |
364 | tree *elt, bool *safe, | |
365 | bool *bitfield, | |
366 | tree *bounds, | |
367 | gimple_stmt_iterator *iter, | |
368 | bool innermost_bounds); | |
369 | ||
370 | #define chkp_bndldx_fndecl \ | |
371 | (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDLDX)) | |
372 | #define chkp_bndstx_fndecl \ | |
373 | (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDSTX)) | |
374 | #define chkp_checkl_fndecl \ | |
375 | (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDCL)) | |
376 | #define chkp_checku_fndecl \ | |
377 | (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDCU)) | |
378 | #define chkp_bndmk_fndecl \ | |
379 | (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDMK)) | |
380 | #define chkp_ret_bnd_fndecl \ | |
381 | (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDRET)) | |
382 | #define chkp_intersect_fndecl \ | |
383 | (targetm.builtin_chkp_function (BUILT_IN_CHKP_INTERSECT)) | |
384 | #define chkp_narrow_bounds_fndecl \ | |
385 | (targetm.builtin_chkp_function (BUILT_IN_CHKP_NARROW)) | |
386 | #define chkp_sizeof_fndecl \ | |
387 | (targetm.builtin_chkp_function (BUILT_IN_CHKP_SIZEOF)) | |
388 | #define chkp_extract_lower_fndecl \ | |
389 | (targetm.builtin_chkp_function (BUILT_IN_CHKP_EXTRACT_LOWER)) | |
390 | #define chkp_extract_upper_fndecl \ | |
391 | (targetm.builtin_chkp_function (BUILT_IN_CHKP_EXTRACT_UPPER)) | |
392 | ||
393 | static GTY (()) tree chkp_uintptr_type; | |
394 | ||
395 | static GTY (()) tree chkp_zero_bounds_var; | |
396 | static GTY (()) tree chkp_none_bounds_var; | |
397 | ||
398 | static GTY (()) basic_block entry_block; | |
399 | static GTY (()) tree zero_bounds; | |
400 | static GTY (()) tree none_bounds; | |
401 | static GTY (()) tree incomplete_bounds; | |
402 | static GTY (()) tree tmp_var; | |
403 | static GTY (()) tree size_tmp_var; | |
404 | static GTY (()) bitmap chkp_abnormal_copies; | |
405 | ||
406 | struct hash_set<tree> *chkp_invalid_bounds; | |
407 | struct hash_set<tree> *chkp_completed_bounds_set; | |
408 | struct hash_map<tree, tree> *chkp_reg_bounds; | |
409 | struct hash_map<tree, tree> *chkp_bound_vars; | |
410 | struct hash_map<tree, tree> *chkp_reg_addr_bounds; | |
411 | struct hash_map<tree, tree> *chkp_incomplete_bounds_map; | |
412 | struct hash_map<tree, tree> *chkp_bounds_map; | |
413 | struct hash_map<tree, tree> *chkp_static_var_bounds; | |
414 | ||
415 | static bool in_chkp_pass; | |
416 | ||
417 | #define CHKP_BOUND_TMP_NAME "__bound_tmp" | |
418 | #define CHKP_SIZE_TMP_NAME "__size_tmp" | |
419 | #define CHKP_BOUNDS_OF_SYMBOL_PREFIX "__chkp_bounds_of_" | |
420 | #define CHKP_STRING_BOUNDS_PREFIX "__chkp_string_bounds_" | |
421 | #define CHKP_VAR_BOUNDS_PREFIX "__chkp_var_bounds_" | |
422 | #define CHKP_ZERO_BOUNDS_VAR_NAME "__chkp_zero_bounds" | |
423 | #define CHKP_NONE_BOUNDS_VAR_NAME "__chkp_none_bounds" | |
424 | ||
425 | /* Static checker constructors may become very large and their | |
426 | compilation with optimization may take too much time. | |
427 | Therefore we put a limit to number of statements in one | |
428 | constructor. Tests with 100 000 statically initialized | |
429 | pointers showed following compilation times on Sandy Bridge | |
430 | server (used -O2): | |
431 | limit 100 => ~18 sec. | |
432 | limit 300 => ~22 sec. | |
433 | limit 1000 => ~30 sec. | |
434 | limit 3000 => ~49 sec. | |
435 | limit 5000 => ~55 sec. | |
436 | limit 10000 => ~76 sec. | |
437 | limit 100000 => ~532 sec. */ | |
438 | #define MAX_STMTS_IN_STATIC_CHKP_CTOR (PARAM_VALUE (PARAM_CHKP_MAX_CTOR_SIZE)) | |
439 | ||
440 | struct chkp_ctor_stmt_list | |
441 | { | |
442 | tree stmts; | |
443 | int avail; | |
444 | }; | |
445 | ||
446 | /* Return 1 if function FNDECL is instrumented by Pointer | |
447 | Bounds Checker. */ | |
448 | bool | |
449 | chkp_function_instrumented_p (tree fndecl) | |
450 | { | |
451 | return fndecl | |
452 | && lookup_attribute ("chkp instrumented", DECL_ATTRIBUTES (fndecl)); | |
453 | } | |
454 | ||
455 | /* Mark function FNDECL as instrumented. */ | |
456 | void | |
457 | chkp_function_mark_instrumented (tree fndecl) | |
458 | { | |
459 | if (chkp_function_instrumented_p (fndecl)) | |
460 | return; | |
461 | ||
462 | DECL_ATTRIBUTES (fndecl) | |
463 | = tree_cons (get_identifier ("chkp instrumented"), NULL, | |
464 | DECL_ATTRIBUTES (fndecl)); | |
465 | } | |
466 | ||
467 | /* Return true when STMT is builtin call to instrumentation function | |
468 | corresponding to CODE. */ | |
469 | ||
470 | bool | |
471 | chkp_gimple_call_builtin_p (gimple call, | |
472 | enum built_in_function code) | |
473 | { | |
474 | tree fndecl; | |
475 | if (is_gimple_call (call) | |
476 | && (fndecl = targetm.builtin_chkp_function (code)) | |
477 | && gimple_call_fndecl (call) == fndecl) | |
478 | return true; | |
479 | return false; | |
480 | } | |
481 | ||
482 | /* Emit code to store zero bounds for PTR located at MEM. */ | |
483 | void | |
484 | chkp_expand_bounds_reset_for_mem (tree mem, tree ptr) | |
485 | { | |
486 | tree zero_bnd, bnd, addr, bndstx; | |
487 | ||
488 | if (flag_chkp_use_static_const_bounds) | |
489 | zero_bnd = chkp_get_zero_bounds_var (); | |
490 | else | |
491 | zero_bnd = chkp_build_make_bounds_call (integer_zero_node, | |
492 | integer_zero_node); | |
493 | bnd = make_tree (pointer_bounds_type_node, | |
494 | assign_temp (pointer_bounds_type_node, 0, 1)); | |
495 | addr = build1 (ADDR_EXPR, | |
496 | build_pointer_type (TREE_TYPE (mem)), mem); | |
497 | bndstx = chkp_build_bndstx_call (addr, ptr, bnd); | |
498 | ||
499 | expand_assignment (bnd, zero_bnd, false); | |
500 | expand_normal (bndstx); | |
501 | } | |
502 | ||
503 | /* Mark statement S to not be instrumented. */ | |
504 | static void | |
505 | chkp_mark_stmt (gimple s) | |
506 | { | |
507 | gimple_set_plf (s, GF_PLF_1, true); | |
508 | } | |
509 | ||
510 | /* Mark statement S to be instrumented. */ | |
511 | static void | |
512 | chkp_unmark_stmt (gimple s) | |
513 | { | |
514 | gimple_set_plf (s, GF_PLF_1, false); | |
515 | } | |
516 | ||
517 | /* Return 1 if statement S should not be instrumented. */ | |
518 | static bool | |
519 | chkp_marked_stmt_p (gimple s) | |
520 | { | |
521 | return gimple_plf (s, GF_PLF_1); | |
522 | } | |
523 | ||
524 | /* Get var to be used for bound temps. */ | |
525 | static tree | |
526 | chkp_get_tmp_var (void) | |
527 | { | |
528 | if (!tmp_var) | |
529 | tmp_var = create_tmp_reg (pointer_bounds_type_node, CHKP_BOUND_TMP_NAME); | |
530 | ||
531 | return tmp_var; | |
532 | } | |
533 | ||
534 | /* Get SSA_NAME to be used as temp. */ | |
535 | static tree | |
536 | chkp_get_tmp_reg (gimple stmt) | |
537 | { | |
538 | if (in_chkp_pass) | |
539 | return make_ssa_name (chkp_get_tmp_var (), stmt); | |
540 | ||
541 | return make_temp_ssa_name (pointer_bounds_type_node, stmt, | |
542 | CHKP_BOUND_TMP_NAME); | |
543 | } | |
544 | ||
545 | /* Get var to be used for size temps. */ | |
546 | static tree | |
547 | chkp_get_size_tmp_var (void) | |
548 | { | |
549 | if (!size_tmp_var) | |
550 | size_tmp_var = create_tmp_reg (chkp_uintptr_type, CHKP_SIZE_TMP_NAME); | |
551 | ||
552 | return size_tmp_var; | |
553 | } | |
554 | ||
555 | /* Register bounds BND for address of OBJ. */ | |
556 | static void | |
557 | chkp_register_addr_bounds (tree obj, tree bnd) | |
558 | { | |
559 | if (bnd == incomplete_bounds) | |
560 | return; | |
561 | ||
562 | chkp_reg_addr_bounds->put (obj, bnd); | |
563 | ||
564 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
565 | { | |
566 | fprintf (dump_file, "Regsitered bound "); | |
567 | print_generic_expr (dump_file, bnd, 0); | |
568 | fprintf (dump_file, " for address of "); | |
569 | print_generic_expr (dump_file, obj, 0); | |
570 | fprintf (dump_file, "\n"); | |
571 | } | |
572 | } | |
573 | ||
574 | /* Return bounds registered for address of OBJ. */ | |
575 | static tree | |
576 | chkp_get_registered_addr_bounds (tree obj) | |
577 | { | |
578 | tree *slot = chkp_reg_addr_bounds->get (obj); | |
579 | return slot ? *slot : NULL_TREE; | |
580 | } | |
581 | ||
582 | /* Mark BOUNDS as completed. */ | |
583 | static void | |
584 | chkp_mark_completed_bounds (tree bounds) | |
585 | { | |
586 | chkp_completed_bounds_set->add (bounds); | |
587 | ||
588 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
589 | { | |
590 | fprintf (dump_file, "Marked bounds "); | |
591 | print_generic_expr (dump_file, bounds, 0); | |
592 | fprintf (dump_file, " as completed\n"); | |
593 | } | |
594 | } | |
595 | ||
596 | /* Return 1 if BOUNDS were marked as completed and 0 otherwise. */ | |
597 | static bool | |
598 | chkp_completed_bounds (tree bounds) | |
599 | { | |
600 | return chkp_completed_bounds_set->contains (bounds); | |
601 | } | |
602 | ||
603 | /* Clear comleted bound marks. */ | |
604 | static void | |
605 | chkp_erase_completed_bounds (void) | |
606 | { | |
607 | delete chkp_completed_bounds_set; | |
608 | chkp_completed_bounds_set = new hash_set<tree>; | |
609 | } | |
610 | ||
611 | /* Mark BOUNDS associated with PTR as incomplete. */ | |
612 | static void | |
613 | chkp_register_incomplete_bounds (tree bounds, tree ptr) | |
614 | { | |
615 | chkp_incomplete_bounds_map->put (bounds, ptr); | |
616 | ||
617 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
618 | { | |
619 | fprintf (dump_file, "Regsitered incomplete bounds "); | |
620 | print_generic_expr (dump_file, bounds, 0); | |
621 | fprintf (dump_file, " for "); | |
622 | print_generic_expr (dump_file, ptr, 0); | |
623 | fprintf (dump_file, "\n"); | |
624 | } | |
625 | } | |
626 | ||
627 | /* Return 1 if BOUNDS are incomplete and 0 otherwise. */ | |
628 | static bool | |
629 | chkp_incomplete_bounds (tree bounds) | |
630 | { | |
631 | if (bounds == incomplete_bounds) | |
632 | return true; | |
633 | ||
634 | if (chkp_completed_bounds (bounds)) | |
635 | return false; | |
636 | ||
637 | return chkp_incomplete_bounds_map->get (bounds) != NULL; | |
638 | } | |
639 | ||
640 | /* Clear incomleted bound marks. */ | |
641 | static void | |
642 | chkp_erase_incomplete_bounds (void) | |
643 | { | |
644 | delete chkp_incomplete_bounds_map; | |
645 | chkp_incomplete_bounds_map = new hash_map<tree, tree>; | |
646 | } | |
647 | ||
648 | /* Build and return bndmk call which creates bounds for structure | |
649 | pointed by PTR. Structure should have complete type. */ | |
650 | tree | |
651 | chkp_make_bounds_for_struct_addr (tree ptr) | |
652 | { | |
653 | tree type = TREE_TYPE (ptr); | |
654 | tree size; | |
655 | ||
656 | gcc_assert (POINTER_TYPE_P (type)); | |
657 | ||
658 | size = TYPE_SIZE (TREE_TYPE (type)); | |
659 | ||
660 | gcc_assert (size); | |
661 | ||
662 | return build_call_nary (pointer_bounds_type_node, | |
663 | build_fold_addr_expr (chkp_bndmk_fndecl), | |
664 | 2, ptr, size); | |
665 | } | |
666 | ||
667 | /* Traversal function for chkp_may_finish_incomplete_bounds. | |
668 | Set RES to 0 if at least one argument of phi statement | |
669 | defining bounds (passed in KEY arg) is unknown. | |
670 | Traversal stops when first unknown phi argument is found. */ | |
671 | bool | |
672 | chkp_may_complete_phi_bounds (tree const &bounds, tree *slot ATTRIBUTE_UNUSED, | |
673 | bool *res) | |
674 | { | |
675 | gimple phi; | |
676 | unsigned i; | |
677 | ||
678 | gcc_assert (TREE_CODE (bounds) == SSA_NAME); | |
679 | ||
680 | phi = SSA_NAME_DEF_STMT (bounds); | |
681 | ||
682 | gcc_assert (phi && gimple_code (phi) == GIMPLE_PHI); | |
683 | ||
684 | for (i = 0; i < gimple_phi_num_args (phi); i++) | |
685 | { | |
686 | tree phi_arg = gimple_phi_arg_def (phi, i); | |
687 | if (!phi_arg) | |
688 | { | |
689 | *res = false; | |
690 | /* Do not need to traverse further. */ | |
691 | return false; | |
692 | } | |
693 | } | |
694 | ||
695 | return true; | |
696 | } | |
697 | ||
698 | /* Return 1 if all phi nodes created for bounds have their | |
699 | arguments computed. */ | |
700 | static bool | |
701 | chkp_may_finish_incomplete_bounds (void) | |
702 | { | |
703 | bool res = true; | |
704 | ||
705 | chkp_incomplete_bounds_map | |
706 | ->traverse<bool *, chkp_may_complete_phi_bounds> (&res); | |
707 | ||
708 | return res; | |
709 | } | |
710 | ||
711 | /* Helper function for chkp_finish_incomplete_bounds. | |
712 | Recompute args for bounds phi node. */ | |
713 | bool | |
714 | chkp_recompute_phi_bounds (tree const &bounds, tree *slot, | |
715 | void *res ATTRIBUTE_UNUSED) | |
716 | { | |
717 | tree ptr = *slot; | |
538dd0b7 DM |
718 | gphi *bounds_phi; |
719 | gphi *ptr_phi; | |
d5e254e1 IE |
720 | unsigned i; |
721 | ||
722 | gcc_assert (TREE_CODE (bounds) == SSA_NAME); | |
723 | gcc_assert (TREE_CODE (ptr) == SSA_NAME); | |
724 | ||
538dd0b7 DM |
725 | bounds_phi = as_a <gphi *> (SSA_NAME_DEF_STMT (bounds)); |
726 | ptr_phi = as_a <gphi *> (SSA_NAME_DEF_STMT (ptr)); | |
d5e254e1 IE |
727 | |
728 | for (i = 0; i < gimple_phi_num_args (bounds_phi); i++) | |
729 | { | |
730 | tree ptr_arg = gimple_phi_arg_def (ptr_phi, i); | |
731 | tree bound_arg = chkp_find_bounds (ptr_arg, NULL); | |
732 | ||
733 | add_phi_arg (bounds_phi, bound_arg, | |
734 | gimple_phi_arg_edge (ptr_phi, i), | |
735 | UNKNOWN_LOCATION); | |
736 | } | |
737 | ||
738 | return true; | |
739 | } | |
740 | ||
741 | /* Mark BOUNDS as invalid. */ | |
742 | static void | |
743 | chkp_mark_invalid_bounds (tree bounds) | |
744 | { | |
745 | chkp_invalid_bounds->add (bounds); | |
746 | ||
747 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
748 | { | |
749 | fprintf (dump_file, "Marked bounds "); | |
750 | print_generic_expr (dump_file, bounds, 0); | |
751 | fprintf (dump_file, " as invalid\n"); | |
752 | } | |
753 | } | |
754 | ||
755 | /* Return 1 if BOUNDS were marked as invalid and 0 otherwise. */ | |
756 | static bool | |
757 | chkp_valid_bounds (tree bounds) | |
758 | { | |
759 | if (bounds == zero_bounds || bounds == none_bounds) | |
760 | return false; | |
761 | ||
762 | return !chkp_invalid_bounds->contains (bounds); | |
763 | } | |
764 | ||
765 | /* Helper function for chkp_finish_incomplete_bounds. | |
766 | Check all arguments of phi nodes trying to find | |
767 | valid completed bounds. If there is at least one | |
768 | such arg then bounds produced by phi node are marked | |
769 | as valid completed bounds and all phi args are | |
770 | recomputed. */ | |
771 | bool | |
772 | chkp_find_valid_phi_bounds (tree const &bounds, tree *slot, bool *res) | |
773 | { | |
774 | gimple phi; | |
775 | unsigned i; | |
776 | ||
777 | gcc_assert (TREE_CODE (bounds) == SSA_NAME); | |
778 | ||
779 | if (chkp_completed_bounds (bounds)) | |
780 | return true; | |
781 | ||
782 | phi = SSA_NAME_DEF_STMT (bounds); | |
783 | ||
784 | gcc_assert (phi && gimple_code (phi) == GIMPLE_PHI); | |
785 | ||
786 | for (i = 0; i < gimple_phi_num_args (phi); i++) | |
787 | { | |
788 | tree phi_arg = gimple_phi_arg_def (phi, i); | |
789 | ||
790 | gcc_assert (phi_arg); | |
791 | ||
792 | if (chkp_valid_bounds (phi_arg) && !chkp_incomplete_bounds (phi_arg)) | |
793 | { | |
794 | *res = true; | |
795 | chkp_mark_completed_bounds (bounds); | |
796 | chkp_recompute_phi_bounds (bounds, slot, NULL); | |
797 | return true; | |
798 | } | |
799 | } | |
800 | ||
801 | return true; | |
802 | } | |
803 | ||
804 | /* Helper function for chkp_finish_incomplete_bounds. | |
805 | Marks all incompleted bounds as invalid. */ | |
806 | bool | |
807 | chkp_mark_invalid_bounds_walker (tree const &bounds, | |
808 | tree *slot ATTRIBUTE_UNUSED, | |
809 | void *res ATTRIBUTE_UNUSED) | |
810 | { | |
811 | if (!chkp_completed_bounds (bounds)) | |
812 | { | |
813 | chkp_mark_invalid_bounds (bounds); | |
814 | chkp_mark_completed_bounds (bounds); | |
815 | } | |
816 | return true; | |
817 | } | |
818 | ||
819 | /* When all bound phi nodes have all their args computed | |
820 | we have enough info to find valid bounds. We iterate | |
821 | through all incompleted bounds searching for valid | |
822 | bounds. Found valid bounds are marked as completed | |
823 | and all remaining incompleted bounds are recomputed. | |
824 | Process continues until no new valid bounds may be | |
825 | found. All remained incompleted bounds are marked as | |
826 | invalid (i.e. have no valid source of bounds). */ | |
827 | static void | |
828 | chkp_finish_incomplete_bounds (void) | |
829 | { | |
830 | bool found_valid; | |
831 | ||
832 | while (found_valid) | |
833 | { | |
834 | found_valid = false; | |
835 | ||
836 | chkp_incomplete_bounds_map-> | |
837 | traverse<bool *, chkp_find_valid_phi_bounds> (&found_valid); | |
838 | ||
839 | if (found_valid) | |
840 | chkp_incomplete_bounds_map-> | |
841 | traverse<void *, chkp_recompute_phi_bounds> (NULL); | |
842 | } | |
843 | ||
844 | chkp_incomplete_bounds_map-> | |
845 | traverse<void *, chkp_mark_invalid_bounds_walker> (NULL); | |
846 | chkp_incomplete_bounds_map-> | |
847 | traverse<void *, chkp_recompute_phi_bounds> (NULL); | |
848 | ||
849 | chkp_erase_completed_bounds (); | |
850 | chkp_erase_incomplete_bounds (); | |
851 | } | |
852 | ||
853 | /* Return 1 if type TYPE is a pointer type or a | |
854 | structure having a pointer type as one of its fields. | |
855 | Otherwise return 0. */ | |
856 | bool | |
857 | chkp_type_has_pointer (const_tree type) | |
858 | { | |
859 | bool res = false; | |
860 | ||
861 | if (BOUNDED_TYPE_P (type)) | |
862 | res = true; | |
863 | else if (RECORD_OR_UNION_TYPE_P (type)) | |
864 | { | |
865 | tree field; | |
866 | ||
867 | for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field)) | |
868 | if (TREE_CODE (field) == FIELD_DECL) | |
869 | res = res || chkp_type_has_pointer (TREE_TYPE (field)); | |
870 | } | |
871 | else if (TREE_CODE (type) == ARRAY_TYPE) | |
872 | res = chkp_type_has_pointer (TREE_TYPE (type)); | |
873 | ||
874 | return res; | |
875 | } | |
876 | ||
877 | unsigned | |
878 | chkp_type_bounds_count (const_tree type) | |
879 | { | |
880 | unsigned res = 0; | |
881 | ||
882 | if (!type) | |
883 | res = 0; | |
884 | else if (BOUNDED_TYPE_P (type)) | |
885 | res = 1; | |
886 | else if (RECORD_OR_UNION_TYPE_P (type)) | |
887 | { | |
888 | bitmap have_bound; | |
889 | ||
890 | bitmap_obstack_initialize (NULL); | |
891 | have_bound = BITMAP_ALLOC (NULL); | |
892 | chkp_find_bound_slots (type, have_bound); | |
893 | res = bitmap_count_bits (have_bound); | |
894 | BITMAP_FREE (have_bound); | |
895 | bitmap_obstack_release (NULL); | |
896 | } | |
897 | ||
898 | return res; | |
899 | } | |
900 | ||
901 | /* Get bounds associated with NODE via | |
902 | chkp_set_bounds call. */ | |
903 | tree | |
904 | chkp_get_bounds (tree node) | |
905 | { | |
906 | tree *slot; | |
907 | ||
908 | if (!chkp_bounds_map) | |
909 | return NULL_TREE; | |
910 | ||
911 | slot = chkp_bounds_map->get (node); | |
912 | return slot ? *slot : NULL_TREE; | |
913 | } | |
914 | ||
915 | /* Associate bounds VAL with NODE. */ | |
916 | void | |
917 | chkp_set_bounds (tree node, tree val) | |
918 | { | |
919 | if (!chkp_bounds_map) | |
920 | chkp_bounds_map = new hash_map<tree, tree>; | |
921 | ||
922 | chkp_bounds_map->put (node, val); | |
923 | } | |
924 | ||
925 | /* Check if statically initialized variable VAR require | |
926 | static bounds initialization. If VAR is added into | |
927 | bounds initlization list then 1 is returned. Otherwise | |
928 | return 0. */ | |
929 | extern bool | |
930 | chkp_register_var_initializer (tree var) | |
931 | { | |
932 | if (!flag_check_pointer_bounds | |
933 | || DECL_INITIAL (var) == error_mark_node) | |
934 | return false; | |
935 | ||
936 | gcc_assert (TREE_CODE (var) == VAR_DECL); | |
937 | gcc_assert (DECL_INITIAL (var)); | |
938 | ||
939 | if (TREE_STATIC (var) | |
940 | && chkp_type_has_pointer (TREE_TYPE (var))) | |
941 | { | |
942 | varpool_node::get_create (var)->need_bounds_init = 1; | |
943 | return true; | |
944 | } | |
945 | ||
946 | return false; | |
947 | } | |
948 | ||
949 | /* Helper function for chkp_finish_file. | |
950 | ||
951 | Add new modification statement (RHS is assigned to LHS) | |
952 | into list of static initializer statementes (passed in ARG). | |
953 | If statements list becomes too big, emit checker constructor | |
954 | and start the new one. */ | |
955 | static void | |
956 | chkp_add_modification_to_stmt_list (tree lhs, | |
957 | tree rhs, | |
958 | void *arg) | |
959 | { | |
960 | struct chkp_ctor_stmt_list *stmts = (struct chkp_ctor_stmt_list *)arg; | |
961 | tree modify; | |
962 | ||
963 | if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs))) | |
964 | rhs = build1 (CONVERT_EXPR, TREE_TYPE (lhs), rhs); | |
965 | ||
966 | modify = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs); | |
967 | append_to_statement_list (modify, &stmts->stmts); | |
968 | ||
969 | stmts->avail--; | |
970 | } | |
971 | ||
972 | /* Build and return ADDR_EXPR for specified object OBJ. */ | |
973 | static tree | |
974 | chkp_build_addr_expr (tree obj) | |
975 | { | |
976 | return TREE_CODE (obj) == TARGET_MEM_REF | |
977 | ? tree_mem_ref_addr (ptr_type_node, obj) | |
978 | : build_fold_addr_expr (obj); | |
979 | } | |
980 | ||
981 | /* Helper function for chkp_finish_file. | |
982 | Initialize bound variable BND_VAR with bounds of variable | |
983 | VAR to statements list STMTS. If statements list becomes | |
984 | too big, emit checker constructor and start the new one. */ | |
985 | static void | |
986 | chkp_output_static_bounds (tree bnd_var, tree var, | |
987 | struct chkp_ctor_stmt_list *stmts) | |
988 | { | |
989 | tree lb, ub, size; | |
990 | ||
991 | if (TREE_CODE (var) == STRING_CST) | |
992 | { | |
993 | lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var)); | |
994 | size = build_int_cst (size_type_node, TREE_STRING_LENGTH (var) - 1); | |
995 | } | |
996 | else if (DECL_SIZE (var) | |
997 | && !chkp_variable_size_type (TREE_TYPE (var))) | |
998 | { | |
999 | /* Compute bounds using statically known size. */ | |
1000 | lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var)); | |
1001 | size = size_binop (MINUS_EXPR, DECL_SIZE_UNIT (var), size_one_node); | |
1002 | } | |
1003 | else | |
1004 | { | |
1005 | /* Compute bounds using dynamic size. */ | |
1006 | tree call; | |
1007 | ||
1008 | lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var)); | |
1009 | call = build1 (ADDR_EXPR, | |
1010 | build_pointer_type (TREE_TYPE (chkp_sizeof_fndecl)), | |
1011 | chkp_sizeof_fndecl); | |
1012 | size = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_sizeof_fndecl)), | |
1013 | call, 1, var); | |
1014 | ||
1015 | if (flag_chkp_zero_dynamic_size_as_infinite) | |
1016 | { | |
1017 | tree max_size, cond; | |
1018 | ||
1019 | max_size = build2 (MINUS_EXPR, size_type_node, size_zero_node, lb); | |
1020 | cond = build2 (NE_EXPR, boolean_type_node, size, size_zero_node); | |
1021 | size = build3 (COND_EXPR, size_type_node, cond, size, max_size); | |
1022 | } | |
1023 | ||
1024 | size = size_binop (MINUS_EXPR, size, size_one_node); | |
1025 | } | |
1026 | ||
1027 | ub = size_binop (PLUS_EXPR, lb, size); | |
1028 | stmts->avail -= targetm.chkp_initialize_bounds (bnd_var, lb, ub, | |
1029 | &stmts->stmts); | |
1030 | if (stmts->avail <= 0) | |
1031 | { | |
1032 | cgraph_build_static_cdtor ('B', stmts->stmts, | |
1033 | MAX_RESERVED_INIT_PRIORITY + 2); | |
1034 | stmts->avail = MAX_STMTS_IN_STATIC_CHKP_CTOR; | |
1035 | stmts->stmts = NULL; | |
1036 | } | |
1037 | } | |
1038 | ||
1039 | /* Return entry block to be used for checker initilization code. | |
1040 | Create new block if required. */ | |
1041 | static basic_block | |
1042 | chkp_get_entry_block (void) | |
1043 | { | |
1044 | if (!entry_block) | |
1045 | entry_block = split_block (ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL)->dest; | |
1046 | ||
1047 | return entry_block; | |
1048 | } | |
1049 | ||
1050 | /* Return a bounds var to be used for pointer var PTR_VAR. */ | |
1051 | static tree | |
1052 | chkp_get_bounds_var (tree ptr_var) | |
1053 | { | |
1054 | tree bnd_var; | |
1055 | tree *slot; | |
1056 | ||
1057 | slot = chkp_bound_vars->get (ptr_var); | |
1058 | if (slot) | |
1059 | bnd_var = *slot; | |
1060 | else | |
1061 | { | |
1062 | bnd_var = create_tmp_reg (pointer_bounds_type_node, | |
1063 | CHKP_BOUND_TMP_NAME); | |
1064 | chkp_bound_vars->put (ptr_var, bnd_var); | |
1065 | } | |
1066 | ||
1067 | return bnd_var; | |
1068 | } | |
1069 | ||
1070 | ||
1071 | ||
1072 | /* Register bounds BND for object PTR in global bounds table. | |
1073 | A copy of bounds may be created for abnormal ssa names. | |
1074 | Returns bounds to use for PTR. */ | |
1075 | static tree | |
1076 | chkp_maybe_copy_and_register_bounds (tree ptr, tree bnd) | |
1077 | { | |
1078 | bool abnormal_ptr; | |
1079 | ||
1080 | if (!chkp_reg_bounds) | |
1081 | return bnd; | |
1082 | ||
1083 | /* Do nothing if bounds are incomplete_bounds | |
1084 | because it means bounds will be recomputed. */ | |
1085 | if (bnd == incomplete_bounds) | |
1086 | return bnd; | |
1087 | ||
1088 | abnormal_ptr = (TREE_CODE (ptr) == SSA_NAME | |
1089 | && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ptr) | |
1090 | && gimple_code (SSA_NAME_DEF_STMT (ptr)) != GIMPLE_PHI); | |
1091 | ||
1092 | /* A single bounds value may be reused multiple times for | |
1093 | different pointer values. It may cause coalescing issues | |
1094 | for abnormal SSA names. To avoid it we create a bounds | |
1095 | copy in case it is computed for abnormal SSA name. | |
1096 | ||
1097 | We also cannot reuse such created copies for other pointers */ | |
1098 | if (abnormal_ptr | |
1099 | || bitmap_bit_p (chkp_abnormal_copies, SSA_NAME_VERSION (bnd))) | |
1100 | { | |
1101 | tree bnd_var = NULL_TREE; | |
1102 | ||
1103 | if (abnormal_ptr) | |
1104 | { | |
1105 | if (SSA_NAME_VAR (ptr)) | |
1106 | bnd_var = chkp_get_bounds_var (SSA_NAME_VAR (ptr)); | |
1107 | } | |
1108 | else | |
1109 | bnd_var = chkp_get_tmp_var (); | |
1110 | ||
1111 | /* For abnormal copies we may just find original | |
1112 | bounds and use them. */ | |
1113 | if (!abnormal_ptr && !SSA_NAME_IS_DEFAULT_DEF (bnd)) | |
1114 | { | |
1115 | gimple bnd_def = SSA_NAME_DEF_STMT (bnd); | |
1116 | gcc_checking_assert (gimple_code (bnd_def) == GIMPLE_ASSIGN); | |
1117 | bnd = gimple_assign_rhs1 (bnd_def); | |
1118 | } | |
1119 | /* For undefined values we usually use none bounds | |
1120 | value but in case of abnormal edge it may cause | |
1121 | coalescing failures. Use default definition of | |
1122 | bounds variable instead to avoid it. */ | |
1123 | else if (SSA_NAME_IS_DEFAULT_DEF (ptr) | |
1124 | && TREE_CODE (SSA_NAME_VAR (ptr)) != PARM_DECL) | |
1125 | { | |
1126 | bnd = get_or_create_ssa_default_def (cfun, bnd_var); | |
1127 | ||
1128 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
1129 | { | |
1130 | fprintf (dump_file, "Using default def bounds "); | |
1131 | print_generic_expr (dump_file, bnd, 0); | |
1132 | fprintf (dump_file, " for abnormal default def SSA name "); | |
1133 | print_generic_expr (dump_file, ptr, 0); | |
1134 | fprintf (dump_file, "\n"); | |
1135 | } | |
1136 | } | |
1137 | else | |
1138 | { | |
1139 | tree copy; | |
1140 | gimple def = SSA_NAME_DEF_STMT (ptr); | |
1141 | gimple assign; | |
1142 | gimple_stmt_iterator gsi; | |
1143 | ||
1144 | if (bnd_var) | |
1145 | copy = make_ssa_name (bnd_var, gimple_build_nop ()); | |
1146 | else | |
1147 | copy = make_temp_ssa_name (pointer_bounds_type_node, | |
1148 | gimple_build_nop (), | |
1149 | CHKP_BOUND_TMP_NAME); | |
1150 | assign = gimple_build_assign (copy, bnd); | |
1151 | ||
1152 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
1153 | { | |
1154 | fprintf (dump_file, "Creating a copy of bounds "); | |
1155 | print_generic_expr (dump_file, bnd, 0); | |
1156 | fprintf (dump_file, " for abnormal SSA name "); | |
1157 | print_generic_expr (dump_file, ptr, 0); | |
1158 | fprintf (dump_file, "\n"); | |
1159 | } | |
1160 | ||
1161 | if (gimple_code (def) == GIMPLE_NOP) | |
1162 | { | |
1163 | gsi = gsi_last_bb (chkp_get_entry_block ()); | |
1164 | if (!gsi_end_p (gsi) && is_ctrl_stmt (gsi_stmt (gsi))) | |
1165 | gsi_insert_before (&gsi, assign, GSI_CONTINUE_LINKING); | |
1166 | else | |
1167 | gsi_insert_after (&gsi, assign, GSI_CONTINUE_LINKING); | |
1168 | } | |
1169 | else | |
1170 | { | |
1171 | gimple bnd_def = SSA_NAME_DEF_STMT (bnd); | |
1172 | /* Sometimes (e.g. when we load a pointer from a | |
1173 | memory) bounds are produced later than a pointer. | |
1174 | We need to insert bounds copy appropriately. */ | |
1175 | if (gimple_code (bnd_def) != GIMPLE_NOP | |
1176 | && stmt_dominates_stmt_p (def, bnd_def)) | |
1177 | gsi = gsi_for_stmt (bnd_def); | |
1178 | else | |
1179 | gsi = gsi_for_stmt (def); | |
1180 | gsi_insert_after (&gsi, assign, GSI_CONTINUE_LINKING); | |
1181 | } | |
1182 | ||
1183 | bnd = copy; | |
1184 | } | |
1185 | ||
1186 | if (abnormal_ptr) | |
1187 | bitmap_set_bit (chkp_abnormal_copies, SSA_NAME_VERSION (bnd)); | |
1188 | } | |
1189 | ||
1190 | chkp_reg_bounds->put (ptr, bnd); | |
1191 | ||
1192 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
1193 | { | |
1194 | fprintf (dump_file, "Regsitered bound "); | |
1195 | print_generic_expr (dump_file, bnd, 0); | |
1196 | fprintf (dump_file, " for pointer "); | |
1197 | print_generic_expr (dump_file, ptr, 0); | |
1198 | fprintf (dump_file, "\n"); | |
1199 | } | |
1200 | ||
1201 | return bnd; | |
1202 | } | |
1203 | ||
1204 | /* Get bounds registered for object PTR in global bounds table. */ | |
1205 | static tree | |
1206 | chkp_get_registered_bounds (tree ptr) | |
1207 | { | |
1208 | tree *slot; | |
1209 | ||
1210 | if (!chkp_reg_bounds) | |
1211 | return NULL_TREE; | |
1212 | ||
1213 | slot = chkp_reg_bounds->get (ptr); | |
1214 | return slot ? *slot : NULL_TREE; | |
1215 | } | |
1216 | ||
1217 | /* Add bound retvals to return statement pointed by GSI. */ | |
1218 | ||
1219 | static void | |
1220 | chkp_add_bounds_to_ret_stmt (gimple_stmt_iterator *gsi) | |
1221 | { | |
538dd0b7 | 1222 | greturn *ret = as_a <greturn *> (gsi_stmt (*gsi)); |
d5e254e1 IE |
1223 | tree retval = gimple_return_retval (ret); |
1224 | tree ret_decl = DECL_RESULT (cfun->decl); | |
1225 | tree bounds; | |
1226 | ||
1227 | if (!retval) | |
1228 | return; | |
1229 | ||
1230 | if (BOUNDED_P (ret_decl)) | |
1231 | { | |
1232 | bounds = chkp_find_bounds (retval, gsi); | |
1233 | bounds = chkp_maybe_copy_and_register_bounds (ret_decl, bounds); | |
1234 | gimple_return_set_retbnd (ret, bounds); | |
1235 | } | |
1236 | ||
1237 | update_stmt (ret); | |
1238 | } | |
1239 | ||
1240 | /* Force OP to be suitable for using as an argument for call. | |
1241 | New statements (if any) go to SEQ. */ | |
1242 | static tree | |
1243 | chkp_force_gimple_call_op (tree op, gimple_seq *seq) | |
1244 | { | |
1245 | gimple_seq stmts; | |
1246 | gimple_stmt_iterator si; | |
1247 | ||
1248 | op = force_gimple_operand (unshare_expr (op), &stmts, true, NULL_TREE); | |
1249 | ||
1250 | for (si = gsi_start (stmts); !gsi_end_p (si); gsi_next (&si)) | |
1251 | chkp_mark_stmt (gsi_stmt (si)); | |
1252 | ||
1253 | gimple_seq_add_seq (seq, stmts); | |
1254 | ||
1255 | return op; | |
1256 | } | |
1257 | ||
1258 | /* Generate lower bound check for memory access by ADDR. | |
1259 | Check is inserted before the position pointed by ITER. | |
1260 | DIRFLAG indicates whether memory access is load or store. */ | |
1261 | static void | |
1262 | chkp_check_lower (tree addr, tree bounds, | |
1263 | gimple_stmt_iterator iter, | |
1264 | location_t location, | |
1265 | tree dirflag) | |
1266 | { | |
1267 | gimple_seq seq; | |
1268 | gimple check; | |
1269 | tree node; | |
1270 | ||
1271 | if (bounds == chkp_get_zero_bounds ()) | |
1272 | return; | |
1273 | ||
1274 | if (dirflag == integer_zero_node | |
1275 | && !flag_chkp_check_read) | |
1276 | return; | |
1277 | ||
1278 | if (dirflag == integer_one_node | |
1279 | && !flag_chkp_check_write) | |
1280 | return; | |
1281 | ||
1282 | seq = NULL; | |
1283 | ||
1284 | node = chkp_force_gimple_call_op (addr, &seq); | |
1285 | ||
1286 | check = gimple_build_call (chkp_checkl_fndecl, 2, node, bounds); | |
1287 | chkp_mark_stmt (check); | |
1288 | gimple_call_set_with_bounds (check, true); | |
1289 | gimple_set_location (check, location); | |
1290 | gimple_seq_add_stmt (&seq, check); | |
1291 | ||
1292 | gsi_insert_seq_before (&iter, seq, GSI_SAME_STMT); | |
1293 | ||
1294 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
1295 | { | |
1296 | gimple before = gsi_stmt (iter); | |
1297 | fprintf (dump_file, "Generated lower bound check for statement "); | |
1298 | print_gimple_stmt (dump_file, before, 0, TDF_VOPS|TDF_MEMSYMS); | |
1299 | fprintf (dump_file, " "); | |
1300 | print_gimple_stmt (dump_file, check, 0, TDF_VOPS|TDF_MEMSYMS); | |
1301 | } | |
1302 | } | |
1303 | ||
1304 | /* Generate upper bound check for memory access by ADDR. | |
1305 | Check is inserted before the position pointed by ITER. | |
1306 | DIRFLAG indicates whether memory access is load or store. */ | |
1307 | static void | |
1308 | chkp_check_upper (tree addr, tree bounds, | |
1309 | gimple_stmt_iterator iter, | |
1310 | location_t location, | |
1311 | tree dirflag) | |
1312 | { | |
1313 | gimple_seq seq; | |
1314 | gimple check; | |
1315 | tree node; | |
1316 | ||
1317 | if (bounds == chkp_get_zero_bounds ()) | |
1318 | return; | |
1319 | ||
1320 | if (dirflag == integer_zero_node | |
1321 | && !flag_chkp_check_read) | |
1322 | return; | |
1323 | ||
1324 | if (dirflag == integer_one_node | |
1325 | && !flag_chkp_check_write) | |
1326 | return; | |
1327 | ||
1328 | seq = NULL; | |
1329 | ||
1330 | node = chkp_force_gimple_call_op (addr, &seq); | |
1331 | ||
1332 | check = gimple_build_call (chkp_checku_fndecl, 2, node, bounds); | |
1333 | chkp_mark_stmt (check); | |
1334 | gimple_call_set_with_bounds (check, true); | |
1335 | gimple_set_location (check, location); | |
1336 | gimple_seq_add_stmt (&seq, check); | |
1337 | ||
1338 | gsi_insert_seq_before (&iter, seq, GSI_SAME_STMT); | |
1339 | ||
1340 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
1341 | { | |
1342 | gimple before = gsi_stmt (iter); | |
1343 | fprintf (dump_file, "Generated upper bound check for statement "); | |
1344 | print_gimple_stmt (dump_file, before, 0, TDF_VOPS|TDF_MEMSYMS); | |
1345 | fprintf (dump_file, " "); | |
1346 | print_gimple_stmt (dump_file, check, 0, TDF_VOPS|TDF_MEMSYMS); | |
1347 | } | |
1348 | } | |
1349 | ||
1350 | /* Generate lower and upper bound checks for memory access | |
1351 | to memory slot [FIRST, LAST] againsr BOUNDS. Checks | |
1352 | are inserted before the position pointed by ITER. | |
1353 | DIRFLAG indicates whether memory access is load or store. */ | |
1354 | void | |
1355 | chkp_check_mem_access (tree first, tree last, tree bounds, | |
1356 | gimple_stmt_iterator iter, | |
1357 | location_t location, | |
1358 | tree dirflag) | |
1359 | { | |
1360 | chkp_check_lower (first, bounds, iter, location, dirflag); | |
1361 | chkp_check_upper (last, bounds, iter, location, dirflag); | |
1362 | } | |
1363 | ||
1364 | /* Replace call to _bnd_chk_* pointed by GSI with | |
1365 | bndcu and bndcl calls. DIRFLAG determines whether | |
1366 | check is for read or write. */ | |
1367 | ||
1368 | void | |
1369 | chkp_replace_address_check_builtin (gimple_stmt_iterator *gsi, | |
1370 | tree dirflag) | |
1371 | { | |
1372 | gimple_stmt_iterator call_iter = *gsi; | |
1373 | gimple call = gsi_stmt (*gsi); | |
1374 | tree fndecl = gimple_call_fndecl (call); | |
1375 | tree addr = gimple_call_arg (call, 0); | |
1376 | tree bounds = chkp_find_bounds (addr, gsi); | |
1377 | ||
1378 | if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_LBOUNDS | |
1379 | || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS) | |
1380 | chkp_check_lower (addr, bounds, *gsi, gimple_location (call), dirflag); | |
1381 | ||
1382 | if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_UBOUNDS) | |
1383 | chkp_check_upper (addr, bounds, *gsi, gimple_location (call), dirflag); | |
1384 | ||
1385 | if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS) | |
1386 | { | |
1387 | tree size = gimple_call_arg (call, 1); | |
1388 | addr = fold_build_pointer_plus (addr, size); | |
1389 | addr = fold_build_pointer_plus_hwi (addr, -1); | |
1390 | chkp_check_upper (addr, bounds, *gsi, gimple_location (call), dirflag); | |
1391 | } | |
1392 | ||
1393 | gsi_remove (&call_iter, true); | |
1394 | } | |
1395 | ||
1396 | /* Replace call to _bnd_get_ptr_* pointed by GSI with | |
1397 | corresponding bounds extract call. */ | |
1398 | ||
1399 | void | |
1400 | chkp_replace_extract_builtin (gimple_stmt_iterator *gsi) | |
1401 | { | |
1402 | gimple call = gsi_stmt (*gsi); | |
1403 | tree fndecl = gimple_call_fndecl (call); | |
1404 | tree addr = gimple_call_arg (call, 0); | |
1405 | tree bounds = chkp_find_bounds (addr, gsi); | |
1406 | gimple extract; | |
1407 | ||
1408 | if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_LBOUND) | |
1409 | fndecl = chkp_extract_lower_fndecl; | |
1410 | else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_UBOUND) | |
1411 | fndecl = chkp_extract_upper_fndecl; | |
1412 | else | |
1413 | gcc_unreachable (); | |
1414 | ||
1415 | extract = gimple_build_call (fndecl, 1, bounds); | |
1416 | gimple_call_set_lhs (extract, gimple_call_lhs (call)); | |
1417 | chkp_mark_stmt (extract); | |
1418 | ||
1419 | gsi_replace (gsi, extract, false); | |
1420 | } | |
1421 | ||
1422 | /* Return COMPONENT_REF accessing FIELD in OBJ. */ | |
1423 | static tree | |
1424 | chkp_build_component_ref (tree obj, tree field) | |
1425 | { | |
1426 | tree res; | |
1427 | ||
1428 | /* If object is TMR then we do not use component_ref but | |
1429 | add offset instead. We need it to be able to get addr | |
1430 | of the reasult later. */ | |
1431 | if (TREE_CODE (obj) == TARGET_MEM_REF) | |
1432 | { | |
1433 | tree offs = TMR_OFFSET (obj); | |
1434 | offs = fold_binary_to_constant (PLUS_EXPR, TREE_TYPE (offs), | |
1435 | offs, DECL_FIELD_OFFSET (field)); | |
1436 | ||
1437 | gcc_assert (offs); | |
1438 | ||
1439 | res = copy_node (obj); | |
1440 | TREE_TYPE (res) = TREE_TYPE (field); | |
1441 | TMR_OFFSET (res) = offs; | |
1442 | } | |
1443 | else | |
1444 | res = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL_TREE); | |
1445 | ||
1446 | return res; | |
1447 | } | |
1448 | ||
1449 | /* Return ARRAY_REF for array ARR and index IDX with | |
1450 | specified element type ETYPE and element size ESIZE. */ | |
1451 | static tree | |
1452 | chkp_build_array_ref (tree arr, tree etype, tree esize, | |
1453 | unsigned HOST_WIDE_INT idx) | |
1454 | { | |
1455 | tree index = build_int_cst (size_type_node, idx); | |
1456 | tree res; | |
1457 | ||
1458 | /* If object is TMR then we do not use array_ref but | |
1459 | add offset instead. We need it to be able to get addr | |
1460 | of the reasult later. */ | |
1461 | if (TREE_CODE (arr) == TARGET_MEM_REF) | |
1462 | { | |
1463 | tree offs = TMR_OFFSET (arr); | |
1464 | ||
1465 | esize = fold_binary_to_constant (MULT_EXPR, TREE_TYPE (esize), | |
1466 | esize, index); | |
1467 | gcc_assert(esize); | |
1468 | ||
1469 | offs = fold_binary_to_constant (PLUS_EXPR, TREE_TYPE (offs), | |
1470 | offs, esize); | |
1471 | gcc_assert (offs); | |
1472 | ||
1473 | res = copy_node (arr); | |
1474 | TREE_TYPE (res) = etype; | |
1475 | TMR_OFFSET (res) = offs; | |
1476 | } | |
1477 | else | |
1478 | res = build4 (ARRAY_REF, etype, arr, index, NULL_TREE, NULL_TREE); | |
1479 | ||
1480 | return res; | |
1481 | } | |
1482 | ||
1483 | /* Helper function for chkp_add_bounds_to_call_stmt. | |
1484 | Fill ALL_BOUNDS output array with created bounds. | |
1485 | ||
1486 | OFFS is used for recursive calls and holds basic | |
1487 | offset of TYPE in outer structure in bits. | |
1488 | ||
1489 | ITER points a position where bounds are searched. | |
1490 | ||
1491 | ALL_BOUNDS[i] is filled with elem bounds if there | |
1492 | is a field in TYPE which has pointer type and offset | |
1493 | equal to i * POINTER_SIZE in bits. */ | |
1494 | static void | |
1495 | chkp_find_bounds_for_elem (tree elem, tree *all_bounds, | |
1496 | HOST_WIDE_INT offs, | |
1497 | gimple_stmt_iterator *iter) | |
1498 | { | |
1499 | tree type = TREE_TYPE (elem); | |
1500 | ||
1501 | if (BOUNDED_TYPE_P (type)) | |
1502 | { | |
1503 | if (!all_bounds[offs / POINTER_SIZE]) | |
1504 | { | |
1505 | tree temp = make_temp_ssa_name (type, gimple_build_nop (), ""); | |
1506 | gimple assign = gimple_build_assign (temp, elem); | |
1507 | gimple_stmt_iterator gsi; | |
1508 | ||
1509 | gsi_insert_before (iter, assign, GSI_SAME_STMT); | |
1510 | gsi = gsi_for_stmt (assign); | |
1511 | ||
1512 | all_bounds[offs / POINTER_SIZE] = chkp_find_bounds (temp, &gsi); | |
1513 | } | |
1514 | } | |
1515 | else if (RECORD_OR_UNION_TYPE_P (type)) | |
1516 | { | |
1517 | tree field; | |
1518 | ||
1519 | for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field)) | |
1520 | if (TREE_CODE (field) == FIELD_DECL) | |
1521 | { | |
1522 | tree base = unshare_expr (elem); | |
1523 | tree field_ref = chkp_build_component_ref (base, field); | |
1524 | HOST_WIDE_INT field_offs | |
1525 | = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field)); | |
1526 | if (DECL_FIELD_OFFSET (field)) | |
1527 | field_offs += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field)) * 8; | |
1528 | ||
1529 | chkp_find_bounds_for_elem (field_ref, all_bounds, | |
1530 | offs + field_offs, iter); | |
1531 | } | |
1532 | } | |
1533 | else if (TREE_CODE (type) == ARRAY_TYPE) | |
1534 | { | |
1535 | tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type)); | |
1536 | tree etype = TREE_TYPE (type); | |
1537 | HOST_WIDE_INT esize = TREE_INT_CST_LOW (TYPE_SIZE (etype)); | |
1538 | unsigned HOST_WIDE_INT cur; | |
1539 | ||
1540 | if (!maxval || integer_minus_onep (maxval)) | |
1541 | return; | |
1542 | ||
1543 | for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++) | |
1544 | { | |
1545 | tree base = unshare_expr (elem); | |
1546 | tree arr_elem = chkp_build_array_ref (base, etype, | |
1547 | TYPE_SIZE (etype), | |
1548 | cur); | |
1549 | chkp_find_bounds_for_elem (arr_elem, all_bounds, offs + cur * esize, | |
1550 | iter); | |
1551 | } | |
1552 | } | |
1553 | } | |
1554 | ||
1555 | /* Fill HAVE_BOUND output bitmap with information about | |
1556 | bounds requred for object of type TYPE. | |
1557 | ||
1558 | OFFS is used for recursive calls and holds basic | |
1559 | offset of TYPE in outer structure in bits. | |
1560 | ||
1561 | HAVE_BOUND[i] is set to 1 if there is a field | |
1562 | in TYPE which has pointer type and offset | |
1563 | equal to i * POINTER_SIZE - OFFS in bits. */ | |
1564 | void | |
1565 | chkp_find_bound_slots_1 (const_tree type, bitmap have_bound, | |
1566 | HOST_WIDE_INT offs) | |
1567 | { | |
1568 | if (BOUNDED_TYPE_P (type)) | |
1569 | bitmap_set_bit (have_bound, offs / POINTER_SIZE); | |
1570 | else if (RECORD_OR_UNION_TYPE_P (type)) | |
1571 | { | |
1572 | tree field; | |
1573 | ||
1574 | for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field)) | |
1575 | if (TREE_CODE (field) == FIELD_DECL) | |
1576 | { | |
1577 | HOST_WIDE_INT field_offs | |
1578 | = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field)); | |
1579 | if (DECL_FIELD_OFFSET (field)) | |
1580 | field_offs += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field)) * 8; | |
1581 | chkp_find_bound_slots_1 (TREE_TYPE (field), have_bound, | |
1582 | offs + field_offs); | |
1583 | } | |
1584 | } | |
1585 | else if (TREE_CODE (type) == ARRAY_TYPE) | |
1586 | { | |
1587 | tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type)); | |
1588 | tree etype = TREE_TYPE (type); | |
1589 | HOST_WIDE_INT esize = TREE_INT_CST_LOW (TYPE_SIZE (etype)); | |
1590 | unsigned HOST_WIDE_INT cur; | |
1591 | ||
152ceb59 IE |
1592 | if (!maxval |
1593 | || TREE_CODE (maxval) != INTEGER_CST | |
1594 | || integer_minus_onep (maxval)) | |
d5e254e1 IE |
1595 | return; |
1596 | ||
1597 | for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++) | |
1598 | chkp_find_bound_slots_1 (etype, have_bound, offs + cur * esize); | |
1599 | } | |
1600 | } | |
1601 | ||
1602 | /* Fill bitmap RES with information about bounds for | |
1603 | type TYPE. See chkp_find_bound_slots_1 for more | |
1604 | details. */ | |
1605 | void | |
1606 | chkp_find_bound_slots (const_tree type, bitmap res) | |
1607 | { | |
1608 | bitmap_clear (res); | |
1609 | chkp_find_bound_slots_1 (type, res, 0); | |
1610 | } | |
1611 | ||
edcf72f3 IE |
1612 | /* Return 1 if call to FNDECL should be instrumented |
1613 | and 0 otherwise. */ | |
1614 | ||
1615 | static bool | |
1616 | chkp_instrument_normal_builtin (tree fndecl) | |
1617 | { | |
1618 | switch (DECL_FUNCTION_CODE (fndecl)) | |
1619 | { | |
1620 | case BUILT_IN_STRLEN: | |
1621 | case BUILT_IN_STRCPY: | |
1622 | case BUILT_IN_STRNCPY: | |
1623 | case BUILT_IN_STPCPY: | |
1624 | case BUILT_IN_STPNCPY: | |
1625 | case BUILT_IN_STRCAT: | |
1626 | case BUILT_IN_STRNCAT: | |
1627 | case BUILT_IN_MEMCPY: | |
1628 | case BUILT_IN_MEMPCPY: | |
1629 | case BUILT_IN_MEMSET: | |
1630 | case BUILT_IN_MEMMOVE: | |
1631 | case BUILT_IN_BZERO: | |
1632 | case BUILT_IN_STRCMP: | |
1633 | case BUILT_IN_STRNCMP: | |
1634 | case BUILT_IN_BCMP: | |
1635 | case BUILT_IN_MEMCMP: | |
1636 | case BUILT_IN_MEMCPY_CHK: | |
1637 | case BUILT_IN_MEMPCPY_CHK: | |
1638 | case BUILT_IN_MEMMOVE_CHK: | |
1639 | case BUILT_IN_MEMSET_CHK: | |
1640 | case BUILT_IN_STRCPY_CHK: | |
1641 | case BUILT_IN_STRNCPY_CHK: | |
1642 | case BUILT_IN_STPCPY_CHK: | |
1643 | case BUILT_IN_STPNCPY_CHK: | |
1644 | case BUILT_IN_STRCAT_CHK: | |
1645 | case BUILT_IN_STRNCAT_CHK: | |
1646 | case BUILT_IN_MALLOC: | |
1647 | case BUILT_IN_CALLOC: | |
1648 | case BUILT_IN_REALLOC: | |
1649 | return 1; | |
1650 | ||
1651 | default: | |
1652 | return 0; | |
1653 | } | |
1654 | } | |
1655 | ||
d5e254e1 IE |
1656 | /* Add bound arguments to call statement pointed by GSI. |
1657 | Also performs a replacement of user checker builtins calls | |
1658 | with internal ones. */ | |
1659 | ||
1660 | static void | |
1661 | chkp_add_bounds_to_call_stmt (gimple_stmt_iterator *gsi) | |
1662 | { | |
538dd0b7 | 1663 | gcall *call = as_a <gcall *> (gsi_stmt (*gsi)); |
d5e254e1 IE |
1664 | unsigned arg_no = 0; |
1665 | tree fndecl = gimple_call_fndecl (call); | |
1666 | tree fntype; | |
1667 | tree first_formal_arg; | |
1668 | tree arg; | |
1669 | bool use_fntype = false; | |
1670 | tree op; | |
1671 | ssa_op_iter iter; | |
538dd0b7 | 1672 | gcall *new_call; |
d5e254e1 IE |
1673 | |
1674 | /* Do nothing for internal functions. */ | |
1675 | if (gimple_call_internal_p (call)) | |
1676 | return; | |
1677 | ||
1678 | fntype = TREE_TYPE (TREE_TYPE (gimple_call_fn (call))); | |
1679 | ||
1680 | /* Do nothing if back-end builtin is called. */ | |
1681 | if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD) | |
1682 | return; | |
1683 | ||
1684 | /* Do nothing for some middle-end builtins. */ | |
1685 | if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL | |
1686 | && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_OBJECT_SIZE) | |
1687 | return; | |
1688 | ||
edcf72f3 | 1689 | /* Do nothing for calls to legacy functions. */ |
d5e254e1 IE |
1690 | if (fndecl |
1691 | && lookup_attribute ("bnd_legacy", DECL_ATTRIBUTES (fndecl))) | |
1692 | return; | |
1693 | ||
1694 | /* Ignore CHKP_INIT_PTR_BOUNDS, CHKP_NULL_PTR_BOUNDS | |
1695 | and CHKP_COPY_PTR_BOUNDS. */ | |
1696 | if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL | |
1697 | && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_INIT_PTR_BOUNDS | |
1698 | || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NULL_PTR_BOUNDS | |
1699 | || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_COPY_PTR_BOUNDS | |
1700 | || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_SET_PTR_BOUNDS)) | |
1701 | return; | |
1702 | ||
1703 | /* Check user builtins are replaced with checks. */ | |
1704 | if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL | |
1705 | && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_LBOUNDS | |
1706 | || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_UBOUNDS | |
1707 | || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS)) | |
1708 | { | |
1709 | chkp_replace_address_check_builtin (gsi, integer_minus_one_node); | |
1710 | return; | |
1711 | } | |
1712 | ||
1713 | /* Check user builtins are replaced with bound extract. */ | |
1714 | if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL | |
1715 | && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_LBOUND | |
1716 | || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_UBOUND)) | |
1717 | { | |
1718 | chkp_replace_extract_builtin (gsi); | |
1719 | return; | |
1720 | } | |
1721 | ||
1722 | /* BUILT_IN_CHKP_NARROW_PTR_BOUNDS call is replaced with | |
1723 | target narrow bounds call. */ | |
1724 | if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL | |
1725 | && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NARROW_PTR_BOUNDS) | |
1726 | { | |
1727 | tree arg = gimple_call_arg (call, 1); | |
1728 | tree bounds = chkp_find_bounds (arg, gsi); | |
1729 | ||
1730 | gimple_call_set_fndecl (call, chkp_narrow_bounds_fndecl); | |
1731 | gimple_call_set_arg (call, 1, bounds); | |
1732 | update_stmt (call); | |
1733 | ||
1734 | return; | |
1735 | } | |
1736 | ||
1737 | /* BUILT_IN_CHKP_STORE_PTR_BOUNDS call is replaced with | |
1738 | bndstx call. */ | |
1739 | if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL | |
1740 | && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_STORE_PTR_BOUNDS) | |
1741 | { | |
1742 | tree addr = gimple_call_arg (call, 0); | |
1743 | tree ptr = gimple_call_arg (call, 1); | |
1744 | tree bounds = chkp_find_bounds (ptr, gsi); | |
1745 | gimple_stmt_iterator iter = gsi_for_stmt (call); | |
1746 | ||
1747 | chkp_build_bndstx (addr, ptr, bounds, gsi); | |
1748 | gsi_remove (&iter, true); | |
1749 | ||
1750 | return; | |
1751 | } | |
1752 | ||
1753 | if (!flag_chkp_instrument_calls) | |
1754 | return; | |
1755 | ||
edcf72f3 IE |
1756 | /* We instrument only some subset of builtins. We also instrument |
1757 | builtin calls to be inlined. */ | |
1758 | if (fndecl | |
1759 | && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL | |
1760 | && !chkp_instrument_normal_builtin (fndecl)) | |
1761 | { | |
1762 | if (!lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl))) | |
1763 | return; | |
1764 | ||
1765 | struct cgraph_node *clone = chkp_maybe_create_clone (fndecl); | |
1766 | if (!clone | |
1767 | || !gimple_has_body_p (clone->decl)) | |
1768 | return; | |
1769 | } | |
d5e254e1 IE |
1770 | |
1771 | /* If function decl is available then use it for | |
1772 | formal arguments list. Otherwise use function type. */ | |
1773 | if (fndecl && DECL_ARGUMENTS (fndecl)) | |
1774 | first_formal_arg = DECL_ARGUMENTS (fndecl); | |
1775 | else | |
1776 | { | |
1777 | first_formal_arg = TYPE_ARG_TYPES (fntype); | |
1778 | use_fntype = true; | |
1779 | } | |
1780 | ||
1781 | /* Fill vector of new call args. */ | |
1782 | vec<tree> new_args = vNULL; | |
1783 | new_args.create (gimple_call_num_args (call)); | |
1784 | arg = first_formal_arg; | |
1785 | for (arg_no = 0; arg_no < gimple_call_num_args (call); arg_no++) | |
1786 | { | |
1787 | tree call_arg = gimple_call_arg (call, arg_no); | |
1788 | tree type; | |
1789 | ||
1790 | /* Get arg type using formal argument description | |
1791 | or actual argument type. */ | |
1792 | if (arg) | |
1793 | if (use_fntype) | |
1794 | if (TREE_VALUE (arg) != void_type_node) | |
1795 | { | |
1796 | type = TREE_VALUE (arg); | |
1797 | arg = TREE_CHAIN (arg); | |
1798 | } | |
1799 | else | |
1800 | type = TREE_TYPE (call_arg); | |
1801 | else | |
1802 | { | |
1803 | type = TREE_TYPE (arg); | |
1804 | arg = TREE_CHAIN (arg); | |
1805 | } | |
1806 | else | |
1807 | type = TREE_TYPE (call_arg); | |
1808 | ||
1809 | new_args.safe_push (call_arg); | |
1810 | ||
1811 | if (BOUNDED_TYPE_P (type) | |
1812 | || pass_by_reference (NULL, TYPE_MODE (type), type, true)) | |
1813 | new_args.safe_push (chkp_find_bounds (call_arg, gsi)); | |
1814 | else if (chkp_type_has_pointer (type)) | |
1815 | { | |
1816 | HOST_WIDE_INT max_bounds | |
1817 | = TREE_INT_CST_LOW (TYPE_SIZE (type)) / POINTER_SIZE; | |
1818 | tree *all_bounds = (tree *)xmalloc (sizeof (tree) * max_bounds); | |
1819 | HOST_WIDE_INT bnd_no; | |
1820 | ||
1821 | memset (all_bounds, 0, sizeof (tree) * max_bounds); | |
1822 | ||
1823 | chkp_find_bounds_for_elem (call_arg, all_bounds, 0, gsi); | |
1824 | ||
1825 | for (bnd_no = 0; bnd_no < max_bounds; bnd_no++) | |
1826 | if (all_bounds[bnd_no]) | |
1827 | new_args.safe_push (all_bounds[bnd_no]); | |
1828 | ||
1829 | free (all_bounds); | |
1830 | } | |
1831 | } | |
1832 | ||
1833 | if (new_args.length () == gimple_call_num_args (call)) | |
1834 | new_call = call; | |
1835 | else | |
1836 | { | |
1837 | new_call = gimple_build_call_vec (gimple_op (call, 1), new_args); | |
1838 | gimple_call_set_lhs (new_call, gimple_call_lhs (call)); | |
1839 | gimple_call_copy_flags (new_call, call); | |
1840 | } | |
1841 | new_args.release (); | |
1842 | ||
d5e254e1 IE |
1843 | /* For direct calls fndecl is replaced with instrumented version. */ |
1844 | if (fndecl) | |
1845 | { | |
1846 | tree new_decl = chkp_maybe_create_clone (fndecl)->decl; | |
1847 | gimple_call_set_fndecl (new_call, new_decl); | |
1848 | gimple_call_set_fntype (new_call, TREE_TYPE (new_decl)); | |
1849 | } | |
1850 | /* For indirect call we should fix function pointer type if | |
1851 | pass some bounds. */ | |
1852 | else if (new_call != call) | |
1853 | { | |
1854 | tree type = gimple_call_fntype (call); | |
1855 | type = chkp_copy_function_type_adding_bounds (type); | |
1856 | gimple_call_set_fntype (new_call, type); | |
1857 | } | |
1858 | ||
1859 | /* replace old call statement with the new one. */ | |
1860 | if (call != new_call) | |
1861 | { | |
1862 | FOR_EACH_SSA_TREE_OPERAND (op, call, iter, SSA_OP_ALL_DEFS) | |
1863 | { | |
1864 | SSA_NAME_DEF_STMT (op) = new_call; | |
1865 | } | |
1866 | gsi_replace (gsi, new_call, true); | |
1867 | } | |
1868 | else | |
1869 | update_stmt (new_call); | |
1870 | ||
1871 | gimple_call_set_with_bounds (new_call, true); | |
1872 | } | |
1873 | ||
1874 | /* Return constant static bounds var with specified LB and UB | |
1875 | if such var exists in varpool. Return NULL otherwise. */ | |
1876 | static tree | |
1877 | chkp_find_const_bounds_var (HOST_WIDE_INT lb, | |
1878 | HOST_WIDE_INT ub) | |
1879 | { | |
1880 | tree val = targetm.chkp_make_bounds_constant (lb, ub); | |
1881 | struct varpool_node *node; | |
1882 | ||
1883 | /* We expect bounds constant is represented as a complex value | |
1884 | of two pointer sized integers. */ | |
1885 | gcc_assert (TREE_CODE (val) == COMPLEX_CST); | |
1886 | ||
1887 | FOR_EACH_VARIABLE (node) | |
1888 | if (POINTER_BOUNDS_P (node->decl) | |
1889 | && TREE_READONLY (node->decl) | |
1890 | && DECL_INITIAL (node->decl) | |
1891 | && TREE_CODE (DECL_INITIAL (node->decl)) == COMPLEX_CST | |
1892 | && tree_int_cst_equal (TREE_REALPART (DECL_INITIAL (node->decl)), | |
1893 | TREE_REALPART (val)) | |
1894 | && tree_int_cst_equal (TREE_IMAGPART (DECL_INITIAL (node->decl)), | |
1895 | TREE_IMAGPART (val))) | |
1896 | return node->decl; | |
1897 | ||
1898 | return NULL; | |
1899 | } | |
1900 | ||
1901 | /* Return constant static bounds var with specified bounds LB and UB. | |
1902 | If such var does not exists then new var is created with specified NAME. */ | |
1903 | static tree | |
1904 | chkp_make_static_const_bounds (HOST_WIDE_INT lb, | |
1905 | HOST_WIDE_INT ub, | |
1906 | const char *name) | |
1907 | { | |
1908 | tree var; | |
1909 | ||
1910 | /* With LTO we may have constant bounds already in varpool. | |
1911 | Try to find it. */ | |
1912 | var = chkp_find_const_bounds_var (lb, ub); | |
1913 | ||
1914 | if (var) | |
1915 | return var; | |
1916 | ||
1917 | var = build_decl (UNKNOWN_LOCATION, VAR_DECL, | |
1918 | get_identifier (name), pointer_bounds_type_node); | |
1919 | ||
1920 | TREE_PUBLIC (var) = 1; | |
1921 | TREE_USED (var) = 1; | |
1922 | TREE_READONLY (var) = 1; | |
1923 | TREE_STATIC (var) = 1; | |
1924 | TREE_ADDRESSABLE (var) = 0; | |
1925 | DECL_ARTIFICIAL (var) = 1; | |
1926 | DECL_READ_P (var) = 1; | |
1927 | /* We may use this symbol during ctors generation in chkp_finish_file | |
1928 | when all symbols are emitted. Force output to avoid undefined | |
1929 | symbols in ctors. */ | |
1930 | if (!in_lto_p) | |
1931 | { | |
1932 | DECL_INITIAL (var) = targetm.chkp_make_bounds_constant (lb, ub); | |
1933 | DECL_COMDAT (var) = 1; | |
1934 | varpool_node::get_create (var)->set_comdat_group (DECL_ASSEMBLER_NAME (var)); | |
1935 | varpool_node::get_create (var)->force_output = 1; | |
1936 | } | |
1937 | else | |
1938 | DECL_EXTERNAL (var) = 1; | |
1939 | varpool_node::finalize_decl (var); | |
1940 | ||
1941 | return var; | |
1942 | } | |
1943 | ||
1944 | /* Generate code to make bounds with specified lower bound LB and SIZE. | |
1945 | if AFTER is 1 then code is inserted after position pointed by ITER | |
1946 | otherwise code is inserted before position pointed by ITER. | |
1947 | If ITER is NULL then code is added to entry block. */ | |
1948 | static tree | |
1949 | chkp_make_bounds (tree lb, tree size, gimple_stmt_iterator *iter, bool after) | |
1950 | { | |
1951 | gimple_seq seq; | |
1952 | gimple_stmt_iterator gsi; | |
1953 | gimple stmt; | |
1954 | tree bounds; | |
1955 | ||
1956 | if (iter) | |
1957 | gsi = *iter; | |
1958 | else | |
1959 | gsi = gsi_start_bb (chkp_get_entry_block ()); | |
1960 | ||
1961 | seq = NULL; | |
1962 | ||
1963 | lb = chkp_force_gimple_call_op (lb, &seq); | |
1964 | size = chkp_force_gimple_call_op (size, &seq); | |
1965 | ||
1966 | stmt = gimple_build_call (chkp_bndmk_fndecl, 2, lb, size); | |
1967 | chkp_mark_stmt (stmt); | |
1968 | ||
1969 | bounds = chkp_get_tmp_reg (stmt); | |
1970 | gimple_call_set_lhs (stmt, bounds); | |
1971 | ||
1972 | gimple_seq_add_stmt (&seq, stmt); | |
1973 | ||
1974 | if (iter && after) | |
1975 | gsi_insert_seq_after (&gsi, seq, GSI_SAME_STMT); | |
1976 | else | |
1977 | gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT); | |
1978 | ||
1979 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
1980 | { | |
1981 | fprintf (dump_file, "Made bounds: "); | |
1982 | print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS); | |
1983 | if (iter) | |
1984 | { | |
1985 | fprintf (dump_file, " inserted before statement: "); | |
1986 | print_gimple_stmt (dump_file, gsi_stmt (*iter), 0, TDF_VOPS|TDF_MEMSYMS); | |
1987 | } | |
1988 | else | |
1989 | fprintf (dump_file, " at function entry\n"); | |
1990 | } | |
1991 | ||
1992 | /* update_stmt (stmt); */ | |
1993 | ||
1994 | return bounds; | |
1995 | } | |
1996 | ||
1997 | /* Return var holding zero bounds. */ | |
1998 | tree | |
1999 | chkp_get_zero_bounds_var (void) | |
2000 | { | |
2001 | if (!chkp_zero_bounds_var) | |
2002 | { | |
2003 | tree id = get_identifier (CHKP_ZERO_BOUNDS_VAR_NAME); | |
2004 | symtab_node *node = symtab_node::get_for_asmname (id); | |
2005 | if (node) | |
2006 | chkp_zero_bounds_var = node->decl; | |
2007 | } | |
2008 | ||
2009 | if (!chkp_zero_bounds_var) | |
2010 | chkp_zero_bounds_var | |
2011 | = chkp_make_static_const_bounds (0, -1, | |
2012 | CHKP_ZERO_BOUNDS_VAR_NAME); | |
2013 | return chkp_zero_bounds_var; | |
2014 | } | |
2015 | ||
2016 | /* Return var holding none bounds. */ | |
2017 | tree | |
2018 | chkp_get_none_bounds_var (void) | |
2019 | { | |
2020 | if (!chkp_none_bounds_var) | |
2021 | { | |
2022 | tree id = get_identifier (CHKP_NONE_BOUNDS_VAR_NAME); | |
2023 | symtab_node *node = symtab_node::get_for_asmname (id); | |
2024 | if (node) | |
2025 | chkp_none_bounds_var = node->decl; | |
2026 | } | |
2027 | ||
2028 | if (!chkp_none_bounds_var) | |
2029 | chkp_none_bounds_var | |
2030 | = chkp_make_static_const_bounds (-1, 0, | |
2031 | CHKP_NONE_BOUNDS_VAR_NAME); | |
2032 | return chkp_none_bounds_var; | |
2033 | } | |
2034 | ||
2035 | /* Return SSA_NAME used to represent zero bounds. */ | |
2036 | static tree | |
2037 | chkp_get_zero_bounds (void) | |
2038 | { | |
2039 | if (zero_bounds) | |
2040 | return zero_bounds; | |
2041 | ||
2042 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
2043 | fprintf (dump_file, "Creating zero bounds..."); | |
2044 | ||
2045 | if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds) | |
2046 | || flag_chkp_use_static_const_bounds > 0) | |
2047 | { | |
2048 | gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ()); | |
2049 | gimple stmt; | |
2050 | ||
2051 | zero_bounds = chkp_get_tmp_reg (gimple_build_nop ()); | |
2052 | stmt = gimple_build_assign (zero_bounds, chkp_get_zero_bounds_var ()); | |
2053 | gsi_insert_before (&gsi, stmt, GSI_SAME_STMT); | |
2054 | } | |
2055 | else | |
2056 | zero_bounds = chkp_make_bounds (integer_zero_node, | |
2057 | integer_zero_node, | |
2058 | NULL, | |
2059 | false); | |
2060 | ||
2061 | return zero_bounds; | |
2062 | } | |
2063 | ||
2064 | /* Return SSA_NAME used to represent none bounds. */ | |
2065 | static tree | |
2066 | chkp_get_none_bounds (void) | |
2067 | { | |
2068 | if (none_bounds) | |
2069 | return none_bounds; | |
2070 | ||
2071 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
2072 | fprintf (dump_file, "Creating none bounds..."); | |
2073 | ||
2074 | ||
2075 | if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds) | |
2076 | || flag_chkp_use_static_const_bounds > 0) | |
2077 | { | |
2078 | gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ()); | |
2079 | gimple stmt; | |
2080 | ||
2081 | none_bounds = chkp_get_tmp_reg (gimple_build_nop ()); | |
2082 | stmt = gimple_build_assign (none_bounds, chkp_get_none_bounds_var ()); | |
2083 | gsi_insert_before (&gsi, stmt, GSI_SAME_STMT); | |
2084 | } | |
2085 | else | |
2086 | none_bounds = chkp_make_bounds (integer_minus_one_node, | |
2087 | build_int_cst (size_type_node, 2), | |
2088 | NULL, | |
2089 | false); | |
2090 | ||
2091 | return none_bounds; | |
2092 | } | |
2093 | ||
2094 | /* Return bounds to be used as a result of operation which | |
2095 | should not create poiunter (e.g. MULT_EXPR). */ | |
2096 | static tree | |
2097 | chkp_get_invalid_op_bounds (void) | |
2098 | { | |
2099 | return chkp_get_zero_bounds (); | |
2100 | } | |
2101 | ||
2102 | /* Return bounds to be used for loads of non-pointer values. */ | |
2103 | static tree | |
2104 | chkp_get_nonpointer_load_bounds (void) | |
2105 | { | |
2106 | return chkp_get_zero_bounds (); | |
2107 | } | |
2108 | ||
985f48f7 IE |
2109 | /* Return 1 if may use bndret call to get bounds for pointer |
2110 | returned by CALL. */ | |
2111 | static bool | |
2112 | chkp_call_returns_bounds_p (gcall *call) | |
2113 | { | |
2114 | if (gimple_call_internal_p (call)) | |
2115 | return false; | |
2116 | ||
2117 | tree fndecl = gimple_call_fndecl (call); | |
2118 | ||
2119 | if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD) | |
2120 | return false; | |
2121 | ||
2122 | if (fndecl | |
2123 | && lookup_attribute ("bnd_legacy", DECL_ATTRIBUTES (fndecl))) | |
2124 | return false; | |
2125 | ||
2126 | if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL) | |
2127 | { | |
2128 | if (chkp_instrument_normal_builtin (fndecl)) | |
2129 | return true; | |
2130 | ||
2131 | if (!lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl))) | |
2132 | return false; | |
2133 | ||
2134 | struct cgraph_node *clone = chkp_maybe_create_clone (fndecl); | |
2135 | return (clone && gimple_has_body_p (clone->decl)); | |
2136 | } | |
2137 | ||
2138 | return true; | |
2139 | } | |
2140 | ||
d5e254e1 IE |
2141 | /* Build bounds returned by CALL. */ |
2142 | static tree | |
538dd0b7 | 2143 | chkp_build_returned_bound (gcall *call) |
d5e254e1 IE |
2144 | { |
2145 | gimple_stmt_iterator gsi; | |
2146 | tree bounds; | |
2147 | gimple stmt; | |
2148 | tree fndecl = gimple_call_fndecl (call); | |
2149 | ||
2150 | /* To avoid fixing alloca expands in targets we handle | |
2151 | it separately. */ | |
2152 | if (fndecl | |
2153 | && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL | |
2154 | && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA | |
2155 | || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA_WITH_ALIGN)) | |
2156 | { | |
2157 | tree size = gimple_call_arg (call, 0); | |
2158 | tree lb = gimple_call_lhs (call); | |
2159 | gimple_stmt_iterator iter = gsi_for_stmt (call); | |
2160 | bounds = chkp_make_bounds (lb, size, &iter, true); | |
2161 | } | |
2162 | /* We know bounds returned by set_bounds builtin call. */ | |
2163 | else if (fndecl | |
2164 | && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL | |
2165 | && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_SET_PTR_BOUNDS) | |
2166 | { | |
2167 | tree lb = gimple_call_arg (call, 0); | |
2168 | tree size = gimple_call_arg (call, 1); | |
2169 | gimple_stmt_iterator iter = gsi_for_stmt (call); | |
2170 | bounds = chkp_make_bounds (lb, size, &iter, true); | |
2171 | } | |
2172 | /* Detect bounds initialization calls. */ | |
2173 | else if (fndecl | |
2174 | && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL | |
2175 | && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_INIT_PTR_BOUNDS) | |
2176 | bounds = chkp_get_zero_bounds (); | |
2177 | /* Detect bounds nullification calls. */ | |
2178 | else if (fndecl | |
2179 | && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL | |
2180 | && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NULL_PTR_BOUNDS) | |
2181 | bounds = chkp_get_none_bounds (); | |
2182 | /* Detect bounds copy calls. */ | |
2183 | else if (fndecl | |
2184 | && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL | |
2185 | && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_COPY_PTR_BOUNDS) | |
2186 | { | |
2187 | gimple_stmt_iterator iter = gsi_for_stmt (call); | |
2188 | bounds = chkp_find_bounds (gimple_call_arg (call, 1), &iter); | |
2189 | } | |
2190 | /* Do not use retbnd when returned bounds are equal to some | |
2191 | of passed bounds. */ | |
d3fb44cb | 2192 | else if (gimple_call_return_flags (call) & ERF_RETURNS_ARG) |
d5e254e1 IE |
2193 | { |
2194 | gimple_stmt_iterator iter = gsi_for_stmt (call); | |
2195 | unsigned int retarg = 0, argno; | |
2196 | if (gimple_call_return_flags (call) & ERF_RETURNS_ARG) | |
2197 | retarg = gimple_call_return_flags (call) & ERF_RETURN_ARG_MASK; | |
2198 | if (gimple_call_with_bounds_p (call)) | |
2199 | { | |
2200 | for (argno = 0; argno < gimple_call_num_args (call); argno++) | |
2201 | if (!POINTER_BOUNDS_P (gimple_call_arg (call, argno))) | |
2202 | { | |
2203 | if (retarg) | |
2204 | retarg--; | |
2205 | else | |
2206 | break; | |
2207 | } | |
2208 | } | |
2209 | else | |
2210 | argno = retarg; | |
2211 | ||
2212 | bounds = chkp_find_bounds (gimple_call_arg (call, argno), &iter); | |
2213 | } | |
985f48f7 | 2214 | else if (chkp_call_returns_bounds_p (call)) |
d5e254e1 IE |
2215 | { |
2216 | gcc_assert (TREE_CODE (gimple_call_lhs (call)) == SSA_NAME); | |
2217 | ||
2218 | /* In general case build checker builtin call to | |
2219 | obtain returned bounds. */ | |
2220 | stmt = gimple_build_call (chkp_ret_bnd_fndecl, 1, | |
2221 | gimple_call_lhs (call)); | |
2222 | chkp_mark_stmt (stmt); | |
2223 | ||
2224 | gsi = gsi_for_stmt (call); | |
2225 | gsi_insert_after (&gsi, stmt, GSI_SAME_STMT); | |
2226 | ||
2227 | bounds = chkp_get_tmp_reg (stmt); | |
2228 | gimple_call_set_lhs (stmt, bounds); | |
2229 | ||
2230 | update_stmt (stmt); | |
2231 | } | |
985f48f7 IE |
2232 | else |
2233 | bounds = chkp_get_zero_bounds (); | |
d5e254e1 IE |
2234 | |
2235 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
2236 | { | |
2237 | fprintf (dump_file, "Built returned bounds ("); | |
2238 | print_generic_expr (dump_file, bounds, 0); | |
2239 | fprintf (dump_file, ") for call: "); | |
2240 | print_gimple_stmt (dump_file, call, 0, TDF_VOPS|TDF_MEMSYMS); | |
2241 | } | |
2242 | ||
2243 | bounds = chkp_maybe_copy_and_register_bounds (gimple_call_lhs (call), bounds); | |
2244 | ||
2245 | return bounds; | |
2246 | } | |
2247 | ||
2248 | /* Return bounds used as returned by call | |
2249 | which produced SSA name VAL. */ | |
538dd0b7 | 2250 | gcall * |
d5e254e1 IE |
2251 | chkp_retbnd_call_by_val (tree val) |
2252 | { | |
2253 | if (TREE_CODE (val) != SSA_NAME) | |
2254 | return NULL; | |
2255 | ||
2256 | gcc_assert (gimple_code (SSA_NAME_DEF_STMT (val)) == GIMPLE_CALL); | |
2257 | ||
2258 | imm_use_iterator use_iter; | |
2259 | use_operand_p use_p; | |
2260 | FOR_EACH_IMM_USE_FAST (use_p, use_iter, val) | |
2261 | if (gimple_code (USE_STMT (use_p)) == GIMPLE_CALL | |
2262 | && gimple_call_fndecl (USE_STMT (use_p)) == chkp_ret_bnd_fndecl) | |
538dd0b7 | 2263 | return as_a <gcall *> (USE_STMT (use_p)); |
d5e254e1 IE |
2264 | |
2265 | return NULL; | |
2266 | } | |
2267 | ||
2268 | /* Check the next parameter for the given PARM is bounds | |
2269 | and return it's default SSA_NAME (create if required). */ | |
2270 | static tree | |
2271 | chkp_get_next_bounds_parm (tree parm) | |
2272 | { | |
2273 | tree bounds = TREE_CHAIN (parm); | |
2274 | gcc_assert (POINTER_BOUNDS_P (bounds)); | |
2275 | bounds = ssa_default_def (cfun, bounds); | |
2276 | if (!bounds) | |
2277 | { | |
2278 | bounds = make_ssa_name (TREE_CHAIN (parm), gimple_build_nop ()); | |
2279 | set_ssa_default_def (cfun, TREE_CHAIN (parm), bounds); | |
2280 | } | |
2281 | return bounds; | |
2282 | } | |
2283 | ||
2284 | /* Return bounds to be used for input argument PARM. */ | |
2285 | static tree | |
2286 | chkp_get_bound_for_parm (tree parm) | |
2287 | { | |
2288 | tree decl = SSA_NAME_VAR (parm); | |
2289 | tree bounds; | |
2290 | ||
2291 | gcc_assert (TREE_CODE (decl) == PARM_DECL); | |
2292 | ||
2293 | bounds = chkp_get_registered_bounds (parm); | |
2294 | ||
2295 | if (!bounds) | |
2296 | bounds = chkp_get_registered_bounds (decl); | |
2297 | ||
2298 | if (!bounds) | |
2299 | { | |
2300 | tree orig_decl = cgraph_node::get (cfun->decl)->orig_decl; | |
2301 | ||
2302 | /* For static chain param we return zero bounds | |
2303 | because currently we do not check dereferences | |
2304 | of this pointer. */ | |
2305 | if (cfun->static_chain_decl == decl) | |
2306 | bounds = chkp_get_zero_bounds (); | |
2307 | /* If non instrumented runtime is used then it may be useful | |
2308 | to use zero bounds for input arguments of main | |
2309 | function. */ | |
2310 | else if (flag_chkp_zero_input_bounds_for_main | |
2311 | && strcmp (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (orig_decl)), | |
2312 | "main") == 0) | |
2313 | bounds = chkp_get_zero_bounds (); | |
2314 | else if (BOUNDED_P (parm)) | |
2315 | { | |
2316 | bounds = chkp_get_next_bounds_parm (decl); | |
2317 | bounds = chkp_maybe_copy_and_register_bounds (decl, bounds); | |
2318 | ||
2319 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
2320 | { | |
2321 | fprintf (dump_file, "Built arg bounds ("); | |
2322 | print_generic_expr (dump_file, bounds, 0); | |
2323 | fprintf (dump_file, ") for arg: "); | |
2324 | print_node (dump_file, "", decl, 0); | |
2325 | } | |
2326 | } | |
2327 | else | |
2328 | bounds = chkp_get_zero_bounds (); | |
2329 | } | |
2330 | ||
2331 | if (!chkp_get_registered_bounds (parm)) | |
2332 | bounds = chkp_maybe_copy_and_register_bounds (parm, bounds); | |
2333 | ||
2334 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
2335 | { | |
2336 | fprintf (dump_file, "Using bounds "); | |
2337 | print_generic_expr (dump_file, bounds, 0); | |
2338 | fprintf (dump_file, " for parm "); | |
2339 | print_generic_expr (dump_file, parm, 0); | |
2340 | fprintf (dump_file, " of type "); | |
2341 | print_generic_expr (dump_file, TREE_TYPE (parm), 0); | |
2342 | fprintf (dump_file, ".\n"); | |
2343 | } | |
2344 | ||
2345 | return bounds; | |
2346 | } | |
2347 | ||
2348 | /* Build and return CALL_EXPR for bndstx builtin with specified | |
2349 | arguments. */ | |
2350 | tree | |
2351 | chkp_build_bndldx_call (tree addr, tree ptr) | |
2352 | { | |
2353 | tree fn = build1 (ADDR_EXPR, | |
2354 | build_pointer_type (TREE_TYPE (chkp_bndldx_fndecl)), | |
2355 | chkp_bndldx_fndecl); | |
2356 | tree call = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndldx_fndecl)), | |
2357 | fn, 2, addr, ptr); | |
2358 | CALL_WITH_BOUNDS_P (call) = true; | |
2359 | return call; | |
2360 | } | |
2361 | ||
2362 | /* Insert code to load bounds for PTR located by ADDR. | |
2363 | Code is inserted after position pointed by GSI. | |
2364 | Loaded bounds are returned. */ | |
2365 | static tree | |
2366 | chkp_build_bndldx (tree addr, tree ptr, gimple_stmt_iterator *gsi) | |
2367 | { | |
2368 | gimple_seq seq; | |
2369 | gimple stmt; | |
2370 | tree bounds; | |
2371 | ||
2372 | seq = NULL; | |
2373 | ||
2374 | addr = chkp_force_gimple_call_op (addr, &seq); | |
2375 | ptr = chkp_force_gimple_call_op (ptr, &seq); | |
2376 | ||
2377 | stmt = gimple_build_call (chkp_bndldx_fndecl, 2, addr, ptr); | |
2378 | chkp_mark_stmt (stmt); | |
2379 | bounds = chkp_get_tmp_reg (stmt); | |
2380 | gimple_call_set_lhs (stmt, bounds); | |
2381 | ||
2382 | gimple_seq_add_stmt (&seq, stmt); | |
2383 | ||
2384 | gsi_insert_seq_after (gsi, seq, GSI_CONTINUE_LINKING); | |
2385 | ||
2386 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
2387 | { | |
2388 | fprintf (dump_file, "Generated bndldx for pointer "); | |
2389 | print_generic_expr (dump_file, ptr, 0); | |
2390 | fprintf (dump_file, ": "); | |
2391 | print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS); | |
2392 | } | |
2393 | ||
2394 | return bounds; | |
2395 | } | |
2396 | ||
2397 | /* Build and return CALL_EXPR for bndstx builtin with specified | |
2398 | arguments. */ | |
2399 | tree | |
2400 | chkp_build_bndstx_call (tree addr, tree ptr, tree bounds) | |
2401 | { | |
2402 | tree fn = build1 (ADDR_EXPR, | |
2403 | build_pointer_type (TREE_TYPE (chkp_bndstx_fndecl)), | |
2404 | chkp_bndstx_fndecl); | |
2405 | tree call = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndstx_fndecl)), | |
2406 | fn, 3, ptr, bounds, addr); | |
2407 | CALL_WITH_BOUNDS_P (call) = true; | |
2408 | return call; | |
2409 | } | |
2410 | ||
2411 | /* Insert code to store BOUNDS for PTR stored by ADDR. | |
2412 | New statements are inserted after position pointed | |
2413 | by GSI. */ | |
2414 | void | |
2415 | chkp_build_bndstx (tree addr, tree ptr, tree bounds, | |
2416 | gimple_stmt_iterator *gsi) | |
2417 | { | |
2418 | gimple_seq seq; | |
2419 | gimple stmt; | |
2420 | ||
2421 | seq = NULL; | |
2422 | ||
2423 | addr = chkp_force_gimple_call_op (addr, &seq); | |
2424 | ptr = chkp_force_gimple_call_op (ptr, &seq); | |
2425 | ||
2426 | stmt = gimple_build_call (chkp_bndstx_fndecl, 3, ptr, bounds, addr); | |
2427 | chkp_mark_stmt (stmt); | |
2428 | gimple_call_set_with_bounds (stmt, true); | |
2429 | ||
2430 | gimple_seq_add_stmt (&seq, stmt); | |
2431 | ||
2432 | gsi_insert_seq_after (gsi, seq, GSI_CONTINUE_LINKING); | |
2433 | ||
2434 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
2435 | { | |
2436 | fprintf (dump_file, "Generated bndstx for pointer store "); | |
2437 | print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_VOPS|TDF_MEMSYMS); | |
2438 | print_gimple_stmt (dump_file, stmt, 2, TDF_VOPS|TDF_MEMSYMS); | |
2439 | } | |
2440 | } | |
2441 | ||
2442 | /* Compute bounds for pointer NODE which was assigned in | |
2443 | assignment statement ASSIGN. Return computed bounds. */ | |
2444 | static tree | |
2445 | chkp_compute_bounds_for_assignment (tree node, gimple assign) | |
2446 | { | |
2447 | enum tree_code rhs_code = gimple_assign_rhs_code (assign); | |
2448 | tree rhs1 = gimple_assign_rhs1 (assign); | |
2449 | tree bounds = NULL_TREE; | |
2450 | gimple_stmt_iterator iter = gsi_for_stmt (assign); | |
2451 | ||
2452 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
2453 | { | |
2454 | fprintf (dump_file, "Computing bounds for assignment: "); | |
2455 | print_gimple_stmt (dump_file, assign, 0, TDF_VOPS|TDF_MEMSYMS); | |
2456 | } | |
2457 | ||
2458 | switch (rhs_code) | |
2459 | { | |
2460 | case MEM_REF: | |
2461 | case TARGET_MEM_REF: | |
2462 | case COMPONENT_REF: | |
2463 | case ARRAY_REF: | |
2464 | /* We need to load bounds from the bounds table. */ | |
2465 | bounds = chkp_find_bounds_loaded (node, rhs1, &iter); | |
2466 | break; | |
2467 | ||
2468 | case VAR_DECL: | |
2469 | case SSA_NAME: | |
2470 | case ADDR_EXPR: | |
2471 | case POINTER_PLUS_EXPR: | |
2472 | case NOP_EXPR: | |
2473 | case CONVERT_EXPR: | |
2474 | case INTEGER_CST: | |
2475 | /* Bounds are just propagated from RHS. */ | |
2476 | bounds = chkp_find_bounds (rhs1, &iter); | |
2477 | break; | |
2478 | ||
2479 | case VIEW_CONVERT_EXPR: | |
2480 | /* Bounds are just propagated from RHS. */ | |
2481 | bounds = chkp_find_bounds (TREE_OPERAND (rhs1, 0), &iter); | |
2482 | break; | |
2483 | ||
2484 | case PARM_DECL: | |
2485 | if (BOUNDED_P (rhs1)) | |
2486 | { | |
2487 | /* We need to load bounds from the bounds table. */ | |
2488 | bounds = chkp_build_bndldx (chkp_build_addr_expr (rhs1), | |
2489 | node, &iter); | |
2490 | TREE_ADDRESSABLE (rhs1) = 1; | |
2491 | } | |
2492 | else | |
2493 | bounds = chkp_get_nonpointer_load_bounds (); | |
2494 | break; | |
2495 | ||
2496 | case MINUS_EXPR: | |
2497 | case PLUS_EXPR: | |
2498 | case BIT_AND_EXPR: | |
2499 | case BIT_IOR_EXPR: | |
2500 | case BIT_XOR_EXPR: | |
2501 | { | |
2502 | tree rhs2 = gimple_assign_rhs2 (assign); | |
2503 | tree bnd1 = chkp_find_bounds (rhs1, &iter); | |
2504 | tree bnd2 = chkp_find_bounds (rhs2, &iter); | |
2505 | ||
2506 | /* First we try to check types of operands. If it | |
2507 | does not help then look at bound values. | |
2508 | ||
2509 | If some bounds are incomplete and other are | |
2510 | not proven to be valid (i.e. also incomplete | |
2511 | or invalid because value is not pointer) then | |
2512 | resulting value is incomplete and will be | |
2513 | recomputed later in chkp_finish_incomplete_bounds. */ | |
2514 | if (BOUNDED_P (rhs1) | |
2515 | && !BOUNDED_P (rhs2)) | |
2516 | bounds = bnd1; | |
2517 | else if (BOUNDED_P (rhs2) | |
2518 | && !BOUNDED_P (rhs1) | |
2519 | && rhs_code != MINUS_EXPR) | |
2520 | bounds = bnd2; | |
2521 | else if (chkp_incomplete_bounds (bnd1)) | |
2522 | if (chkp_valid_bounds (bnd2) && rhs_code != MINUS_EXPR | |
2523 | && !chkp_incomplete_bounds (bnd2)) | |
2524 | bounds = bnd2; | |
2525 | else | |
2526 | bounds = incomplete_bounds; | |
2527 | else if (chkp_incomplete_bounds (bnd2)) | |
2528 | if (chkp_valid_bounds (bnd1) | |
2529 | && !chkp_incomplete_bounds (bnd1)) | |
2530 | bounds = bnd1; | |
2531 | else | |
2532 | bounds = incomplete_bounds; | |
2533 | else if (!chkp_valid_bounds (bnd1)) | |
2534 | if (chkp_valid_bounds (bnd2) && rhs_code != MINUS_EXPR) | |
2535 | bounds = bnd2; | |
2536 | else if (bnd2 == chkp_get_zero_bounds ()) | |
2537 | bounds = bnd2; | |
2538 | else | |
2539 | bounds = bnd1; | |
2540 | else if (!chkp_valid_bounds (bnd2)) | |
2541 | bounds = bnd1; | |
2542 | else | |
2543 | /* Seems both operands may have valid bounds | |
2544 | (e.g. pointer minus pointer). In such case | |
2545 | use default invalid op bounds. */ | |
2546 | bounds = chkp_get_invalid_op_bounds (); | |
2547 | } | |
2548 | break; | |
2549 | ||
2550 | case BIT_NOT_EXPR: | |
2551 | case NEGATE_EXPR: | |
2552 | case LSHIFT_EXPR: | |
2553 | case RSHIFT_EXPR: | |
2554 | case LROTATE_EXPR: | |
2555 | case RROTATE_EXPR: | |
2556 | case EQ_EXPR: | |
2557 | case NE_EXPR: | |
2558 | case LT_EXPR: | |
2559 | case LE_EXPR: | |
2560 | case GT_EXPR: | |
2561 | case GE_EXPR: | |
2562 | case MULT_EXPR: | |
2563 | case RDIV_EXPR: | |
2564 | case TRUNC_DIV_EXPR: | |
2565 | case FLOOR_DIV_EXPR: | |
2566 | case CEIL_DIV_EXPR: | |
2567 | case ROUND_DIV_EXPR: | |
2568 | case TRUNC_MOD_EXPR: | |
2569 | case FLOOR_MOD_EXPR: | |
2570 | case CEIL_MOD_EXPR: | |
2571 | case ROUND_MOD_EXPR: | |
2572 | case EXACT_DIV_EXPR: | |
2573 | case FIX_TRUNC_EXPR: | |
2574 | case FLOAT_EXPR: | |
2575 | case REALPART_EXPR: | |
2576 | case IMAGPART_EXPR: | |
2577 | /* No valid bounds may be produced by these exprs. */ | |
2578 | bounds = chkp_get_invalid_op_bounds (); | |
2579 | break; | |
2580 | ||
2581 | case COND_EXPR: | |
2582 | { | |
2583 | tree val1 = gimple_assign_rhs2 (assign); | |
2584 | tree val2 = gimple_assign_rhs3 (assign); | |
2585 | tree bnd1 = chkp_find_bounds (val1, &iter); | |
2586 | tree bnd2 = chkp_find_bounds (val2, &iter); | |
2587 | gimple stmt; | |
2588 | ||
2589 | if (chkp_incomplete_bounds (bnd1) || chkp_incomplete_bounds (bnd2)) | |
2590 | bounds = incomplete_bounds; | |
2591 | else if (bnd1 == bnd2) | |
2592 | bounds = bnd1; | |
2593 | else | |
2594 | { | |
2595 | rhs1 = unshare_expr (rhs1); | |
2596 | ||
2597 | bounds = chkp_get_tmp_reg (assign); | |
0d0e4a03 | 2598 | stmt = gimple_build_assign (bounds, COND_EXPR, rhs1, bnd1, bnd2); |
d5e254e1 IE |
2599 | gsi_insert_after (&iter, stmt, GSI_SAME_STMT); |
2600 | ||
2601 | if (!chkp_valid_bounds (bnd1) && !chkp_valid_bounds (bnd2)) | |
2602 | chkp_mark_invalid_bounds (bounds); | |
2603 | } | |
2604 | } | |
2605 | break; | |
2606 | ||
2607 | case MAX_EXPR: | |
2608 | case MIN_EXPR: | |
2609 | { | |
2610 | tree rhs2 = gimple_assign_rhs2 (assign); | |
2611 | tree bnd1 = chkp_find_bounds (rhs1, &iter); | |
2612 | tree bnd2 = chkp_find_bounds (rhs2, &iter); | |
2613 | ||
2614 | if (chkp_incomplete_bounds (bnd1) || chkp_incomplete_bounds (bnd2)) | |
2615 | bounds = incomplete_bounds; | |
2616 | else if (bnd1 == bnd2) | |
2617 | bounds = bnd1; | |
2618 | else | |
2619 | { | |
2620 | gimple stmt; | |
2621 | tree cond = build2 (rhs_code == MAX_EXPR ? GT_EXPR : LT_EXPR, | |
2622 | boolean_type_node, rhs1, rhs2); | |
2623 | bounds = chkp_get_tmp_reg (assign); | |
0d0e4a03 | 2624 | stmt = gimple_build_assign (bounds, COND_EXPR, cond, bnd1, bnd2); |
d5e254e1 IE |
2625 | |
2626 | gsi_insert_after (&iter, stmt, GSI_SAME_STMT); | |
2627 | ||
2628 | if (!chkp_valid_bounds (bnd1) && !chkp_valid_bounds (bnd2)) | |
2629 | chkp_mark_invalid_bounds (bounds); | |
2630 | } | |
2631 | } | |
2632 | break; | |
2633 | ||
2634 | default: | |
2635 | bounds = chkp_get_zero_bounds (); | |
2636 | warning (0, "pointer bounds were lost due to unexpected expression %s", | |
2637 | get_tree_code_name (rhs_code)); | |
2638 | } | |
2639 | ||
2640 | gcc_assert (bounds); | |
2641 | ||
2642 | if (node) | |
2643 | bounds = chkp_maybe_copy_and_register_bounds (node, bounds); | |
2644 | ||
2645 | return bounds; | |
2646 | } | |
2647 | ||
2648 | /* Compute bounds for ssa name NODE defined by DEF_STMT pointed by ITER. | |
2649 | ||
2650 | There are just few statement codes allowed: NOP (for default ssa names), | |
2651 | ASSIGN, CALL, PHI, ASM. | |
2652 | ||
2653 | Return computed bounds. */ | |
2654 | static tree | |
2655 | chkp_get_bounds_by_definition (tree node, gimple def_stmt, | |
538dd0b7 | 2656 | gphi_iterator *iter) |
d5e254e1 IE |
2657 | { |
2658 | tree var, bounds; | |
2659 | enum gimple_code code = gimple_code (def_stmt); | |
538dd0b7 | 2660 | gphi *stmt; |
d5e254e1 IE |
2661 | |
2662 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
2663 | { | |
2664 | fprintf (dump_file, "Searching for bounds for node: "); | |
2665 | print_generic_expr (dump_file, node, 0); | |
2666 | ||
2667 | fprintf (dump_file, " using its definition: "); | |
2668 | print_gimple_stmt (dump_file, def_stmt, 0, TDF_VOPS|TDF_MEMSYMS); | |
2669 | } | |
2670 | ||
2671 | switch (code) | |
2672 | { | |
2673 | case GIMPLE_NOP: | |
2674 | var = SSA_NAME_VAR (node); | |
2675 | switch (TREE_CODE (var)) | |
2676 | { | |
2677 | case PARM_DECL: | |
2678 | bounds = chkp_get_bound_for_parm (node); | |
2679 | break; | |
2680 | ||
2681 | case VAR_DECL: | |
2682 | /* For uninitialized pointers use none bounds. */ | |
2683 | bounds = chkp_get_none_bounds (); | |
2684 | bounds = chkp_maybe_copy_and_register_bounds (node, bounds); | |
2685 | break; | |
2686 | ||
2687 | case RESULT_DECL: | |
2688 | { | |
2689 | tree base_type; | |
2690 | ||
2691 | gcc_assert (TREE_CODE (TREE_TYPE (node)) == REFERENCE_TYPE); | |
2692 | ||
2693 | base_type = TREE_TYPE (TREE_TYPE (node)); | |
2694 | ||
2695 | gcc_assert (TYPE_SIZE (base_type) | |
2696 | && TREE_CODE (TYPE_SIZE (base_type)) == INTEGER_CST | |
2697 | && tree_to_uhwi (TYPE_SIZE (base_type)) != 0); | |
2698 | ||
2699 | bounds = chkp_make_bounds (node, TYPE_SIZE_UNIT (base_type), | |
2700 | NULL, false); | |
2701 | bounds = chkp_maybe_copy_and_register_bounds (node, bounds); | |
2702 | } | |
2703 | break; | |
2704 | ||
2705 | default: | |
2706 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
2707 | { | |
2708 | fprintf (dump_file, "Unexpected var with no definition\n"); | |
2709 | print_generic_expr (dump_file, var, 0); | |
2710 | } | |
2711 | internal_error ("chkp_get_bounds_by_definition: Unexpected var of type %s", | |
2712 | get_tree_code_name (TREE_CODE (var))); | |
2713 | } | |
2714 | break; | |
2715 | ||
2716 | case GIMPLE_ASSIGN: | |
2717 | bounds = chkp_compute_bounds_for_assignment (node, def_stmt); | |
2718 | break; | |
2719 | ||
2720 | case GIMPLE_CALL: | |
538dd0b7 | 2721 | bounds = chkp_build_returned_bound (as_a <gcall *> (def_stmt)); |
d5e254e1 IE |
2722 | break; |
2723 | ||
2724 | case GIMPLE_PHI: | |
2725 | if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (node)) | |
2726 | if (SSA_NAME_VAR (node)) | |
2727 | var = chkp_get_bounds_var (SSA_NAME_VAR (node)); | |
2728 | else | |
2729 | var = make_temp_ssa_name (pointer_bounds_type_node, | |
2730 | gimple_build_nop (), | |
2731 | CHKP_BOUND_TMP_NAME); | |
2732 | else | |
2733 | var = chkp_get_tmp_var (); | |
2734 | stmt = create_phi_node (var, gimple_bb (def_stmt)); | |
2735 | bounds = gimple_phi_result (stmt); | |
538dd0b7 | 2736 | *iter = gsi_for_phi (stmt); |
d5e254e1 IE |
2737 | |
2738 | bounds = chkp_maybe_copy_and_register_bounds (node, bounds); | |
2739 | ||
2740 | /* Created bounds do not have all phi args computed and | |
2741 | therefore we do not know if there is a valid source | |
2742 | of bounds for that node. Therefore we mark bounds | |
2743 | as incomplete and then recompute them when all phi | |
2744 | args are computed. */ | |
2745 | chkp_register_incomplete_bounds (bounds, node); | |
2746 | break; | |
2747 | ||
2748 | case GIMPLE_ASM: | |
2749 | bounds = chkp_get_zero_bounds (); | |
2750 | bounds = chkp_maybe_copy_and_register_bounds (node, bounds); | |
2751 | break; | |
2752 | ||
2753 | default: | |
2754 | internal_error ("chkp_get_bounds_by_definition: Unexpected GIMPLE code %s", | |
2755 | gimple_code_name[code]); | |
2756 | } | |
2757 | ||
2758 | return bounds; | |
2759 | } | |
2760 | ||
2761 | /* Return CALL_EXPR for bndmk with specified LOWER_BOUND and SIZE. */ | |
2762 | tree | |
2763 | chkp_build_make_bounds_call (tree lower_bound, tree size) | |
2764 | { | |
2765 | tree call = build1 (ADDR_EXPR, | |
2766 | build_pointer_type (TREE_TYPE (chkp_bndmk_fndecl)), | |
2767 | chkp_bndmk_fndecl); | |
2768 | return build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndmk_fndecl)), | |
2769 | call, 2, lower_bound, size); | |
2770 | } | |
2771 | ||
2772 | /* Create static bounds var of specfified OBJ which is | |
2773 | is either VAR_DECL or string constant. */ | |
2774 | static tree | |
2775 | chkp_make_static_bounds (tree obj) | |
2776 | { | |
2777 | static int string_id = 1; | |
2778 | static int var_id = 1; | |
2779 | tree *slot; | |
2780 | const char *var_name; | |
2781 | char *bnd_var_name; | |
2782 | tree bnd_var; | |
2783 | ||
2784 | /* First check if we already have required var. */ | |
2785 | if (chkp_static_var_bounds) | |
2786 | { | |
227eabce IE |
2787 | /* For vars we use assembler name as a key in |
2788 | chkp_static_var_bounds map. It allows to | |
2789 | avoid duplicating bound vars for decls | |
2790 | sharing assembler name. */ | |
2791 | if (TREE_CODE (obj) == VAR_DECL) | |
2792 | { | |
2793 | tree name = DECL_ASSEMBLER_NAME (obj); | |
2794 | slot = chkp_static_var_bounds->get (name); | |
2795 | if (slot) | |
2796 | return *slot; | |
2797 | } | |
2798 | else | |
2799 | { | |
2800 | slot = chkp_static_var_bounds->get (obj); | |
2801 | if (slot) | |
2802 | return *slot; | |
2803 | } | |
d5e254e1 IE |
2804 | } |
2805 | ||
2806 | /* Build decl for bounds var. */ | |
2807 | if (TREE_CODE (obj) == VAR_DECL) | |
2808 | { | |
2809 | if (DECL_IGNORED_P (obj)) | |
2810 | { | |
2811 | bnd_var_name = (char *) xmalloc (strlen (CHKP_VAR_BOUNDS_PREFIX) + 10); | |
2812 | sprintf (bnd_var_name, "%s%d", CHKP_VAR_BOUNDS_PREFIX, var_id++); | |
2813 | } | |
2814 | else | |
2815 | { | |
2816 | var_name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (obj)); | |
2817 | ||
2818 | /* For hidden symbols we want to skip first '*' char. */ | |
2819 | if (*var_name == '*') | |
2820 | var_name++; | |
2821 | ||
2822 | bnd_var_name = (char *) xmalloc (strlen (var_name) | |
2823 | + strlen (CHKP_BOUNDS_OF_SYMBOL_PREFIX) + 1); | |
2824 | strcpy (bnd_var_name, CHKP_BOUNDS_OF_SYMBOL_PREFIX); | |
2825 | strcat (bnd_var_name, var_name); | |
2826 | } | |
2827 | ||
2828 | bnd_var = build_decl (UNKNOWN_LOCATION, VAR_DECL, | |
2829 | get_identifier (bnd_var_name), | |
2830 | pointer_bounds_type_node); | |
2831 | ||
2832 | /* Address of the obj will be used as lower bound. */ | |
2833 | TREE_ADDRESSABLE (obj) = 1; | |
2834 | } | |
2835 | else | |
2836 | { | |
2837 | bnd_var_name = (char *) xmalloc (strlen (CHKP_STRING_BOUNDS_PREFIX) + 10); | |
2838 | sprintf (bnd_var_name, "%s%d", CHKP_STRING_BOUNDS_PREFIX, string_id++); | |
2839 | ||
2840 | bnd_var = build_decl (UNKNOWN_LOCATION, VAR_DECL, | |
2841 | get_identifier (bnd_var_name), | |
2842 | pointer_bounds_type_node); | |
2843 | } | |
2844 | ||
2845 | TREE_PUBLIC (bnd_var) = 0; | |
2846 | TREE_USED (bnd_var) = 1; | |
2847 | TREE_READONLY (bnd_var) = 0; | |
2848 | TREE_STATIC (bnd_var) = 1; | |
2849 | TREE_ADDRESSABLE (bnd_var) = 0; | |
2850 | DECL_ARTIFICIAL (bnd_var) = 1; | |
2851 | DECL_COMMON (bnd_var) = 1; | |
2852 | DECL_COMDAT (bnd_var) = 1; | |
2853 | DECL_READ_P (bnd_var) = 1; | |
2854 | DECL_INITIAL (bnd_var) = chkp_build_addr_expr (obj); | |
2855 | /* Force output similar to constant bounds. | |
2856 | See chkp_make_static_const_bounds. */ | |
2857 | varpool_node::get_create (bnd_var)->force_output = 1; | |
2858 | /* Mark symbol as requiring bounds initialization. */ | |
2859 | varpool_node::get_create (bnd_var)->need_bounds_init = 1; | |
2860 | varpool_node::finalize_decl (bnd_var); | |
2861 | ||
2862 | /* Add created var to the map to use it for other references | |
2863 | to obj. */ | |
2864 | if (!chkp_static_var_bounds) | |
2865 | chkp_static_var_bounds = new hash_map<tree, tree>; | |
2866 | ||
227eabce IE |
2867 | if (TREE_CODE (obj) == VAR_DECL) |
2868 | { | |
2869 | tree name = DECL_ASSEMBLER_NAME (obj); | |
2870 | chkp_static_var_bounds->put (name, bnd_var); | |
2871 | } | |
2872 | else | |
2873 | chkp_static_var_bounds->put (obj, bnd_var); | |
d5e254e1 IE |
2874 | |
2875 | return bnd_var; | |
2876 | } | |
2877 | ||
2878 | /* When var has incomplete type we cannot get size to | |
2879 | compute its bounds. In such cases we use checker | |
2880 | builtin call which determines object size at runtime. */ | |
2881 | static tree | |
2882 | chkp_generate_extern_var_bounds (tree var) | |
2883 | { | |
2884 | tree bounds, size_reloc, lb, size, max_size, cond; | |
2885 | gimple_stmt_iterator gsi; | |
2886 | gimple_seq seq = NULL; | |
2887 | gimple stmt; | |
2888 | ||
2889 | /* If instrumentation is not enabled for vars having | |
2890 | incomplete type then just return zero bounds to avoid | |
2891 | checks for this var. */ | |
2892 | if (!flag_chkp_incomplete_type) | |
2893 | return chkp_get_zero_bounds (); | |
2894 | ||
2895 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
2896 | { | |
2897 | fprintf (dump_file, "Generating bounds for extern symbol '"); | |
2898 | print_generic_expr (dump_file, var, 0); | |
2899 | fprintf (dump_file, "'\n"); | |
2900 | } | |
2901 | ||
2902 | stmt = gimple_build_call (chkp_sizeof_fndecl, 1, var); | |
2903 | ||
2904 | size_reloc = create_tmp_reg (chkp_uintptr_type, CHKP_SIZE_TMP_NAME); | |
2905 | gimple_call_set_lhs (stmt, size_reloc); | |
2906 | ||
2907 | gimple_seq_add_stmt (&seq, stmt); | |
2908 | ||
2909 | lb = chkp_build_addr_expr (var); | |
2910 | size = make_ssa_name (chkp_get_size_tmp_var (), gimple_build_nop ()); | |
2911 | ||
2912 | if (flag_chkp_zero_dynamic_size_as_infinite) | |
2913 | { | |
2914 | /* We should check that size relocation was resolved. | |
2915 | If it was not then use maximum possible size for the var. */ | |
2916 | max_size = build2 (MINUS_EXPR, chkp_uintptr_type, integer_zero_node, | |
2917 | fold_convert (chkp_uintptr_type, lb)); | |
2918 | max_size = chkp_force_gimple_call_op (max_size, &seq); | |
2919 | ||
0d0e4a03 JJ |
2920 | cond = build2 (NE_EXPR, boolean_type_node, |
2921 | size_reloc, integer_zero_node); | |
2922 | stmt = gimple_build_assign (size, COND_EXPR, cond, size_reloc, max_size); | |
d5e254e1 IE |
2923 | gimple_seq_add_stmt (&seq, stmt); |
2924 | } | |
2925 | else | |
2926 | { | |
2927 | stmt = gimple_build_assign (size, size_reloc); | |
2928 | gimple_seq_add_stmt (&seq, stmt); | |
2929 | } | |
2930 | ||
2931 | gsi = gsi_start_bb (chkp_get_entry_block ()); | |
2932 | gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING); | |
2933 | ||
2934 | bounds = chkp_make_bounds (lb, size, &gsi, true); | |
2935 | ||
2936 | return bounds; | |
2937 | } | |
2938 | ||
2939 | /* Return 1 if TYPE has fields with zero size or fields | |
2940 | marked with chkp_variable_size attribute. */ | |
2941 | bool | |
2942 | chkp_variable_size_type (tree type) | |
2943 | { | |
2944 | bool res = false; | |
2945 | tree field; | |
2946 | ||
2947 | if (RECORD_OR_UNION_TYPE_P (type)) | |
2948 | for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field)) | |
2949 | { | |
2950 | if (TREE_CODE (field) == FIELD_DECL) | |
2951 | res = res | |
2952 | || lookup_attribute ("bnd_variable_size", DECL_ATTRIBUTES (field)) | |
2953 | || chkp_variable_size_type (TREE_TYPE (field)); | |
2954 | } | |
2955 | else | |
2956 | res = !TYPE_SIZE (type) | |
2957 | || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST | |
2958 | || tree_to_uhwi (TYPE_SIZE (type)) == 0; | |
2959 | ||
2960 | return res; | |
2961 | } | |
2962 | ||
2963 | /* Compute and return bounds for address of DECL which is | |
2964 | one of VAR_DECL, PARM_DECL, RESULT_DECL. */ | |
2965 | static tree | |
2966 | chkp_get_bounds_for_decl_addr (tree decl) | |
2967 | { | |
2968 | tree bounds; | |
2969 | ||
2970 | gcc_assert (TREE_CODE (decl) == VAR_DECL | |
2971 | || TREE_CODE (decl) == PARM_DECL | |
2972 | || TREE_CODE (decl) == RESULT_DECL); | |
2973 | ||
2974 | bounds = chkp_get_registered_addr_bounds (decl); | |
2975 | ||
2976 | if (bounds) | |
2977 | return bounds; | |
2978 | ||
2979 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
2980 | { | |
2981 | fprintf (dump_file, "Building bounds for address of decl "); | |
2982 | print_generic_expr (dump_file, decl, 0); | |
2983 | fprintf (dump_file, "\n"); | |
2984 | } | |
2985 | ||
2986 | /* Use zero bounds if size is unknown and checks for | |
2987 | unknown sizes are restricted. */ | |
2988 | if ((!DECL_SIZE (decl) | |
2989 | || (chkp_variable_size_type (TREE_TYPE (decl)) | |
2990 | && (TREE_STATIC (decl) | |
2991 | || DECL_EXTERNAL (decl) | |
2992 | || TREE_PUBLIC (decl)))) | |
2993 | && !flag_chkp_incomplete_type) | |
2994 | return chkp_get_zero_bounds (); | |
2995 | ||
2996 | if (flag_chkp_use_static_bounds | |
2997 | && TREE_CODE (decl) == VAR_DECL | |
2998 | && (TREE_STATIC (decl) | |
2999 | || DECL_EXTERNAL (decl) | |
3000 | || TREE_PUBLIC (decl)) | |
3001 | && !DECL_THREAD_LOCAL_P (decl)) | |
3002 | { | |
3003 | tree bnd_var = chkp_make_static_bounds (decl); | |
3004 | gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ()); | |
3005 | gimple stmt; | |
3006 | ||
3007 | bounds = chkp_get_tmp_reg (gimple_build_nop ()); | |
3008 | stmt = gimple_build_assign (bounds, bnd_var); | |
3009 | gsi_insert_before (&gsi, stmt, GSI_SAME_STMT); | |
3010 | } | |
3011 | else if (!DECL_SIZE (decl) | |
3012 | || (chkp_variable_size_type (TREE_TYPE (decl)) | |
3013 | && (TREE_STATIC (decl) | |
3014 | || DECL_EXTERNAL (decl) | |
3015 | || TREE_PUBLIC (decl)))) | |
3016 | { | |
3017 | gcc_assert (TREE_CODE (decl) == VAR_DECL); | |
3018 | bounds = chkp_generate_extern_var_bounds (decl); | |
3019 | } | |
3020 | else | |
3021 | { | |
3022 | tree lb = chkp_build_addr_expr (decl); | |
3023 | bounds = chkp_make_bounds (lb, DECL_SIZE_UNIT (decl), NULL, false); | |
3024 | } | |
3025 | ||
3026 | return bounds; | |
3027 | } | |
3028 | ||
3029 | /* Compute and return bounds for constant string. */ | |
3030 | static tree | |
3031 | chkp_get_bounds_for_string_cst (tree cst) | |
3032 | { | |
3033 | tree bounds; | |
3034 | tree lb; | |
3035 | tree size; | |
3036 | ||
3037 | gcc_assert (TREE_CODE (cst) == STRING_CST); | |
3038 | ||
3039 | bounds = chkp_get_registered_bounds (cst); | |
3040 | ||
3041 | if (bounds) | |
3042 | return bounds; | |
3043 | ||
3044 | if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds) | |
3045 | || flag_chkp_use_static_const_bounds > 0) | |
3046 | { | |
3047 | tree bnd_var = chkp_make_static_bounds (cst); | |
3048 | gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ()); | |
3049 | gimple stmt; | |
3050 | ||
3051 | bounds = chkp_get_tmp_reg (gimple_build_nop ()); | |
3052 | stmt = gimple_build_assign (bounds, bnd_var); | |
3053 | gsi_insert_before (&gsi, stmt, GSI_SAME_STMT); | |
3054 | } | |
3055 | else | |
3056 | { | |
3057 | lb = chkp_build_addr_expr (cst); | |
3058 | size = build_int_cst (chkp_uintptr_type, TREE_STRING_LENGTH (cst)); | |
3059 | bounds = chkp_make_bounds (lb, size, NULL, false); | |
3060 | } | |
3061 | ||
3062 | bounds = chkp_maybe_copy_and_register_bounds (cst, bounds); | |
3063 | ||
3064 | return bounds; | |
3065 | } | |
3066 | ||
3067 | /* Generate code to instersect bounds BOUNDS1 and BOUNDS2 and | |
3068 | return the result. if ITER is not NULL then Code is inserted | |
3069 | before position pointed by ITER. Otherwise code is added to | |
3070 | entry block. */ | |
3071 | static tree | |
3072 | chkp_intersect_bounds (tree bounds1, tree bounds2, gimple_stmt_iterator *iter) | |
3073 | { | |
3074 | if (!bounds1 || bounds1 == chkp_get_zero_bounds ()) | |
3075 | return bounds2 ? bounds2 : bounds1; | |
3076 | else if (!bounds2 || bounds2 == chkp_get_zero_bounds ()) | |
3077 | return bounds1; | |
3078 | else | |
3079 | { | |
3080 | gimple_seq seq; | |
3081 | gimple stmt; | |
3082 | tree bounds; | |
3083 | ||
3084 | seq = NULL; | |
3085 | ||
3086 | stmt = gimple_build_call (chkp_intersect_fndecl, 2, bounds1, bounds2); | |
3087 | chkp_mark_stmt (stmt); | |
3088 | ||
3089 | bounds = chkp_get_tmp_reg (stmt); | |
3090 | gimple_call_set_lhs (stmt, bounds); | |
3091 | ||
3092 | gimple_seq_add_stmt (&seq, stmt); | |
3093 | ||
3094 | /* We are probably doing narrowing for constant expression. | |
3095 | In such case iter may be undefined. */ | |
3096 | if (!iter) | |
3097 | { | |
3098 | gimple_stmt_iterator gsi = gsi_last_bb (chkp_get_entry_block ()); | |
3099 | iter = &gsi; | |
3100 | gsi_insert_seq_after (iter, seq, GSI_SAME_STMT); | |
3101 | } | |
3102 | else | |
3103 | gsi_insert_seq_before (iter, seq, GSI_SAME_STMT); | |
3104 | ||
3105 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
3106 | { | |
3107 | fprintf (dump_file, "Bounds intersection: "); | |
3108 | print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS); | |
3109 | fprintf (dump_file, " inserted before statement: "); | |
3110 | print_gimple_stmt (dump_file, gsi_stmt (*iter), 0, | |
3111 | TDF_VOPS|TDF_MEMSYMS); | |
3112 | } | |
3113 | ||
3114 | return bounds; | |
3115 | } | |
3116 | } | |
3117 | ||
3118 | /* Return 1 if we are allowed to narrow bounds for addressed FIELD | |
3119 | and 0 othersize. */ | |
3120 | static bool | |
3121 | chkp_may_narrow_to_field (tree field) | |
3122 | { | |
3123 | return DECL_SIZE (field) && TREE_CODE (DECL_SIZE (field)) == INTEGER_CST | |
3124 | && tree_to_uhwi (DECL_SIZE (field)) != 0 | |
3125 | && (!DECL_FIELD_OFFSET (field) | |
3126 | || TREE_CODE (DECL_FIELD_OFFSET (field)) == INTEGER_CST) | |
3127 | && (!DECL_FIELD_BIT_OFFSET (field) | |
3128 | || TREE_CODE (DECL_FIELD_BIT_OFFSET (field)) == INTEGER_CST) | |
3129 | && !lookup_attribute ("bnd_variable_size", DECL_ATTRIBUTES (field)) | |
3130 | && !chkp_variable_size_type (TREE_TYPE (field)); | |
3131 | } | |
3132 | ||
3133 | /* Return 1 if bounds for FIELD should be narrowed to | |
3134 | field's own size. */ | |
3135 | static bool | |
3136 | chkp_narrow_bounds_for_field (tree field) | |
3137 | { | |
3138 | HOST_WIDE_INT offs; | |
3139 | HOST_WIDE_INT bit_offs; | |
3140 | ||
3141 | if (!chkp_may_narrow_to_field (field)) | |
3142 | return false; | |
3143 | ||
3144 | /* Accesse to compiler generated fields should not cause | |
3145 | bounds narrowing. */ | |
3146 | if (DECL_ARTIFICIAL (field)) | |
3147 | return false; | |
3148 | ||
3149 | offs = tree_to_uhwi (DECL_FIELD_OFFSET (field)); | |
3150 | bit_offs = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field)); | |
3151 | ||
3152 | return (flag_chkp_narrow_bounds | |
3153 | && (flag_chkp_first_field_has_own_bounds | |
3154 | || offs | |
3155 | || bit_offs)); | |
3156 | } | |
3157 | ||
3158 | /* Perform narrowing for BOUNDS using bounds computed for field | |
3159 | access COMPONENT. ITER meaning is the same as for | |
3160 | chkp_intersect_bounds. */ | |
3161 | static tree | |
3162 | chkp_narrow_bounds_to_field (tree bounds, tree component, | |
3163 | gimple_stmt_iterator *iter) | |
3164 | { | |
3165 | tree field = TREE_OPERAND (component, 1); | |
3166 | tree size = DECL_SIZE_UNIT (field); | |
3167 | tree field_ptr = chkp_build_addr_expr (component); | |
3168 | tree field_bounds; | |
3169 | ||
3170 | field_bounds = chkp_make_bounds (field_ptr, size, iter, false); | |
3171 | ||
3172 | return chkp_intersect_bounds (field_bounds, bounds, iter); | |
3173 | } | |
3174 | ||
3175 | /* Parse field or array access NODE. | |
3176 | ||
3177 | PTR ouput parameter holds a pointer to the outermost | |
3178 | object. | |
3179 | ||
3180 | BITFIELD output parameter is set to 1 if bitfield is | |
3181 | accessed and to 0 otherwise. If it is 1 then ELT holds | |
3182 | outer component for accessed bit field. | |
3183 | ||
3184 | SAFE outer parameter is set to 1 if access is safe and | |
3185 | checks are not required. | |
3186 | ||
3187 | BOUNDS outer parameter holds bounds to be used to check | |
3188 | access (may be NULL). | |
3189 | ||
3190 | If INNERMOST_BOUNDS is 1 then try to narrow bounds to the | |
3191 | innermost accessed component. */ | |
3192 | static void | |
3193 | chkp_parse_array_and_component_ref (tree node, tree *ptr, | |
3194 | tree *elt, bool *safe, | |
3195 | bool *bitfield, | |
3196 | tree *bounds, | |
3197 | gimple_stmt_iterator *iter, | |
3198 | bool innermost_bounds) | |
3199 | { | |
3200 | tree comp_to_narrow = NULL_TREE; | |
3201 | tree last_comp = NULL_TREE; | |
3202 | bool array_ref_found = false; | |
3203 | tree *nodes; | |
3204 | tree var; | |
3205 | int len; | |
3206 | int i; | |
3207 | ||
3208 | /* Compute tree height for expression. */ | |
3209 | var = node; | |
3210 | len = 1; | |
3211 | while (TREE_CODE (var) == COMPONENT_REF | |
3212 | || TREE_CODE (var) == ARRAY_REF | |
3213 | || TREE_CODE (var) == VIEW_CONVERT_EXPR) | |
3214 | { | |
3215 | var = TREE_OPERAND (var, 0); | |
3216 | len++; | |
3217 | } | |
3218 | ||
3219 | gcc_assert (len > 1); | |
3220 | ||
3221 | /* It is more convenient for us to scan left-to-right, | |
3222 | so walk tree again and put all node to nodes vector | |
3223 | in reversed order. */ | |
3224 | nodes = XALLOCAVEC (tree, len); | |
3225 | nodes[len - 1] = node; | |
3226 | for (i = len - 2; i >= 0; i--) | |
3227 | nodes[i] = TREE_OPERAND (nodes[i + 1], 0); | |
3228 | ||
3229 | if (bounds) | |
3230 | *bounds = NULL; | |
3231 | *safe = true; | |
3232 | *bitfield = (TREE_CODE (node) == COMPONENT_REF | |
3233 | && DECL_BIT_FIELD_TYPE (TREE_OPERAND (node, 1))); | |
3234 | /* To get bitfield address we will need outer elemnt. */ | |
3235 | if (*bitfield) | |
3236 | *elt = nodes[len - 2]; | |
3237 | else | |
3238 | *elt = NULL_TREE; | |
3239 | ||
3240 | /* If we have indirection in expression then compute | |
3241 | outermost structure bounds. Computed bounds may be | |
3242 | narrowed later. */ | |
3243 | if (TREE_CODE (nodes[0]) == MEM_REF || INDIRECT_REF_P (nodes[0])) | |
3244 | { | |
3245 | *safe = false; | |
3246 | *ptr = TREE_OPERAND (nodes[0], 0); | |
3247 | if (bounds) | |
3248 | *bounds = chkp_find_bounds (*ptr, iter); | |
3249 | } | |
3250 | else | |
3251 | { | |
3252 | gcc_assert (TREE_CODE (var) == VAR_DECL | |
3253 | || TREE_CODE (var) == PARM_DECL | |
3254 | || TREE_CODE (var) == RESULT_DECL | |
3255 | || TREE_CODE (var) == STRING_CST | |
3256 | || TREE_CODE (var) == SSA_NAME); | |
3257 | ||
3258 | *ptr = chkp_build_addr_expr (var); | |
3259 | } | |
3260 | ||
3261 | /* In this loop we are trying to find a field access | |
3262 | requiring narrowing. There are two simple rules | |
3263 | for search: | |
3264 | 1. Leftmost array_ref is chosen if any. | |
3265 | 2. Rightmost suitable component_ref is chosen if innermost | |
3266 | bounds are required and no array_ref exists. */ | |
3267 | for (i = 1; i < len; i++) | |
3268 | { | |
3269 | var = nodes[i]; | |
3270 | ||
3271 | if (TREE_CODE (var) == ARRAY_REF) | |
3272 | { | |
3273 | *safe = false; | |
3274 | array_ref_found = true; | |
3275 | if (flag_chkp_narrow_bounds | |
3276 | && !flag_chkp_narrow_to_innermost_arrray | |
3277 | && (!last_comp | |
3278 | || chkp_may_narrow_to_field (TREE_OPERAND (last_comp, 1)))) | |
3279 | { | |
3280 | comp_to_narrow = last_comp; | |
3281 | break; | |
3282 | } | |
3283 | } | |
3284 | else if (TREE_CODE (var) == COMPONENT_REF) | |
3285 | { | |
3286 | tree field = TREE_OPERAND (var, 1); | |
3287 | ||
3288 | if (innermost_bounds | |
3289 | && !array_ref_found | |
3290 | && chkp_narrow_bounds_for_field (field)) | |
3291 | comp_to_narrow = var; | |
3292 | last_comp = var; | |
3293 | ||
3294 | if (flag_chkp_narrow_bounds | |
3295 | && flag_chkp_narrow_to_innermost_arrray | |
3296 | && TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE) | |
3297 | { | |
3298 | if (bounds) | |
3299 | *bounds = chkp_narrow_bounds_to_field (*bounds, var, iter); | |
3300 | comp_to_narrow = NULL; | |
3301 | } | |
3302 | } | |
3303 | else if (TREE_CODE (var) == VIEW_CONVERT_EXPR) | |
3304 | /* Nothing to do for it. */ | |
3305 | ; | |
3306 | else | |
3307 | gcc_unreachable (); | |
3308 | } | |
3309 | ||
3310 | if (comp_to_narrow && DECL_SIZE (TREE_OPERAND (comp_to_narrow, 1)) && bounds) | |
3311 | *bounds = chkp_narrow_bounds_to_field (*bounds, comp_to_narrow, iter); | |
3312 | ||
3313 | if (innermost_bounds && bounds && !*bounds) | |
3314 | *bounds = chkp_find_bounds (*ptr, iter); | |
3315 | } | |
3316 | ||
3317 | /* Compute and return bounds for address of OBJ. */ | |
3318 | static tree | |
3319 | chkp_make_addressed_object_bounds (tree obj, gimple_stmt_iterator *iter) | |
3320 | { | |
3321 | tree bounds = chkp_get_registered_addr_bounds (obj); | |
3322 | ||
3323 | if (bounds) | |
3324 | return bounds; | |
3325 | ||
3326 | switch (TREE_CODE (obj)) | |
3327 | { | |
3328 | case VAR_DECL: | |
3329 | case PARM_DECL: | |
3330 | case RESULT_DECL: | |
3331 | bounds = chkp_get_bounds_for_decl_addr (obj); | |
3332 | break; | |
3333 | ||
3334 | case STRING_CST: | |
3335 | bounds = chkp_get_bounds_for_string_cst (obj); | |
3336 | break; | |
3337 | ||
3338 | case ARRAY_REF: | |
3339 | case COMPONENT_REF: | |
3340 | { | |
3341 | tree elt; | |
3342 | tree ptr; | |
3343 | bool safe; | |
3344 | bool bitfield; | |
3345 | ||
3346 | chkp_parse_array_and_component_ref (obj, &ptr, &elt, &safe, | |
3347 | &bitfield, &bounds, iter, true); | |
3348 | ||
3349 | gcc_assert (bounds); | |
3350 | } | |
3351 | break; | |
3352 | ||
3353 | case FUNCTION_DECL: | |
3354 | case LABEL_DECL: | |
3355 | bounds = chkp_get_zero_bounds (); | |
3356 | break; | |
3357 | ||
3358 | case MEM_REF: | |
3359 | bounds = chkp_find_bounds (TREE_OPERAND (obj, 0), iter); | |
3360 | break; | |
3361 | ||
3362 | case REALPART_EXPR: | |
3363 | case IMAGPART_EXPR: | |
3364 | bounds = chkp_make_addressed_object_bounds (TREE_OPERAND (obj, 0), iter); | |
3365 | break; | |
3366 | ||
3367 | default: | |
3368 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
3369 | { | |
3370 | fprintf (dump_file, "chkp_make_addressed_object_bounds: " | |
3371 | "unexpected object of type %s\n", | |
3372 | get_tree_code_name (TREE_CODE (obj))); | |
3373 | print_node (dump_file, "", obj, 0); | |
3374 | } | |
3375 | internal_error ("chkp_make_addressed_object_bounds: " | |
3376 | "Unexpected tree code %s", | |
3377 | get_tree_code_name (TREE_CODE (obj))); | |
3378 | } | |
3379 | ||
3380 | chkp_register_addr_bounds (obj, bounds); | |
3381 | ||
3382 | return bounds; | |
3383 | } | |
3384 | ||
3385 | /* Compute bounds for pointer PTR loaded from PTR_SRC. Generate statements | |
3386 | to compute bounds if required. Computed bounds should be available at | |
3387 | position pointed by ITER. | |
3388 | ||
3389 | If PTR_SRC is NULL_TREE then pointer definition is identified. | |
3390 | ||
3391 | If PTR_SRC is not NULL_TREE then ITER points to statements which loads | |
3392 | PTR. If PTR is a any memory reference then ITER points to a statement | |
3393 | after which bndldx will be inserterd. In both cases ITER will be updated | |
3394 | to point to the inserted bndldx statement. */ | |
3395 | ||
3396 | static tree | |
3397 | chkp_find_bounds_1 (tree ptr, tree ptr_src, gimple_stmt_iterator *iter) | |
3398 | { | |
3399 | tree addr = NULL_TREE; | |
3400 | tree bounds = NULL_TREE; | |
3401 | ||
3402 | if (!ptr_src) | |
3403 | ptr_src = ptr; | |
3404 | ||
3405 | bounds = chkp_get_registered_bounds (ptr_src); | |
3406 | ||
3407 | if (bounds) | |
3408 | return bounds; | |
3409 | ||
3410 | switch (TREE_CODE (ptr_src)) | |
3411 | { | |
3412 | case MEM_REF: | |
3413 | case VAR_DECL: | |
3414 | if (BOUNDED_P (ptr_src)) | |
3415 | if (TREE_CODE (ptr) == VAR_DECL && DECL_REGISTER (ptr)) | |
3416 | bounds = chkp_get_zero_bounds (); | |
3417 | else | |
3418 | { | |
3419 | addr = chkp_build_addr_expr (ptr_src); | |
3420 | bounds = chkp_build_bndldx (addr, ptr, iter); | |
3421 | } | |
3422 | else | |
3423 | bounds = chkp_get_nonpointer_load_bounds (); | |
3424 | break; | |
3425 | ||
3426 | case ARRAY_REF: | |
3427 | case COMPONENT_REF: | |
3428 | addr = get_base_address (ptr_src); | |
3429 | if (DECL_P (addr) | |
3430 | || TREE_CODE (addr) == MEM_REF | |
3431 | || TREE_CODE (addr) == TARGET_MEM_REF) | |
3432 | { | |
3433 | if (BOUNDED_P (ptr_src)) | |
3434 | if (TREE_CODE (ptr) == VAR_DECL && DECL_REGISTER (ptr)) | |
3435 | bounds = chkp_get_zero_bounds (); | |
3436 | else | |
3437 | { | |
3438 | addr = chkp_build_addr_expr (ptr_src); | |
3439 | bounds = chkp_build_bndldx (addr, ptr, iter); | |
3440 | } | |
3441 | else | |
3442 | bounds = chkp_get_nonpointer_load_bounds (); | |
3443 | } | |
3444 | else | |
3445 | { | |
3446 | gcc_assert (TREE_CODE (addr) == SSA_NAME); | |
3447 | bounds = chkp_find_bounds (addr, iter); | |
3448 | } | |
3449 | break; | |
3450 | ||
3451 | case PARM_DECL: | |
3452 | gcc_unreachable (); | |
3453 | bounds = chkp_get_bound_for_parm (ptr_src); | |
3454 | break; | |
3455 | ||
3456 | case TARGET_MEM_REF: | |
3457 | addr = chkp_build_addr_expr (ptr_src); | |
3458 | bounds = chkp_build_bndldx (addr, ptr, iter); | |
3459 | break; | |
3460 | ||
3461 | case SSA_NAME: | |
3462 | bounds = chkp_get_registered_bounds (ptr_src); | |
3463 | if (!bounds) | |
3464 | { | |
3465 | gimple def_stmt = SSA_NAME_DEF_STMT (ptr_src); | |
538dd0b7 | 3466 | gphi_iterator phi_iter; |
d5e254e1 IE |
3467 | |
3468 | bounds = chkp_get_bounds_by_definition (ptr_src, def_stmt, &phi_iter); | |
3469 | ||
3470 | gcc_assert (bounds); | |
3471 | ||
538dd0b7 | 3472 | if (gphi *def_phi = dyn_cast <gphi *> (def_stmt)) |
d5e254e1 IE |
3473 | { |
3474 | unsigned i; | |
3475 | ||
538dd0b7 | 3476 | for (i = 0; i < gimple_phi_num_args (def_phi); i++) |
d5e254e1 | 3477 | { |
538dd0b7 | 3478 | tree arg = gimple_phi_arg_def (def_phi, i); |
d5e254e1 | 3479 | tree arg_bnd; |
538dd0b7 | 3480 | gphi *phi_bnd; |
d5e254e1 IE |
3481 | |
3482 | arg_bnd = chkp_find_bounds (arg, NULL); | |
3483 | ||
3484 | /* chkp_get_bounds_by_definition created new phi | |
3485 | statement and phi_iter points to it. | |
3486 | ||
3487 | Previous call to chkp_find_bounds could create | |
3488 | new basic block and therefore change phi statement | |
3489 | phi_iter points to. */ | |
538dd0b7 | 3490 | phi_bnd = phi_iter.phi (); |
d5e254e1 IE |
3491 | |
3492 | add_phi_arg (phi_bnd, arg_bnd, | |
538dd0b7 | 3493 | gimple_phi_arg_edge (def_phi, i), |
d5e254e1 IE |
3494 | UNKNOWN_LOCATION); |
3495 | } | |
3496 | ||
3497 | /* If all bound phi nodes have their arg computed | |
3498 | then we may finish its computation. See | |
3499 | chkp_finish_incomplete_bounds for more details. */ | |
3500 | if (chkp_may_finish_incomplete_bounds ()) | |
3501 | chkp_finish_incomplete_bounds (); | |
3502 | } | |
3503 | ||
3504 | gcc_assert (bounds == chkp_get_registered_bounds (ptr_src) | |
3505 | || chkp_incomplete_bounds (bounds)); | |
3506 | } | |
3507 | break; | |
3508 | ||
3509 | case ADDR_EXPR: | |
3510 | bounds = chkp_make_addressed_object_bounds (TREE_OPERAND (ptr_src, 0), iter); | |
3511 | break; | |
3512 | ||
3513 | case INTEGER_CST: | |
3514 | if (integer_zerop (ptr_src)) | |
3515 | bounds = chkp_get_none_bounds (); | |
3516 | else | |
3517 | bounds = chkp_get_invalid_op_bounds (); | |
3518 | break; | |
3519 | ||
3520 | default: | |
3521 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
3522 | { | |
3523 | fprintf (dump_file, "chkp_find_bounds: unexpected ptr of type %s\n", | |
3524 | get_tree_code_name (TREE_CODE (ptr_src))); | |
3525 | print_node (dump_file, "", ptr_src, 0); | |
3526 | } | |
3527 | internal_error ("chkp_find_bounds: Unexpected tree code %s", | |
3528 | get_tree_code_name (TREE_CODE (ptr_src))); | |
3529 | } | |
3530 | ||
3531 | if (!bounds) | |
3532 | { | |
3533 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
3534 | { | |
3535 | fprintf (stderr, "chkp_find_bounds: cannot find bounds for pointer\n"); | |
3536 | print_node (dump_file, "", ptr_src, 0); | |
3537 | } | |
3538 | internal_error ("chkp_find_bounds: Cannot find bounds for pointer"); | |
3539 | } | |
3540 | ||
3541 | return bounds; | |
3542 | } | |
3543 | ||
3544 | /* Normal case for bounds search without forced narrowing. */ | |
3545 | static tree | |
3546 | chkp_find_bounds (tree ptr, gimple_stmt_iterator *iter) | |
3547 | { | |
3548 | return chkp_find_bounds_1 (ptr, NULL_TREE, iter); | |
3549 | } | |
3550 | ||
3551 | /* Search bounds for pointer PTR loaded from PTR_SRC | |
3552 | by statement *ITER points to. */ | |
3553 | static tree | |
3554 | chkp_find_bounds_loaded (tree ptr, tree ptr_src, gimple_stmt_iterator *iter) | |
3555 | { | |
3556 | return chkp_find_bounds_1 (ptr, ptr_src, iter); | |
3557 | } | |
3558 | ||
3559 | /* Helper function which checks type of RHS and finds all pointers in | |
3560 | it. For each found pointer we build it's accesses in LHS and RHS | |
3561 | objects and then call HANDLER for them. Function is used to copy | |
3562 | or initilize bounds for copied object. */ | |
3563 | static void | |
3564 | chkp_walk_pointer_assignments (tree lhs, tree rhs, void *arg, | |
3565 | assign_handler handler) | |
3566 | { | |
3567 | tree type = TREE_TYPE (lhs); | |
3568 | ||
3569 | /* We have nothing to do with clobbers. */ | |
3570 | if (TREE_CLOBBER_P (rhs)) | |
3571 | return; | |
3572 | ||
3573 | if (BOUNDED_TYPE_P (type)) | |
3574 | handler (lhs, rhs, arg); | |
3575 | else if (RECORD_OR_UNION_TYPE_P (type)) | |
3576 | { | |
3577 | tree field; | |
3578 | ||
3579 | if (TREE_CODE (rhs) == CONSTRUCTOR) | |
3580 | { | |
3581 | unsigned HOST_WIDE_INT cnt; | |
3582 | tree val; | |
3583 | ||
3584 | FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs), cnt, field, val) | |
3585 | { | |
3586 | if (chkp_type_has_pointer (TREE_TYPE (field))) | |
3587 | { | |
3588 | tree lhs_field = chkp_build_component_ref (lhs, field); | |
3589 | chkp_walk_pointer_assignments (lhs_field, val, arg, handler); | |
3590 | } | |
3591 | } | |
3592 | } | |
3593 | else | |
3594 | for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field)) | |
3595 | if (TREE_CODE (field) == FIELD_DECL | |
3596 | && chkp_type_has_pointer (TREE_TYPE (field))) | |
3597 | { | |
3598 | tree rhs_field = chkp_build_component_ref (rhs, field); | |
3599 | tree lhs_field = chkp_build_component_ref (lhs, field); | |
3600 | chkp_walk_pointer_assignments (lhs_field, rhs_field, arg, handler); | |
3601 | } | |
3602 | } | |
3603 | else if (TREE_CODE (type) == ARRAY_TYPE) | |
3604 | { | |
3605 | unsigned HOST_WIDE_INT cur = 0; | |
3606 | tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type)); | |
3607 | tree etype = TREE_TYPE (type); | |
3608 | tree esize = TYPE_SIZE (etype); | |
3609 | ||
3610 | if (TREE_CODE (rhs) == CONSTRUCTOR) | |
3611 | { | |
3612 | unsigned HOST_WIDE_INT cnt; | |
3613 | tree purp, val, lhs_elem; | |
3614 | ||
3615 | FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs), cnt, purp, val) | |
3616 | { | |
3617 | if (purp && TREE_CODE (purp) == RANGE_EXPR) | |
3618 | { | |
3619 | tree lo_index = TREE_OPERAND (purp, 0); | |
3620 | tree hi_index = TREE_OPERAND (purp, 1); | |
3621 | ||
3622 | for (cur = (unsigned)tree_to_uhwi (lo_index); | |
3623 | cur <= (unsigned)tree_to_uhwi (hi_index); | |
3624 | cur++) | |
3625 | { | |
3626 | lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur); | |
3627 | chkp_walk_pointer_assignments (lhs_elem, val, arg, handler); | |
3628 | } | |
3629 | } | |
3630 | else | |
3631 | { | |
3632 | if (purp) | |
3633 | { | |
3634 | gcc_assert (TREE_CODE (purp) == INTEGER_CST); | |
3635 | cur = tree_to_uhwi (purp); | |
3636 | } | |
3637 | ||
3638 | lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur++); | |
3639 | ||
3640 | chkp_walk_pointer_assignments (lhs_elem, val, arg, handler); | |
3641 | } | |
3642 | } | |
3643 | } | |
3644 | /* Copy array only when size is known. */ | |
3645 | else if (maxval && !integer_minus_onep (maxval)) | |
3646 | for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++) | |
3647 | { | |
3648 | tree lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur); | |
3649 | tree rhs_elem = chkp_build_array_ref (rhs, etype, esize, cur); | |
3650 | chkp_walk_pointer_assignments (lhs_elem, rhs_elem, arg, handler); | |
3651 | } | |
3652 | } | |
3653 | else | |
3654 | internal_error("chkp_walk_pointer_assignments: unexpected RHS type: %s", | |
3655 | get_tree_code_name (TREE_CODE (type))); | |
3656 | } | |
3657 | ||
3658 | /* Add code to copy bounds for assignment of RHS to LHS. | |
3659 | ARG is an iterator pointing ne code position. */ | |
3660 | static void | |
3661 | chkp_copy_bounds_for_elem (tree lhs, tree rhs, void *arg) | |
3662 | { | |
3663 | gimple_stmt_iterator *iter = (gimple_stmt_iterator *)arg; | |
3664 | tree bounds = chkp_find_bounds (rhs, iter); | |
3665 | tree addr = chkp_build_addr_expr(lhs); | |
3666 | ||
3667 | chkp_build_bndstx (addr, rhs, bounds, iter); | |
3668 | } | |
3669 | ||
3670 | /* Emit static bound initilizers and size vars. */ | |
3671 | void | |
3672 | chkp_finish_file (void) | |
3673 | { | |
3674 | struct varpool_node *node; | |
3675 | struct chkp_ctor_stmt_list stmts; | |
3676 | ||
3677 | if (seen_error ()) | |
3678 | return; | |
3679 | ||
3680 | /* Iterate through varpool and generate bounds initialization | |
3681 | constructors for all statically initialized pointers. */ | |
3682 | stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR; | |
3683 | stmts.stmts = NULL; | |
3684 | FOR_EACH_VARIABLE (node) | |
3685 | /* Check that var is actually emitted and we need and may initialize | |
3686 | its bounds. */ | |
3687 | if (node->need_bounds_init | |
3688 | && !POINTER_BOUNDS_P (node->decl) | |
3689 | && DECL_RTL (node->decl) | |
3690 | && MEM_P (DECL_RTL (node->decl)) | |
3691 | && TREE_ASM_WRITTEN (node->decl)) | |
3692 | { | |
3693 | chkp_walk_pointer_assignments (node->decl, | |
3694 | DECL_INITIAL (node->decl), | |
3695 | &stmts, | |
3696 | chkp_add_modification_to_stmt_list); | |
3697 | ||
3698 | if (stmts.avail <= 0) | |
3699 | { | |
3700 | cgraph_build_static_cdtor ('P', stmts.stmts, | |
3701 | MAX_RESERVED_INIT_PRIORITY + 3); | |
3702 | stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR; | |
3703 | stmts.stmts = NULL; | |
3704 | } | |
3705 | } | |
3706 | ||
3707 | if (stmts.stmts) | |
3708 | cgraph_build_static_cdtor ('P', stmts.stmts, | |
3709 | MAX_RESERVED_INIT_PRIORITY + 3); | |
3710 | ||
3711 | /* Iterate through varpool and generate bounds initialization | |
3712 | constructors for all static bounds vars. */ | |
3713 | stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR; | |
3714 | stmts.stmts = NULL; | |
3715 | FOR_EACH_VARIABLE (node) | |
3716 | if (node->need_bounds_init | |
3717 | && POINTER_BOUNDS_P (node->decl) | |
3718 | && TREE_ASM_WRITTEN (node->decl)) | |
3719 | { | |
3720 | tree bnd = node->decl; | |
3721 | tree var; | |
3722 | ||
3723 | gcc_assert (DECL_INITIAL (bnd) | |
3724 | && TREE_CODE (DECL_INITIAL (bnd)) == ADDR_EXPR); | |
3725 | ||
3726 | var = TREE_OPERAND (DECL_INITIAL (bnd), 0); | |
3727 | chkp_output_static_bounds (bnd, var, &stmts); | |
3728 | } | |
3729 | ||
3730 | if (stmts.stmts) | |
3731 | cgraph_build_static_cdtor ('B', stmts.stmts, | |
3732 | MAX_RESERVED_INIT_PRIORITY + 2); | |
3733 | ||
3734 | delete chkp_static_var_bounds; | |
3735 | delete chkp_bounds_map; | |
3736 | } | |
3737 | ||
3738 | /* An instrumentation function which is called for each statement | |
3739 | having memory access we want to instrument. It inserts check | |
3740 | code and bounds copy code. | |
3741 | ||
3742 | ITER points to statement to instrument. | |
3743 | ||
3744 | NODE holds memory access in statement to check. | |
3745 | ||
3746 | LOC holds the location information for statement. | |
3747 | ||
3748 | DIRFLAGS determines whether access is read or write. | |
3749 | ||
3750 | ACCESS_OFFS should be added to address used in NODE | |
3751 | before check. | |
3752 | ||
3753 | ACCESS_SIZE holds size of checked access. | |
3754 | ||
3755 | SAFE indicates if NODE access is safe and should not be | |
3756 | checked. */ | |
3757 | static void | |
3758 | chkp_process_stmt (gimple_stmt_iterator *iter, tree node, | |
3759 | location_t loc, tree dirflag, | |
3760 | tree access_offs, tree access_size, | |
3761 | bool safe) | |
3762 | { | |
3763 | tree node_type = TREE_TYPE (node); | |
3764 | tree size = access_size ? access_size : TYPE_SIZE_UNIT (node_type); | |
3765 | tree addr_first = NULL_TREE; /* address of the first accessed byte */ | |
3766 | tree addr_last = NULL_TREE; /* address of the last accessed byte */ | |
3767 | tree ptr = NULL_TREE; /* a pointer used for dereference */ | |
3768 | tree bounds = NULL_TREE; | |
3769 | ||
3770 | /* We do not need instrumentation for clobbers. */ | |
3771 | if (dirflag == integer_one_node | |
3772 | && gimple_code (gsi_stmt (*iter)) == GIMPLE_ASSIGN | |
3773 | && TREE_CLOBBER_P (gimple_assign_rhs1 (gsi_stmt (*iter)))) | |
3774 | return; | |
3775 | ||
3776 | switch (TREE_CODE (node)) | |
3777 | { | |
3778 | case ARRAY_REF: | |
3779 | case COMPONENT_REF: | |
3780 | { | |
3781 | bool bitfield; | |
3782 | tree elt; | |
3783 | ||
3784 | if (safe) | |
3785 | { | |
3786 | /* We are not going to generate any checks, so do not | |
3787 | generate bounds as well. */ | |
3788 | addr_first = chkp_build_addr_expr (node); | |
3789 | break; | |
3790 | } | |
3791 | ||
3792 | chkp_parse_array_and_component_ref (node, &ptr, &elt, &safe, | |
3793 | &bitfield, &bounds, iter, false); | |
3794 | ||
3795 | /* Break if there is no dereference and operation is safe. */ | |
3796 | ||
3797 | if (bitfield) | |
3798 | { | |
3799 | tree field = TREE_OPERAND (node, 1); | |
3800 | ||
3801 | if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST) | |
3802 | size = DECL_SIZE_UNIT (field); | |
3803 | ||
3804 | if (elt) | |
3805 | elt = chkp_build_addr_expr (elt); | |
3806 | addr_first = fold_convert_loc (loc, ptr_type_node, elt ? elt : ptr); | |
3807 | addr_first = fold_build_pointer_plus_loc (loc, | |
3808 | addr_first, | |
3809 | byte_position (field)); | |
3810 | } | |
3811 | else | |
3812 | addr_first = chkp_build_addr_expr (node); | |
3813 | } | |
3814 | break; | |
3815 | ||
3816 | case INDIRECT_REF: | |
3817 | ptr = TREE_OPERAND (node, 0); | |
3818 | addr_first = ptr; | |
3819 | break; | |
3820 | ||
3821 | case MEM_REF: | |
3822 | ptr = TREE_OPERAND (node, 0); | |
3823 | addr_first = chkp_build_addr_expr (node); | |
3824 | break; | |
3825 | ||
3826 | case TARGET_MEM_REF: | |
3827 | ptr = TMR_BASE (node); | |
3828 | addr_first = chkp_build_addr_expr (node); | |
3829 | break; | |
3830 | ||
3831 | case ARRAY_RANGE_REF: | |
3832 | printf("ARRAY_RANGE_REF\n"); | |
3833 | debug_gimple_stmt(gsi_stmt(*iter)); | |
3834 | debug_tree(node); | |
3835 | gcc_unreachable (); | |
3836 | break; | |
3837 | ||
3838 | case BIT_FIELD_REF: | |
3839 | { | |
3840 | tree offs, rem, bpu; | |
3841 | ||
3842 | gcc_assert (!access_offs); | |
3843 | gcc_assert (!access_size); | |
3844 | ||
3845 | bpu = fold_convert (size_type_node, bitsize_int (BITS_PER_UNIT)); | |
3846 | offs = fold_convert (size_type_node, TREE_OPERAND (node, 2)); | |
3847 | rem = size_binop_loc (loc, TRUNC_MOD_EXPR, offs, bpu); | |
3848 | offs = size_binop_loc (loc, TRUNC_DIV_EXPR, offs, bpu); | |
3849 | ||
3850 | size = fold_convert (size_type_node, TREE_OPERAND (node, 1)); | |
3851 | size = size_binop_loc (loc, PLUS_EXPR, size, rem); | |
3852 | size = size_binop_loc (loc, CEIL_DIV_EXPR, size, bpu); | |
3853 | size = fold_convert (size_type_node, size); | |
3854 | ||
3855 | chkp_process_stmt (iter, TREE_OPERAND (node, 0), loc, | |
3856 | dirflag, offs, size, safe); | |
3857 | return; | |
3858 | } | |
3859 | break; | |
3860 | ||
3861 | case VAR_DECL: | |
3862 | case RESULT_DECL: | |
3863 | case PARM_DECL: | |
3864 | if (dirflag != integer_one_node | |
3865 | || DECL_REGISTER (node)) | |
3866 | return; | |
3867 | ||
3868 | safe = true; | |
3869 | addr_first = chkp_build_addr_expr (node); | |
3870 | break; | |
3871 | ||
3872 | default: | |
3873 | return; | |
3874 | } | |
3875 | ||
3876 | /* If addr_last was not computed then use (addr_first + size - 1) | |
3877 | expression to compute it. */ | |
3878 | if (!addr_last) | |
3879 | { | |
3880 | addr_last = fold_build_pointer_plus_loc (loc, addr_first, size); | |
3881 | addr_last = fold_build_pointer_plus_hwi_loc (loc, addr_last, -1); | |
3882 | } | |
3883 | ||
3884 | /* Shift both first_addr and last_addr by access_offs if specified. */ | |
3885 | if (access_offs) | |
3886 | { | |
3887 | addr_first = fold_build_pointer_plus_loc (loc, addr_first, access_offs); | |
3888 | addr_last = fold_build_pointer_plus_loc (loc, addr_last, access_offs); | |
3889 | } | |
3890 | ||
3891 | /* Generate bndcl/bndcu checks if memory access is not safe. */ | |
3892 | if (!safe) | |
3893 | { | |
3894 | gimple_stmt_iterator stmt_iter = *iter; | |
3895 | ||
3896 | if (!bounds) | |
3897 | bounds = chkp_find_bounds (ptr, iter); | |
3898 | ||
3899 | chkp_check_mem_access (addr_first, addr_last, bounds, | |
3900 | stmt_iter, loc, dirflag); | |
3901 | } | |
3902 | ||
3903 | /* We need to store bounds in case pointer is stored. */ | |
3904 | if (dirflag == integer_one_node | |
3905 | && chkp_type_has_pointer (node_type) | |
3906 | && flag_chkp_store_bounds) | |
3907 | { | |
3908 | gimple stmt = gsi_stmt (*iter); | |
3909 | tree rhs1 = gimple_assign_rhs1 (stmt); | |
3910 | enum tree_code rhs_code = gimple_assign_rhs_code (stmt); | |
3911 | ||
3912 | if (get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS) | |
3913 | chkp_walk_pointer_assignments (node, rhs1, iter, | |
3914 | chkp_copy_bounds_for_elem); | |
3915 | else | |
3916 | { | |
3917 | bounds = chkp_compute_bounds_for_assignment (NULL_TREE, stmt); | |
3918 | chkp_build_bndstx (addr_first, rhs1, bounds, iter); | |
3919 | } | |
3920 | } | |
3921 | } | |
3922 | ||
3923 | /* Add code to copy bounds for all pointers copied | |
3924 | in ASSIGN created during inline of EDGE. */ | |
3925 | void | |
3926 | chkp_copy_bounds_for_assign (gimple assign, struct cgraph_edge *edge) | |
3927 | { | |
3928 | tree lhs = gimple_assign_lhs (assign); | |
3929 | tree rhs = gimple_assign_rhs1 (assign); | |
3930 | gimple_stmt_iterator iter = gsi_for_stmt (assign); | |
3931 | ||
3932 | if (!flag_chkp_store_bounds) | |
3933 | return; | |
3934 | ||
3935 | chkp_walk_pointer_assignments (lhs, rhs, &iter, chkp_copy_bounds_for_elem); | |
3936 | ||
3937 | /* We should create edges for all created calls to bndldx and bndstx. */ | |
3938 | while (gsi_stmt (iter) != assign) | |
3939 | { | |
3940 | gimple stmt = gsi_stmt (iter); | |
3941 | if (gimple_code (stmt) == GIMPLE_CALL) | |
3942 | { | |
3943 | tree fndecl = gimple_call_fndecl (stmt); | |
3944 | struct cgraph_node *callee = cgraph_node::get_create (fndecl); | |
3945 | struct cgraph_edge *new_edge; | |
3946 | ||
3947 | gcc_assert (fndecl == chkp_bndstx_fndecl | |
3948 | || fndecl == chkp_bndldx_fndecl | |
3949 | || fndecl == chkp_ret_bnd_fndecl); | |
3950 | ||
538dd0b7 DM |
3951 | new_edge = edge->caller->create_edge (callee, |
3952 | as_a <gcall *> (stmt), | |
3953 | edge->count, | |
d5e254e1 IE |
3954 | edge->frequency); |
3955 | new_edge->frequency = compute_call_stmt_bb_frequency | |
3956 | (edge->caller->decl, gimple_bb (stmt)); | |
3957 | } | |
3958 | gsi_prev (&iter); | |
3959 | } | |
3960 | } | |
3961 | ||
3962 | /* Some code transformation made during instrumentation pass | |
3963 | may put code into inconsistent state. Here we find and fix | |
3964 | such flaws. */ | |
3965 | void | |
3966 | chkp_fix_cfg () | |
3967 | { | |
3968 | basic_block bb; | |
3969 | gimple_stmt_iterator i; | |
3970 | ||
3971 | /* We could insert some code right after stmt which ends bb. | |
3972 | We wanted to put this code on fallthru edge but did not | |
3973 | add new edges from the beginning because it may cause new | |
3974 | phi node creation which may be incorrect due to incomplete | |
3975 | bound phi nodes. */ | |
3976 | FOR_ALL_BB_FN (bb, cfun) | |
3977 | for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i)) | |
3978 | { | |
3979 | gimple stmt = gsi_stmt (i); | |
3980 | gimple_stmt_iterator next = i; | |
3981 | ||
3982 | gsi_next (&next); | |
3983 | ||
3984 | if (stmt_ends_bb_p (stmt) | |
3985 | && !gsi_end_p (next)) | |
3986 | { | |
3987 | edge fall = find_fallthru_edge (bb->succs); | |
3988 | basic_block dest = NULL; | |
3989 | int flags = 0; | |
3990 | ||
3991 | gcc_assert (fall); | |
3992 | ||
3993 | /* We cannot split abnormal edge. Therefore we | |
3994 | store its params, make it regular and then | |
3995 | rebuild abnormal edge after split. */ | |
3996 | if (fall->flags & EDGE_ABNORMAL) | |
3997 | { | |
3998 | flags = fall->flags & ~EDGE_FALLTHRU; | |
3999 | dest = fall->dest; | |
4000 | ||
4001 | fall->flags &= ~EDGE_COMPLEX; | |
4002 | } | |
4003 | ||
4004 | while (!gsi_end_p (next)) | |
4005 | { | |
4006 | gimple next_stmt = gsi_stmt (next); | |
4007 | gsi_remove (&next, false); | |
4008 | gsi_insert_on_edge (fall, next_stmt); | |
4009 | } | |
4010 | ||
4011 | gsi_commit_edge_inserts (); | |
4012 | ||
4013 | /* Re-create abnormal edge. */ | |
4014 | if (dest) | |
4015 | make_edge (bb, dest, flags); | |
4016 | } | |
4017 | } | |
4018 | } | |
4019 | ||
4020 | /* Walker callback for chkp_replace_function_pointers. Replaces | |
4021 | function pointer in the specified operand with pointer to the | |
4022 | instrumented function version. */ | |
4023 | static tree | |
4024 | chkp_replace_function_pointer (tree *op, int *walk_subtrees, | |
4025 | void *data ATTRIBUTE_UNUSED) | |
4026 | { | |
4027 | if (TREE_CODE (*op) == FUNCTION_DECL | |
4028 | && !lookup_attribute ("bnd_legacy", DECL_ATTRIBUTES (*op)) | |
edcf72f3 IE |
4029 | && (DECL_BUILT_IN_CLASS (*op) == NOT_BUILT_IN |
4030 | /* For builtins we replace pointers only for selected | |
4031 | function and functions having definitions. */ | |
4032 | || (DECL_BUILT_IN_CLASS (*op) == BUILT_IN_NORMAL | |
4033 | && (chkp_instrument_normal_builtin (*op) | |
4034 | || gimple_has_body_p (*op))))) | |
d5e254e1 IE |
4035 | { |
4036 | struct cgraph_node *node = cgraph_node::get_create (*op); | |
edcf72f3 | 4037 | struct cgraph_node *clone = NULL; |
d5e254e1 IE |
4038 | |
4039 | if (!node->instrumentation_clone) | |
edcf72f3 | 4040 | clone = chkp_maybe_create_clone (*op); |
d5e254e1 | 4041 | |
edcf72f3 IE |
4042 | if (clone) |
4043 | *op = clone->decl; | |
d5e254e1 IE |
4044 | *walk_subtrees = 0; |
4045 | } | |
4046 | ||
4047 | return NULL; | |
4048 | } | |
4049 | ||
4050 | /* This function searches for function pointers in statement | |
4051 | pointed by GSI and replaces them with pointers to instrumented | |
4052 | function versions. */ | |
4053 | static void | |
4054 | chkp_replace_function_pointers (gimple_stmt_iterator *gsi) | |
4055 | { | |
4056 | gimple stmt = gsi_stmt (*gsi); | |
4057 | /* For calls we want to walk call args only. */ | |
4058 | if (gimple_code (stmt) == GIMPLE_CALL) | |
4059 | { | |
4060 | unsigned i; | |
4061 | for (i = 0; i < gimple_call_num_args (stmt); i++) | |
4062 | walk_tree (gimple_call_arg_ptr (stmt, i), | |
4063 | chkp_replace_function_pointer, NULL, NULL); | |
4064 | } | |
4065 | else | |
4066 | walk_gimple_stmt (gsi, NULL, chkp_replace_function_pointer, NULL); | |
4067 | } | |
4068 | ||
4069 | /* This function instruments all statements working with memory, | |
4070 | calls and rets. | |
4071 | ||
4072 | It also removes excess statements from static initializers. */ | |
4073 | static void | |
4074 | chkp_instrument_function (void) | |
4075 | { | |
4076 | basic_block bb, next; | |
4077 | gimple_stmt_iterator i; | |
4078 | enum gimple_rhs_class grhs_class; | |
4079 | bool safe = lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl)); | |
4080 | ||
4081 | bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; | |
4082 | do | |
4083 | { | |
4084 | next = bb->next_bb; | |
4085 | for (i = gsi_start_bb (bb); !gsi_end_p (i); ) | |
4086 | { | |
4087 | gimple s = gsi_stmt (i); | |
4088 | ||
4089 | /* Skip statement marked to not be instrumented. */ | |
4090 | if (chkp_marked_stmt_p (s)) | |
4091 | { | |
4092 | gsi_next (&i); | |
4093 | continue; | |
4094 | } | |
4095 | ||
4096 | chkp_replace_function_pointers (&i); | |
4097 | ||
4098 | switch (gimple_code (s)) | |
4099 | { | |
4100 | case GIMPLE_ASSIGN: | |
4101 | chkp_process_stmt (&i, gimple_assign_lhs (s), | |
4102 | gimple_location (s), integer_one_node, | |
4103 | NULL_TREE, NULL_TREE, safe); | |
4104 | chkp_process_stmt (&i, gimple_assign_rhs1 (s), | |
4105 | gimple_location (s), integer_zero_node, | |
4106 | NULL_TREE, NULL_TREE, safe); | |
4107 | grhs_class = get_gimple_rhs_class (gimple_assign_rhs_code (s)); | |
4108 | if (grhs_class == GIMPLE_BINARY_RHS) | |
4109 | chkp_process_stmt (&i, gimple_assign_rhs2 (s), | |
4110 | gimple_location (s), integer_zero_node, | |
4111 | NULL_TREE, NULL_TREE, safe); | |
4112 | break; | |
4113 | ||
4114 | case GIMPLE_RETURN: | |
538dd0b7 DM |
4115 | { |
4116 | greturn *r = as_a <greturn *> (s); | |
4117 | if (gimple_return_retval (r) != NULL_TREE) | |
4118 | { | |
4119 | chkp_process_stmt (&i, gimple_return_retval (r), | |
4120 | gimple_location (r), | |
4121 | integer_zero_node, | |
4122 | NULL_TREE, NULL_TREE, safe); | |
4123 | ||
4124 | /* Additionally we need to add bounds | |
4125 | to return statement. */ | |
4126 | chkp_add_bounds_to_ret_stmt (&i); | |
4127 | } | |
4128 | } | |
4129 | break; | |
d5e254e1 IE |
4130 | |
4131 | case GIMPLE_CALL: | |
4132 | chkp_add_bounds_to_call_stmt (&i); | |
4133 | break; | |
4134 | ||
4135 | default: | |
4136 | ; | |
4137 | } | |
4138 | ||
4139 | gsi_next (&i); | |
4140 | ||
4141 | /* We do not need any actual pointer stores in checker | |
4142 | static initializer. */ | |
4143 | if (lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl)) | |
4144 | && gimple_code (s) == GIMPLE_ASSIGN | |
4145 | && gimple_store_p (s)) | |
4146 | { | |
4147 | gimple_stmt_iterator del_iter = gsi_for_stmt (s); | |
4148 | gsi_remove (&del_iter, true); | |
4149 | unlink_stmt_vdef (s); | |
4150 | release_defs(s); | |
4151 | } | |
4152 | } | |
4153 | bb = next; | |
4154 | } | |
4155 | while (bb); | |
4156 | ||
4157 | /* Some input params may have bounds and be address taken. In this case | |
4158 | we should store incoming bounds into bounds table. */ | |
4159 | tree arg; | |
4160 | if (flag_chkp_store_bounds) | |
4161 | for (arg = DECL_ARGUMENTS (cfun->decl); arg; arg = DECL_CHAIN (arg)) | |
4162 | if (TREE_ADDRESSABLE (arg)) | |
4163 | { | |
4164 | if (BOUNDED_P (arg)) | |
4165 | { | |
4166 | tree bounds = chkp_get_next_bounds_parm (arg); | |
4167 | tree def_ptr = ssa_default_def (cfun, arg); | |
4168 | gimple_stmt_iterator iter | |
4169 | = gsi_start_bb (chkp_get_entry_block ()); | |
4170 | chkp_build_bndstx (chkp_build_addr_expr (arg), | |
4171 | def_ptr ? def_ptr : arg, | |
4172 | bounds, &iter); | |
4173 | ||
4174 | /* Skip bounds arg. */ | |
4175 | arg = TREE_CHAIN (arg); | |
4176 | } | |
4177 | else if (chkp_type_has_pointer (TREE_TYPE (arg))) | |
4178 | { | |
4179 | tree orig_arg = arg; | |
4180 | bitmap slots = BITMAP_ALLOC (NULL); | |
4181 | gimple_stmt_iterator iter | |
4182 | = gsi_start_bb (chkp_get_entry_block ()); | |
4183 | bitmap_iterator bi; | |
4184 | unsigned bnd_no; | |
4185 | ||
4186 | chkp_find_bound_slots (TREE_TYPE (arg), slots); | |
4187 | ||
4188 | EXECUTE_IF_SET_IN_BITMAP (slots, 0, bnd_no, bi) | |
4189 | { | |
4190 | tree bounds = chkp_get_next_bounds_parm (arg); | |
4191 | HOST_WIDE_INT offs = bnd_no * POINTER_SIZE / BITS_PER_UNIT; | |
4192 | tree addr = chkp_build_addr_expr (orig_arg); | |
4193 | tree ptr = build2 (MEM_REF, ptr_type_node, addr, | |
4194 | build_int_cst (ptr_type_node, offs)); | |
4195 | chkp_build_bndstx (chkp_build_addr_expr (ptr), ptr, | |
4196 | bounds, &iter); | |
4197 | ||
4198 | arg = DECL_CHAIN (arg); | |
4199 | } | |
4200 | BITMAP_FREE (slots); | |
4201 | } | |
4202 | } | |
4203 | } | |
4204 | ||
4205 | /* Find init/null/copy_ptr_bounds calls and replace them | |
4206 | with assignments. It should allow better code | |
4207 | optimization. */ | |
4208 | ||
4209 | static void | |
4210 | chkp_remove_useless_builtins () | |
4211 | { | |
4212 | basic_block bb; | |
4213 | gimple_stmt_iterator gsi; | |
4214 | ||
4215 | FOR_EACH_BB_FN (bb, cfun) | |
4216 | { | |
4217 | for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) | |
4218 | { | |
4219 | gimple stmt = gsi_stmt (gsi); | |
4220 | tree fndecl; | |
4221 | enum built_in_function fcode; | |
4222 | ||
4223 | /* Find builtins returning first arg and replace | |
4224 | them with assignments. */ | |
4225 | if (gimple_code (stmt) == GIMPLE_CALL | |
4226 | && (fndecl = gimple_call_fndecl (stmt)) | |
4227 | && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL | |
4228 | && (fcode = DECL_FUNCTION_CODE (fndecl)) | |
4229 | && (fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS | |
4230 | || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS | |
4231 | || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS | |
4232 | || fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS)) | |
4233 | { | |
4234 | tree res = gimple_call_arg (stmt, 0); | |
4235 | update_call_from_tree (&gsi, res); | |
4236 | stmt = gsi_stmt (gsi); | |
4237 | update_stmt (stmt); | |
4238 | } | |
4239 | } | |
4240 | } | |
4241 | } | |
4242 | ||
4243 | /* Initialize pass. */ | |
4244 | static void | |
4245 | chkp_init (void) | |
4246 | { | |
4247 | basic_block bb; | |
4248 | gimple_stmt_iterator i; | |
4249 | ||
4250 | in_chkp_pass = true; | |
4251 | ||
4252 | for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; bb; bb = bb->next_bb) | |
4253 | for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i)) | |
4254 | chkp_unmark_stmt (gsi_stmt (i)); | |
4255 | ||
4256 | chkp_invalid_bounds = new hash_set<tree>; | |
4257 | chkp_completed_bounds_set = new hash_set<tree>; | |
4258 | delete chkp_reg_bounds; | |
4259 | chkp_reg_bounds = new hash_map<tree, tree>; | |
4260 | delete chkp_bound_vars; | |
4261 | chkp_bound_vars = new hash_map<tree, tree>; | |
4262 | chkp_reg_addr_bounds = new hash_map<tree, tree>; | |
4263 | chkp_incomplete_bounds_map = new hash_map<tree, tree>; | |
4264 | delete chkp_bounds_map; | |
4265 | chkp_bounds_map = new hash_map<tree, tree>; | |
4266 | chkp_abnormal_copies = BITMAP_GGC_ALLOC (); | |
4267 | ||
4268 | entry_block = NULL; | |
4269 | zero_bounds = NULL_TREE; | |
4270 | none_bounds = NULL_TREE; | |
4271 | incomplete_bounds = integer_zero_node; | |
4272 | tmp_var = NULL_TREE; | |
4273 | size_tmp_var = NULL_TREE; | |
4274 | ||
4275 | chkp_uintptr_type = lang_hooks.types.type_for_mode (ptr_mode, true); | |
4276 | ||
4277 | /* We create these constant bounds once for each object file. | |
4278 | These symbols go to comdat section and result in single copy | |
4279 | of each one in the final binary. */ | |
4280 | chkp_get_zero_bounds_var (); | |
4281 | chkp_get_none_bounds_var (); | |
4282 | ||
4283 | calculate_dominance_info (CDI_DOMINATORS); | |
4284 | calculate_dominance_info (CDI_POST_DOMINATORS); | |
4285 | ||
4286 | bitmap_obstack_initialize (NULL); | |
4287 | } | |
4288 | ||
4289 | /* Finalize instrumentation pass. */ | |
4290 | static void | |
4291 | chkp_fini (void) | |
4292 | { | |
4293 | in_chkp_pass = false; | |
4294 | ||
4295 | delete chkp_invalid_bounds; | |
4296 | delete chkp_completed_bounds_set; | |
4297 | delete chkp_reg_addr_bounds; | |
4298 | delete chkp_incomplete_bounds_map; | |
4299 | ||
4300 | free_dominance_info (CDI_DOMINATORS); | |
4301 | free_dominance_info (CDI_POST_DOMINATORS); | |
4302 | ||
4303 | bitmap_obstack_release (NULL); | |
4304 | } | |
4305 | ||
4306 | /* Main instrumentation pass function. */ | |
4307 | static unsigned int | |
4308 | chkp_execute (void) | |
4309 | { | |
4310 | chkp_init (); | |
4311 | ||
4312 | chkp_instrument_function (); | |
4313 | ||
4314 | chkp_remove_useless_builtins (); | |
4315 | ||
4316 | chkp_function_mark_instrumented (cfun->decl); | |
4317 | ||
4318 | chkp_fix_cfg (); | |
4319 | ||
4320 | chkp_fini (); | |
4321 | ||
4322 | return 0; | |
4323 | } | |
4324 | ||
4325 | /* Instrumentation pass gate. */ | |
4326 | static bool | |
4327 | chkp_gate (void) | |
4328 | { | |
4329 | return cgraph_node::get (cfun->decl)->instrumentation_clone | |
4330 | || lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl)); | |
4331 | } | |
4332 | ||
4333 | namespace { | |
4334 | ||
4335 | const pass_data pass_data_chkp = | |
4336 | { | |
4337 | GIMPLE_PASS, /* type */ | |
4338 | "chkp", /* name */ | |
4339 | OPTGROUP_NONE, /* optinfo_flags */ | |
4340 | TV_NONE, /* tv_id */ | |
4341 | PROP_ssa | PROP_cfg, /* properties_required */ | |
4342 | 0, /* properties_provided */ | |
4343 | 0, /* properties_destroyed */ | |
4344 | 0, /* todo_flags_start */ | |
4345 | TODO_verify_il | |
4346 | | TODO_update_ssa /* todo_flags_finish */ | |
4347 | }; | |
4348 | ||
4349 | class pass_chkp : public gimple_opt_pass | |
4350 | { | |
4351 | public: | |
4352 | pass_chkp (gcc::context *ctxt) | |
4353 | : gimple_opt_pass (pass_data_chkp, ctxt) | |
4354 | {} | |
4355 | ||
4356 | /* opt_pass methods: */ | |
4357 | virtual opt_pass * clone () | |
4358 | { | |
4359 | return new pass_chkp (m_ctxt); | |
4360 | } | |
4361 | ||
4362 | virtual bool gate (function *) | |
4363 | { | |
4364 | return chkp_gate (); | |
4365 | } | |
4366 | ||
4367 | virtual unsigned int execute (function *) | |
4368 | { | |
4369 | return chkp_execute (); | |
4370 | } | |
4371 | ||
4372 | }; // class pass_chkp | |
4373 | ||
4374 | } // anon namespace | |
4375 | ||
4376 | gimple_opt_pass * | |
4377 | make_pass_chkp (gcc::context *ctxt) | |
4378 | { | |
4379 | return new pass_chkp (ctxt); | |
4380 | } | |
4381 | ||
4382 | #include "gt-tree-chkp.h" |