]>
Commit | Line | Data |
---|---|---|
d5e254e1 | 1 | /* Pointer Bounds Checker insrumentation pass. |
5624e564 | 2 | Copyright (C) 2014-2015 Free Software Foundation, Inc. |
d5e254e1 IE |
3 | Contributed by Ilya Enkovich (ilya.enkovich@intel.com) |
4 | ||
5 | This file is part of GCC. | |
6 | ||
7 | GCC is free software; you can redistribute it and/or modify it under | |
8 | the terms of the GNU General Public License as published by the Free | |
9 | Software Foundation; either version 3, or (at your option) any later | |
10 | version. | |
11 | ||
12 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY | |
13 | WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
14 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
15 | for more details. | |
16 | ||
17 | You should have received a copy of the GNU General Public License | |
18 | along with GCC; see the file COPYING3. If not see | |
19 | <http://www.gnu.org/licenses/>. */ | |
20 | ||
21 | #include "config.h" | |
22 | #include "system.h" | |
23 | #include "coretypes.h" | |
40e23961 MC |
24 | #include "hash-set.h" |
25 | #include "machmode.h" | |
26 | #include "vec.h" | |
27 | #include "double-int.h" | |
28 | #include "input.h" | |
29 | #include "alias.h" | |
30 | #include "symtab.h" | |
31 | #include "options.h" | |
32 | #include "wide-int.h" | |
33 | #include "inchash.h" | |
34 | #include "tree.h" | |
35 | #include "fold-const.h" | |
d5e254e1 IE |
36 | #include "stor-layout.h" |
37 | #include "varasm.h" | |
d5e254e1 IE |
38 | #include "target.h" |
39 | #include "tree-iterator.h" | |
40 | #include "tree-cfg.h" | |
41 | #include "langhooks.h" | |
42 | #include "tree-pass.h" | |
43 | #include "diagnostic.h" | |
44 | #include "ggc.h" | |
45 | #include "is-a.h" | |
46 | #include "cfgloop.h" | |
47 | #include "stringpool.h" | |
48 | #include "tree-ssa-alias.h" | |
49 | #include "tree-ssanames.h" | |
50 | #include "tree-ssa-operands.h" | |
51 | #include "tree-ssa-address.h" | |
52 | #include "tree-ssa.h" | |
53 | #include "predict.h" | |
54 | #include "dominance.h" | |
55 | #include "cfg.h" | |
56 | #include "basic-block.h" | |
57 | #include "tree-ssa-loop-niter.h" | |
58 | #include "gimple-expr.h" | |
59 | #include "gimple.h" | |
60 | #include "tree-phinodes.h" | |
61 | #include "gimple-ssa.h" | |
62 | #include "ssa-iterators.h" | |
63 | #include "gimple-pretty-print.h" | |
64 | #include "gimple-iterator.h" | |
65 | #include "gimplify.h" | |
66 | #include "gimplify-me.h" | |
67 | #include "print-tree.h" | |
68 | #include "expr.h" | |
69 | #include "tree-ssa-propagate.h" | |
70 | #include "gimple-fold.h" | |
71 | #include "tree-chkp.h" | |
72 | #include "gimple-walk.h" | |
73 | #include "rtl.h" /* For MEM_P, assign_temp. */ | |
74 | #include "tree-dfa.h" | |
75 | #include "ipa-ref.h" | |
76 | #include "lto-streamer.h" | |
77 | #include "cgraph.h" | |
78 | #include "ipa-chkp.h" | |
79 | #include "params.h" | |
80 | #include "ipa-chkp.h" | |
81 | #include "params.h" | |
82 | ||
83 | /* Pointer Bounds Checker instruments code with memory checks to find | |
84 | out-of-bounds memory accesses. Checks are performed by computing | |
85 | bounds for each pointer and then comparing address of accessed | |
86 | memory before pointer dereferencing. | |
87 | ||
88 | 1. Function clones. | |
89 | ||
90 | See ipa-chkp.c. | |
91 | ||
92 | 2. Instrumentation. | |
93 | ||
94 | There are few things to instrument: | |
95 | ||
96 | a) Memory accesses - add checker calls to check address of accessed memory | |
97 | against bounds of dereferenced pointer. Obviously safe memory | |
98 | accesses like static variable access does not have to be instrumented | |
99 | with checks. | |
100 | ||
101 | Example: | |
102 | ||
103 | val_2 = *p_1; | |
104 | ||
105 | with 4 bytes access is transformed into: | |
106 | ||
107 | __builtin___chkp_bndcl (__bound_tmp.1_3, p_1); | |
108 | D.1_4 = p_1 + 3; | |
109 | __builtin___chkp_bndcu (__bound_tmp.1_3, D.1_4); | |
110 | val_2 = *p_1; | |
111 | ||
112 | where __bound_tmp.1_3 are bounds computed for pointer p_1, | |
113 | __builtin___chkp_bndcl is a lower bound check and | |
114 | __builtin___chkp_bndcu is an upper bound check. | |
115 | ||
116 | b) Pointer stores. | |
117 | ||
118 | When pointer is stored in memory we need to store its bounds. To | |
119 | achieve compatibility of instrumented code with regular codes | |
120 | we have to keep data layout and store bounds in special bound tables | |
121 | via special checker call. Implementation of bounds table may vary for | |
122 | different platforms. It has to associate pointer value and its | |
123 | location (it is required because we may have two equal pointers | |
124 | with different bounds stored in different places) with bounds. | |
125 | Another checker builtin allows to get bounds for specified pointer | |
126 | loaded from specified location. | |
127 | ||
128 | Example: | |
129 | ||
130 | buf1[i_1] = &buf2; | |
131 | ||
132 | is transformed into: | |
133 | ||
134 | buf1[i_1] = &buf2; | |
135 | D.1_2 = &buf1[i_1]; | |
136 | __builtin___chkp_bndstx (D.1_2, &buf2, __bound_tmp.1_2); | |
137 | ||
138 | where __bound_tmp.1_2 are bounds of &buf2. | |
139 | ||
140 | c) Static initialization. | |
141 | ||
142 | The special case of pointer store is static pointer initialization. | |
143 | Bounds initialization is performed in a few steps: | |
144 | - register all static initializations in front-end using | |
145 | chkp_register_var_initializer | |
146 | - when file compilation finishes we create functions with special | |
147 | attribute 'chkp ctor' and put explicit initialization code | |
148 | (assignments) for all statically initialized pointers. | |
149 | - when checker constructor is compiled checker pass adds required | |
150 | bounds initialization for all statically initialized pointers | |
151 | - since we do not actually need excess pointers initialization | |
152 | in checker constructor we remove such assignments from them | |
153 | ||
154 | d) Calls. | |
155 | ||
156 | For each call in the code we add additional arguments to pass | |
157 | bounds for pointer arguments. We determine type of call arguments | |
158 | using arguments list from function declaration; if function | |
159 | declaration is not available we use function type; otherwise | |
160 | (e.g. for unnamed arguments) we use type of passed value. Function | |
161 | declaration/type is replaced with the instrumented one. | |
162 | ||
163 | Example: | |
164 | ||
165 | val_1 = foo (&buf1, &buf2, &buf1, 0); | |
166 | ||
167 | is translated into: | |
168 | ||
169 | val_1 = foo.chkp (&buf1, __bound_tmp.1_2, &buf2, __bound_tmp.1_3, | |
170 | &buf1, __bound_tmp.1_2, 0); | |
171 | ||
172 | e) Returns. | |
173 | ||
174 | If function returns a pointer value we have to return bounds also. | |
175 | A new operand was added for return statement to hold returned bounds. | |
176 | ||
177 | Example: | |
178 | ||
179 | return &_buf1; | |
180 | ||
181 | is transformed into | |
182 | ||
183 | return &_buf1, __bound_tmp.1_1; | |
184 | ||
185 | 3. Bounds computation. | |
186 | ||
187 | Compiler is fully responsible for computing bounds to be used for each | |
188 | memory access. The first step for bounds computation is to find the | |
189 | origin of pointer dereferenced for memory access. Basing on pointer | |
190 | origin we define a way to compute its bounds. There are just few | |
191 | possible cases: | |
192 | ||
193 | a) Pointer is returned by call. | |
194 | ||
195 | In this case we use corresponding checker builtin method to obtain returned | |
196 | bounds. | |
197 | ||
198 | Example: | |
199 | ||
200 | buf_1 = malloc (size_2); | |
201 | foo (buf_1); | |
202 | ||
203 | is translated into: | |
204 | ||
205 | buf_1 = malloc (size_2); | |
206 | __bound_tmp.1_3 = __builtin___chkp_bndret (buf_1); | |
207 | foo (buf_1, __bound_tmp.1_3); | |
208 | ||
209 | b) Pointer is an address of an object. | |
210 | ||
211 | In this case compiler tries to compute objects size and create corresponding | |
212 | bounds. If object has incomplete type then special checker builtin is used to | |
213 | obtain its size at runtime. | |
214 | ||
215 | Example: | |
216 | ||
217 | foo () | |
218 | { | |
219 | <unnamed type> __bound_tmp.3; | |
220 | static int buf[100]; | |
221 | ||
222 | <bb 3>: | |
223 | __bound_tmp.3_2 = __builtin___chkp_bndmk (&buf, 400); | |
224 | ||
225 | <bb 2>: | |
226 | return &buf, __bound_tmp.3_2; | |
227 | } | |
228 | ||
229 | Example: | |
230 | ||
231 | Address of an object 'extern int buf[]' with incomplete type is | |
232 | returned. | |
233 | ||
234 | foo () | |
235 | { | |
236 | <unnamed type> __bound_tmp.4; | |
237 | long unsigned int __size_tmp.3; | |
238 | ||
239 | <bb 3>: | |
240 | __size_tmp.3_4 = __builtin_ia32_sizeof (buf); | |
241 | __bound_tmp.4_3 = __builtin_ia32_bndmk (&buf, __size_tmp.3_4); | |
242 | ||
243 | <bb 2>: | |
244 | return &buf, __bound_tmp.4_3; | |
245 | } | |
246 | ||
247 | c) Pointer is the result of object narrowing. | |
248 | ||
249 | It happens when we use pointer to an object to compute pointer to a part | |
250 | of an object. E.g. we take pointer to a field of a structure. In this | |
251 | case we perform bounds intersection using bounds of original object and | |
252 | bounds of object's part (which are computed basing on its type). | |
253 | ||
254 | There may be some debatable questions about when narrowing should occur | |
255 | and when it should not. To avoid false bound violations in correct | |
256 | programs we do not perform narrowing when address of an array element is | |
257 | obtained (it has address of the whole array) and when address of the first | |
258 | structure field is obtained (because it is guaranteed to be equal to | |
259 | address of the whole structure and it is legal to cast it back to structure). | |
260 | ||
261 | Default narrowing behavior may be changed using compiler flags. | |
262 | ||
263 | Example: | |
264 | ||
265 | In this example address of the second structure field is returned. | |
266 | ||
267 | foo (struct A * p, __bounds_type __bounds_of_p) | |
268 | { | |
269 | <unnamed type> __bound_tmp.3; | |
270 | int * _2; | |
271 | int * _5; | |
272 | ||
273 | <bb 2>: | |
274 | _5 = &p_1(D)->second_field; | |
275 | __bound_tmp.3_6 = __builtin___chkp_bndmk (_5, 4); | |
276 | __bound_tmp.3_8 = __builtin___chkp_intersect (__bound_tmp.3_6, | |
277 | __bounds_of_p_3(D)); | |
278 | _2 = &p_1(D)->second_field; | |
279 | return _2, __bound_tmp.3_8; | |
280 | } | |
281 | ||
282 | Example: | |
283 | ||
284 | In this example address of the first field of array element is returned. | |
285 | ||
286 | foo (struct A * p, __bounds_type __bounds_of_p, int i) | |
287 | { | |
288 | long unsigned int _3; | |
289 | long unsigned int _4; | |
290 | struct A * _6; | |
291 | int * _7; | |
292 | ||
293 | <bb 2>: | |
294 | _3 = (long unsigned int) i_1(D); | |
295 | _4 = _3 * 8; | |
296 | _6 = p_5(D) + _4; | |
297 | _7 = &_6->first_field; | |
298 | return _7, __bounds_of_p_2(D); | |
299 | } | |
300 | ||
301 | ||
302 | d) Pointer is the result of pointer arithmetic or type cast. | |
303 | ||
304 | In this case bounds of the base pointer are used. In case of binary | |
305 | operation producing a pointer we are analyzing data flow further | |
306 | looking for operand's bounds. One operand is considered as a base | |
307 | if it has some valid bounds. If we fall into a case when none of | |
308 | operands (or both of them) has valid bounds, a default bounds value | |
309 | is used. | |
310 | ||
311 | Trying to find out bounds for binary operations we may fall into | |
312 | cyclic dependencies for pointers. To avoid infinite recursion all | |
313 | walked phi nodes instantly obtain corresponding bounds but created | |
314 | bounds are marked as incomplete. It helps us to stop DF walk during | |
315 | bounds search. | |
316 | ||
317 | When we reach pointer source, some args of incomplete bounds phi obtain | |
318 | valid bounds and those values are propagated further through phi nodes. | |
319 | If no valid bounds were found for phi node then we mark its result as | |
320 | invalid bounds. Process stops when all incomplete bounds become either | |
321 | valid or invalid and we are able to choose a pointer base. | |
322 | ||
323 | e) Pointer is loaded from the memory. | |
324 | ||
325 | In this case we just need to load bounds from the bounds table. | |
326 | ||
327 | Example: | |
328 | ||
329 | foo () | |
330 | { | |
331 | <unnamed type> __bound_tmp.3; | |
332 | static int * buf; | |
333 | int * _2; | |
334 | ||
335 | <bb 2>: | |
336 | _2 = buf; | |
337 | __bound_tmp.3_4 = __builtin___chkp_bndldx (&buf, _2); | |
338 | return _2, __bound_tmp.3_4; | |
339 | } | |
340 | ||
341 | */ | |
342 | ||
343 | typedef void (*assign_handler)(tree, tree, void *); | |
344 | ||
345 | static tree chkp_get_zero_bounds (); | |
346 | static tree chkp_find_bounds (tree ptr, gimple_stmt_iterator *iter); | |
347 | static tree chkp_find_bounds_loaded (tree ptr, tree ptr_src, | |
348 | gimple_stmt_iterator *iter); | |
349 | static void chkp_parse_array_and_component_ref (tree node, tree *ptr, | |
350 | tree *elt, bool *safe, | |
351 | bool *bitfield, | |
352 | tree *bounds, | |
353 | gimple_stmt_iterator *iter, | |
354 | bool innermost_bounds); | |
355 | ||
356 | #define chkp_bndldx_fndecl \ | |
357 | (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDLDX)) | |
358 | #define chkp_bndstx_fndecl \ | |
359 | (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDSTX)) | |
360 | #define chkp_checkl_fndecl \ | |
361 | (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDCL)) | |
362 | #define chkp_checku_fndecl \ | |
363 | (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDCU)) | |
364 | #define chkp_bndmk_fndecl \ | |
365 | (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDMK)) | |
366 | #define chkp_ret_bnd_fndecl \ | |
367 | (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDRET)) | |
368 | #define chkp_intersect_fndecl \ | |
369 | (targetm.builtin_chkp_function (BUILT_IN_CHKP_INTERSECT)) | |
370 | #define chkp_narrow_bounds_fndecl \ | |
371 | (targetm.builtin_chkp_function (BUILT_IN_CHKP_NARROW)) | |
372 | #define chkp_sizeof_fndecl \ | |
373 | (targetm.builtin_chkp_function (BUILT_IN_CHKP_SIZEOF)) | |
374 | #define chkp_extract_lower_fndecl \ | |
375 | (targetm.builtin_chkp_function (BUILT_IN_CHKP_EXTRACT_LOWER)) | |
376 | #define chkp_extract_upper_fndecl \ | |
377 | (targetm.builtin_chkp_function (BUILT_IN_CHKP_EXTRACT_UPPER)) | |
378 | ||
379 | static GTY (()) tree chkp_uintptr_type; | |
380 | ||
381 | static GTY (()) tree chkp_zero_bounds_var; | |
382 | static GTY (()) tree chkp_none_bounds_var; | |
383 | ||
384 | static GTY (()) basic_block entry_block; | |
385 | static GTY (()) tree zero_bounds; | |
386 | static GTY (()) tree none_bounds; | |
387 | static GTY (()) tree incomplete_bounds; | |
388 | static GTY (()) tree tmp_var; | |
389 | static GTY (()) tree size_tmp_var; | |
390 | static GTY (()) bitmap chkp_abnormal_copies; | |
391 | ||
392 | struct hash_set<tree> *chkp_invalid_bounds; | |
393 | struct hash_set<tree> *chkp_completed_bounds_set; | |
394 | struct hash_map<tree, tree> *chkp_reg_bounds; | |
395 | struct hash_map<tree, tree> *chkp_bound_vars; | |
396 | struct hash_map<tree, tree> *chkp_reg_addr_bounds; | |
397 | struct hash_map<tree, tree> *chkp_incomplete_bounds_map; | |
398 | struct hash_map<tree, tree> *chkp_bounds_map; | |
399 | struct hash_map<tree, tree> *chkp_static_var_bounds; | |
400 | ||
401 | static bool in_chkp_pass; | |
402 | ||
403 | #define CHKP_BOUND_TMP_NAME "__bound_tmp" | |
404 | #define CHKP_SIZE_TMP_NAME "__size_tmp" | |
405 | #define CHKP_BOUNDS_OF_SYMBOL_PREFIX "__chkp_bounds_of_" | |
406 | #define CHKP_STRING_BOUNDS_PREFIX "__chkp_string_bounds_" | |
407 | #define CHKP_VAR_BOUNDS_PREFIX "__chkp_var_bounds_" | |
408 | #define CHKP_ZERO_BOUNDS_VAR_NAME "__chkp_zero_bounds" | |
409 | #define CHKP_NONE_BOUNDS_VAR_NAME "__chkp_none_bounds" | |
410 | ||
411 | /* Static checker constructors may become very large and their | |
412 | compilation with optimization may take too much time. | |
413 | Therefore we put a limit to number of statements in one | |
414 | constructor. Tests with 100 000 statically initialized | |
415 | pointers showed following compilation times on Sandy Bridge | |
416 | server (used -O2): | |
417 | limit 100 => ~18 sec. | |
418 | limit 300 => ~22 sec. | |
419 | limit 1000 => ~30 sec. | |
420 | limit 3000 => ~49 sec. | |
421 | limit 5000 => ~55 sec. | |
422 | limit 10000 => ~76 sec. | |
423 | limit 100000 => ~532 sec. */ | |
424 | #define MAX_STMTS_IN_STATIC_CHKP_CTOR (PARAM_VALUE (PARAM_CHKP_MAX_CTOR_SIZE)) | |
425 | ||
426 | struct chkp_ctor_stmt_list | |
427 | { | |
428 | tree stmts; | |
429 | int avail; | |
430 | }; | |
431 | ||
432 | /* Return 1 if function FNDECL is instrumented by Pointer | |
433 | Bounds Checker. */ | |
434 | bool | |
435 | chkp_function_instrumented_p (tree fndecl) | |
436 | { | |
437 | return fndecl | |
438 | && lookup_attribute ("chkp instrumented", DECL_ATTRIBUTES (fndecl)); | |
439 | } | |
440 | ||
441 | /* Mark function FNDECL as instrumented. */ | |
442 | void | |
443 | chkp_function_mark_instrumented (tree fndecl) | |
444 | { | |
445 | if (chkp_function_instrumented_p (fndecl)) | |
446 | return; | |
447 | ||
448 | DECL_ATTRIBUTES (fndecl) | |
449 | = tree_cons (get_identifier ("chkp instrumented"), NULL, | |
450 | DECL_ATTRIBUTES (fndecl)); | |
451 | } | |
452 | ||
453 | /* Return true when STMT is builtin call to instrumentation function | |
454 | corresponding to CODE. */ | |
455 | ||
456 | bool | |
457 | chkp_gimple_call_builtin_p (gimple call, | |
458 | enum built_in_function code) | |
459 | { | |
460 | tree fndecl; | |
461 | if (is_gimple_call (call) | |
462 | && (fndecl = targetm.builtin_chkp_function (code)) | |
463 | && gimple_call_fndecl (call) == fndecl) | |
464 | return true; | |
465 | return false; | |
466 | } | |
467 | ||
468 | /* Emit code to store zero bounds for PTR located at MEM. */ | |
469 | void | |
470 | chkp_expand_bounds_reset_for_mem (tree mem, tree ptr) | |
471 | { | |
472 | tree zero_bnd, bnd, addr, bndstx; | |
473 | ||
474 | if (flag_chkp_use_static_const_bounds) | |
475 | zero_bnd = chkp_get_zero_bounds_var (); | |
476 | else | |
477 | zero_bnd = chkp_build_make_bounds_call (integer_zero_node, | |
478 | integer_zero_node); | |
479 | bnd = make_tree (pointer_bounds_type_node, | |
480 | assign_temp (pointer_bounds_type_node, 0, 1)); | |
481 | addr = build1 (ADDR_EXPR, | |
482 | build_pointer_type (TREE_TYPE (mem)), mem); | |
483 | bndstx = chkp_build_bndstx_call (addr, ptr, bnd); | |
484 | ||
485 | expand_assignment (bnd, zero_bnd, false); | |
486 | expand_normal (bndstx); | |
487 | } | |
488 | ||
489 | /* Mark statement S to not be instrumented. */ | |
490 | static void | |
491 | chkp_mark_stmt (gimple s) | |
492 | { | |
493 | gimple_set_plf (s, GF_PLF_1, true); | |
494 | } | |
495 | ||
496 | /* Mark statement S to be instrumented. */ | |
497 | static void | |
498 | chkp_unmark_stmt (gimple s) | |
499 | { | |
500 | gimple_set_plf (s, GF_PLF_1, false); | |
501 | } | |
502 | ||
503 | /* Return 1 if statement S should not be instrumented. */ | |
504 | static bool | |
505 | chkp_marked_stmt_p (gimple s) | |
506 | { | |
507 | return gimple_plf (s, GF_PLF_1); | |
508 | } | |
509 | ||
510 | /* Get var to be used for bound temps. */ | |
511 | static tree | |
512 | chkp_get_tmp_var (void) | |
513 | { | |
514 | if (!tmp_var) | |
515 | tmp_var = create_tmp_reg (pointer_bounds_type_node, CHKP_BOUND_TMP_NAME); | |
516 | ||
517 | return tmp_var; | |
518 | } | |
519 | ||
520 | /* Get SSA_NAME to be used as temp. */ | |
521 | static tree | |
522 | chkp_get_tmp_reg (gimple stmt) | |
523 | { | |
524 | if (in_chkp_pass) | |
525 | return make_ssa_name (chkp_get_tmp_var (), stmt); | |
526 | ||
527 | return make_temp_ssa_name (pointer_bounds_type_node, stmt, | |
528 | CHKP_BOUND_TMP_NAME); | |
529 | } | |
530 | ||
531 | /* Get var to be used for size temps. */ | |
532 | static tree | |
533 | chkp_get_size_tmp_var (void) | |
534 | { | |
535 | if (!size_tmp_var) | |
536 | size_tmp_var = create_tmp_reg (chkp_uintptr_type, CHKP_SIZE_TMP_NAME); | |
537 | ||
538 | return size_tmp_var; | |
539 | } | |
540 | ||
541 | /* Register bounds BND for address of OBJ. */ | |
542 | static void | |
543 | chkp_register_addr_bounds (tree obj, tree bnd) | |
544 | { | |
545 | if (bnd == incomplete_bounds) | |
546 | return; | |
547 | ||
548 | chkp_reg_addr_bounds->put (obj, bnd); | |
549 | ||
550 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
551 | { | |
552 | fprintf (dump_file, "Regsitered bound "); | |
553 | print_generic_expr (dump_file, bnd, 0); | |
554 | fprintf (dump_file, " for address of "); | |
555 | print_generic_expr (dump_file, obj, 0); | |
556 | fprintf (dump_file, "\n"); | |
557 | } | |
558 | } | |
559 | ||
560 | /* Return bounds registered for address of OBJ. */ | |
561 | static tree | |
562 | chkp_get_registered_addr_bounds (tree obj) | |
563 | { | |
564 | tree *slot = chkp_reg_addr_bounds->get (obj); | |
565 | return slot ? *slot : NULL_TREE; | |
566 | } | |
567 | ||
568 | /* Mark BOUNDS as completed. */ | |
569 | static void | |
570 | chkp_mark_completed_bounds (tree bounds) | |
571 | { | |
572 | chkp_completed_bounds_set->add (bounds); | |
573 | ||
574 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
575 | { | |
576 | fprintf (dump_file, "Marked bounds "); | |
577 | print_generic_expr (dump_file, bounds, 0); | |
578 | fprintf (dump_file, " as completed\n"); | |
579 | } | |
580 | } | |
581 | ||
582 | /* Return 1 if BOUNDS were marked as completed and 0 otherwise. */ | |
583 | static bool | |
584 | chkp_completed_bounds (tree bounds) | |
585 | { | |
586 | return chkp_completed_bounds_set->contains (bounds); | |
587 | } | |
588 | ||
589 | /* Clear comleted bound marks. */ | |
590 | static void | |
591 | chkp_erase_completed_bounds (void) | |
592 | { | |
593 | delete chkp_completed_bounds_set; | |
594 | chkp_completed_bounds_set = new hash_set<tree>; | |
595 | } | |
596 | ||
597 | /* Mark BOUNDS associated with PTR as incomplete. */ | |
598 | static void | |
599 | chkp_register_incomplete_bounds (tree bounds, tree ptr) | |
600 | { | |
601 | chkp_incomplete_bounds_map->put (bounds, ptr); | |
602 | ||
603 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
604 | { | |
605 | fprintf (dump_file, "Regsitered incomplete bounds "); | |
606 | print_generic_expr (dump_file, bounds, 0); | |
607 | fprintf (dump_file, " for "); | |
608 | print_generic_expr (dump_file, ptr, 0); | |
609 | fprintf (dump_file, "\n"); | |
610 | } | |
611 | } | |
612 | ||
613 | /* Return 1 if BOUNDS are incomplete and 0 otherwise. */ | |
614 | static bool | |
615 | chkp_incomplete_bounds (tree bounds) | |
616 | { | |
617 | if (bounds == incomplete_bounds) | |
618 | return true; | |
619 | ||
620 | if (chkp_completed_bounds (bounds)) | |
621 | return false; | |
622 | ||
623 | return chkp_incomplete_bounds_map->get (bounds) != NULL; | |
624 | } | |
625 | ||
626 | /* Clear incomleted bound marks. */ | |
627 | static void | |
628 | chkp_erase_incomplete_bounds (void) | |
629 | { | |
630 | delete chkp_incomplete_bounds_map; | |
631 | chkp_incomplete_bounds_map = new hash_map<tree, tree>; | |
632 | } | |
633 | ||
634 | /* Build and return bndmk call which creates bounds for structure | |
635 | pointed by PTR. Structure should have complete type. */ | |
636 | tree | |
637 | chkp_make_bounds_for_struct_addr (tree ptr) | |
638 | { | |
639 | tree type = TREE_TYPE (ptr); | |
640 | tree size; | |
641 | ||
642 | gcc_assert (POINTER_TYPE_P (type)); | |
643 | ||
644 | size = TYPE_SIZE (TREE_TYPE (type)); | |
645 | ||
646 | gcc_assert (size); | |
647 | ||
648 | return build_call_nary (pointer_bounds_type_node, | |
649 | build_fold_addr_expr (chkp_bndmk_fndecl), | |
650 | 2, ptr, size); | |
651 | } | |
652 | ||
653 | /* Traversal function for chkp_may_finish_incomplete_bounds. | |
654 | Set RES to 0 if at least one argument of phi statement | |
655 | defining bounds (passed in KEY arg) is unknown. | |
656 | Traversal stops when first unknown phi argument is found. */ | |
657 | bool | |
658 | chkp_may_complete_phi_bounds (tree const &bounds, tree *slot ATTRIBUTE_UNUSED, | |
659 | bool *res) | |
660 | { | |
661 | gimple phi; | |
662 | unsigned i; | |
663 | ||
664 | gcc_assert (TREE_CODE (bounds) == SSA_NAME); | |
665 | ||
666 | phi = SSA_NAME_DEF_STMT (bounds); | |
667 | ||
668 | gcc_assert (phi && gimple_code (phi) == GIMPLE_PHI); | |
669 | ||
670 | for (i = 0; i < gimple_phi_num_args (phi); i++) | |
671 | { | |
672 | tree phi_arg = gimple_phi_arg_def (phi, i); | |
673 | if (!phi_arg) | |
674 | { | |
675 | *res = false; | |
676 | /* Do not need to traverse further. */ | |
677 | return false; | |
678 | } | |
679 | } | |
680 | ||
681 | return true; | |
682 | } | |
683 | ||
684 | /* Return 1 if all phi nodes created for bounds have their | |
685 | arguments computed. */ | |
686 | static bool | |
687 | chkp_may_finish_incomplete_bounds (void) | |
688 | { | |
689 | bool res = true; | |
690 | ||
691 | chkp_incomplete_bounds_map | |
692 | ->traverse<bool *, chkp_may_complete_phi_bounds> (&res); | |
693 | ||
694 | return res; | |
695 | } | |
696 | ||
697 | /* Helper function for chkp_finish_incomplete_bounds. | |
698 | Recompute args for bounds phi node. */ | |
699 | bool | |
700 | chkp_recompute_phi_bounds (tree const &bounds, tree *slot, | |
701 | void *res ATTRIBUTE_UNUSED) | |
702 | { | |
703 | tree ptr = *slot; | |
538dd0b7 DM |
704 | gphi *bounds_phi; |
705 | gphi *ptr_phi; | |
d5e254e1 IE |
706 | unsigned i; |
707 | ||
708 | gcc_assert (TREE_CODE (bounds) == SSA_NAME); | |
709 | gcc_assert (TREE_CODE (ptr) == SSA_NAME); | |
710 | ||
538dd0b7 DM |
711 | bounds_phi = as_a <gphi *> (SSA_NAME_DEF_STMT (bounds)); |
712 | ptr_phi = as_a <gphi *> (SSA_NAME_DEF_STMT (ptr)); | |
d5e254e1 IE |
713 | |
714 | for (i = 0; i < gimple_phi_num_args (bounds_phi); i++) | |
715 | { | |
716 | tree ptr_arg = gimple_phi_arg_def (ptr_phi, i); | |
717 | tree bound_arg = chkp_find_bounds (ptr_arg, NULL); | |
718 | ||
719 | add_phi_arg (bounds_phi, bound_arg, | |
720 | gimple_phi_arg_edge (ptr_phi, i), | |
721 | UNKNOWN_LOCATION); | |
722 | } | |
723 | ||
724 | return true; | |
725 | } | |
726 | ||
727 | /* Mark BOUNDS as invalid. */ | |
728 | static void | |
729 | chkp_mark_invalid_bounds (tree bounds) | |
730 | { | |
731 | chkp_invalid_bounds->add (bounds); | |
732 | ||
733 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
734 | { | |
735 | fprintf (dump_file, "Marked bounds "); | |
736 | print_generic_expr (dump_file, bounds, 0); | |
737 | fprintf (dump_file, " as invalid\n"); | |
738 | } | |
739 | } | |
740 | ||
741 | /* Return 1 if BOUNDS were marked as invalid and 0 otherwise. */ | |
742 | static bool | |
743 | chkp_valid_bounds (tree bounds) | |
744 | { | |
745 | if (bounds == zero_bounds || bounds == none_bounds) | |
746 | return false; | |
747 | ||
748 | return !chkp_invalid_bounds->contains (bounds); | |
749 | } | |
750 | ||
751 | /* Helper function for chkp_finish_incomplete_bounds. | |
752 | Check all arguments of phi nodes trying to find | |
753 | valid completed bounds. If there is at least one | |
754 | such arg then bounds produced by phi node are marked | |
755 | as valid completed bounds and all phi args are | |
756 | recomputed. */ | |
757 | bool | |
758 | chkp_find_valid_phi_bounds (tree const &bounds, tree *slot, bool *res) | |
759 | { | |
760 | gimple phi; | |
761 | unsigned i; | |
762 | ||
763 | gcc_assert (TREE_CODE (bounds) == SSA_NAME); | |
764 | ||
765 | if (chkp_completed_bounds (bounds)) | |
766 | return true; | |
767 | ||
768 | phi = SSA_NAME_DEF_STMT (bounds); | |
769 | ||
770 | gcc_assert (phi && gimple_code (phi) == GIMPLE_PHI); | |
771 | ||
772 | for (i = 0; i < gimple_phi_num_args (phi); i++) | |
773 | { | |
774 | tree phi_arg = gimple_phi_arg_def (phi, i); | |
775 | ||
776 | gcc_assert (phi_arg); | |
777 | ||
778 | if (chkp_valid_bounds (phi_arg) && !chkp_incomplete_bounds (phi_arg)) | |
779 | { | |
780 | *res = true; | |
781 | chkp_mark_completed_bounds (bounds); | |
782 | chkp_recompute_phi_bounds (bounds, slot, NULL); | |
783 | return true; | |
784 | } | |
785 | } | |
786 | ||
787 | return true; | |
788 | } | |
789 | ||
790 | /* Helper function for chkp_finish_incomplete_bounds. | |
791 | Marks all incompleted bounds as invalid. */ | |
792 | bool | |
793 | chkp_mark_invalid_bounds_walker (tree const &bounds, | |
794 | tree *slot ATTRIBUTE_UNUSED, | |
795 | void *res ATTRIBUTE_UNUSED) | |
796 | { | |
797 | if (!chkp_completed_bounds (bounds)) | |
798 | { | |
799 | chkp_mark_invalid_bounds (bounds); | |
800 | chkp_mark_completed_bounds (bounds); | |
801 | } | |
802 | return true; | |
803 | } | |
804 | ||
805 | /* When all bound phi nodes have all their args computed | |
806 | we have enough info to find valid bounds. We iterate | |
807 | through all incompleted bounds searching for valid | |
808 | bounds. Found valid bounds are marked as completed | |
809 | and all remaining incompleted bounds are recomputed. | |
810 | Process continues until no new valid bounds may be | |
811 | found. All remained incompleted bounds are marked as | |
812 | invalid (i.e. have no valid source of bounds). */ | |
813 | static void | |
814 | chkp_finish_incomplete_bounds (void) | |
815 | { | |
816 | bool found_valid; | |
817 | ||
818 | while (found_valid) | |
819 | { | |
820 | found_valid = false; | |
821 | ||
822 | chkp_incomplete_bounds_map-> | |
823 | traverse<bool *, chkp_find_valid_phi_bounds> (&found_valid); | |
824 | ||
825 | if (found_valid) | |
826 | chkp_incomplete_bounds_map-> | |
827 | traverse<void *, chkp_recompute_phi_bounds> (NULL); | |
828 | } | |
829 | ||
830 | chkp_incomplete_bounds_map-> | |
831 | traverse<void *, chkp_mark_invalid_bounds_walker> (NULL); | |
832 | chkp_incomplete_bounds_map-> | |
833 | traverse<void *, chkp_recompute_phi_bounds> (NULL); | |
834 | ||
835 | chkp_erase_completed_bounds (); | |
836 | chkp_erase_incomplete_bounds (); | |
837 | } | |
838 | ||
839 | /* Return 1 if type TYPE is a pointer type or a | |
840 | structure having a pointer type as one of its fields. | |
841 | Otherwise return 0. */ | |
842 | bool | |
843 | chkp_type_has_pointer (const_tree type) | |
844 | { | |
845 | bool res = false; | |
846 | ||
847 | if (BOUNDED_TYPE_P (type)) | |
848 | res = true; | |
849 | else if (RECORD_OR_UNION_TYPE_P (type)) | |
850 | { | |
851 | tree field; | |
852 | ||
853 | for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field)) | |
854 | if (TREE_CODE (field) == FIELD_DECL) | |
855 | res = res || chkp_type_has_pointer (TREE_TYPE (field)); | |
856 | } | |
857 | else if (TREE_CODE (type) == ARRAY_TYPE) | |
858 | res = chkp_type_has_pointer (TREE_TYPE (type)); | |
859 | ||
860 | return res; | |
861 | } | |
862 | ||
863 | unsigned | |
864 | chkp_type_bounds_count (const_tree type) | |
865 | { | |
866 | unsigned res = 0; | |
867 | ||
868 | if (!type) | |
869 | res = 0; | |
870 | else if (BOUNDED_TYPE_P (type)) | |
871 | res = 1; | |
872 | else if (RECORD_OR_UNION_TYPE_P (type)) | |
873 | { | |
874 | bitmap have_bound; | |
875 | ||
876 | bitmap_obstack_initialize (NULL); | |
877 | have_bound = BITMAP_ALLOC (NULL); | |
878 | chkp_find_bound_slots (type, have_bound); | |
879 | res = bitmap_count_bits (have_bound); | |
880 | BITMAP_FREE (have_bound); | |
881 | bitmap_obstack_release (NULL); | |
882 | } | |
883 | ||
884 | return res; | |
885 | } | |
886 | ||
887 | /* Get bounds associated with NODE via | |
888 | chkp_set_bounds call. */ | |
889 | tree | |
890 | chkp_get_bounds (tree node) | |
891 | { | |
892 | tree *slot; | |
893 | ||
894 | if (!chkp_bounds_map) | |
895 | return NULL_TREE; | |
896 | ||
897 | slot = chkp_bounds_map->get (node); | |
898 | return slot ? *slot : NULL_TREE; | |
899 | } | |
900 | ||
901 | /* Associate bounds VAL with NODE. */ | |
902 | void | |
903 | chkp_set_bounds (tree node, tree val) | |
904 | { | |
905 | if (!chkp_bounds_map) | |
906 | chkp_bounds_map = new hash_map<tree, tree>; | |
907 | ||
908 | chkp_bounds_map->put (node, val); | |
909 | } | |
910 | ||
911 | /* Check if statically initialized variable VAR require | |
912 | static bounds initialization. If VAR is added into | |
913 | bounds initlization list then 1 is returned. Otherwise | |
914 | return 0. */ | |
915 | extern bool | |
916 | chkp_register_var_initializer (tree var) | |
917 | { | |
918 | if (!flag_check_pointer_bounds | |
919 | || DECL_INITIAL (var) == error_mark_node) | |
920 | return false; | |
921 | ||
922 | gcc_assert (TREE_CODE (var) == VAR_DECL); | |
923 | gcc_assert (DECL_INITIAL (var)); | |
924 | ||
925 | if (TREE_STATIC (var) | |
926 | && chkp_type_has_pointer (TREE_TYPE (var))) | |
927 | { | |
928 | varpool_node::get_create (var)->need_bounds_init = 1; | |
929 | return true; | |
930 | } | |
931 | ||
932 | return false; | |
933 | } | |
934 | ||
935 | /* Helper function for chkp_finish_file. | |
936 | ||
937 | Add new modification statement (RHS is assigned to LHS) | |
938 | into list of static initializer statementes (passed in ARG). | |
939 | If statements list becomes too big, emit checker constructor | |
940 | and start the new one. */ | |
941 | static void | |
942 | chkp_add_modification_to_stmt_list (tree lhs, | |
943 | tree rhs, | |
944 | void *arg) | |
945 | { | |
946 | struct chkp_ctor_stmt_list *stmts = (struct chkp_ctor_stmt_list *)arg; | |
947 | tree modify; | |
948 | ||
949 | if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs))) | |
950 | rhs = build1 (CONVERT_EXPR, TREE_TYPE (lhs), rhs); | |
951 | ||
952 | modify = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs); | |
953 | append_to_statement_list (modify, &stmts->stmts); | |
954 | ||
955 | stmts->avail--; | |
956 | } | |
957 | ||
958 | /* Build and return ADDR_EXPR for specified object OBJ. */ | |
959 | static tree | |
960 | chkp_build_addr_expr (tree obj) | |
961 | { | |
962 | return TREE_CODE (obj) == TARGET_MEM_REF | |
963 | ? tree_mem_ref_addr (ptr_type_node, obj) | |
964 | : build_fold_addr_expr (obj); | |
965 | } | |
966 | ||
967 | /* Helper function for chkp_finish_file. | |
968 | Initialize bound variable BND_VAR with bounds of variable | |
969 | VAR to statements list STMTS. If statements list becomes | |
970 | too big, emit checker constructor and start the new one. */ | |
971 | static void | |
972 | chkp_output_static_bounds (tree bnd_var, tree var, | |
973 | struct chkp_ctor_stmt_list *stmts) | |
974 | { | |
975 | tree lb, ub, size; | |
976 | ||
977 | if (TREE_CODE (var) == STRING_CST) | |
978 | { | |
979 | lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var)); | |
980 | size = build_int_cst (size_type_node, TREE_STRING_LENGTH (var) - 1); | |
981 | } | |
982 | else if (DECL_SIZE (var) | |
983 | && !chkp_variable_size_type (TREE_TYPE (var))) | |
984 | { | |
985 | /* Compute bounds using statically known size. */ | |
986 | lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var)); | |
987 | size = size_binop (MINUS_EXPR, DECL_SIZE_UNIT (var), size_one_node); | |
988 | } | |
989 | else | |
990 | { | |
991 | /* Compute bounds using dynamic size. */ | |
992 | tree call; | |
993 | ||
994 | lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var)); | |
995 | call = build1 (ADDR_EXPR, | |
996 | build_pointer_type (TREE_TYPE (chkp_sizeof_fndecl)), | |
997 | chkp_sizeof_fndecl); | |
998 | size = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_sizeof_fndecl)), | |
999 | call, 1, var); | |
1000 | ||
1001 | if (flag_chkp_zero_dynamic_size_as_infinite) | |
1002 | { | |
1003 | tree max_size, cond; | |
1004 | ||
1005 | max_size = build2 (MINUS_EXPR, size_type_node, size_zero_node, lb); | |
1006 | cond = build2 (NE_EXPR, boolean_type_node, size, size_zero_node); | |
1007 | size = build3 (COND_EXPR, size_type_node, cond, size, max_size); | |
1008 | } | |
1009 | ||
1010 | size = size_binop (MINUS_EXPR, size, size_one_node); | |
1011 | } | |
1012 | ||
1013 | ub = size_binop (PLUS_EXPR, lb, size); | |
1014 | stmts->avail -= targetm.chkp_initialize_bounds (bnd_var, lb, ub, | |
1015 | &stmts->stmts); | |
1016 | if (stmts->avail <= 0) | |
1017 | { | |
1018 | cgraph_build_static_cdtor ('B', stmts->stmts, | |
1019 | MAX_RESERVED_INIT_PRIORITY + 2); | |
1020 | stmts->avail = MAX_STMTS_IN_STATIC_CHKP_CTOR; | |
1021 | stmts->stmts = NULL; | |
1022 | } | |
1023 | } | |
1024 | ||
1025 | /* Return entry block to be used for checker initilization code. | |
1026 | Create new block if required. */ | |
1027 | static basic_block | |
1028 | chkp_get_entry_block (void) | |
1029 | { | |
1030 | if (!entry_block) | |
1031 | entry_block = split_block (ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL)->dest; | |
1032 | ||
1033 | return entry_block; | |
1034 | } | |
1035 | ||
1036 | /* Return a bounds var to be used for pointer var PTR_VAR. */ | |
1037 | static tree | |
1038 | chkp_get_bounds_var (tree ptr_var) | |
1039 | { | |
1040 | tree bnd_var; | |
1041 | tree *slot; | |
1042 | ||
1043 | slot = chkp_bound_vars->get (ptr_var); | |
1044 | if (slot) | |
1045 | bnd_var = *slot; | |
1046 | else | |
1047 | { | |
1048 | bnd_var = create_tmp_reg (pointer_bounds_type_node, | |
1049 | CHKP_BOUND_TMP_NAME); | |
1050 | chkp_bound_vars->put (ptr_var, bnd_var); | |
1051 | } | |
1052 | ||
1053 | return bnd_var; | |
1054 | } | |
1055 | ||
1056 | ||
1057 | ||
1058 | /* Register bounds BND for object PTR in global bounds table. | |
1059 | A copy of bounds may be created for abnormal ssa names. | |
1060 | Returns bounds to use for PTR. */ | |
1061 | static tree | |
1062 | chkp_maybe_copy_and_register_bounds (tree ptr, tree bnd) | |
1063 | { | |
1064 | bool abnormal_ptr; | |
1065 | ||
1066 | if (!chkp_reg_bounds) | |
1067 | return bnd; | |
1068 | ||
1069 | /* Do nothing if bounds are incomplete_bounds | |
1070 | because it means bounds will be recomputed. */ | |
1071 | if (bnd == incomplete_bounds) | |
1072 | return bnd; | |
1073 | ||
1074 | abnormal_ptr = (TREE_CODE (ptr) == SSA_NAME | |
1075 | && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ptr) | |
1076 | && gimple_code (SSA_NAME_DEF_STMT (ptr)) != GIMPLE_PHI); | |
1077 | ||
1078 | /* A single bounds value may be reused multiple times for | |
1079 | different pointer values. It may cause coalescing issues | |
1080 | for abnormal SSA names. To avoid it we create a bounds | |
1081 | copy in case it is computed for abnormal SSA name. | |
1082 | ||
1083 | We also cannot reuse such created copies for other pointers */ | |
1084 | if (abnormal_ptr | |
1085 | || bitmap_bit_p (chkp_abnormal_copies, SSA_NAME_VERSION (bnd))) | |
1086 | { | |
1087 | tree bnd_var = NULL_TREE; | |
1088 | ||
1089 | if (abnormal_ptr) | |
1090 | { | |
1091 | if (SSA_NAME_VAR (ptr)) | |
1092 | bnd_var = chkp_get_bounds_var (SSA_NAME_VAR (ptr)); | |
1093 | } | |
1094 | else | |
1095 | bnd_var = chkp_get_tmp_var (); | |
1096 | ||
1097 | /* For abnormal copies we may just find original | |
1098 | bounds and use them. */ | |
1099 | if (!abnormal_ptr && !SSA_NAME_IS_DEFAULT_DEF (bnd)) | |
1100 | { | |
1101 | gimple bnd_def = SSA_NAME_DEF_STMT (bnd); | |
1102 | gcc_checking_assert (gimple_code (bnd_def) == GIMPLE_ASSIGN); | |
1103 | bnd = gimple_assign_rhs1 (bnd_def); | |
1104 | } | |
1105 | /* For undefined values we usually use none bounds | |
1106 | value but in case of abnormal edge it may cause | |
1107 | coalescing failures. Use default definition of | |
1108 | bounds variable instead to avoid it. */ | |
1109 | else if (SSA_NAME_IS_DEFAULT_DEF (ptr) | |
1110 | && TREE_CODE (SSA_NAME_VAR (ptr)) != PARM_DECL) | |
1111 | { | |
1112 | bnd = get_or_create_ssa_default_def (cfun, bnd_var); | |
1113 | ||
1114 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
1115 | { | |
1116 | fprintf (dump_file, "Using default def bounds "); | |
1117 | print_generic_expr (dump_file, bnd, 0); | |
1118 | fprintf (dump_file, " for abnormal default def SSA name "); | |
1119 | print_generic_expr (dump_file, ptr, 0); | |
1120 | fprintf (dump_file, "\n"); | |
1121 | } | |
1122 | } | |
1123 | else | |
1124 | { | |
1125 | tree copy; | |
1126 | gimple def = SSA_NAME_DEF_STMT (ptr); | |
1127 | gimple assign; | |
1128 | gimple_stmt_iterator gsi; | |
1129 | ||
1130 | if (bnd_var) | |
1131 | copy = make_ssa_name (bnd_var, gimple_build_nop ()); | |
1132 | else | |
1133 | copy = make_temp_ssa_name (pointer_bounds_type_node, | |
1134 | gimple_build_nop (), | |
1135 | CHKP_BOUND_TMP_NAME); | |
1136 | assign = gimple_build_assign (copy, bnd); | |
1137 | ||
1138 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
1139 | { | |
1140 | fprintf (dump_file, "Creating a copy of bounds "); | |
1141 | print_generic_expr (dump_file, bnd, 0); | |
1142 | fprintf (dump_file, " for abnormal SSA name "); | |
1143 | print_generic_expr (dump_file, ptr, 0); | |
1144 | fprintf (dump_file, "\n"); | |
1145 | } | |
1146 | ||
1147 | if (gimple_code (def) == GIMPLE_NOP) | |
1148 | { | |
1149 | gsi = gsi_last_bb (chkp_get_entry_block ()); | |
1150 | if (!gsi_end_p (gsi) && is_ctrl_stmt (gsi_stmt (gsi))) | |
1151 | gsi_insert_before (&gsi, assign, GSI_CONTINUE_LINKING); | |
1152 | else | |
1153 | gsi_insert_after (&gsi, assign, GSI_CONTINUE_LINKING); | |
1154 | } | |
1155 | else | |
1156 | { | |
1157 | gimple bnd_def = SSA_NAME_DEF_STMT (bnd); | |
1158 | /* Sometimes (e.g. when we load a pointer from a | |
1159 | memory) bounds are produced later than a pointer. | |
1160 | We need to insert bounds copy appropriately. */ | |
1161 | if (gimple_code (bnd_def) != GIMPLE_NOP | |
1162 | && stmt_dominates_stmt_p (def, bnd_def)) | |
1163 | gsi = gsi_for_stmt (bnd_def); | |
1164 | else | |
1165 | gsi = gsi_for_stmt (def); | |
1166 | gsi_insert_after (&gsi, assign, GSI_CONTINUE_LINKING); | |
1167 | } | |
1168 | ||
1169 | bnd = copy; | |
1170 | } | |
1171 | ||
1172 | if (abnormal_ptr) | |
1173 | bitmap_set_bit (chkp_abnormal_copies, SSA_NAME_VERSION (bnd)); | |
1174 | } | |
1175 | ||
1176 | chkp_reg_bounds->put (ptr, bnd); | |
1177 | ||
1178 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
1179 | { | |
1180 | fprintf (dump_file, "Regsitered bound "); | |
1181 | print_generic_expr (dump_file, bnd, 0); | |
1182 | fprintf (dump_file, " for pointer "); | |
1183 | print_generic_expr (dump_file, ptr, 0); | |
1184 | fprintf (dump_file, "\n"); | |
1185 | } | |
1186 | ||
1187 | return bnd; | |
1188 | } | |
1189 | ||
1190 | /* Get bounds registered for object PTR in global bounds table. */ | |
1191 | static tree | |
1192 | chkp_get_registered_bounds (tree ptr) | |
1193 | { | |
1194 | tree *slot; | |
1195 | ||
1196 | if (!chkp_reg_bounds) | |
1197 | return NULL_TREE; | |
1198 | ||
1199 | slot = chkp_reg_bounds->get (ptr); | |
1200 | return slot ? *slot : NULL_TREE; | |
1201 | } | |
1202 | ||
1203 | /* Add bound retvals to return statement pointed by GSI. */ | |
1204 | ||
1205 | static void | |
1206 | chkp_add_bounds_to_ret_stmt (gimple_stmt_iterator *gsi) | |
1207 | { | |
538dd0b7 | 1208 | greturn *ret = as_a <greturn *> (gsi_stmt (*gsi)); |
d5e254e1 IE |
1209 | tree retval = gimple_return_retval (ret); |
1210 | tree ret_decl = DECL_RESULT (cfun->decl); | |
1211 | tree bounds; | |
1212 | ||
1213 | if (!retval) | |
1214 | return; | |
1215 | ||
1216 | if (BOUNDED_P (ret_decl)) | |
1217 | { | |
1218 | bounds = chkp_find_bounds (retval, gsi); | |
1219 | bounds = chkp_maybe_copy_and_register_bounds (ret_decl, bounds); | |
1220 | gimple_return_set_retbnd (ret, bounds); | |
1221 | } | |
1222 | ||
1223 | update_stmt (ret); | |
1224 | } | |
1225 | ||
1226 | /* Force OP to be suitable for using as an argument for call. | |
1227 | New statements (if any) go to SEQ. */ | |
1228 | static tree | |
1229 | chkp_force_gimple_call_op (tree op, gimple_seq *seq) | |
1230 | { | |
1231 | gimple_seq stmts; | |
1232 | gimple_stmt_iterator si; | |
1233 | ||
1234 | op = force_gimple_operand (unshare_expr (op), &stmts, true, NULL_TREE); | |
1235 | ||
1236 | for (si = gsi_start (stmts); !gsi_end_p (si); gsi_next (&si)) | |
1237 | chkp_mark_stmt (gsi_stmt (si)); | |
1238 | ||
1239 | gimple_seq_add_seq (seq, stmts); | |
1240 | ||
1241 | return op; | |
1242 | } | |
1243 | ||
1244 | /* Generate lower bound check for memory access by ADDR. | |
1245 | Check is inserted before the position pointed by ITER. | |
1246 | DIRFLAG indicates whether memory access is load or store. */ | |
1247 | static void | |
1248 | chkp_check_lower (tree addr, tree bounds, | |
1249 | gimple_stmt_iterator iter, | |
1250 | location_t location, | |
1251 | tree dirflag) | |
1252 | { | |
1253 | gimple_seq seq; | |
1254 | gimple check; | |
1255 | tree node; | |
1256 | ||
1257 | if (bounds == chkp_get_zero_bounds ()) | |
1258 | return; | |
1259 | ||
1260 | if (dirflag == integer_zero_node | |
1261 | && !flag_chkp_check_read) | |
1262 | return; | |
1263 | ||
1264 | if (dirflag == integer_one_node | |
1265 | && !flag_chkp_check_write) | |
1266 | return; | |
1267 | ||
1268 | seq = NULL; | |
1269 | ||
1270 | node = chkp_force_gimple_call_op (addr, &seq); | |
1271 | ||
1272 | check = gimple_build_call (chkp_checkl_fndecl, 2, node, bounds); | |
1273 | chkp_mark_stmt (check); | |
1274 | gimple_call_set_with_bounds (check, true); | |
1275 | gimple_set_location (check, location); | |
1276 | gimple_seq_add_stmt (&seq, check); | |
1277 | ||
1278 | gsi_insert_seq_before (&iter, seq, GSI_SAME_STMT); | |
1279 | ||
1280 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
1281 | { | |
1282 | gimple before = gsi_stmt (iter); | |
1283 | fprintf (dump_file, "Generated lower bound check for statement "); | |
1284 | print_gimple_stmt (dump_file, before, 0, TDF_VOPS|TDF_MEMSYMS); | |
1285 | fprintf (dump_file, " "); | |
1286 | print_gimple_stmt (dump_file, check, 0, TDF_VOPS|TDF_MEMSYMS); | |
1287 | } | |
1288 | } | |
1289 | ||
1290 | /* Generate upper bound check for memory access by ADDR. | |
1291 | Check is inserted before the position pointed by ITER. | |
1292 | DIRFLAG indicates whether memory access is load or store. */ | |
1293 | static void | |
1294 | chkp_check_upper (tree addr, tree bounds, | |
1295 | gimple_stmt_iterator iter, | |
1296 | location_t location, | |
1297 | tree dirflag) | |
1298 | { | |
1299 | gimple_seq seq; | |
1300 | gimple check; | |
1301 | tree node; | |
1302 | ||
1303 | if (bounds == chkp_get_zero_bounds ()) | |
1304 | return; | |
1305 | ||
1306 | if (dirflag == integer_zero_node | |
1307 | && !flag_chkp_check_read) | |
1308 | return; | |
1309 | ||
1310 | if (dirflag == integer_one_node | |
1311 | && !flag_chkp_check_write) | |
1312 | return; | |
1313 | ||
1314 | seq = NULL; | |
1315 | ||
1316 | node = chkp_force_gimple_call_op (addr, &seq); | |
1317 | ||
1318 | check = gimple_build_call (chkp_checku_fndecl, 2, node, bounds); | |
1319 | chkp_mark_stmt (check); | |
1320 | gimple_call_set_with_bounds (check, true); | |
1321 | gimple_set_location (check, location); | |
1322 | gimple_seq_add_stmt (&seq, check); | |
1323 | ||
1324 | gsi_insert_seq_before (&iter, seq, GSI_SAME_STMT); | |
1325 | ||
1326 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
1327 | { | |
1328 | gimple before = gsi_stmt (iter); | |
1329 | fprintf (dump_file, "Generated upper bound check for statement "); | |
1330 | print_gimple_stmt (dump_file, before, 0, TDF_VOPS|TDF_MEMSYMS); | |
1331 | fprintf (dump_file, " "); | |
1332 | print_gimple_stmt (dump_file, check, 0, TDF_VOPS|TDF_MEMSYMS); | |
1333 | } | |
1334 | } | |
1335 | ||
1336 | /* Generate lower and upper bound checks for memory access | |
1337 | to memory slot [FIRST, LAST] againsr BOUNDS. Checks | |
1338 | are inserted before the position pointed by ITER. | |
1339 | DIRFLAG indicates whether memory access is load or store. */ | |
1340 | void | |
1341 | chkp_check_mem_access (tree first, tree last, tree bounds, | |
1342 | gimple_stmt_iterator iter, | |
1343 | location_t location, | |
1344 | tree dirflag) | |
1345 | { | |
1346 | chkp_check_lower (first, bounds, iter, location, dirflag); | |
1347 | chkp_check_upper (last, bounds, iter, location, dirflag); | |
1348 | } | |
1349 | ||
1350 | /* Replace call to _bnd_chk_* pointed by GSI with | |
1351 | bndcu and bndcl calls. DIRFLAG determines whether | |
1352 | check is for read or write. */ | |
1353 | ||
1354 | void | |
1355 | chkp_replace_address_check_builtin (gimple_stmt_iterator *gsi, | |
1356 | tree dirflag) | |
1357 | { | |
1358 | gimple_stmt_iterator call_iter = *gsi; | |
1359 | gimple call = gsi_stmt (*gsi); | |
1360 | tree fndecl = gimple_call_fndecl (call); | |
1361 | tree addr = gimple_call_arg (call, 0); | |
1362 | tree bounds = chkp_find_bounds (addr, gsi); | |
1363 | ||
1364 | if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_LBOUNDS | |
1365 | || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS) | |
1366 | chkp_check_lower (addr, bounds, *gsi, gimple_location (call), dirflag); | |
1367 | ||
1368 | if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_UBOUNDS) | |
1369 | chkp_check_upper (addr, bounds, *gsi, gimple_location (call), dirflag); | |
1370 | ||
1371 | if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS) | |
1372 | { | |
1373 | tree size = gimple_call_arg (call, 1); | |
1374 | addr = fold_build_pointer_plus (addr, size); | |
1375 | addr = fold_build_pointer_plus_hwi (addr, -1); | |
1376 | chkp_check_upper (addr, bounds, *gsi, gimple_location (call), dirflag); | |
1377 | } | |
1378 | ||
1379 | gsi_remove (&call_iter, true); | |
1380 | } | |
1381 | ||
1382 | /* Replace call to _bnd_get_ptr_* pointed by GSI with | |
1383 | corresponding bounds extract call. */ | |
1384 | ||
1385 | void | |
1386 | chkp_replace_extract_builtin (gimple_stmt_iterator *gsi) | |
1387 | { | |
1388 | gimple call = gsi_stmt (*gsi); | |
1389 | tree fndecl = gimple_call_fndecl (call); | |
1390 | tree addr = gimple_call_arg (call, 0); | |
1391 | tree bounds = chkp_find_bounds (addr, gsi); | |
1392 | gimple extract; | |
1393 | ||
1394 | if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_LBOUND) | |
1395 | fndecl = chkp_extract_lower_fndecl; | |
1396 | else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_UBOUND) | |
1397 | fndecl = chkp_extract_upper_fndecl; | |
1398 | else | |
1399 | gcc_unreachable (); | |
1400 | ||
1401 | extract = gimple_build_call (fndecl, 1, bounds); | |
1402 | gimple_call_set_lhs (extract, gimple_call_lhs (call)); | |
1403 | chkp_mark_stmt (extract); | |
1404 | ||
1405 | gsi_replace (gsi, extract, false); | |
1406 | } | |
1407 | ||
1408 | /* Return COMPONENT_REF accessing FIELD in OBJ. */ | |
1409 | static tree | |
1410 | chkp_build_component_ref (tree obj, tree field) | |
1411 | { | |
1412 | tree res; | |
1413 | ||
1414 | /* If object is TMR then we do not use component_ref but | |
1415 | add offset instead. We need it to be able to get addr | |
1416 | of the reasult later. */ | |
1417 | if (TREE_CODE (obj) == TARGET_MEM_REF) | |
1418 | { | |
1419 | tree offs = TMR_OFFSET (obj); | |
1420 | offs = fold_binary_to_constant (PLUS_EXPR, TREE_TYPE (offs), | |
1421 | offs, DECL_FIELD_OFFSET (field)); | |
1422 | ||
1423 | gcc_assert (offs); | |
1424 | ||
1425 | res = copy_node (obj); | |
1426 | TREE_TYPE (res) = TREE_TYPE (field); | |
1427 | TMR_OFFSET (res) = offs; | |
1428 | } | |
1429 | else | |
1430 | res = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL_TREE); | |
1431 | ||
1432 | return res; | |
1433 | } | |
1434 | ||
1435 | /* Return ARRAY_REF for array ARR and index IDX with | |
1436 | specified element type ETYPE and element size ESIZE. */ | |
1437 | static tree | |
1438 | chkp_build_array_ref (tree arr, tree etype, tree esize, | |
1439 | unsigned HOST_WIDE_INT idx) | |
1440 | { | |
1441 | tree index = build_int_cst (size_type_node, idx); | |
1442 | tree res; | |
1443 | ||
1444 | /* If object is TMR then we do not use array_ref but | |
1445 | add offset instead. We need it to be able to get addr | |
1446 | of the reasult later. */ | |
1447 | if (TREE_CODE (arr) == TARGET_MEM_REF) | |
1448 | { | |
1449 | tree offs = TMR_OFFSET (arr); | |
1450 | ||
1451 | esize = fold_binary_to_constant (MULT_EXPR, TREE_TYPE (esize), | |
1452 | esize, index); | |
1453 | gcc_assert(esize); | |
1454 | ||
1455 | offs = fold_binary_to_constant (PLUS_EXPR, TREE_TYPE (offs), | |
1456 | offs, esize); | |
1457 | gcc_assert (offs); | |
1458 | ||
1459 | res = copy_node (arr); | |
1460 | TREE_TYPE (res) = etype; | |
1461 | TMR_OFFSET (res) = offs; | |
1462 | } | |
1463 | else | |
1464 | res = build4 (ARRAY_REF, etype, arr, index, NULL_TREE, NULL_TREE); | |
1465 | ||
1466 | return res; | |
1467 | } | |
1468 | ||
1469 | /* Helper function for chkp_add_bounds_to_call_stmt. | |
1470 | Fill ALL_BOUNDS output array with created bounds. | |
1471 | ||
1472 | OFFS is used for recursive calls and holds basic | |
1473 | offset of TYPE in outer structure in bits. | |
1474 | ||
1475 | ITER points a position where bounds are searched. | |
1476 | ||
1477 | ALL_BOUNDS[i] is filled with elem bounds if there | |
1478 | is a field in TYPE which has pointer type and offset | |
1479 | equal to i * POINTER_SIZE in bits. */ | |
1480 | static void | |
1481 | chkp_find_bounds_for_elem (tree elem, tree *all_bounds, | |
1482 | HOST_WIDE_INT offs, | |
1483 | gimple_stmt_iterator *iter) | |
1484 | { | |
1485 | tree type = TREE_TYPE (elem); | |
1486 | ||
1487 | if (BOUNDED_TYPE_P (type)) | |
1488 | { | |
1489 | if (!all_bounds[offs / POINTER_SIZE]) | |
1490 | { | |
1491 | tree temp = make_temp_ssa_name (type, gimple_build_nop (), ""); | |
1492 | gimple assign = gimple_build_assign (temp, elem); | |
1493 | gimple_stmt_iterator gsi; | |
1494 | ||
1495 | gsi_insert_before (iter, assign, GSI_SAME_STMT); | |
1496 | gsi = gsi_for_stmt (assign); | |
1497 | ||
1498 | all_bounds[offs / POINTER_SIZE] = chkp_find_bounds (temp, &gsi); | |
1499 | } | |
1500 | } | |
1501 | else if (RECORD_OR_UNION_TYPE_P (type)) | |
1502 | { | |
1503 | tree field; | |
1504 | ||
1505 | for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field)) | |
1506 | if (TREE_CODE (field) == FIELD_DECL) | |
1507 | { | |
1508 | tree base = unshare_expr (elem); | |
1509 | tree field_ref = chkp_build_component_ref (base, field); | |
1510 | HOST_WIDE_INT field_offs | |
1511 | = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field)); | |
1512 | if (DECL_FIELD_OFFSET (field)) | |
1513 | field_offs += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field)) * 8; | |
1514 | ||
1515 | chkp_find_bounds_for_elem (field_ref, all_bounds, | |
1516 | offs + field_offs, iter); | |
1517 | } | |
1518 | } | |
1519 | else if (TREE_CODE (type) == ARRAY_TYPE) | |
1520 | { | |
1521 | tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type)); | |
1522 | tree etype = TREE_TYPE (type); | |
1523 | HOST_WIDE_INT esize = TREE_INT_CST_LOW (TYPE_SIZE (etype)); | |
1524 | unsigned HOST_WIDE_INT cur; | |
1525 | ||
1526 | if (!maxval || integer_minus_onep (maxval)) | |
1527 | return; | |
1528 | ||
1529 | for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++) | |
1530 | { | |
1531 | tree base = unshare_expr (elem); | |
1532 | tree arr_elem = chkp_build_array_ref (base, etype, | |
1533 | TYPE_SIZE (etype), | |
1534 | cur); | |
1535 | chkp_find_bounds_for_elem (arr_elem, all_bounds, offs + cur * esize, | |
1536 | iter); | |
1537 | } | |
1538 | } | |
1539 | } | |
1540 | ||
1541 | /* Fill HAVE_BOUND output bitmap with information about | |
1542 | bounds requred for object of type TYPE. | |
1543 | ||
1544 | OFFS is used for recursive calls and holds basic | |
1545 | offset of TYPE in outer structure in bits. | |
1546 | ||
1547 | HAVE_BOUND[i] is set to 1 if there is a field | |
1548 | in TYPE which has pointer type and offset | |
1549 | equal to i * POINTER_SIZE - OFFS in bits. */ | |
1550 | void | |
1551 | chkp_find_bound_slots_1 (const_tree type, bitmap have_bound, | |
1552 | HOST_WIDE_INT offs) | |
1553 | { | |
1554 | if (BOUNDED_TYPE_P (type)) | |
1555 | bitmap_set_bit (have_bound, offs / POINTER_SIZE); | |
1556 | else if (RECORD_OR_UNION_TYPE_P (type)) | |
1557 | { | |
1558 | tree field; | |
1559 | ||
1560 | for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field)) | |
1561 | if (TREE_CODE (field) == FIELD_DECL) | |
1562 | { | |
1563 | HOST_WIDE_INT field_offs | |
1564 | = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field)); | |
1565 | if (DECL_FIELD_OFFSET (field)) | |
1566 | field_offs += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field)) * 8; | |
1567 | chkp_find_bound_slots_1 (TREE_TYPE (field), have_bound, | |
1568 | offs + field_offs); | |
1569 | } | |
1570 | } | |
1571 | else if (TREE_CODE (type) == ARRAY_TYPE) | |
1572 | { | |
1573 | tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type)); | |
1574 | tree etype = TREE_TYPE (type); | |
1575 | HOST_WIDE_INT esize = TREE_INT_CST_LOW (TYPE_SIZE (etype)); | |
1576 | unsigned HOST_WIDE_INT cur; | |
1577 | ||
152ceb59 IE |
1578 | if (!maxval |
1579 | || TREE_CODE (maxval) != INTEGER_CST | |
1580 | || integer_minus_onep (maxval)) | |
d5e254e1 IE |
1581 | return; |
1582 | ||
1583 | for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++) | |
1584 | chkp_find_bound_slots_1 (etype, have_bound, offs + cur * esize); | |
1585 | } | |
1586 | } | |
1587 | ||
1588 | /* Fill bitmap RES with information about bounds for | |
1589 | type TYPE. See chkp_find_bound_slots_1 for more | |
1590 | details. */ | |
1591 | void | |
1592 | chkp_find_bound_slots (const_tree type, bitmap res) | |
1593 | { | |
1594 | bitmap_clear (res); | |
1595 | chkp_find_bound_slots_1 (type, res, 0); | |
1596 | } | |
1597 | ||
edcf72f3 IE |
1598 | /* Return 1 if call to FNDECL should be instrumented |
1599 | and 0 otherwise. */ | |
1600 | ||
1601 | static bool | |
1602 | chkp_instrument_normal_builtin (tree fndecl) | |
1603 | { | |
1604 | switch (DECL_FUNCTION_CODE (fndecl)) | |
1605 | { | |
1606 | case BUILT_IN_STRLEN: | |
1607 | case BUILT_IN_STRCPY: | |
1608 | case BUILT_IN_STRNCPY: | |
1609 | case BUILT_IN_STPCPY: | |
1610 | case BUILT_IN_STPNCPY: | |
1611 | case BUILT_IN_STRCAT: | |
1612 | case BUILT_IN_STRNCAT: | |
1613 | case BUILT_IN_MEMCPY: | |
1614 | case BUILT_IN_MEMPCPY: | |
1615 | case BUILT_IN_MEMSET: | |
1616 | case BUILT_IN_MEMMOVE: | |
1617 | case BUILT_IN_BZERO: | |
1618 | case BUILT_IN_STRCMP: | |
1619 | case BUILT_IN_STRNCMP: | |
1620 | case BUILT_IN_BCMP: | |
1621 | case BUILT_IN_MEMCMP: | |
1622 | case BUILT_IN_MEMCPY_CHK: | |
1623 | case BUILT_IN_MEMPCPY_CHK: | |
1624 | case BUILT_IN_MEMMOVE_CHK: | |
1625 | case BUILT_IN_MEMSET_CHK: | |
1626 | case BUILT_IN_STRCPY_CHK: | |
1627 | case BUILT_IN_STRNCPY_CHK: | |
1628 | case BUILT_IN_STPCPY_CHK: | |
1629 | case BUILT_IN_STPNCPY_CHK: | |
1630 | case BUILT_IN_STRCAT_CHK: | |
1631 | case BUILT_IN_STRNCAT_CHK: | |
1632 | case BUILT_IN_MALLOC: | |
1633 | case BUILT_IN_CALLOC: | |
1634 | case BUILT_IN_REALLOC: | |
1635 | return 1; | |
1636 | ||
1637 | default: | |
1638 | return 0; | |
1639 | } | |
1640 | } | |
1641 | ||
d5e254e1 IE |
1642 | /* Add bound arguments to call statement pointed by GSI. |
1643 | Also performs a replacement of user checker builtins calls | |
1644 | with internal ones. */ | |
1645 | ||
1646 | static void | |
1647 | chkp_add_bounds_to_call_stmt (gimple_stmt_iterator *gsi) | |
1648 | { | |
538dd0b7 | 1649 | gcall *call = as_a <gcall *> (gsi_stmt (*gsi)); |
d5e254e1 IE |
1650 | unsigned arg_no = 0; |
1651 | tree fndecl = gimple_call_fndecl (call); | |
1652 | tree fntype; | |
1653 | tree first_formal_arg; | |
1654 | tree arg; | |
1655 | bool use_fntype = false; | |
1656 | tree op; | |
1657 | ssa_op_iter iter; | |
538dd0b7 | 1658 | gcall *new_call; |
d5e254e1 IE |
1659 | |
1660 | /* Do nothing for internal functions. */ | |
1661 | if (gimple_call_internal_p (call)) | |
1662 | return; | |
1663 | ||
1664 | fntype = TREE_TYPE (TREE_TYPE (gimple_call_fn (call))); | |
1665 | ||
1666 | /* Do nothing if back-end builtin is called. */ | |
1667 | if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD) | |
1668 | return; | |
1669 | ||
1670 | /* Do nothing for some middle-end builtins. */ | |
1671 | if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL | |
1672 | && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_OBJECT_SIZE) | |
1673 | return; | |
1674 | ||
edcf72f3 | 1675 | /* Do nothing for calls to legacy functions. */ |
d5e254e1 IE |
1676 | if (fndecl |
1677 | && lookup_attribute ("bnd_legacy", DECL_ATTRIBUTES (fndecl))) | |
1678 | return; | |
1679 | ||
1680 | /* Ignore CHKP_INIT_PTR_BOUNDS, CHKP_NULL_PTR_BOUNDS | |
1681 | and CHKP_COPY_PTR_BOUNDS. */ | |
1682 | if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL | |
1683 | && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_INIT_PTR_BOUNDS | |
1684 | || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NULL_PTR_BOUNDS | |
1685 | || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_COPY_PTR_BOUNDS | |
1686 | || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_SET_PTR_BOUNDS)) | |
1687 | return; | |
1688 | ||
1689 | /* Check user builtins are replaced with checks. */ | |
1690 | if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL | |
1691 | && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_LBOUNDS | |
1692 | || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_UBOUNDS | |
1693 | || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS)) | |
1694 | { | |
1695 | chkp_replace_address_check_builtin (gsi, integer_minus_one_node); | |
1696 | return; | |
1697 | } | |
1698 | ||
1699 | /* Check user builtins are replaced with bound extract. */ | |
1700 | if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL | |
1701 | && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_LBOUND | |
1702 | || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_UBOUND)) | |
1703 | { | |
1704 | chkp_replace_extract_builtin (gsi); | |
1705 | return; | |
1706 | } | |
1707 | ||
1708 | /* BUILT_IN_CHKP_NARROW_PTR_BOUNDS call is replaced with | |
1709 | target narrow bounds call. */ | |
1710 | if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL | |
1711 | && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NARROW_PTR_BOUNDS) | |
1712 | { | |
1713 | tree arg = gimple_call_arg (call, 1); | |
1714 | tree bounds = chkp_find_bounds (arg, gsi); | |
1715 | ||
1716 | gimple_call_set_fndecl (call, chkp_narrow_bounds_fndecl); | |
1717 | gimple_call_set_arg (call, 1, bounds); | |
1718 | update_stmt (call); | |
1719 | ||
1720 | return; | |
1721 | } | |
1722 | ||
1723 | /* BUILT_IN_CHKP_STORE_PTR_BOUNDS call is replaced with | |
1724 | bndstx call. */ | |
1725 | if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL | |
1726 | && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_STORE_PTR_BOUNDS) | |
1727 | { | |
1728 | tree addr = gimple_call_arg (call, 0); | |
1729 | tree ptr = gimple_call_arg (call, 1); | |
1730 | tree bounds = chkp_find_bounds (ptr, gsi); | |
1731 | gimple_stmt_iterator iter = gsi_for_stmt (call); | |
1732 | ||
1733 | chkp_build_bndstx (addr, ptr, bounds, gsi); | |
1734 | gsi_remove (&iter, true); | |
1735 | ||
1736 | return; | |
1737 | } | |
1738 | ||
1739 | if (!flag_chkp_instrument_calls) | |
1740 | return; | |
1741 | ||
edcf72f3 IE |
1742 | /* We instrument only some subset of builtins. We also instrument |
1743 | builtin calls to be inlined. */ | |
1744 | if (fndecl | |
1745 | && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL | |
1746 | && !chkp_instrument_normal_builtin (fndecl)) | |
1747 | { | |
1748 | if (!lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl))) | |
1749 | return; | |
1750 | ||
1751 | struct cgraph_node *clone = chkp_maybe_create_clone (fndecl); | |
1752 | if (!clone | |
1753 | || !gimple_has_body_p (clone->decl)) | |
1754 | return; | |
1755 | } | |
d5e254e1 IE |
1756 | |
1757 | /* If function decl is available then use it for | |
1758 | formal arguments list. Otherwise use function type. */ | |
1759 | if (fndecl && DECL_ARGUMENTS (fndecl)) | |
1760 | first_formal_arg = DECL_ARGUMENTS (fndecl); | |
1761 | else | |
1762 | { | |
1763 | first_formal_arg = TYPE_ARG_TYPES (fntype); | |
1764 | use_fntype = true; | |
1765 | } | |
1766 | ||
1767 | /* Fill vector of new call args. */ | |
1768 | vec<tree> new_args = vNULL; | |
1769 | new_args.create (gimple_call_num_args (call)); | |
1770 | arg = first_formal_arg; | |
1771 | for (arg_no = 0; arg_no < gimple_call_num_args (call); arg_no++) | |
1772 | { | |
1773 | tree call_arg = gimple_call_arg (call, arg_no); | |
1774 | tree type; | |
1775 | ||
1776 | /* Get arg type using formal argument description | |
1777 | or actual argument type. */ | |
1778 | if (arg) | |
1779 | if (use_fntype) | |
1780 | if (TREE_VALUE (arg) != void_type_node) | |
1781 | { | |
1782 | type = TREE_VALUE (arg); | |
1783 | arg = TREE_CHAIN (arg); | |
1784 | } | |
1785 | else | |
1786 | type = TREE_TYPE (call_arg); | |
1787 | else | |
1788 | { | |
1789 | type = TREE_TYPE (arg); | |
1790 | arg = TREE_CHAIN (arg); | |
1791 | } | |
1792 | else | |
1793 | type = TREE_TYPE (call_arg); | |
1794 | ||
1795 | new_args.safe_push (call_arg); | |
1796 | ||
1797 | if (BOUNDED_TYPE_P (type) | |
1798 | || pass_by_reference (NULL, TYPE_MODE (type), type, true)) | |
1799 | new_args.safe_push (chkp_find_bounds (call_arg, gsi)); | |
1800 | else if (chkp_type_has_pointer (type)) | |
1801 | { | |
1802 | HOST_WIDE_INT max_bounds | |
1803 | = TREE_INT_CST_LOW (TYPE_SIZE (type)) / POINTER_SIZE; | |
1804 | tree *all_bounds = (tree *)xmalloc (sizeof (tree) * max_bounds); | |
1805 | HOST_WIDE_INT bnd_no; | |
1806 | ||
1807 | memset (all_bounds, 0, sizeof (tree) * max_bounds); | |
1808 | ||
1809 | chkp_find_bounds_for_elem (call_arg, all_bounds, 0, gsi); | |
1810 | ||
1811 | for (bnd_no = 0; bnd_no < max_bounds; bnd_no++) | |
1812 | if (all_bounds[bnd_no]) | |
1813 | new_args.safe_push (all_bounds[bnd_no]); | |
1814 | ||
1815 | free (all_bounds); | |
1816 | } | |
1817 | } | |
1818 | ||
1819 | if (new_args.length () == gimple_call_num_args (call)) | |
1820 | new_call = call; | |
1821 | else | |
1822 | { | |
1823 | new_call = gimple_build_call_vec (gimple_op (call, 1), new_args); | |
1824 | gimple_call_set_lhs (new_call, gimple_call_lhs (call)); | |
1825 | gimple_call_copy_flags (new_call, call); | |
1826 | } | |
1827 | new_args.release (); | |
1828 | ||
d5e254e1 IE |
1829 | /* For direct calls fndecl is replaced with instrumented version. */ |
1830 | if (fndecl) | |
1831 | { | |
1832 | tree new_decl = chkp_maybe_create_clone (fndecl)->decl; | |
1833 | gimple_call_set_fndecl (new_call, new_decl); | |
1834 | gimple_call_set_fntype (new_call, TREE_TYPE (new_decl)); | |
1835 | } | |
1836 | /* For indirect call we should fix function pointer type if | |
1837 | pass some bounds. */ | |
1838 | else if (new_call != call) | |
1839 | { | |
1840 | tree type = gimple_call_fntype (call); | |
1841 | type = chkp_copy_function_type_adding_bounds (type); | |
1842 | gimple_call_set_fntype (new_call, type); | |
1843 | } | |
1844 | ||
1845 | /* replace old call statement with the new one. */ | |
1846 | if (call != new_call) | |
1847 | { | |
1848 | FOR_EACH_SSA_TREE_OPERAND (op, call, iter, SSA_OP_ALL_DEFS) | |
1849 | { | |
1850 | SSA_NAME_DEF_STMT (op) = new_call; | |
1851 | } | |
1852 | gsi_replace (gsi, new_call, true); | |
1853 | } | |
1854 | else | |
1855 | update_stmt (new_call); | |
1856 | ||
1857 | gimple_call_set_with_bounds (new_call, true); | |
1858 | } | |
1859 | ||
1860 | /* Return constant static bounds var with specified LB and UB | |
1861 | if such var exists in varpool. Return NULL otherwise. */ | |
1862 | static tree | |
1863 | chkp_find_const_bounds_var (HOST_WIDE_INT lb, | |
1864 | HOST_WIDE_INT ub) | |
1865 | { | |
1866 | tree val = targetm.chkp_make_bounds_constant (lb, ub); | |
1867 | struct varpool_node *node; | |
1868 | ||
1869 | /* We expect bounds constant is represented as a complex value | |
1870 | of two pointer sized integers. */ | |
1871 | gcc_assert (TREE_CODE (val) == COMPLEX_CST); | |
1872 | ||
1873 | FOR_EACH_VARIABLE (node) | |
1874 | if (POINTER_BOUNDS_P (node->decl) | |
1875 | && TREE_READONLY (node->decl) | |
1876 | && DECL_INITIAL (node->decl) | |
1877 | && TREE_CODE (DECL_INITIAL (node->decl)) == COMPLEX_CST | |
1878 | && tree_int_cst_equal (TREE_REALPART (DECL_INITIAL (node->decl)), | |
1879 | TREE_REALPART (val)) | |
1880 | && tree_int_cst_equal (TREE_IMAGPART (DECL_INITIAL (node->decl)), | |
1881 | TREE_IMAGPART (val))) | |
1882 | return node->decl; | |
1883 | ||
1884 | return NULL; | |
1885 | } | |
1886 | ||
1887 | /* Return constant static bounds var with specified bounds LB and UB. | |
1888 | If such var does not exists then new var is created with specified NAME. */ | |
1889 | static tree | |
1890 | chkp_make_static_const_bounds (HOST_WIDE_INT lb, | |
1891 | HOST_WIDE_INT ub, | |
1892 | const char *name) | |
1893 | { | |
1894 | tree var; | |
1895 | ||
1896 | /* With LTO we may have constant bounds already in varpool. | |
1897 | Try to find it. */ | |
1898 | var = chkp_find_const_bounds_var (lb, ub); | |
1899 | ||
1900 | if (var) | |
1901 | return var; | |
1902 | ||
1903 | var = build_decl (UNKNOWN_LOCATION, VAR_DECL, | |
1904 | get_identifier (name), pointer_bounds_type_node); | |
1905 | ||
1906 | TREE_PUBLIC (var) = 1; | |
1907 | TREE_USED (var) = 1; | |
1908 | TREE_READONLY (var) = 1; | |
1909 | TREE_STATIC (var) = 1; | |
1910 | TREE_ADDRESSABLE (var) = 0; | |
1911 | DECL_ARTIFICIAL (var) = 1; | |
1912 | DECL_READ_P (var) = 1; | |
1913 | /* We may use this symbol during ctors generation in chkp_finish_file | |
1914 | when all symbols are emitted. Force output to avoid undefined | |
1915 | symbols in ctors. */ | |
1916 | if (!in_lto_p) | |
1917 | { | |
1918 | DECL_INITIAL (var) = targetm.chkp_make_bounds_constant (lb, ub); | |
1919 | DECL_COMDAT (var) = 1; | |
1920 | varpool_node::get_create (var)->set_comdat_group (DECL_ASSEMBLER_NAME (var)); | |
1921 | varpool_node::get_create (var)->force_output = 1; | |
1922 | } | |
1923 | else | |
1924 | DECL_EXTERNAL (var) = 1; | |
1925 | varpool_node::finalize_decl (var); | |
1926 | ||
1927 | return var; | |
1928 | } | |
1929 | ||
1930 | /* Generate code to make bounds with specified lower bound LB and SIZE. | |
1931 | if AFTER is 1 then code is inserted after position pointed by ITER | |
1932 | otherwise code is inserted before position pointed by ITER. | |
1933 | If ITER is NULL then code is added to entry block. */ | |
1934 | static tree | |
1935 | chkp_make_bounds (tree lb, tree size, gimple_stmt_iterator *iter, bool after) | |
1936 | { | |
1937 | gimple_seq seq; | |
1938 | gimple_stmt_iterator gsi; | |
1939 | gimple stmt; | |
1940 | tree bounds; | |
1941 | ||
1942 | if (iter) | |
1943 | gsi = *iter; | |
1944 | else | |
1945 | gsi = gsi_start_bb (chkp_get_entry_block ()); | |
1946 | ||
1947 | seq = NULL; | |
1948 | ||
1949 | lb = chkp_force_gimple_call_op (lb, &seq); | |
1950 | size = chkp_force_gimple_call_op (size, &seq); | |
1951 | ||
1952 | stmt = gimple_build_call (chkp_bndmk_fndecl, 2, lb, size); | |
1953 | chkp_mark_stmt (stmt); | |
1954 | ||
1955 | bounds = chkp_get_tmp_reg (stmt); | |
1956 | gimple_call_set_lhs (stmt, bounds); | |
1957 | ||
1958 | gimple_seq_add_stmt (&seq, stmt); | |
1959 | ||
1960 | if (iter && after) | |
1961 | gsi_insert_seq_after (&gsi, seq, GSI_SAME_STMT); | |
1962 | else | |
1963 | gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT); | |
1964 | ||
1965 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
1966 | { | |
1967 | fprintf (dump_file, "Made bounds: "); | |
1968 | print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS); | |
1969 | if (iter) | |
1970 | { | |
1971 | fprintf (dump_file, " inserted before statement: "); | |
1972 | print_gimple_stmt (dump_file, gsi_stmt (*iter), 0, TDF_VOPS|TDF_MEMSYMS); | |
1973 | } | |
1974 | else | |
1975 | fprintf (dump_file, " at function entry\n"); | |
1976 | } | |
1977 | ||
1978 | /* update_stmt (stmt); */ | |
1979 | ||
1980 | return bounds; | |
1981 | } | |
1982 | ||
1983 | /* Return var holding zero bounds. */ | |
1984 | tree | |
1985 | chkp_get_zero_bounds_var (void) | |
1986 | { | |
1987 | if (!chkp_zero_bounds_var) | |
1988 | { | |
1989 | tree id = get_identifier (CHKP_ZERO_BOUNDS_VAR_NAME); | |
1990 | symtab_node *node = symtab_node::get_for_asmname (id); | |
1991 | if (node) | |
1992 | chkp_zero_bounds_var = node->decl; | |
1993 | } | |
1994 | ||
1995 | if (!chkp_zero_bounds_var) | |
1996 | chkp_zero_bounds_var | |
1997 | = chkp_make_static_const_bounds (0, -1, | |
1998 | CHKP_ZERO_BOUNDS_VAR_NAME); | |
1999 | return chkp_zero_bounds_var; | |
2000 | } | |
2001 | ||
2002 | /* Return var holding none bounds. */ | |
2003 | tree | |
2004 | chkp_get_none_bounds_var (void) | |
2005 | { | |
2006 | if (!chkp_none_bounds_var) | |
2007 | { | |
2008 | tree id = get_identifier (CHKP_NONE_BOUNDS_VAR_NAME); | |
2009 | symtab_node *node = symtab_node::get_for_asmname (id); | |
2010 | if (node) | |
2011 | chkp_none_bounds_var = node->decl; | |
2012 | } | |
2013 | ||
2014 | if (!chkp_none_bounds_var) | |
2015 | chkp_none_bounds_var | |
2016 | = chkp_make_static_const_bounds (-1, 0, | |
2017 | CHKP_NONE_BOUNDS_VAR_NAME); | |
2018 | return chkp_none_bounds_var; | |
2019 | } | |
2020 | ||
2021 | /* Return SSA_NAME used to represent zero bounds. */ | |
2022 | static tree | |
2023 | chkp_get_zero_bounds (void) | |
2024 | { | |
2025 | if (zero_bounds) | |
2026 | return zero_bounds; | |
2027 | ||
2028 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
2029 | fprintf (dump_file, "Creating zero bounds..."); | |
2030 | ||
2031 | if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds) | |
2032 | || flag_chkp_use_static_const_bounds > 0) | |
2033 | { | |
2034 | gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ()); | |
2035 | gimple stmt; | |
2036 | ||
2037 | zero_bounds = chkp_get_tmp_reg (gimple_build_nop ()); | |
2038 | stmt = gimple_build_assign (zero_bounds, chkp_get_zero_bounds_var ()); | |
2039 | gsi_insert_before (&gsi, stmt, GSI_SAME_STMT); | |
2040 | } | |
2041 | else | |
2042 | zero_bounds = chkp_make_bounds (integer_zero_node, | |
2043 | integer_zero_node, | |
2044 | NULL, | |
2045 | false); | |
2046 | ||
2047 | return zero_bounds; | |
2048 | } | |
2049 | ||
2050 | /* Return SSA_NAME used to represent none bounds. */ | |
2051 | static tree | |
2052 | chkp_get_none_bounds (void) | |
2053 | { | |
2054 | if (none_bounds) | |
2055 | return none_bounds; | |
2056 | ||
2057 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
2058 | fprintf (dump_file, "Creating none bounds..."); | |
2059 | ||
2060 | ||
2061 | if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds) | |
2062 | || flag_chkp_use_static_const_bounds > 0) | |
2063 | { | |
2064 | gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ()); | |
2065 | gimple stmt; | |
2066 | ||
2067 | none_bounds = chkp_get_tmp_reg (gimple_build_nop ()); | |
2068 | stmt = gimple_build_assign (none_bounds, chkp_get_none_bounds_var ()); | |
2069 | gsi_insert_before (&gsi, stmt, GSI_SAME_STMT); | |
2070 | } | |
2071 | else | |
2072 | none_bounds = chkp_make_bounds (integer_minus_one_node, | |
2073 | build_int_cst (size_type_node, 2), | |
2074 | NULL, | |
2075 | false); | |
2076 | ||
2077 | return none_bounds; | |
2078 | } | |
2079 | ||
2080 | /* Return bounds to be used as a result of operation which | |
2081 | should not create poiunter (e.g. MULT_EXPR). */ | |
2082 | static tree | |
2083 | chkp_get_invalid_op_bounds (void) | |
2084 | { | |
2085 | return chkp_get_zero_bounds (); | |
2086 | } | |
2087 | ||
2088 | /* Return bounds to be used for loads of non-pointer values. */ | |
2089 | static tree | |
2090 | chkp_get_nonpointer_load_bounds (void) | |
2091 | { | |
2092 | return chkp_get_zero_bounds (); | |
2093 | } | |
2094 | ||
985f48f7 IE |
2095 | /* Return 1 if may use bndret call to get bounds for pointer |
2096 | returned by CALL. */ | |
2097 | static bool | |
2098 | chkp_call_returns_bounds_p (gcall *call) | |
2099 | { | |
2100 | if (gimple_call_internal_p (call)) | |
2101 | return false; | |
2102 | ||
2103 | tree fndecl = gimple_call_fndecl (call); | |
2104 | ||
2105 | if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD) | |
2106 | return false; | |
2107 | ||
2108 | if (fndecl | |
2109 | && lookup_attribute ("bnd_legacy", DECL_ATTRIBUTES (fndecl))) | |
2110 | return false; | |
2111 | ||
2112 | if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL) | |
2113 | { | |
2114 | if (chkp_instrument_normal_builtin (fndecl)) | |
2115 | return true; | |
2116 | ||
2117 | if (!lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl))) | |
2118 | return false; | |
2119 | ||
2120 | struct cgraph_node *clone = chkp_maybe_create_clone (fndecl); | |
2121 | return (clone && gimple_has_body_p (clone->decl)); | |
2122 | } | |
2123 | ||
2124 | return true; | |
2125 | } | |
2126 | ||
d5e254e1 IE |
2127 | /* Build bounds returned by CALL. */ |
2128 | static tree | |
538dd0b7 | 2129 | chkp_build_returned_bound (gcall *call) |
d5e254e1 IE |
2130 | { |
2131 | gimple_stmt_iterator gsi; | |
2132 | tree bounds; | |
2133 | gimple stmt; | |
2134 | tree fndecl = gimple_call_fndecl (call); | |
2135 | ||
2136 | /* To avoid fixing alloca expands in targets we handle | |
2137 | it separately. */ | |
2138 | if (fndecl | |
2139 | && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL | |
2140 | && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA | |
2141 | || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA_WITH_ALIGN)) | |
2142 | { | |
2143 | tree size = gimple_call_arg (call, 0); | |
2144 | tree lb = gimple_call_lhs (call); | |
2145 | gimple_stmt_iterator iter = gsi_for_stmt (call); | |
2146 | bounds = chkp_make_bounds (lb, size, &iter, true); | |
2147 | } | |
2148 | /* We know bounds returned by set_bounds builtin call. */ | |
2149 | else if (fndecl | |
2150 | && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL | |
2151 | && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_SET_PTR_BOUNDS) | |
2152 | { | |
2153 | tree lb = gimple_call_arg (call, 0); | |
2154 | tree size = gimple_call_arg (call, 1); | |
2155 | gimple_stmt_iterator iter = gsi_for_stmt (call); | |
2156 | bounds = chkp_make_bounds (lb, size, &iter, true); | |
2157 | } | |
2158 | /* Detect bounds initialization calls. */ | |
2159 | else if (fndecl | |
2160 | && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL | |
2161 | && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_INIT_PTR_BOUNDS) | |
2162 | bounds = chkp_get_zero_bounds (); | |
2163 | /* Detect bounds nullification calls. */ | |
2164 | else if (fndecl | |
2165 | && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL | |
2166 | && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NULL_PTR_BOUNDS) | |
2167 | bounds = chkp_get_none_bounds (); | |
2168 | /* Detect bounds copy calls. */ | |
2169 | else if (fndecl | |
2170 | && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL | |
2171 | && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_COPY_PTR_BOUNDS) | |
2172 | { | |
2173 | gimple_stmt_iterator iter = gsi_for_stmt (call); | |
2174 | bounds = chkp_find_bounds (gimple_call_arg (call, 1), &iter); | |
2175 | } | |
2176 | /* Do not use retbnd when returned bounds are equal to some | |
2177 | of passed bounds. */ | |
d3fb44cb | 2178 | else if (gimple_call_return_flags (call) & ERF_RETURNS_ARG) |
d5e254e1 IE |
2179 | { |
2180 | gimple_stmt_iterator iter = gsi_for_stmt (call); | |
2181 | unsigned int retarg = 0, argno; | |
2182 | if (gimple_call_return_flags (call) & ERF_RETURNS_ARG) | |
2183 | retarg = gimple_call_return_flags (call) & ERF_RETURN_ARG_MASK; | |
2184 | if (gimple_call_with_bounds_p (call)) | |
2185 | { | |
2186 | for (argno = 0; argno < gimple_call_num_args (call); argno++) | |
2187 | if (!POINTER_BOUNDS_P (gimple_call_arg (call, argno))) | |
2188 | { | |
2189 | if (retarg) | |
2190 | retarg--; | |
2191 | else | |
2192 | break; | |
2193 | } | |
2194 | } | |
2195 | else | |
2196 | argno = retarg; | |
2197 | ||
2198 | bounds = chkp_find_bounds (gimple_call_arg (call, argno), &iter); | |
2199 | } | |
985f48f7 | 2200 | else if (chkp_call_returns_bounds_p (call)) |
d5e254e1 IE |
2201 | { |
2202 | gcc_assert (TREE_CODE (gimple_call_lhs (call)) == SSA_NAME); | |
2203 | ||
2204 | /* In general case build checker builtin call to | |
2205 | obtain returned bounds. */ | |
2206 | stmt = gimple_build_call (chkp_ret_bnd_fndecl, 1, | |
2207 | gimple_call_lhs (call)); | |
2208 | chkp_mark_stmt (stmt); | |
2209 | ||
2210 | gsi = gsi_for_stmt (call); | |
2211 | gsi_insert_after (&gsi, stmt, GSI_SAME_STMT); | |
2212 | ||
2213 | bounds = chkp_get_tmp_reg (stmt); | |
2214 | gimple_call_set_lhs (stmt, bounds); | |
2215 | ||
2216 | update_stmt (stmt); | |
2217 | } | |
985f48f7 IE |
2218 | else |
2219 | bounds = chkp_get_zero_bounds (); | |
d5e254e1 IE |
2220 | |
2221 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
2222 | { | |
2223 | fprintf (dump_file, "Built returned bounds ("); | |
2224 | print_generic_expr (dump_file, bounds, 0); | |
2225 | fprintf (dump_file, ") for call: "); | |
2226 | print_gimple_stmt (dump_file, call, 0, TDF_VOPS|TDF_MEMSYMS); | |
2227 | } | |
2228 | ||
2229 | bounds = chkp_maybe_copy_and_register_bounds (gimple_call_lhs (call), bounds); | |
2230 | ||
2231 | return bounds; | |
2232 | } | |
2233 | ||
2234 | /* Return bounds used as returned by call | |
2235 | which produced SSA name VAL. */ | |
538dd0b7 | 2236 | gcall * |
d5e254e1 IE |
2237 | chkp_retbnd_call_by_val (tree val) |
2238 | { | |
2239 | if (TREE_CODE (val) != SSA_NAME) | |
2240 | return NULL; | |
2241 | ||
2242 | gcc_assert (gimple_code (SSA_NAME_DEF_STMT (val)) == GIMPLE_CALL); | |
2243 | ||
2244 | imm_use_iterator use_iter; | |
2245 | use_operand_p use_p; | |
2246 | FOR_EACH_IMM_USE_FAST (use_p, use_iter, val) | |
2247 | if (gimple_code (USE_STMT (use_p)) == GIMPLE_CALL | |
2248 | && gimple_call_fndecl (USE_STMT (use_p)) == chkp_ret_bnd_fndecl) | |
538dd0b7 | 2249 | return as_a <gcall *> (USE_STMT (use_p)); |
d5e254e1 IE |
2250 | |
2251 | return NULL; | |
2252 | } | |
2253 | ||
2254 | /* Check the next parameter for the given PARM is bounds | |
2255 | and return it's default SSA_NAME (create if required). */ | |
2256 | static tree | |
2257 | chkp_get_next_bounds_parm (tree parm) | |
2258 | { | |
2259 | tree bounds = TREE_CHAIN (parm); | |
2260 | gcc_assert (POINTER_BOUNDS_P (bounds)); | |
2261 | bounds = ssa_default_def (cfun, bounds); | |
2262 | if (!bounds) | |
2263 | { | |
2264 | bounds = make_ssa_name (TREE_CHAIN (parm), gimple_build_nop ()); | |
2265 | set_ssa_default_def (cfun, TREE_CHAIN (parm), bounds); | |
2266 | } | |
2267 | return bounds; | |
2268 | } | |
2269 | ||
2270 | /* Return bounds to be used for input argument PARM. */ | |
2271 | static tree | |
2272 | chkp_get_bound_for_parm (tree parm) | |
2273 | { | |
2274 | tree decl = SSA_NAME_VAR (parm); | |
2275 | tree bounds; | |
2276 | ||
2277 | gcc_assert (TREE_CODE (decl) == PARM_DECL); | |
2278 | ||
2279 | bounds = chkp_get_registered_bounds (parm); | |
2280 | ||
2281 | if (!bounds) | |
2282 | bounds = chkp_get_registered_bounds (decl); | |
2283 | ||
2284 | if (!bounds) | |
2285 | { | |
2286 | tree orig_decl = cgraph_node::get (cfun->decl)->orig_decl; | |
2287 | ||
2288 | /* For static chain param we return zero bounds | |
2289 | because currently we do not check dereferences | |
2290 | of this pointer. */ | |
2291 | if (cfun->static_chain_decl == decl) | |
2292 | bounds = chkp_get_zero_bounds (); | |
2293 | /* If non instrumented runtime is used then it may be useful | |
2294 | to use zero bounds for input arguments of main | |
2295 | function. */ | |
2296 | else if (flag_chkp_zero_input_bounds_for_main | |
2297 | && strcmp (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (orig_decl)), | |
2298 | "main") == 0) | |
2299 | bounds = chkp_get_zero_bounds (); | |
2300 | else if (BOUNDED_P (parm)) | |
2301 | { | |
2302 | bounds = chkp_get_next_bounds_parm (decl); | |
2303 | bounds = chkp_maybe_copy_and_register_bounds (decl, bounds); | |
2304 | ||
2305 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
2306 | { | |
2307 | fprintf (dump_file, "Built arg bounds ("); | |
2308 | print_generic_expr (dump_file, bounds, 0); | |
2309 | fprintf (dump_file, ") for arg: "); | |
2310 | print_node (dump_file, "", decl, 0); | |
2311 | } | |
2312 | } | |
2313 | else | |
2314 | bounds = chkp_get_zero_bounds (); | |
2315 | } | |
2316 | ||
2317 | if (!chkp_get_registered_bounds (parm)) | |
2318 | bounds = chkp_maybe_copy_and_register_bounds (parm, bounds); | |
2319 | ||
2320 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
2321 | { | |
2322 | fprintf (dump_file, "Using bounds "); | |
2323 | print_generic_expr (dump_file, bounds, 0); | |
2324 | fprintf (dump_file, " for parm "); | |
2325 | print_generic_expr (dump_file, parm, 0); | |
2326 | fprintf (dump_file, " of type "); | |
2327 | print_generic_expr (dump_file, TREE_TYPE (parm), 0); | |
2328 | fprintf (dump_file, ".\n"); | |
2329 | } | |
2330 | ||
2331 | return bounds; | |
2332 | } | |
2333 | ||
2334 | /* Build and return CALL_EXPR for bndstx builtin with specified | |
2335 | arguments. */ | |
2336 | tree | |
2337 | chkp_build_bndldx_call (tree addr, tree ptr) | |
2338 | { | |
2339 | tree fn = build1 (ADDR_EXPR, | |
2340 | build_pointer_type (TREE_TYPE (chkp_bndldx_fndecl)), | |
2341 | chkp_bndldx_fndecl); | |
2342 | tree call = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndldx_fndecl)), | |
2343 | fn, 2, addr, ptr); | |
2344 | CALL_WITH_BOUNDS_P (call) = true; | |
2345 | return call; | |
2346 | } | |
2347 | ||
2348 | /* Insert code to load bounds for PTR located by ADDR. | |
2349 | Code is inserted after position pointed by GSI. | |
2350 | Loaded bounds are returned. */ | |
2351 | static tree | |
2352 | chkp_build_bndldx (tree addr, tree ptr, gimple_stmt_iterator *gsi) | |
2353 | { | |
2354 | gimple_seq seq; | |
2355 | gimple stmt; | |
2356 | tree bounds; | |
2357 | ||
2358 | seq = NULL; | |
2359 | ||
2360 | addr = chkp_force_gimple_call_op (addr, &seq); | |
2361 | ptr = chkp_force_gimple_call_op (ptr, &seq); | |
2362 | ||
2363 | stmt = gimple_build_call (chkp_bndldx_fndecl, 2, addr, ptr); | |
2364 | chkp_mark_stmt (stmt); | |
2365 | bounds = chkp_get_tmp_reg (stmt); | |
2366 | gimple_call_set_lhs (stmt, bounds); | |
2367 | ||
2368 | gimple_seq_add_stmt (&seq, stmt); | |
2369 | ||
2370 | gsi_insert_seq_after (gsi, seq, GSI_CONTINUE_LINKING); | |
2371 | ||
2372 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
2373 | { | |
2374 | fprintf (dump_file, "Generated bndldx for pointer "); | |
2375 | print_generic_expr (dump_file, ptr, 0); | |
2376 | fprintf (dump_file, ": "); | |
2377 | print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS); | |
2378 | } | |
2379 | ||
2380 | return bounds; | |
2381 | } | |
2382 | ||
2383 | /* Build and return CALL_EXPR for bndstx builtin with specified | |
2384 | arguments. */ | |
2385 | tree | |
2386 | chkp_build_bndstx_call (tree addr, tree ptr, tree bounds) | |
2387 | { | |
2388 | tree fn = build1 (ADDR_EXPR, | |
2389 | build_pointer_type (TREE_TYPE (chkp_bndstx_fndecl)), | |
2390 | chkp_bndstx_fndecl); | |
2391 | tree call = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndstx_fndecl)), | |
2392 | fn, 3, ptr, bounds, addr); | |
2393 | CALL_WITH_BOUNDS_P (call) = true; | |
2394 | return call; | |
2395 | } | |
2396 | ||
2397 | /* Insert code to store BOUNDS for PTR stored by ADDR. | |
2398 | New statements are inserted after position pointed | |
2399 | by GSI. */ | |
2400 | void | |
2401 | chkp_build_bndstx (tree addr, tree ptr, tree bounds, | |
2402 | gimple_stmt_iterator *gsi) | |
2403 | { | |
2404 | gimple_seq seq; | |
2405 | gimple stmt; | |
2406 | ||
2407 | seq = NULL; | |
2408 | ||
2409 | addr = chkp_force_gimple_call_op (addr, &seq); | |
2410 | ptr = chkp_force_gimple_call_op (ptr, &seq); | |
2411 | ||
2412 | stmt = gimple_build_call (chkp_bndstx_fndecl, 3, ptr, bounds, addr); | |
2413 | chkp_mark_stmt (stmt); | |
2414 | gimple_call_set_with_bounds (stmt, true); | |
2415 | ||
2416 | gimple_seq_add_stmt (&seq, stmt); | |
2417 | ||
2418 | gsi_insert_seq_after (gsi, seq, GSI_CONTINUE_LINKING); | |
2419 | ||
2420 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
2421 | { | |
2422 | fprintf (dump_file, "Generated bndstx for pointer store "); | |
2423 | print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_VOPS|TDF_MEMSYMS); | |
2424 | print_gimple_stmt (dump_file, stmt, 2, TDF_VOPS|TDF_MEMSYMS); | |
2425 | } | |
2426 | } | |
2427 | ||
2428 | /* Compute bounds for pointer NODE which was assigned in | |
2429 | assignment statement ASSIGN. Return computed bounds. */ | |
2430 | static tree | |
2431 | chkp_compute_bounds_for_assignment (tree node, gimple assign) | |
2432 | { | |
2433 | enum tree_code rhs_code = gimple_assign_rhs_code (assign); | |
2434 | tree rhs1 = gimple_assign_rhs1 (assign); | |
2435 | tree bounds = NULL_TREE; | |
2436 | gimple_stmt_iterator iter = gsi_for_stmt (assign); | |
2437 | ||
2438 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
2439 | { | |
2440 | fprintf (dump_file, "Computing bounds for assignment: "); | |
2441 | print_gimple_stmt (dump_file, assign, 0, TDF_VOPS|TDF_MEMSYMS); | |
2442 | } | |
2443 | ||
2444 | switch (rhs_code) | |
2445 | { | |
2446 | case MEM_REF: | |
2447 | case TARGET_MEM_REF: | |
2448 | case COMPONENT_REF: | |
2449 | case ARRAY_REF: | |
2450 | /* We need to load bounds from the bounds table. */ | |
2451 | bounds = chkp_find_bounds_loaded (node, rhs1, &iter); | |
2452 | break; | |
2453 | ||
2454 | case VAR_DECL: | |
2455 | case SSA_NAME: | |
2456 | case ADDR_EXPR: | |
2457 | case POINTER_PLUS_EXPR: | |
2458 | case NOP_EXPR: | |
2459 | case CONVERT_EXPR: | |
2460 | case INTEGER_CST: | |
2461 | /* Bounds are just propagated from RHS. */ | |
2462 | bounds = chkp_find_bounds (rhs1, &iter); | |
2463 | break; | |
2464 | ||
2465 | case VIEW_CONVERT_EXPR: | |
2466 | /* Bounds are just propagated from RHS. */ | |
2467 | bounds = chkp_find_bounds (TREE_OPERAND (rhs1, 0), &iter); | |
2468 | break; | |
2469 | ||
2470 | case PARM_DECL: | |
2471 | if (BOUNDED_P (rhs1)) | |
2472 | { | |
2473 | /* We need to load bounds from the bounds table. */ | |
2474 | bounds = chkp_build_bndldx (chkp_build_addr_expr (rhs1), | |
2475 | node, &iter); | |
2476 | TREE_ADDRESSABLE (rhs1) = 1; | |
2477 | } | |
2478 | else | |
2479 | bounds = chkp_get_nonpointer_load_bounds (); | |
2480 | break; | |
2481 | ||
2482 | case MINUS_EXPR: | |
2483 | case PLUS_EXPR: | |
2484 | case BIT_AND_EXPR: | |
2485 | case BIT_IOR_EXPR: | |
2486 | case BIT_XOR_EXPR: | |
2487 | { | |
2488 | tree rhs2 = gimple_assign_rhs2 (assign); | |
2489 | tree bnd1 = chkp_find_bounds (rhs1, &iter); | |
2490 | tree bnd2 = chkp_find_bounds (rhs2, &iter); | |
2491 | ||
2492 | /* First we try to check types of operands. If it | |
2493 | does not help then look at bound values. | |
2494 | ||
2495 | If some bounds are incomplete and other are | |
2496 | not proven to be valid (i.e. also incomplete | |
2497 | or invalid because value is not pointer) then | |
2498 | resulting value is incomplete and will be | |
2499 | recomputed later in chkp_finish_incomplete_bounds. */ | |
2500 | if (BOUNDED_P (rhs1) | |
2501 | && !BOUNDED_P (rhs2)) | |
2502 | bounds = bnd1; | |
2503 | else if (BOUNDED_P (rhs2) | |
2504 | && !BOUNDED_P (rhs1) | |
2505 | && rhs_code != MINUS_EXPR) | |
2506 | bounds = bnd2; | |
2507 | else if (chkp_incomplete_bounds (bnd1)) | |
2508 | if (chkp_valid_bounds (bnd2) && rhs_code != MINUS_EXPR | |
2509 | && !chkp_incomplete_bounds (bnd2)) | |
2510 | bounds = bnd2; | |
2511 | else | |
2512 | bounds = incomplete_bounds; | |
2513 | else if (chkp_incomplete_bounds (bnd2)) | |
2514 | if (chkp_valid_bounds (bnd1) | |
2515 | && !chkp_incomplete_bounds (bnd1)) | |
2516 | bounds = bnd1; | |
2517 | else | |
2518 | bounds = incomplete_bounds; | |
2519 | else if (!chkp_valid_bounds (bnd1)) | |
2520 | if (chkp_valid_bounds (bnd2) && rhs_code != MINUS_EXPR) | |
2521 | bounds = bnd2; | |
2522 | else if (bnd2 == chkp_get_zero_bounds ()) | |
2523 | bounds = bnd2; | |
2524 | else | |
2525 | bounds = bnd1; | |
2526 | else if (!chkp_valid_bounds (bnd2)) | |
2527 | bounds = bnd1; | |
2528 | else | |
2529 | /* Seems both operands may have valid bounds | |
2530 | (e.g. pointer minus pointer). In such case | |
2531 | use default invalid op bounds. */ | |
2532 | bounds = chkp_get_invalid_op_bounds (); | |
2533 | } | |
2534 | break; | |
2535 | ||
2536 | case BIT_NOT_EXPR: | |
2537 | case NEGATE_EXPR: | |
2538 | case LSHIFT_EXPR: | |
2539 | case RSHIFT_EXPR: | |
2540 | case LROTATE_EXPR: | |
2541 | case RROTATE_EXPR: | |
2542 | case EQ_EXPR: | |
2543 | case NE_EXPR: | |
2544 | case LT_EXPR: | |
2545 | case LE_EXPR: | |
2546 | case GT_EXPR: | |
2547 | case GE_EXPR: | |
2548 | case MULT_EXPR: | |
2549 | case RDIV_EXPR: | |
2550 | case TRUNC_DIV_EXPR: | |
2551 | case FLOOR_DIV_EXPR: | |
2552 | case CEIL_DIV_EXPR: | |
2553 | case ROUND_DIV_EXPR: | |
2554 | case TRUNC_MOD_EXPR: | |
2555 | case FLOOR_MOD_EXPR: | |
2556 | case CEIL_MOD_EXPR: | |
2557 | case ROUND_MOD_EXPR: | |
2558 | case EXACT_DIV_EXPR: | |
2559 | case FIX_TRUNC_EXPR: | |
2560 | case FLOAT_EXPR: | |
2561 | case REALPART_EXPR: | |
2562 | case IMAGPART_EXPR: | |
2563 | /* No valid bounds may be produced by these exprs. */ | |
2564 | bounds = chkp_get_invalid_op_bounds (); | |
2565 | break; | |
2566 | ||
2567 | case COND_EXPR: | |
2568 | { | |
2569 | tree val1 = gimple_assign_rhs2 (assign); | |
2570 | tree val2 = gimple_assign_rhs3 (assign); | |
2571 | tree bnd1 = chkp_find_bounds (val1, &iter); | |
2572 | tree bnd2 = chkp_find_bounds (val2, &iter); | |
2573 | gimple stmt; | |
2574 | ||
2575 | if (chkp_incomplete_bounds (bnd1) || chkp_incomplete_bounds (bnd2)) | |
2576 | bounds = incomplete_bounds; | |
2577 | else if (bnd1 == bnd2) | |
2578 | bounds = bnd1; | |
2579 | else | |
2580 | { | |
2581 | rhs1 = unshare_expr (rhs1); | |
2582 | ||
2583 | bounds = chkp_get_tmp_reg (assign); | |
0d0e4a03 | 2584 | stmt = gimple_build_assign (bounds, COND_EXPR, rhs1, bnd1, bnd2); |
d5e254e1 IE |
2585 | gsi_insert_after (&iter, stmt, GSI_SAME_STMT); |
2586 | ||
2587 | if (!chkp_valid_bounds (bnd1) && !chkp_valid_bounds (bnd2)) | |
2588 | chkp_mark_invalid_bounds (bounds); | |
2589 | } | |
2590 | } | |
2591 | break; | |
2592 | ||
2593 | case MAX_EXPR: | |
2594 | case MIN_EXPR: | |
2595 | { | |
2596 | tree rhs2 = gimple_assign_rhs2 (assign); | |
2597 | tree bnd1 = chkp_find_bounds (rhs1, &iter); | |
2598 | tree bnd2 = chkp_find_bounds (rhs2, &iter); | |
2599 | ||
2600 | if (chkp_incomplete_bounds (bnd1) || chkp_incomplete_bounds (bnd2)) | |
2601 | bounds = incomplete_bounds; | |
2602 | else if (bnd1 == bnd2) | |
2603 | bounds = bnd1; | |
2604 | else | |
2605 | { | |
2606 | gimple stmt; | |
2607 | tree cond = build2 (rhs_code == MAX_EXPR ? GT_EXPR : LT_EXPR, | |
2608 | boolean_type_node, rhs1, rhs2); | |
2609 | bounds = chkp_get_tmp_reg (assign); | |
0d0e4a03 | 2610 | stmt = gimple_build_assign (bounds, COND_EXPR, cond, bnd1, bnd2); |
d5e254e1 IE |
2611 | |
2612 | gsi_insert_after (&iter, stmt, GSI_SAME_STMT); | |
2613 | ||
2614 | if (!chkp_valid_bounds (bnd1) && !chkp_valid_bounds (bnd2)) | |
2615 | chkp_mark_invalid_bounds (bounds); | |
2616 | } | |
2617 | } | |
2618 | break; | |
2619 | ||
2620 | default: | |
2621 | bounds = chkp_get_zero_bounds (); | |
2622 | warning (0, "pointer bounds were lost due to unexpected expression %s", | |
2623 | get_tree_code_name (rhs_code)); | |
2624 | } | |
2625 | ||
2626 | gcc_assert (bounds); | |
2627 | ||
2628 | if (node) | |
2629 | bounds = chkp_maybe_copy_and_register_bounds (node, bounds); | |
2630 | ||
2631 | return bounds; | |
2632 | } | |
2633 | ||
2634 | /* Compute bounds for ssa name NODE defined by DEF_STMT pointed by ITER. | |
2635 | ||
2636 | There are just few statement codes allowed: NOP (for default ssa names), | |
2637 | ASSIGN, CALL, PHI, ASM. | |
2638 | ||
2639 | Return computed bounds. */ | |
2640 | static tree | |
2641 | chkp_get_bounds_by_definition (tree node, gimple def_stmt, | |
538dd0b7 | 2642 | gphi_iterator *iter) |
d5e254e1 IE |
2643 | { |
2644 | tree var, bounds; | |
2645 | enum gimple_code code = gimple_code (def_stmt); | |
538dd0b7 | 2646 | gphi *stmt; |
d5e254e1 IE |
2647 | |
2648 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
2649 | { | |
2650 | fprintf (dump_file, "Searching for bounds for node: "); | |
2651 | print_generic_expr (dump_file, node, 0); | |
2652 | ||
2653 | fprintf (dump_file, " using its definition: "); | |
2654 | print_gimple_stmt (dump_file, def_stmt, 0, TDF_VOPS|TDF_MEMSYMS); | |
2655 | } | |
2656 | ||
2657 | switch (code) | |
2658 | { | |
2659 | case GIMPLE_NOP: | |
2660 | var = SSA_NAME_VAR (node); | |
2661 | switch (TREE_CODE (var)) | |
2662 | { | |
2663 | case PARM_DECL: | |
2664 | bounds = chkp_get_bound_for_parm (node); | |
2665 | break; | |
2666 | ||
2667 | case VAR_DECL: | |
2668 | /* For uninitialized pointers use none bounds. */ | |
2669 | bounds = chkp_get_none_bounds (); | |
2670 | bounds = chkp_maybe_copy_and_register_bounds (node, bounds); | |
2671 | break; | |
2672 | ||
2673 | case RESULT_DECL: | |
2674 | { | |
2675 | tree base_type; | |
2676 | ||
2677 | gcc_assert (TREE_CODE (TREE_TYPE (node)) == REFERENCE_TYPE); | |
2678 | ||
2679 | base_type = TREE_TYPE (TREE_TYPE (node)); | |
2680 | ||
2681 | gcc_assert (TYPE_SIZE (base_type) | |
2682 | && TREE_CODE (TYPE_SIZE (base_type)) == INTEGER_CST | |
2683 | && tree_to_uhwi (TYPE_SIZE (base_type)) != 0); | |
2684 | ||
2685 | bounds = chkp_make_bounds (node, TYPE_SIZE_UNIT (base_type), | |
2686 | NULL, false); | |
2687 | bounds = chkp_maybe_copy_and_register_bounds (node, bounds); | |
2688 | } | |
2689 | break; | |
2690 | ||
2691 | default: | |
2692 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
2693 | { | |
2694 | fprintf (dump_file, "Unexpected var with no definition\n"); | |
2695 | print_generic_expr (dump_file, var, 0); | |
2696 | } | |
2697 | internal_error ("chkp_get_bounds_by_definition: Unexpected var of type %s", | |
2698 | get_tree_code_name (TREE_CODE (var))); | |
2699 | } | |
2700 | break; | |
2701 | ||
2702 | case GIMPLE_ASSIGN: | |
2703 | bounds = chkp_compute_bounds_for_assignment (node, def_stmt); | |
2704 | break; | |
2705 | ||
2706 | case GIMPLE_CALL: | |
538dd0b7 | 2707 | bounds = chkp_build_returned_bound (as_a <gcall *> (def_stmt)); |
d5e254e1 IE |
2708 | break; |
2709 | ||
2710 | case GIMPLE_PHI: | |
2711 | if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (node)) | |
2712 | if (SSA_NAME_VAR (node)) | |
2713 | var = chkp_get_bounds_var (SSA_NAME_VAR (node)); | |
2714 | else | |
2715 | var = make_temp_ssa_name (pointer_bounds_type_node, | |
2716 | gimple_build_nop (), | |
2717 | CHKP_BOUND_TMP_NAME); | |
2718 | else | |
2719 | var = chkp_get_tmp_var (); | |
2720 | stmt = create_phi_node (var, gimple_bb (def_stmt)); | |
2721 | bounds = gimple_phi_result (stmt); | |
538dd0b7 | 2722 | *iter = gsi_for_phi (stmt); |
d5e254e1 IE |
2723 | |
2724 | bounds = chkp_maybe_copy_and_register_bounds (node, bounds); | |
2725 | ||
2726 | /* Created bounds do not have all phi args computed and | |
2727 | therefore we do not know if there is a valid source | |
2728 | of bounds for that node. Therefore we mark bounds | |
2729 | as incomplete and then recompute them when all phi | |
2730 | args are computed. */ | |
2731 | chkp_register_incomplete_bounds (bounds, node); | |
2732 | break; | |
2733 | ||
2734 | case GIMPLE_ASM: | |
2735 | bounds = chkp_get_zero_bounds (); | |
2736 | bounds = chkp_maybe_copy_and_register_bounds (node, bounds); | |
2737 | break; | |
2738 | ||
2739 | default: | |
2740 | internal_error ("chkp_get_bounds_by_definition: Unexpected GIMPLE code %s", | |
2741 | gimple_code_name[code]); | |
2742 | } | |
2743 | ||
2744 | return bounds; | |
2745 | } | |
2746 | ||
2747 | /* Return CALL_EXPR for bndmk with specified LOWER_BOUND and SIZE. */ | |
2748 | tree | |
2749 | chkp_build_make_bounds_call (tree lower_bound, tree size) | |
2750 | { | |
2751 | tree call = build1 (ADDR_EXPR, | |
2752 | build_pointer_type (TREE_TYPE (chkp_bndmk_fndecl)), | |
2753 | chkp_bndmk_fndecl); | |
2754 | return build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndmk_fndecl)), | |
2755 | call, 2, lower_bound, size); | |
2756 | } | |
2757 | ||
2758 | /* Create static bounds var of specfified OBJ which is | |
2759 | is either VAR_DECL or string constant. */ | |
2760 | static tree | |
2761 | chkp_make_static_bounds (tree obj) | |
2762 | { | |
2763 | static int string_id = 1; | |
2764 | static int var_id = 1; | |
2765 | tree *slot; | |
2766 | const char *var_name; | |
2767 | char *bnd_var_name; | |
2768 | tree bnd_var; | |
2769 | ||
2770 | /* First check if we already have required var. */ | |
2771 | if (chkp_static_var_bounds) | |
2772 | { | |
227eabce IE |
2773 | /* For vars we use assembler name as a key in |
2774 | chkp_static_var_bounds map. It allows to | |
2775 | avoid duplicating bound vars for decls | |
2776 | sharing assembler name. */ | |
2777 | if (TREE_CODE (obj) == VAR_DECL) | |
2778 | { | |
2779 | tree name = DECL_ASSEMBLER_NAME (obj); | |
2780 | slot = chkp_static_var_bounds->get (name); | |
2781 | if (slot) | |
2782 | return *slot; | |
2783 | } | |
2784 | else | |
2785 | { | |
2786 | slot = chkp_static_var_bounds->get (obj); | |
2787 | if (slot) | |
2788 | return *slot; | |
2789 | } | |
d5e254e1 IE |
2790 | } |
2791 | ||
2792 | /* Build decl for bounds var. */ | |
2793 | if (TREE_CODE (obj) == VAR_DECL) | |
2794 | { | |
2795 | if (DECL_IGNORED_P (obj)) | |
2796 | { | |
2797 | bnd_var_name = (char *) xmalloc (strlen (CHKP_VAR_BOUNDS_PREFIX) + 10); | |
2798 | sprintf (bnd_var_name, "%s%d", CHKP_VAR_BOUNDS_PREFIX, var_id++); | |
2799 | } | |
2800 | else | |
2801 | { | |
2802 | var_name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (obj)); | |
2803 | ||
2804 | /* For hidden symbols we want to skip first '*' char. */ | |
2805 | if (*var_name == '*') | |
2806 | var_name++; | |
2807 | ||
2808 | bnd_var_name = (char *) xmalloc (strlen (var_name) | |
2809 | + strlen (CHKP_BOUNDS_OF_SYMBOL_PREFIX) + 1); | |
2810 | strcpy (bnd_var_name, CHKP_BOUNDS_OF_SYMBOL_PREFIX); | |
2811 | strcat (bnd_var_name, var_name); | |
2812 | } | |
2813 | ||
2814 | bnd_var = build_decl (UNKNOWN_LOCATION, VAR_DECL, | |
2815 | get_identifier (bnd_var_name), | |
2816 | pointer_bounds_type_node); | |
2817 | ||
2818 | /* Address of the obj will be used as lower bound. */ | |
2819 | TREE_ADDRESSABLE (obj) = 1; | |
2820 | } | |
2821 | else | |
2822 | { | |
2823 | bnd_var_name = (char *) xmalloc (strlen (CHKP_STRING_BOUNDS_PREFIX) + 10); | |
2824 | sprintf (bnd_var_name, "%s%d", CHKP_STRING_BOUNDS_PREFIX, string_id++); | |
2825 | ||
2826 | bnd_var = build_decl (UNKNOWN_LOCATION, VAR_DECL, | |
2827 | get_identifier (bnd_var_name), | |
2828 | pointer_bounds_type_node); | |
2829 | } | |
2830 | ||
2831 | TREE_PUBLIC (bnd_var) = 0; | |
2832 | TREE_USED (bnd_var) = 1; | |
2833 | TREE_READONLY (bnd_var) = 0; | |
2834 | TREE_STATIC (bnd_var) = 1; | |
2835 | TREE_ADDRESSABLE (bnd_var) = 0; | |
2836 | DECL_ARTIFICIAL (bnd_var) = 1; | |
2837 | DECL_COMMON (bnd_var) = 1; | |
2838 | DECL_COMDAT (bnd_var) = 1; | |
2839 | DECL_READ_P (bnd_var) = 1; | |
2840 | DECL_INITIAL (bnd_var) = chkp_build_addr_expr (obj); | |
2841 | /* Force output similar to constant bounds. | |
2842 | See chkp_make_static_const_bounds. */ | |
2843 | varpool_node::get_create (bnd_var)->force_output = 1; | |
2844 | /* Mark symbol as requiring bounds initialization. */ | |
2845 | varpool_node::get_create (bnd_var)->need_bounds_init = 1; | |
2846 | varpool_node::finalize_decl (bnd_var); | |
2847 | ||
2848 | /* Add created var to the map to use it for other references | |
2849 | to obj. */ | |
2850 | if (!chkp_static_var_bounds) | |
2851 | chkp_static_var_bounds = new hash_map<tree, tree>; | |
2852 | ||
227eabce IE |
2853 | if (TREE_CODE (obj) == VAR_DECL) |
2854 | { | |
2855 | tree name = DECL_ASSEMBLER_NAME (obj); | |
2856 | chkp_static_var_bounds->put (name, bnd_var); | |
2857 | } | |
2858 | else | |
2859 | chkp_static_var_bounds->put (obj, bnd_var); | |
d5e254e1 IE |
2860 | |
2861 | return bnd_var; | |
2862 | } | |
2863 | ||
2864 | /* When var has incomplete type we cannot get size to | |
2865 | compute its bounds. In such cases we use checker | |
2866 | builtin call which determines object size at runtime. */ | |
2867 | static tree | |
2868 | chkp_generate_extern_var_bounds (tree var) | |
2869 | { | |
2870 | tree bounds, size_reloc, lb, size, max_size, cond; | |
2871 | gimple_stmt_iterator gsi; | |
2872 | gimple_seq seq = NULL; | |
2873 | gimple stmt; | |
2874 | ||
2875 | /* If instrumentation is not enabled for vars having | |
2876 | incomplete type then just return zero bounds to avoid | |
2877 | checks for this var. */ | |
2878 | if (!flag_chkp_incomplete_type) | |
2879 | return chkp_get_zero_bounds (); | |
2880 | ||
2881 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
2882 | { | |
2883 | fprintf (dump_file, "Generating bounds for extern symbol '"); | |
2884 | print_generic_expr (dump_file, var, 0); | |
2885 | fprintf (dump_file, "'\n"); | |
2886 | } | |
2887 | ||
2888 | stmt = gimple_build_call (chkp_sizeof_fndecl, 1, var); | |
2889 | ||
2890 | size_reloc = create_tmp_reg (chkp_uintptr_type, CHKP_SIZE_TMP_NAME); | |
2891 | gimple_call_set_lhs (stmt, size_reloc); | |
2892 | ||
2893 | gimple_seq_add_stmt (&seq, stmt); | |
2894 | ||
2895 | lb = chkp_build_addr_expr (var); | |
2896 | size = make_ssa_name (chkp_get_size_tmp_var (), gimple_build_nop ()); | |
2897 | ||
2898 | if (flag_chkp_zero_dynamic_size_as_infinite) | |
2899 | { | |
2900 | /* We should check that size relocation was resolved. | |
2901 | If it was not then use maximum possible size for the var. */ | |
2902 | max_size = build2 (MINUS_EXPR, chkp_uintptr_type, integer_zero_node, | |
2903 | fold_convert (chkp_uintptr_type, lb)); | |
2904 | max_size = chkp_force_gimple_call_op (max_size, &seq); | |
2905 | ||
0d0e4a03 JJ |
2906 | cond = build2 (NE_EXPR, boolean_type_node, |
2907 | size_reloc, integer_zero_node); | |
2908 | stmt = gimple_build_assign (size, COND_EXPR, cond, size_reloc, max_size); | |
d5e254e1 IE |
2909 | gimple_seq_add_stmt (&seq, stmt); |
2910 | } | |
2911 | else | |
2912 | { | |
2913 | stmt = gimple_build_assign (size, size_reloc); | |
2914 | gimple_seq_add_stmt (&seq, stmt); | |
2915 | } | |
2916 | ||
2917 | gsi = gsi_start_bb (chkp_get_entry_block ()); | |
2918 | gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING); | |
2919 | ||
2920 | bounds = chkp_make_bounds (lb, size, &gsi, true); | |
2921 | ||
2922 | return bounds; | |
2923 | } | |
2924 | ||
2925 | /* Return 1 if TYPE has fields with zero size or fields | |
2926 | marked with chkp_variable_size attribute. */ | |
2927 | bool | |
2928 | chkp_variable_size_type (tree type) | |
2929 | { | |
2930 | bool res = false; | |
2931 | tree field; | |
2932 | ||
2933 | if (RECORD_OR_UNION_TYPE_P (type)) | |
2934 | for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field)) | |
2935 | { | |
2936 | if (TREE_CODE (field) == FIELD_DECL) | |
2937 | res = res | |
2938 | || lookup_attribute ("bnd_variable_size", DECL_ATTRIBUTES (field)) | |
2939 | || chkp_variable_size_type (TREE_TYPE (field)); | |
2940 | } | |
2941 | else | |
2942 | res = !TYPE_SIZE (type) | |
2943 | || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST | |
2944 | || tree_to_uhwi (TYPE_SIZE (type)) == 0; | |
2945 | ||
2946 | return res; | |
2947 | } | |
2948 | ||
2949 | /* Compute and return bounds for address of DECL which is | |
2950 | one of VAR_DECL, PARM_DECL, RESULT_DECL. */ | |
2951 | static tree | |
2952 | chkp_get_bounds_for_decl_addr (tree decl) | |
2953 | { | |
2954 | tree bounds; | |
2955 | ||
2956 | gcc_assert (TREE_CODE (decl) == VAR_DECL | |
2957 | || TREE_CODE (decl) == PARM_DECL | |
2958 | || TREE_CODE (decl) == RESULT_DECL); | |
2959 | ||
2960 | bounds = chkp_get_registered_addr_bounds (decl); | |
2961 | ||
2962 | if (bounds) | |
2963 | return bounds; | |
2964 | ||
2965 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
2966 | { | |
2967 | fprintf (dump_file, "Building bounds for address of decl "); | |
2968 | print_generic_expr (dump_file, decl, 0); | |
2969 | fprintf (dump_file, "\n"); | |
2970 | } | |
2971 | ||
2972 | /* Use zero bounds if size is unknown and checks for | |
2973 | unknown sizes are restricted. */ | |
2974 | if ((!DECL_SIZE (decl) | |
2975 | || (chkp_variable_size_type (TREE_TYPE (decl)) | |
2976 | && (TREE_STATIC (decl) | |
2977 | || DECL_EXTERNAL (decl) | |
2978 | || TREE_PUBLIC (decl)))) | |
2979 | && !flag_chkp_incomplete_type) | |
2980 | return chkp_get_zero_bounds (); | |
2981 | ||
2982 | if (flag_chkp_use_static_bounds | |
2983 | && TREE_CODE (decl) == VAR_DECL | |
2984 | && (TREE_STATIC (decl) | |
2985 | || DECL_EXTERNAL (decl) | |
2986 | || TREE_PUBLIC (decl)) | |
2987 | && !DECL_THREAD_LOCAL_P (decl)) | |
2988 | { | |
2989 | tree bnd_var = chkp_make_static_bounds (decl); | |
2990 | gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ()); | |
2991 | gimple stmt; | |
2992 | ||
2993 | bounds = chkp_get_tmp_reg (gimple_build_nop ()); | |
2994 | stmt = gimple_build_assign (bounds, bnd_var); | |
2995 | gsi_insert_before (&gsi, stmt, GSI_SAME_STMT); | |
2996 | } | |
2997 | else if (!DECL_SIZE (decl) | |
2998 | || (chkp_variable_size_type (TREE_TYPE (decl)) | |
2999 | && (TREE_STATIC (decl) | |
3000 | || DECL_EXTERNAL (decl) | |
3001 | || TREE_PUBLIC (decl)))) | |
3002 | { | |
3003 | gcc_assert (TREE_CODE (decl) == VAR_DECL); | |
3004 | bounds = chkp_generate_extern_var_bounds (decl); | |
3005 | } | |
3006 | else | |
3007 | { | |
3008 | tree lb = chkp_build_addr_expr (decl); | |
3009 | bounds = chkp_make_bounds (lb, DECL_SIZE_UNIT (decl), NULL, false); | |
3010 | } | |
3011 | ||
3012 | return bounds; | |
3013 | } | |
3014 | ||
3015 | /* Compute and return bounds for constant string. */ | |
3016 | static tree | |
3017 | chkp_get_bounds_for_string_cst (tree cst) | |
3018 | { | |
3019 | tree bounds; | |
3020 | tree lb; | |
3021 | tree size; | |
3022 | ||
3023 | gcc_assert (TREE_CODE (cst) == STRING_CST); | |
3024 | ||
3025 | bounds = chkp_get_registered_bounds (cst); | |
3026 | ||
3027 | if (bounds) | |
3028 | return bounds; | |
3029 | ||
3030 | if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds) | |
3031 | || flag_chkp_use_static_const_bounds > 0) | |
3032 | { | |
3033 | tree bnd_var = chkp_make_static_bounds (cst); | |
3034 | gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ()); | |
3035 | gimple stmt; | |
3036 | ||
3037 | bounds = chkp_get_tmp_reg (gimple_build_nop ()); | |
3038 | stmt = gimple_build_assign (bounds, bnd_var); | |
3039 | gsi_insert_before (&gsi, stmt, GSI_SAME_STMT); | |
3040 | } | |
3041 | else | |
3042 | { | |
3043 | lb = chkp_build_addr_expr (cst); | |
3044 | size = build_int_cst (chkp_uintptr_type, TREE_STRING_LENGTH (cst)); | |
3045 | bounds = chkp_make_bounds (lb, size, NULL, false); | |
3046 | } | |
3047 | ||
3048 | bounds = chkp_maybe_copy_and_register_bounds (cst, bounds); | |
3049 | ||
3050 | return bounds; | |
3051 | } | |
3052 | ||
3053 | /* Generate code to instersect bounds BOUNDS1 and BOUNDS2 and | |
3054 | return the result. if ITER is not NULL then Code is inserted | |
3055 | before position pointed by ITER. Otherwise code is added to | |
3056 | entry block. */ | |
3057 | static tree | |
3058 | chkp_intersect_bounds (tree bounds1, tree bounds2, gimple_stmt_iterator *iter) | |
3059 | { | |
3060 | if (!bounds1 || bounds1 == chkp_get_zero_bounds ()) | |
3061 | return bounds2 ? bounds2 : bounds1; | |
3062 | else if (!bounds2 || bounds2 == chkp_get_zero_bounds ()) | |
3063 | return bounds1; | |
3064 | else | |
3065 | { | |
3066 | gimple_seq seq; | |
3067 | gimple stmt; | |
3068 | tree bounds; | |
3069 | ||
3070 | seq = NULL; | |
3071 | ||
3072 | stmt = gimple_build_call (chkp_intersect_fndecl, 2, bounds1, bounds2); | |
3073 | chkp_mark_stmt (stmt); | |
3074 | ||
3075 | bounds = chkp_get_tmp_reg (stmt); | |
3076 | gimple_call_set_lhs (stmt, bounds); | |
3077 | ||
3078 | gimple_seq_add_stmt (&seq, stmt); | |
3079 | ||
3080 | /* We are probably doing narrowing for constant expression. | |
3081 | In such case iter may be undefined. */ | |
3082 | if (!iter) | |
3083 | { | |
3084 | gimple_stmt_iterator gsi = gsi_last_bb (chkp_get_entry_block ()); | |
3085 | iter = &gsi; | |
3086 | gsi_insert_seq_after (iter, seq, GSI_SAME_STMT); | |
3087 | } | |
3088 | else | |
3089 | gsi_insert_seq_before (iter, seq, GSI_SAME_STMT); | |
3090 | ||
3091 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
3092 | { | |
3093 | fprintf (dump_file, "Bounds intersection: "); | |
3094 | print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS); | |
3095 | fprintf (dump_file, " inserted before statement: "); | |
3096 | print_gimple_stmt (dump_file, gsi_stmt (*iter), 0, | |
3097 | TDF_VOPS|TDF_MEMSYMS); | |
3098 | } | |
3099 | ||
3100 | return bounds; | |
3101 | } | |
3102 | } | |
3103 | ||
3104 | /* Return 1 if we are allowed to narrow bounds for addressed FIELD | |
3105 | and 0 othersize. */ | |
3106 | static bool | |
3107 | chkp_may_narrow_to_field (tree field) | |
3108 | { | |
3109 | return DECL_SIZE (field) && TREE_CODE (DECL_SIZE (field)) == INTEGER_CST | |
3110 | && tree_to_uhwi (DECL_SIZE (field)) != 0 | |
3111 | && (!DECL_FIELD_OFFSET (field) | |
3112 | || TREE_CODE (DECL_FIELD_OFFSET (field)) == INTEGER_CST) | |
3113 | && (!DECL_FIELD_BIT_OFFSET (field) | |
3114 | || TREE_CODE (DECL_FIELD_BIT_OFFSET (field)) == INTEGER_CST) | |
3115 | && !lookup_attribute ("bnd_variable_size", DECL_ATTRIBUTES (field)) | |
3116 | && !chkp_variable_size_type (TREE_TYPE (field)); | |
3117 | } | |
3118 | ||
3119 | /* Return 1 if bounds for FIELD should be narrowed to | |
3120 | field's own size. */ | |
3121 | static bool | |
3122 | chkp_narrow_bounds_for_field (tree field) | |
3123 | { | |
3124 | HOST_WIDE_INT offs; | |
3125 | HOST_WIDE_INT bit_offs; | |
3126 | ||
3127 | if (!chkp_may_narrow_to_field (field)) | |
3128 | return false; | |
3129 | ||
3130 | /* Accesse to compiler generated fields should not cause | |
3131 | bounds narrowing. */ | |
3132 | if (DECL_ARTIFICIAL (field)) | |
3133 | return false; | |
3134 | ||
3135 | offs = tree_to_uhwi (DECL_FIELD_OFFSET (field)); | |
3136 | bit_offs = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field)); | |
3137 | ||
3138 | return (flag_chkp_narrow_bounds | |
3139 | && (flag_chkp_first_field_has_own_bounds | |
3140 | || offs | |
3141 | || bit_offs)); | |
3142 | } | |
3143 | ||
3144 | /* Perform narrowing for BOUNDS using bounds computed for field | |
3145 | access COMPONENT. ITER meaning is the same as for | |
3146 | chkp_intersect_bounds. */ | |
3147 | static tree | |
3148 | chkp_narrow_bounds_to_field (tree bounds, tree component, | |
3149 | gimple_stmt_iterator *iter) | |
3150 | { | |
3151 | tree field = TREE_OPERAND (component, 1); | |
3152 | tree size = DECL_SIZE_UNIT (field); | |
3153 | tree field_ptr = chkp_build_addr_expr (component); | |
3154 | tree field_bounds; | |
3155 | ||
3156 | field_bounds = chkp_make_bounds (field_ptr, size, iter, false); | |
3157 | ||
3158 | return chkp_intersect_bounds (field_bounds, bounds, iter); | |
3159 | } | |
3160 | ||
3161 | /* Parse field or array access NODE. | |
3162 | ||
3163 | PTR ouput parameter holds a pointer to the outermost | |
3164 | object. | |
3165 | ||
3166 | BITFIELD output parameter is set to 1 if bitfield is | |
3167 | accessed and to 0 otherwise. If it is 1 then ELT holds | |
3168 | outer component for accessed bit field. | |
3169 | ||
3170 | SAFE outer parameter is set to 1 if access is safe and | |
3171 | checks are not required. | |
3172 | ||
3173 | BOUNDS outer parameter holds bounds to be used to check | |
3174 | access (may be NULL). | |
3175 | ||
3176 | If INNERMOST_BOUNDS is 1 then try to narrow bounds to the | |
3177 | innermost accessed component. */ | |
3178 | static void | |
3179 | chkp_parse_array_and_component_ref (tree node, tree *ptr, | |
3180 | tree *elt, bool *safe, | |
3181 | bool *bitfield, | |
3182 | tree *bounds, | |
3183 | gimple_stmt_iterator *iter, | |
3184 | bool innermost_bounds) | |
3185 | { | |
3186 | tree comp_to_narrow = NULL_TREE; | |
3187 | tree last_comp = NULL_TREE; | |
3188 | bool array_ref_found = false; | |
3189 | tree *nodes; | |
3190 | tree var; | |
3191 | int len; | |
3192 | int i; | |
3193 | ||
3194 | /* Compute tree height for expression. */ | |
3195 | var = node; | |
3196 | len = 1; | |
3197 | while (TREE_CODE (var) == COMPONENT_REF | |
3198 | || TREE_CODE (var) == ARRAY_REF | |
3199 | || TREE_CODE (var) == VIEW_CONVERT_EXPR) | |
3200 | { | |
3201 | var = TREE_OPERAND (var, 0); | |
3202 | len++; | |
3203 | } | |
3204 | ||
3205 | gcc_assert (len > 1); | |
3206 | ||
3207 | /* It is more convenient for us to scan left-to-right, | |
3208 | so walk tree again and put all node to nodes vector | |
3209 | in reversed order. */ | |
3210 | nodes = XALLOCAVEC (tree, len); | |
3211 | nodes[len - 1] = node; | |
3212 | for (i = len - 2; i >= 0; i--) | |
3213 | nodes[i] = TREE_OPERAND (nodes[i + 1], 0); | |
3214 | ||
3215 | if (bounds) | |
3216 | *bounds = NULL; | |
3217 | *safe = true; | |
3218 | *bitfield = (TREE_CODE (node) == COMPONENT_REF | |
3219 | && DECL_BIT_FIELD_TYPE (TREE_OPERAND (node, 1))); | |
3220 | /* To get bitfield address we will need outer elemnt. */ | |
3221 | if (*bitfield) | |
3222 | *elt = nodes[len - 2]; | |
3223 | else | |
3224 | *elt = NULL_TREE; | |
3225 | ||
3226 | /* If we have indirection in expression then compute | |
3227 | outermost structure bounds. Computed bounds may be | |
3228 | narrowed later. */ | |
3229 | if (TREE_CODE (nodes[0]) == MEM_REF || INDIRECT_REF_P (nodes[0])) | |
3230 | { | |
3231 | *safe = false; | |
3232 | *ptr = TREE_OPERAND (nodes[0], 0); | |
3233 | if (bounds) | |
3234 | *bounds = chkp_find_bounds (*ptr, iter); | |
3235 | } | |
3236 | else | |
3237 | { | |
3238 | gcc_assert (TREE_CODE (var) == VAR_DECL | |
3239 | || TREE_CODE (var) == PARM_DECL | |
3240 | || TREE_CODE (var) == RESULT_DECL | |
3241 | || TREE_CODE (var) == STRING_CST | |
3242 | || TREE_CODE (var) == SSA_NAME); | |
3243 | ||
3244 | *ptr = chkp_build_addr_expr (var); | |
3245 | } | |
3246 | ||
3247 | /* In this loop we are trying to find a field access | |
3248 | requiring narrowing. There are two simple rules | |
3249 | for search: | |
3250 | 1. Leftmost array_ref is chosen if any. | |
3251 | 2. Rightmost suitable component_ref is chosen if innermost | |
3252 | bounds are required and no array_ref exists. */ | |
3253 | for (i = 1; i < len; i++) | |
3254 | { | |
3255 | var = nodes[i]; | |
3256 | ||
3257 | if (TREE_CODE (var) == ARRAY_REF) | |
3258 | { | |
3259 | *safe = false; | |
3260 | array_ref_found = true; | |
3261 | if (flag_chkp_narrow_bounds | |
3262 | && !flag_chkp_narrow_to_innermost_arrray | |
3263 | && (!last_comp | |
3264 | || chkp_may_narrow_to_field (TREE_OPERAND (last_comp, 1)))) | |
3265 | { | |
3266 | comp_to_narrow = last_comp; | |
3267 | break; | |
3268 | } | |
3269 | } | |
3270 | else if (TREE_CODE (var) == COMPONENT_REF) | |
3271 | { | |
3272 | tree field = TREE_OPERAND (var, 1); | |
3273 | ||
3274 | if (innermost_bounds | |
3275 | && !array_ref_found | |
3276 | && chkp_narrow_bounds_for_field (field)) | |
3277 | comp_to_narrow = var; | |
3278 | last_comp = var; | |
3279 | ||
3280 | if (flag_chkp_narrow_bounds | |
3281 | && flag_chkp_narrow_to_innermost_arrray | |
3282 | && TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE) | |
3283 | { | |
3284 | if (bounds) | |
3285 | *bounds = chkp_narrow_bounds_to_field (*bounds, var, iter); | |
3286 | comp_to_narrow = NULL; | |
3287 | } | |
3288 | } | |
3289 | else if (TREE_CODE (var) == VIEW_CONVERT_EXPR) | |
3290 | /* Nothing to do for it. */ | |
3291 | ; | |
3292 | else | |
3293 | gcc_unreachable (); | |
3294 | } | |
3295 | ||
3296 | if (comp_to_narrow && DECL_SIZE (TREE_OPERAND (comp_to_narrow, 1)) && bounds) | |
3297 | *bounds = chkp_narrow_bounds_to_field (*bounds, comp_to_narrow, iter); | |
3298 | ||
3299 | if (innermost_bounds && bounds && !*bounds) | |
3300 | *bounds = chkp_find_bounds (*ptr, iter); | |
3301 | } | |
3302 | ||
3303 | /* Compute and return bounds for address of OBJ. */ | |
3304 | static tree | |
3305 | chkp_make_addressed_object_bounds (tree obj, gimple_stmt_iterator *iter) | |
3306 | { | |
3307 | tree bounds = chkp_get_registered_addr_bounds (obj); | |
3308 | ||
3309 | if (bounds) | |
3310 | return bounds; | |
3311 | ||
3312 | switch (TREE_CODE (obj)) | |
3313 | { | |
3314 | case VAR_DECL: | |
3315 | case PARM_DECL: | |
3316 | case RESULT_DECL: | |
3317 | bounds = chkp_get_bounds_for_decl_addr (obj); | |
3318 | break; | |
3319 | ||
3320 | case STRING_CST: | |
3321 | bounds = chkp_get_bounds_for_string_cst (obj); | |
3322 | break; | |
3323 | ||
3324 | case ARRAY_REF: | |
3325 | case COMPONENT_REF: | |
3326 | { | |
3327 | tree elt; | |
3328 | tree ptr; | |
3329 | bool safe; | |
3330 | bool bitfield; | |
3331 | ||
3332 | chkp_parse_array_and_component_ref (obj, &ptr, &elt, &safe, | |
3333 | &bitfield, &bounds, iter, true); | |
3334 | ||
3335 | gcc_assert (bounds); | |
3336 | } | |
3337 | break; | |
3338 | ||
3339 | case FUNCTION_DECL: | |
3340 | case LABEL_DECL: | |
3341 | bounds = chkp_get_zero_bounds (); | |
3342 | break; | |
3343 | ||
3344 | case MEM_REF: | |
3345 | bounds = chkp_find_bounds (TREE_OPERAND (obj, 0), iter); | |
3346 | break; | |
3347 | ||
3348 | case REALPART_EXPR: | |
3349 | case IMAGPART_EXPR: | |
3350 | bounds = chkp_make_addressed_object_bounds (TREE_OPERAND (obj, 0), iter); | |
3351 | break; | |
3352 | ||
3353 | default: | |
3354 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
3355 | { | |
3356 | fprintf (dump_file, "chkp_make_addressed_object_bounds: " | |
3357 | "unexpected object of type %s\n", | |
3358 | get_tree_code_name (TREE_CODE (obj))); | |
3359 | print_node (dump_file, "", obj, 0); | |
3360 | } | |
3361 | internal_error ("chkp_make_addressed_object_bounds: " | |
3362 | "Unexpected tree code %s", | |
3363 | get_tree_code_name (TREE_CODE (obj))); | |
3364 | } | |
3365 | ||
3366 | chkp_register_addr_bounds (obj, bounds); | |
3367 | ||
3368 | return bounds; | |
3369 | } | |
3370 | ||
3371 | /* Compute bounds for pointer PTR loaded from PTR_SRC. Generate statements | |
3372 | to compute bounds if required. Computed bounds should be available at | |
3373 | position pointed by ITER. | |
3374 | ||
3375 | If PTR_SRC is NULL_TREE then pointer definition is identified. | |
3376 | ||
3377 | If PTR_SRC is not NULL_TREE then ITER points to statements which loads | |
3378 | PTR. If PTR is a any memory reference then ITER points to a statement | |
3379 | after which bndldx will be inserterd. In both cases ITER will be updated | |
3380 | to point to the inserted bndldx statement. */ | |
3381 | ||
3382 | static tree | |
3383 | chkp_find_bounds_1 (tree ptr, tree ptr_src, gimple_stmt_iterator *iter) | |
3384 | { | |
3385 | tree addr = NULL_TREE; | |
3386 | tree bounds = NULL_TREE; | |
3387 | ||
3388 | if (!ptr_src) | |
3389 | ptr_src = ptr; | |
3390 | ||
3391 | bounds = chkp_get_registered_bounds (ptr_src); | |
3392 | ||
3393 | if (bounds) | |
3394 | return bounds; | |
3395 | ||
3396 | switch (TREE_CODE (ptr_src)) | |
3397 | { | |
3398 | case MEM_REF: | |
3399 | case VAR_DECL: | |
3400 | if (BOUNDED_P (ptr_src)) | |
3401 | if (TREE_CODE (ptr) == VAR_DECL && DECL_REGISTER (ptr)) | |
3402 | bounds = chkp_get_zero_bounds (); | |
3403 | else | |
3404 | { | |
3405 | addr = chkp_build_addr_expr (ptr_src); | |
3406 | bounds = chkp_build_bndldx (addr, ptr, iter); | |
3407 | } | |
3408 | else | |
3409 | bounds = chkp_get_nonpointer_load_bounds (); | |
3410 | break; | |
3411 | ||
3412 | case ARRAY_REF: | |
3413 | case COMPONENT_REF: | |
3414 | addr = get_base_address (ptr_src); | |
3415 | if (DECL_P (addr) | |
3416 | || TREE_CODE (addr) == MEM_REF | |
3417 | || TREE_CODE (addr) == TARGET_MEM_REF) | |
3418 | { | |
3419 | if (BOUNDED_P (ptr_src)) | |
3420 | if (TREE_CODE (ptr) == VAR_DECL && DECL_REGISTER (ptr)) | |
3421 | bounds = chkp_get_zero_bounds (); | |
3422 | else | |
3423 | { | |
3424 | addr = chkp_build_addr_expr (ptr_src); | |
3425 | bounds = chkp_build_bndldx (addr, ptr, iter); | |
3426 | } | |
3427 | else | |
3428 | bounds = chkp_get_nonpointer_load_bounds (); | |
3429 | } | |
3430 | else | |
3431 | { | |
3432 | gcc_assert (TREE_CODE (addr) == SSA_NAME); | |
3433 | bounds = chkp_find_bounds (addr, iter); | |
3434 | } | |
3435 | break; | |
3436 | ||
3437 | case PARM_DECL: | |
3438 | gcc_unreachable (); | |
3439 | bounds = chkp_get_bound_for_parm (ptr_src); | |
3440 | break; | |
3441 | ||
3442 | case TARGET_MEM_REF: | |
3443 | addr = chkp_build_addr_expr (ptr_src); | |
3444 | bounds = chkp_build_bndldx (addr, ptr, iter); | |
3445 | break; | |
3446 | ||
3447 | case SSA_NAME: | |
3448 | bounds = chkp_get_registered_bounds (ptr_src); | |
3449 | if (!bounds) | |
3450 | { | |
3451 | gimple def_stmt = SSA_NAME_DEF_STMT (ptr_src); | |
538dd0b7 | 3452 | gphi_iterator phi_iter; |
d5e254e1 IE |
3453 | |
3454 | bounds = chkp_get_bounds_by_definition (ptr_src, def_stmt, &phi_iter); | |
3455 | ||
3456 | gcc_assert (bounds); | |
3457 | ||
538dd0b7 | 3458 | if (gphi *def_phi = dyn_cast <gphi *> (def_stmt)) |
d5e254e1 IE |
3459 | { |
3460 | unsigned i; | |
3461 | ||
538dd0b7 | 3462 | for (i = 0; i < gimple_phi_num_args (def_phi); i++) |
d5e254e1 | 3463 | { |
538dd0b7 | 3464 | tree arg = gimple_phi_arg_def (def_phi, i); |
d5e254e1 | 3465 | tree arg_bnd; |
538dd0b7 | 3466 | gphi *phi_bnd; |
d5e254e1 IE |
3467 | |
3468 | arg_bnd = chkp_find_bounds (arg, NULL); | |
3469 | ||
3470 | /* chkp_get_bounds_by_definition created new phi | |
3471 | statement and phi_iter points to it. | |
3472 | ||
3473 | Previous call to chkp_find_bounds could create | |
3474 | new basic block and therefore change phi statement | |
3475 | phi_iter points to. */ | |
538dd0b7 | 3476 | phi_bnd = phi_iter.phi (); |
d5e254e1 IE |
3477 | |
3478 | add_phi_arg (phi_bnd, arg_bnd, | |
538dd0b7 | 3479 | gimple_phi_arg_edge (def_phi, i), |
d5e254e1 IE |
3480 | UNKNOWN_LOCATION); |
3481 | } | |
3482 | ||
3483 | /* If all bound phi nodes have their arg computed | |
3484 | then we may finish its computation. See | |
3485 | chkp_finish_incomplete_bounds for more details. */ | |
3486 | if (chkp_may_finish_incomplete_bounds ()) | |
3487 | chkp_finish_incomplete_bounds (); | |
3488 | } | |
3489 | ||
3490 | gcc_assert (bounds == chkp_get_registered_bounds (ptr_src) | |
3491 | || chkp_incomplete_bounds (bounds)); | |
3492 | } | |
3493 | break; | |
3494 | ||
3495 | case ADDR_EXPR: | |
3496 | bounds = chkp_make_addressed_object_bounds (TREE_OPERAND (ptr_src, 0), iter); | |
3497 | break; | |
3498 | ||
3499 | case INTEGER_CST: | |
3500 | if (integer_zerop (ptr_src)) | |
3501 | bounds = chkp_get_none_bounds (); | |
3502 | else | |
3503 | bounds = chkp_get_invalid_op_bounds (); | |
3504 | break; | |
3505 | ||
3506 | default: | |
3507 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
3508 | { | |
3509 | fprintf (dump_file, "chkp_find_bounds: unexpected ptr of type %s\n", | |
3510 | get_tree_code_name (TREE_CODE (ptr_src))); | |
3511 | print_node (dump_file, "", ptr_src, 0); | |
3512 | } | |
3513 | internal_error ("chkp_find_bounds: Unexpected tree code %s", | |
3514 | get_tree_code_name (TREE_CODE (ptr_src))); | |
3515 | } | |
3516 | ||
3517 | if (!bounds) | |
3518 | { | |
3519 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
3520 | { | |
3521 | fprintf (stderr, "chkp_find_bounds: cannot find bounds for pointer\n"); | |
3522 | print_node (dump_file, "", ptr_src, 0); | |
3523 | } | |
3524 | internal_error ("chkp_find_bounds: Cannot find bounds for pointer"); | |
3525 | } | |
3526 | ||
3527 | return bounds; | |
3528 | } | |
3529 | ||
3530 | /* Normal case for bounds search without forced narrowing. */ | |
3531 | static tree | |
3532 | chkp_find_bounds (tree ptr, gimple_stmt_iterator *iter) | |
3533 | { | |
3534 | return chkp_find_bounds_1 (ptr, NULL_TREE, iter); | |
3535 | } | |
3536 | ||
3537 | /* Search bounds for pointer PTR loaded from PTR_SRC | |
3538 | by statement *ITER points to. */ | |
3539 | static tree | |
3540 | chkp_find_bounds_loaded (tree ptr, tree ptr_src, gimple_stmt_iterator *iter) | |
3541 | { | |
3542 | return chkp_find_bounds_1 (ptr, ptr_src, iter); | |
3543 | } | |
3544 | ||
3545 | /* Helper function which checks type of RHS and finds all pointers in | |
3546 | it. For each found pointer we build it's accesses in LHS and RHS | |
3547 | objects and then call HANDLER for them. Function is used to copy | |
3548 | or initilize bounds for copied object. */ | |
3549 | static void | |
3550 | chkp_walk_pointer_assignments (tree lhs, tree rhs, void *arg, | |
3551 | assign_handler handler) | |
3552 | { | |
3553 | tree type = TREE_TYPE (lhs); | |
3554 | ||
3555 | /* We have nothing to do with clobbers. */ | |
3556 | if (TREE_CLOBBER_P (rhs)) | |
3557 | return; | |
3558 | ||
3559 | if (BOUNDED_TYPE_P (type)) | |
3560 | handler (lhs, rhs, arg); | |
3561 | else if (RECORD_OR_UNION_TYPE_P (type)) | |
3562 | { | |
3563 | tree field; | |
3564 | ||
3565 | if (TREE_CODE (rhs) == CONSTRUCTOR) | |
3566 | { | |
3567 | unsigned HOST_WIDE_INT cnt; | |
3568 | tree val; | |
3569 | ||
3570 | FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs), cnt, field, val) | |
3571 | { | |
3572 | if (chkp_type_has_pointer (TREE_TYPE (field))) | |
3573 | { | |
3574 | tree lhs_field = chkp_build_component_ref (lhs, field); | |
3575 | chkp_walk_pointer_assignments (lhs_field, val, arg, handler); | |
3576 | } | |
3577 | } | |
3578 | } | |
3579 | else | |
3580 | for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field)) | |
3581 | if (TREE_CODE (field) == FIELD_DECL | |
3582 | && chkp_type_has_pointer (TREE_TYPE (field))) | |
3583 | { | |
3584 | tree rhs_field = chkp_build_component_ref (rhs, field); | |
3585 | tree lhs_field = chkp_build_component_ref (lhs, field); | |
3586 | chkp_walk_pointer_assignments (lhs_field, rhs_field, arg, handler); | |
3587 | } | |
3588 | } | |
3589 | else if (TREE_CODE (type) == ARRAY_TYPE) | |
3590 | { | |
3591 | unsigned HOST_WIDE_INT cur = 0; | |
3592 | tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type)); | |
3593 | tree etype = TREE_TYPE (type); | |
3594 | tree esize = TYPE_SIZE (etype); | |
3595 | ||
3596 | if (TREE_CODE (rhs) == CONSTRUCTOR) | |
3597 | { | |
3598 | unsigned HOST_WIDE_INT cnt; | |
3599 | tree purp, val, lhs_elem; | |
3600 | ||
3601 | FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs), cnt, purp, val) | |
3602 | { | |
3603 | if (purp && TREE_CODE (purp) == RANGE_EXPR) | |
3604 | { | |
3605 | tree lo_index = TREE_OPERAND (purp, 0); | |
3606 | tree hi_index = TREE_OPERAND (purp, 1); | |
3607 | ||
3608 | for (cur = (unsigned)tree_to_uhwi (lo_index); | |
3609 | cur <= (unsigned)tree_to_uhwi (hi_index); | |
3610 | cur++) | |
3611 | { | |
3612 | lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur); | |
3613 | chkp_walk_pointer_assignments (lhs_elem, val, arg, handler); | |
3614 | } | |
3615 | } | |
3616 | else | |
3617 | { | |
3618 | if (purp) | |
3619 | { | |
3620 | gcc_assert (TREE_CODE (purp) == INTEGER_CST); | |
3621 | cur = tree_to_uhwi (purp); | |
3622 | } | |
3623 | ||
3624 | lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur++); | |
3625 | ||
3626 | chkp_walk_pointer_assignments (lhs_elem, val, arg, handler); | |
3627 | } | |
3628 | } | |
3629 | } | |
3630 | /* Copy array only when size is known. */ | |
3631 | else if (maxval && !integer_minus_onep (maxval)) | |
3632 | for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++) | |
3633 | { | |
3634 | tree lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur); | |
3635 | tree rhs_elem = chkp_build_array_ref (rhs, etype, esize, cur); | |
3636 | chkp_walk_pointer_assignments (lhs_elem, rhs_elem, arg, handler); | |
3637 | } | |
3638 | } | |
3639 | else | |
3640 | internal_error("chkp_walk_pointer_assignments: unexpected RHS type: %s", | |
3641 | get_tree_code_name (TREE_CODE (type))); | |
3642 | } | |
3643 | ||
3644 | /* Add code to copy bounds for assignment of RHS to LHS. | |
3645 | ARG is an iterator pointing ne code position. */ | |
3646 | static void | |
3647 | chkp_copy_bounds_for_elem (tree lhs, tree rhs, void *arg) | |
3648 | { | |
3649 | gimple_stmt_iterator *iter = (gimple_stmt_iterator *)arg; | |
3650 | tree bounds = chkp_find_bounds (rhs, iter); | |
3651 | tree addr = chkp_build_addr_expr(lhs); | |
3652 | ||
3653 | chkp_build_bndstx (addr, rhs, bounds, iter); | |
3654 | } | |
3655 | ||
3656 | /* Emit static bound initilizers and size vars. */ | |
3657 | void | |
3658 | chkp_finish_file (void) | |
3659 | { | |
3660 | struct varpool_node *node; | |
3661 | struct chkp_ctor_stmt_list stmts; | |
3662 | ||
3663 | if (seen_error ()) | |
3664 | return; | |
3665 | ||
3666 | /* Iterate through varpool and generate bounds initialization | |
3667 | constructors for all statically initialized pointers. */ | |
3668 | stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR; | |
3669 | stmts.stmts = NULL; | |
3670 | FOR_EACH_VARIABLE (node) | |
3671 | /* Check that var is actually emitted and we need and may initialize | |
3672 | its bounds. */ | |
3673 | if (node->need_bounds_init | |
3674 | && !POINTER_BOUNDS_P (node->decl) | |
3675 | && DECL_RTL (node->decl) | |
3676 | && MEM_P (DECL_RTL (node->decl)) | |
3677 | && TREE_ASM_WRITTEN (node->decl)) | |
3678 | { | |
3679 | chkp_walk_pointer_assignments (node->decl, | |
3680 | DECL_INITIAL (node->decl), | |
3681 | &stmts, | |
3682 | chkp_add_modification_to_stmt_list); | |
3683 | ||
3684 | if (stmts.avail <= 0) | |
3685 | { | |
3686 | cgraph_build_static_cdtor ('P', stmts.stmts, | |
3687 | MAX_RESERVED_INIT_PRIORITY + 3); | |
3688 | stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR; | |
3689 | stmts.stmts = NULL; | |
3690 | } | |
3691 | } | |
3692 | ||
3693 | if (stmts.stmts) | |
3694 | cgraph_build_static_cdtor ('P', stmts.stmts, | |
3695 | MAX_RESERVED_INIT_PRIORITY + 3); | |
3696 | ||
3697 | /* Iterate through varpool and generate bounds initialization | |
3698 | constructors for all static bounds vars. */ | |
3699 | stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR; | |
3700 | stmts.stmts = NULL; | |
3701 | FOR_EACH_VARIABLE (node) | |
3702 | if (node->need_bounds_init | |
3703 | && POINTER_BOUNDS_P (node->decl) | |
3704 | && TREE_ASM_WRITTEN (node->decl)) | |
3705 | { | |
3706 | tree bnd = node->decl; | |
3707 | tree var; | |
3708 | ||
3709 | gcc_assert (DECL_INITIAL (bnd) | |
3710 | && TREE_CODE (DECL_INITIAL (bnd)) == ADDR_EXPR); | |
3711 | ||
3712 | var = TREE_OPERAND (DECL_INITIAL (bnd), 0); | |
3713 | chkp_output_static_bounds (bnd, var, &stmts); | |
3714 | } | |
3715 | ||
3716 | if (stmts.stmts) | |
3717 | cgraph_build_static_cdtor ('B', stmts.stmts, | |
3718 | MAX_RESERVED_INIT_PRIORITY + 2); | |
3719 | ||
3720 | delete chkp_static_var_bounds; | |
3721 | delete chkp_bounds_map; | |
3722 | } | |
3723 | ||
3724 | /* An instrumentation function which is called for each statement | |
3725 | having memory access we want to instrument. It inserts check | |
3726 | code and bounds copy code. | |
3727 | ||
3728 | ITER points to statement to instrument. | |
3729 | ||
3730 | NODE holds memory access in statement to check. | |
3731 | ||
3732 | LOC holds the location information for statement. | |
3733 | ||
3734 | DIRFLAGS determines whether access is read or write. | |
3735 | ||
3736 | ACCESS_OFFS should be added to address used in NODE | |
3737 | before check. | |
3738 | ||
3739 | ACCESS_SIZE holds size of checked access. | |
3740 | ||
3741 | SAFE indicates if NODE access is safe and should not be | |
3742 | checked. */ | |
3743 | static void | |
3744 | chkp_process_stmt (gimple_stmt_iterator *iter, tree node, | |
3745 | location_t loc, tree dirflag, | |
3746 | tree access_offs, tree access_size, | |
3747 | bool safe) | |
3748 | { | |
3749 | tree node_type = TREE_TYPE (node); | |
3750 | tree size = access_size ? access_size : TYPE_SIZE_UNIT (node_type); | |
3751 | tree addr_first = NULL_TREE; /* address of the first accessed byte */ | |
3752 | tree addr_last = NULL_TREE; /* address of the last accessed byte */ | |
3753 | tree ptr = NULL_TREE; /* a pointer used for dereference */ | |
3754 | tree bounds = NULL_TREE; | |
3755 | ||
3756 | /* We do not need instrumentation for clobbers. */ | |
3757 | if (dirflag == integer_one_node | |
3758 | && gimple_code (gsi_stmt (*iter)) == GIMPLE_ASSIGN | |
3759 | && TREE_CLOBBER_P (gimple_assign_rhs1 (gsi_stmt (*iter)))) | |
3760 | return; | |
3761 | ||
3762 | switch (TREE_CODE (node)) | |
3763 | { | |
3764 | case ARRAY_REF: | |
3765 | case COMPONENT_REF: | |
3766 | { | |
3767 | bool bitfield; | |
3768 | tree elt; | |
3769 | ||
3770 | if (safe) | |
3771 | { | |
3772 | /* We are not going to generate any checks, so do not | |
3773 | generate bounds as well. */ | |
3774 | addr_first = chkp_build_addr_expr (node); | |
3775 | break; | |
3776 | } | |
3777 | ||
3778 | chkp_parse_array_and_component_ref (node, &ptr, &elt, &safe, | |
3779 | &bitfield, &bounds, iter, false); | |
3780 | ||
3781 | /* Break if there is no dereference and operation is safe. */ | |
3782 | ||
3783 | if (bitfield) | |
3784 | { | |
3785 | tree field = TREE_OPERAND (node, 1); | |
3786 | ||
3787 | if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST) | |
3788 | size = DECL_SIZE_UNIT (field); | |
3789 | ||
3790 | if (elt) | |
3791 | elt = chkp_build_addr_expr (elt); | |
3792 | addr_first = fold_convert_loc (loc, ptr_type_node, elt ? elt : ptr); | |
3793 | addr_first = fold_build_pointer_plus_loc (loc, | |
3794 | addr_first, | |
3795 | byte_position (field)); | |
3796 | } | |
3797 | else | |
3798 | addr_first = chkp_build_addr_expr (node); | |
3799 | } | |
3800 | break; | |
3801 | ||
3802 | case INDIRECT_REF: | |
3803 | ptr = TREE_OPERAND (node, 0); | |
3804 | addr_first = ptr; | |
3805 | break; | |
3806 | ||
3807 | case MEM_REF: | |
3808 | ptr = TREE_OPERAND (node, 0); | |
3809 | addr_first = chkp_build_addr_expr (node); | |
3810 | break; | |
3811 | ||
3812 | case TARGET_MEM_REF: | |
3813 | ptr = TMR_BASE (node); | |
3814 | addr_first = chkp_build_addr_expr (node); | |
3815 | break; | |
3816 | ||
3817 | case ARRAY_RANGE_REF: | |
3818 | printf("ARRAY_RANGE_REF\n"); | |
3819 | debug_gimple_stmt(gsi_stmt(*iter)); | |
3820 | debug_tree(node); | |
3821 | gcc_unreachable (); | |
3822 | break; | |
3823 | ||
3824 | case BIT_FIELD_REF: | |
3825 | { | |
3826 | tree offs, rem, bpu; | |
3827 | ||
3828 | gcc_assert (!access_offs); | |
3829 | gcc_assert (!access_size); | |
3830 | ||
3831 | bpu = fold_convert (size_type_node, bitsize_int (BITS_PER_UNIT)); | |
3832 | offs = fold_convert (size_type_node, TREE_OPERAND (node, 2)); | |
3833 | rem = size_binop_loc (loc, TRUNC_MOD_EXPR, offs, bpu); | |
3834 | offs = size_binop_loc (loc, TRUNC_DIV_EXPR, offs, bpu); | |
3835 | ||
3836 | size = fold_convert (size_type_node, TREE_OPERAND (node, 1)); | |
3837 | size = size_binop_loc (loc, PLUS_EXPR, size, rem); | |
3838 | size = size_binop_loc (loc, CEIL_DIV_EXPR, size, bpu); | |
3839 | size = fold_convert (size_type_node, size); | |
3840 | ||
3841 | chkp_process_stmt (iter, TREE_OPERAND (node, 0), loc, | |
3842 | dirflag, offs, size, safe); | |
3843 | return; | |
3844 | } | |
3845 | break; | |
3846 | ||
3847 | case VAR_DECL: | |
3848 | case RESULT_DECL: | |
3849 | case PARM_DECL: | |
3850 | if (dirflag != integer_one_node | |
3851 | || DECL_REGISTER (node)) | |
3852 | return; | |
3853 | ||
3854 | safe = true; | |
3855 | addr_first = chkp_build_addr_expr (node); | |
3856 | break; | |
3857 | ||
3858 | default: | |
3859 | return; | |
3860 | } | |
3861 | ||
3862 | /* If addr_last was not computed then use (addr_first + size - 1) | |
3863 | expression to compute it. */ | |
3864 | if (!addr_last) | |
3865 | { | |
3866 | addr_last = fold_build_pointer_plus_loc (loc, addr_first, size); | |
3867 | addr_last = fold_build_pointer_plus_hwi_loc (loc, addr_last, -1); | |
3868 | } | |
3869 | ||
3870 | /* Shift both first_addr and last_addr by access_offs if specified. */ | |
3871 | if (access_offs) | |
3872 | { | |
3873 | addr_first = fold_build_pointer_plus_loc (loc, addr_first, access_offs); | |
3874 | addr_last = fold_build_pointer_plus_loc (loc, addr_last, access_offs); | |
3875 | } | |
3876 | ||
3877 | /* Generate bndcl/bndcu checks if memory access is not safe. */ | |
3878 | if (!safe) | |
3879 | { | |
3880 | gimple_stmt_iterator stmt_iter = *iter; | |
3881 | ||
3882 | if (!bounds) | |
3883 | bounds = chkp_find_bounds (ptr, iter); | |
3884 | ||
3885 | chkp_check_mem_access (addr_first, addr_last, bounds, | |
3886 | stmt_iter, loc, dirflag); | |
3887 | } | |
3888 | ||
3889 | /* We need to store bounds in case pointer is stored. */ | |
3890 | if (dirflag == integer_one_node | |
3891 | && chkp_type_has_pointer (node_type) | |
3892 | && flag_chkp_store_bounds) | |
3893 | { | |
3894 | gimple stmt = gsi_stmt (*iter); | |
3895 | tree rhs1 = gimple_assign_rhs1 (stmt); | |
3896 | enum tree_code rhs_code = gimple_assign_rhs_code (stmt); | |
3897 | ||
3898 | if (get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS) | |
3899 | chkp_walk_pointer_assignments (node, rhs1, iter, | |
3900 | chkp_copy_bounds_for_elem); | |
3901 | else | |
3902 | { | |
3903 | bounds = chkp_compute_bounds_for_assignment (NULL_TREE, stmt); | |
3904 | chkp_build_bndstx (addr_first, rhs1, bounds, iter); | |
3905 | } | |
3906 | } | |
3907 | } | |
3908 | ||
3909 | /* Add code to copy bounds for all pointers copied | |
3910 | in ASSIGN created during inline of EDGE. */ | |
3911 | void | |
3912 | chkp_copy_bounds_for_assign (gimple assign, struct cgraph_edge *edge) | |
3913 | { | |
3914 | tree lhs = gimple_assign_lhs (assign); | |
3915 | tree rhs = gimple_assign_rhs1 (assign); | |
3916 | gimple_stmt_iterator iter = gsi_for_stmt (assign); | |
3917 | ||
3918 | if (!flag_chkp_store_bounds) | |
3919 | return; | |
3920 | ||
3921 | chkp_walk_pointer_assignments (lhs, rhs, &iter, chkp_copy_bounds_for_elem); | |
3922 | ||
3923 | /* We should create edges for all created calls to bndldx and bndstx. */ | |
3924 | while (gsi_stmt (iter) != assign) | |
3925 | { | |
3926 | gimple stmt = gsi_stmt (iter); | |
3927 | if (gimple_code (stmt) == GIMPLE_CALL) | |
3928 | { | |
3929 | tree fndecl = gimple_call_fndecl (stmt); | |
3930 | struct cgraph_node *callee = cgraph_node::get_create (fndecl); | |
3931 | struct cgraph_edge *new_edge; | |
3932 | ||
3933 | gcc_assert (fndecl == chkp_bndstx_fndecl | |
3934 | || fndecl == chkp_bndldx_fndecl | |
3935 | || fndecl == chkp_ret_bnd_fndecl); | |
3936 | ||
538dd0b7 DM |
3937 | new_edge = edge->caller->create_edge (callee, |
3938 | as_a <gcall *> (stmt), | |
3939 | edge->count, | |
d5e254e1 IE |
3940 | edge->frequency); |
3941 | new_edge->frequency = compute_call_stmt_bb_frequency | |
3942 | (edge->caller->decl, gimple_bb (stmt)); | |
3943 | } | |
3944 | gsi_prev (&iter); | |
3945 | } | |
3946 | } | |
3947 | ||
3948 | /* Some code transformation made during instrumentation pass | |
3949 | may put code into inconsistent state. Here we find and fix | |
3950 | such flaws. */ | |
3951 | void | |
3952 | chkp_fix_cfg () | |
3953 | { | |
3954 | basic_block bb; | |
3955 | gimple_stmt_iterator i; | |
3956 | ||
3957 | /* We could insert some code right after stmt which ends bb. | |
3958 | We wanted to put this code on fallthru edge but did not | |
3959 | add new edges from the beginning because it may cause new | |
3960 | phi node creation which may be incorrect due to incomplete | |
3961 | bound phi nodes. */ | |
3962 | FOR_ALL_BB_FN (bb, cfun) | |
3963 | for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i)) | |
3964 | { | |
3965 | gimple stmt = gsi_stmt (i); | |
3966 | gimple_stmt_iterator next = i; | |
3967 | ||
3968 | gsi_next (&next); | |
3969 | ||
3970 | if (stmt_ends_bb_p (stmt) | |
3971 | && !gsi_end_p (next)) | |
3972 | { | |
3973 | edge fall = find_fallthru_edge (bb->succs); | |
3974 | basic_block dest = NULL; | |
3975 | int flags = 0; | |
3976 | ||
3977 | gcc_assert (fall); | |
3978 | ||
3979 | /* We cannot split abnormal edge. Therefore we | |
3980 | store its params, make it regular and then | |
3981 | rebuild abnormal edge after split. */ | |
3982 | if (fall->flags & EDGE_ABNORMAL) | |
3983 | { | |
3984 | flags = fall->flags & ~EDGE_FALLTHRU; | |
3985 | dest = fall->dest; | |
3986 | ||
3987 | fall->flags &= ~EDGE_COMPLEX; | |
3988 | } | |
3989 | ||
3990 | while (!gsi_end_p (next)) | |
3991 | { | |
3992 | gimple next_stmt = gsi_stmt (next); | |
3993 | gsi_remove (&next, false); | |
3994 | gsi_insert_on_edge (fall, next_stmt); | |
3995 | } | |
3996 | ||
3997 | gsi_commit_edge_inserts (); | |
3998 | ||
3999 | /* Re-create abnormal edge. */ | |
4000 | if (dest) | |
4001 | make_edge (bb, dest, flags); | |
4002 | } | |
4003 | } | |
4004 | } | |
4005 | ||
4006 | /* Walker callback for chkp_replace_function_pointers. Replaces | |
4007 | function pointer in the specified operand with pointer to the | |
4008 | instrumented function version. */ | |
4009 | static tree | |
4010 | chkp_replace_function_pointer (tree *op, int *walk_subtrees, | |
4011 | void *data ATTRIBUTE_UNUSED) | |
4012 | { | |
4013 | if (TREE_CODE (*op) == FUNCTION_DECL | |
4014 | && !lookup_attribute ("bnd_legacy", DECL_ATTRIBUTES (*op)) | |
edcf72f3 IE |
4015 | && (DECL_BUILT_IN_CLASS (*op) == NOT_BUILT_IN |
4016 | /* For builtins we replace pointers only for selected | |
4017 | function and functions having definitions. */ | |
4018 | || (DECL_BUILT_IN_CLASS (*op) == BUILT_IN_NORMAL | |
4019 | && (chkp_instrument_normal_builtin (*op) | |
4020 | || gimple_has_body_p (*op))))) | |
d5e254e1 IE |
4021 | { |
4022 | struct cgraph_node *node = cgraph_node::get_create (*op); | |
edcf72f3 | 4023 | struct cgraph_node *clone = NULL; |
d5e254e1 IE |
4024 | |
4025 | if (!node->instrumentation_clone) | |
edcf72f3 | 4026 | clone = chkp_maybe_create_clone (*op); |
d5e254e1 | 4027 | |
edcf72f3 IE |
4028 | if (clone) |
4029 | *op = clone->decl; | |
d5e254e1 IE |
4030 | *walk_subtrees = 0; |
4031 | } | |
4032 | ||
4033 | return NULL; | |
4034 | } | |
4035 | ||
4036 | /* This function searches for function pointers in statement | |
4037 | pointed by GSI and replaces them with pointers to instrumented | |
4038 | function versions. */ | |
4039 | static void | |
4040 | chkp_replace_function_pointers (gimple_stmt_iterator *gsi) | |
4041 | { | |
4042 | gimple stmt = gsi_stmt (*gsi); | |
4043 | /* For calls we want to walk call args only. */ | |
4044 | if (gimple_code (stmt) == GIMPLE_CALL) | |
4045 | { | |
4046 | unsigned i; | |
4047 | for (i = 0; i < gimple_call_num_args (stmt); i++) | |
4048 | walk_tree (gimple_call_arg_ptr (stmt, i), | |
4049 | chkp_replace_function_pointer, NULL, NULL); | |
4050 | } | |
4051 | else | |
4052 | walk_gimple_stmt (gsi, NULL, chkp_replace_function_pointer, NULL); | |
4053 | } | |
4054 | ||
4055 | /* This function instruments all statements working with memory, | |
4056 | calls and rets. | |
4057 | ||
4058 | It also removes excess statements from static initializers. */ | |
4059 | static void | |
4060 | chkp_instrument_function (void) | |
4061 | { | |
4062 | basic_block bb, next; | |
4063 | gimple_stmt_iterator i; | |
4064 | enum gimple_rhs_class grhs_class; | |
4065 | bool safe = lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl)); | |
4066 | ||
4067 | bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; | |
4068 | do | |
4069 | { | |
4070 | next = bb->next_bb; | |
4071 | for (i = gsi_start_bb (bb); !gsi_end_p (i); ) | |
4072 | { | |
4073 | gimple s = gsi_stmt (i); | |
4074 | ||
4075 | /* Skip statement marked to not be instrumented. */ | |
4076 | if (chkp_marked_stmt_p (s)) | |
4077 | { | |
4078 | gsi_next (&i); | |
4079 | continue; | |
4080 | } | |
4081 | ||
4082 | chkp_replace_function_pointers (&i); | |
4083 | ||
4084 | switch (gimple_code (s)) | |
4085 | { | |
4086 | case GIMPLE_ASSIGN: | |
4087 | chkp_process_stmt (&i, gimple_assign_lhs (s), | |
4088 | gimple_location (s), integer_one_node, | |
4089 | NULL_TREE, NULL_TREE, safe); | |
4090 | chkp_process_stmt (&i, gimple_assign_rhs1 (s), | |
4091 | gimple_location (s), integer_zero_node, | |
4092 | NULL_TREE, NULL_TREE, safe); | |
4093 | grhs_class = get_gimple_rhs_class (gimple_assign_rhs_code (s)); | |
4094 | if (grhs_class == GIMPLE_BINARY_RHS) | |
4095 | chkp_process_stmt (&i, gimple_assign_rhs2 (s), | |
4096 | gimple_location (s), integer_zero_node, | |
4097 | NULL_TREE, NULL_TREE, safe); | |
4098 | break; | |
4099 | ||
4100 | case GIMPLE_RETURN: | |
538dd0b7 DM |
4101 | { |
4102 | greturn *r = as_a <greturn *> (s); | |
4103 | if (gimple_return_retval (r) != NULL_TREE) | |
4104 | { | |
4105 | chkp_process_stmt (&i, gimple_return_retval (r), | |
4106 | gimple_location (r), | |
4107 | integer_zero_node, | |
4108 | NULL_TREE, NULL_TREE, safe); | |
4109 | ||
4110 | /* Additionally we need to add bounds | |
4111 | to return statement. */ | |
4112 | chkp_add_bounds_to_ret_stmt (&i); | |
4113 | } | |
4114 | } | |
4115 | break; | |
d5e254e1 IE |
4116 | |
4117 | case GIMPLE_CALL: | |
4118 | chkp_add_bounds_to_call_stmt (&i); | |
4119 | break; | |
4120 | ||
4121 | default: | |
4122 | ; | |
4123 | } | |
4124 | ||
4125 | gsi_next (&i); | |
4126 | ||
4127 | /* We do not need any actual pointer stores in checker | |
4128 | static initializer. */ | |
4129 | if (lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl)) | |
4130 | && gimple_code (s) == GIMPLE_ASSIGN | |
4131 | && gimple_store_p (s)) | |
4132 | { | |
4133 | gimple_stmt_iterator del_iter = gsi_for_stmt (s); | |
4134 | gsi_remove (&del_iter, true); | |
4135 | unlink_stmt_vdef (s); | |
4136 | release_defs(s); | |
4137 | } | |
4138 | } | |
4139 | bb = next; | |
4140 | } | |
4141 | while (bb); | |
4142 | ||
4143 | /* Some input params may have bounds and be address taken. In this case | |
4144 | we should store incoming bounds into bounds table. */ | |
4145 | tree arg; | |
4146 | if (flag_chkp_store_bounds) | |
4147 | for (arg = DECL_ARGUMENTS (cfun->decl); arg; arg = DECL_CHAIN (arg)) | |
4148 | if (TREE_ADDRESSABLE (arg)) | |
4149 | { | |
4150 | if (BOUNDED_P (arg)) | |
4151 | { | |
4152 | tree bounds = chkp_get_next_bounds_parm (arg); | |
4153 | tree def_ptr = ssa_default_def (cfun, arg); | |
4154 | gimple_stmt_iterator iter | |
4155 | = gsi_start_bb (chkp_get_entry_block ()); | |
4156 | chkp_build_bndstx (chkp_build_addr_expr (arg), | |
4157 | def_ptr ? def_ptr : arg, | |
4158 | bounds, &iter); | |
4159 | ||
4160 | /* Skip bounds arg. */ | |
4161 | arg = TREE_CHAIN (arg); | |
4162 | } | |
4163 | else if (chkp_type_has_pointer (TREE_TYPE (arg))) | |
4164 | { | |
4165 | tree orig_arg = arg; | |
4166 | bitmap slots = BITMAP_ALLOC (NULL); | |
4167 | gimple_stmt_iterator iter | |
4168 | = gsi_start_bb (chkp_get_entry_block ()); | |
4169 | bitmap_iterator bi; | |
4170 | unsigned bnd_no; | |
4171 | ||
4172 | chkp_find_bound_slots (TREE_TYPE (arg), slots); | |
4173 | ||
4174 | EXECUTE_IF_SET_IN_BITMAP (slots, 0, bnd_no, bi) | |
4175 | { | |
4176 | tree bounds = chkp_get_next_bounds_parm (arg); | |
4177 | HOST_WIDE_INT offs = bnd_no * POINTER_SIZE / BITS_PER_UNIT; | |
4178 | tree addr = chkp_build_addr_expr (orig_arg); | |
4179 | tree ptr = build2 (MEM_REF, ptr_type_node, addr, | |
4180 | build_int_cst (ptr_type_node, offs)); | |
4181 | chkp_build_bndstx (chkp_build_addr_expr (ptr), ptr, | |
4182 | bounds, &iter); | |
4183 | ||
4184 | arg = DECL_CHAIN (arg); | |
4185 | } | |
4186 | BITMAP_FREE (slots); | |
4187 | } | |
4188 | } | |
4189 | } | |
4190 | ||
4191 | /* Find init/null/copy_ptr_bounds calls and replace them | |
4192 | with assignments. It should allow better code | |
4193 | optimization. */ | |
4194 | ||
4195 | static void | |
4196 | chkp_remove_useless_builtins () | |
4197 | { | |
4198 | basic_block bb; | |
4199 | gimple_stmt_iterator gsi; | |
4200 | ||
4201 | FOR_EACH_BB_FN (bb, cfun) | |
4202 | { | |
4203 | for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) | |
4204 | { | |
4205 | gimple stmt = gsi_stmt (gsi); | |
4206 | tree fndecl; | |
4207 | enum built_in_function fcode; | |
4208 | ||
4209 | /* Find builtins returning first arg and replace | |
4210 | them with assignments. */ | |
4211 | if (gimple_code (stmt) == GIMPLE_CALL | |
4212 | && (fndecl = gimple_call_fndecl (stmt)) | |
4213 | && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL | |
4214 | && (fcode = DECL_FUNCTION_CODE (fndecl)) | |
4215 | && (fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS | |
4216 | || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS | |
4217 | || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS | |
4218 | || fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS)) | |
4219 | { | |
4220 | tree res = gimple_call_arg (stmt, 0); | |
4221 | update_call_from_tree (&gsi, res); | |
4222 | stmt = gsi_stmt (gsi); | |
4223 | update_stmt (stmt); | |
4224 | } | |
4225 | } | |
4226 | } | |
4227 | } | |
4228 | ||
4229 | /* Initialize pass. */ | |
4230 | static void | |
4231 | chkp_init (void) | |
4232 | { | |
4233 | basic_block bb; | |
4234 | gimple_stmt_iterator i; | |
4235 | ||
4236 | in_chkp_pass = true; | |
4237 | ||
4238 | for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; bb; bb = bb->next_bb) | |
4239 | for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i)) | |
4240 | chkp_unmark_stmt (gsi_stmt (i)); | |
4241 | ||
4242 | chkp_invalid_bounds = new hash_set<tree>; | |
4243 | chkp_completed_bounds_set = new hash_set<tree>; | |
4244 | delete chkp_reg_bounds; | |
4245 | chkp_reg_bounds = new hash_map<tree, tree>; | |
4246 | delete chkp_bound_vars; | |
4247 | chkp_bound_vars = new hash_map<tree, tree>; | |
4248 | chkp_reg_addr_bounds = new hash_map<tree, tree>; | |
4249 | chkp_incomplete_bounds_map = new hash_map<tree, tree>; | |
4250 | delete chkp_bounds_map; | |
4251 | chkp_bounds_map = new hash_map<tree, tree>; | |
4252 | chkp_abnormal_copies = BITMAP_GGC_ALLOC (); | |
4253 | ||
4254 | entry_block = NULL; | |
4255 | zero_bounds = NULL_TREE; | |
4256 | none_bounds = NULL_TREE; | |
4257 | incomplete_bounds = integer_zero_node; | |
4258 | tmp_var = NULL_TREE; | |
4259 | size_tmp_var = NULL_TREE; | |
4260 | ||
4261 | chkp_uintptr_type = lang_hooks.types.type_for_mode (ptr_mode, true); | |
4262 | ||
4263 | /* We create these constant bounds once for each object file. | |
4264 | These symbols go to comdat section and result in single copy | |
4265 | of each one in the final binary. */ | |
4266 | chkp_get_zero_bounds_var (); | |
4267 | chkp_get_none_bounds_var (); | |
4268 | ||
4269 | calculate_dominance_info (CDI_DOMINATORS); | |
4270 | calculate_dominance_info (CDI_POST_DOMINATORS); | |
4271 | ||
4272 | bitmap_obstack_initialize (NULL); | |
4273 | } | |
4274 | ||
4275 | /* Finalize instrumentation pass. */ | |
4276 | static void | |
4277 | chkp_fini (void) | |
4278 | { | |
4279 | in_chkp_pass = false; | |
4280 | ||
4281 | delete chkp_invalid_bounds; | |
4282 | delete chkp_completed_bounds_set; | |
4283 | delete chkp_reg_addr_bounds; | |
4284 | delete chkp_incomplete_bounds_map; | |
4285 | ||
4286 | free_dominance_info (CDI_DOMINATORS); | |
4287 | free_dominance_info (CDI_POST_DOMINATORS); | |
4288 | ||
4289 | bitmap_obstack_release (NULL); | |
4290 | } | |
4291 | ||
4292 | /* Main instrumentation pass function. */ | |
4293 | static unsigned int | |
4294 | chkp_execute (void) | |
4295 | { | |
4296 | chkp_init (); | |
4297 | ||
4298 | chkp_instrument_function (); | |
4299 | ||
4300 | chkp_remove_useless_builtins (); | |
4301 | ||
4302 | chkp_function_mark_instrumented (cfun->decl); | |
4303 | ||
4304 | chkp_fix_cfg (); | |
4305 | ||
4306 | chkp_fini (); | |
4307 | ||
4308 | return 0; | |
4309 | } | |
4310 | ||
4311 | /* Instrumentation pass gate. */ | |
4312 | static bool | |
4313 | chkp_gate (void) | |
4314 | { | |
4315 | return cgraph_node::get (cfun->decl)->instrumentation_clone | |
4316 | || lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl)); | |
4317 | } | |
4318 | ||
4319 | namespace { | |
4320 | ||
4321 | const pass_data pass_data_chkp = | |
4322 | { | |
4323 | GIMPLE_PASS, /* type */ | |
4324 | "chkp", /* name */ | |
4325 | OPTGROUP_NONE, /* optinfo_flags */ | |
4326 | TV_NONE, /* tv_id */ | |
4327 | PROP_ssa | PROP_cfg, /* properties_required */ | |
4328 | 0, /* properties_provided */ | |
4329 | 0, /* properties_destroyed */ | |
4330 | 0, /* todo_flags_start */ | |
4331 | TODO_verify_il | |
4332 | | TODO_update_ssa /* todo_flags_finish */ | |
4333 | }; | |
4334 | ||
4335 | class pass_chkp : public gimple_opt_pass | |
4336 | { | |
4337 | public: | |
4338 | pass_chkp (gcc::context *ctxt) | |
4339 | : gimple_opt_pass (pass_data_chkp, ctxt) | |
4340 | {} | |
4341 | ||
4342 | /* opt_pass methods: */ | |
4343 | virtual opt_pass * clone () | |
4344 | { | |
4345 | return new pass_chkp (m_ctxt); | |
4346 | } | |
4347 | ||
4348 | virtual bool gate (function *) | |
4349 | { | |
4350 | return chkp_gate (); | |
4351 | } | |
4352 | ||
4353 | virtual unsigned int execute (function *) | |
4354 | { | |
4355 | return chkp_execute (); | |
4356 | } | |
4357 | ||
4358 | }; // class pass_chkp | |
4359 | ||
4360 | } // anon namespace | |
4361 | ||
4362 | gimple_opt_pass * | |
4363 | make_pass_chkp (gcc::context *ctxt) | |
4364 | { | |
4365 | return new pass_chkp (ctxt); | |
4366 | } | |
4367 | ||
4368 | #include "gt-tree-chkp.h" |