]>
Commit | Line | Data |
---|---|---|
058a1b7a | 1 | /* Pointer Bounds Checker insrumentation pass. |
d353bf18 | 2 | Copyright (C) 2014-2015 Free Software Foundation, Inc. |
058a1b7a | 3 | Contributed by Ilya Enkovich (ilya.enkovich@intel.com) |
4 | ||
5 | This file is part of GCC. | |
6 | ||
7 | GCC is free software; you can redistribute it and/or modify it under | |
8 | the terms of the GNU General Public License as published by the Free | |
9 | Software Foundation; either version 3, or (at your option) any later | |
10 | version. | |
11 | ||
12 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY | |
13 | WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
14 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
15 | for more details. | |
16 | ||
17 | You should have received a copy of the GNU General Public License | |
18 | along with GCC; see the file COPYING3. If not see | |
19 | <http://www.gnu.org/licenses/>. */ | |
20 | ||
21 | #include "config.h" | |
22 | #include "system.h" | |
23 | #include "coretypes.h" | |
b20a8bb4 | 24 | #include "alias.h" |
25 | #include "symtab.h" | |
26 | #include "options.h" | |
b20a8bb4 | 27 | #include "tree.h" |
28 | #include "fold-const.h" | |
058a1b7a | 29 | #include "stor-layout.h" |
30 | #include "varasm.h" | |
058a1b7a | 31 | #include "target.h" |
32 | #include "tree-iterator.h" | |
33 | #include "tree-cfg.h" | |
34 | #include "langhooks.h" | |
35 | #include "tree-pass.h" | |
36 | #include "diagnostic.h" | |
058a1b7a | 37 | #include "cfgloop.h" |
38 | #include "stringpool.h" | |
39 | #include "tree-ssa-alias.h" | |
40 | #include "tree-ssanames.h" | |
41 | #include "tree-ssa-operands.h" | |
42 | #include "tree-ssa-address.h" | |
43 | #include "tree-ssa.h" | |
44 | #include "predict.h" | |
45 | #include "dominance.h" | |
46 | #include "cfg.h" | |
47 | #include "basic-block.h" | |
48 | #include "tree-ssa-loop-niter.h" | |
49 | #include "gimple-expr.h" | |
50 | #include "gimple.h" | |
51 | #include "tree-phinodes.h" | |
52 | #include "gimple-ssa.h" | |
53 | #include "ssa-iterators.h" | |
54 | #include "gimple-pretty-print.h" | |
55 | #include "gimple-iterator.h" | |
56 | #include "gimplify.h" | |
57 | #include "gimplify-me.h" | |
58 | #include "print-tree.h" | |
d53441c8 | 59 | #include "tm.h" |
60 | #include "hard-reg-set.h" | |
61 | #include "function.h" | |
62 | #include "rtl.h" | |
63 | #include "flags.h" | |
d53441c8 | 64 | #include "insn-config.h" |
65 | #include "expmed.h" | |
66 | #include "dojump.h" | |
67 | #include "explow.h" | |
68 | #include "calls.h" | |
69 | #include "emit-rtl.h" | |
70 | #include "stmt.h" | |
058a1b7a | 71 | #include "expr.h" |
72 | #include "tree-ssa-propagate.h" | |
73 | #include "gimple-fold.h" | |
74 | #include "tree-chkp.h" | |
75 | #include "gimple-walk.h" | |
76 | #include "rtl.h" /* For MEM_P, assign_temp. */ | |
77 | #include "tree-dfa.h" | |
78 | #include "ipa-ref.h" | |
79 | #include "lto-streamer.h" | |
80 | #include "cgraph.h" | |
81 | #include "ipa-chkp.h" | |
82 | #include "params.h" | |
058a1b7a | 83 | |
84 | /* Pointer Bounds Checker instruments code with memory checks to find | |
85 | out-of-bounds memory accesses. Checks are performed by computing | |
86 | bounds for each pointer and then comparing address of accessed | |
87 | memory before pointer dereferencing. | |
88 | ||
89 | 1. Function clones. | |
90 | ||
91 | See ipa-chkp.c. | |
92 | ||
93 | 2. Instrumentation. | |
94 | ||
95 | There are few things to instrument: | |
96 | ||
97 | a) Memory accesses - add checker calls to check address of accessed memory | |
98 | against bounds of dereferenced pointer. Obviously safe memory | |
99 | accesses like static variable access does not have to be instrumented | |
100 | with checks. | |
101 | ||
102 | Example: | |
103 | ||
104 | val_2 = *p_1; | |
105 | ||
106 | with 4 bytes access is transformed into: | |
107 | ||
108 | __builtin___chkp_bndcl (__bound_tmp.1_3, p_1); | |
109 | D.1_4 = p_1 + 3; | |
110 | __builtin___chkp_bndcu (__bound_tmp.1_3, D.1_4); | |
111 | val_2 = *p_1; | |
112 | ||
113 | where __bound_tmp.1_3 are bounds computed for pointer p_1, | |
114 | __builtin___chkp_bndcl is a lower bound check and | |
115 | __builtin___chkp_bndcu is an upper bound check. | |
116 | ||
117 | b) Pointer stores. | |
118 | ||
119 | When pointer is stored in memory we need to store its bounds. To | |
120 | achieve compatibility of instrumented code with regular codes | |
121 | we have to keep data layout and store bounds in special bound tables | |
122 | via special checker call. Implementation of bounds table may vary for | |
123 | different platforms. It has to associate pointer value and its | |
124 | location (it is required because we may have two equal pointers | |
125 | with different bounds stored in different places) with bounds. | |
126 | Another checker builtin allows to get bounds for specified pointer | |
127 | loaded from specified location. | |
128 | ||
129 | Example: | |
130 | ||
131 | buf1[i_1] = &buf2; | |
132 | ||
133 | is transformed into: | |
134 | ||
135 | buf1[i_1] = &buf2; | |
136 | D.1_2 = &buf1[i_1]; | |
137 | __builtin___chkp_bndstx (D.1_2, &buf2, __bound_tmp.1_2); | |
138 | ||
139 | where __bound_tmp.1_2 are bounds of &buf2. | |
140 | ||
141 | c) Static initialization. | |
142 | ||
143 | The special case of pointer store is static pointer initialization. | |
144 | Bounds initialization is performed in a few steps: | |
145 | - register all static initializations in front-end using | |
146 | chkp_register_var_initializer | |
147 | - when file compilation finishes we create functions with special | |
148 | attribute 'chkp ctor' and put explicit initialization code | |
149 | (assignments) for all statically initialized pointers. | |
150 | - when checker constructor is compiled checker pass adds required | |
151 | bounds initialization for all statically initialized pointers | |
152 | - since we do not actually need excess pointers initialization | |
153 | in checker constructor we remove such assignments from them | |
154 | ||
155 | d) Calls. | |
156 | ||
157 | For each call in the code we add additional arguments to pass | |
158 | bounds for pointer arguments. We determine type of call arguments | |
159 | using arguments list from function declaration; if function | |
160 | declaration is not available we use function type; otherwise | |
161 | (e.g. for unnamed arguments) we use type of passed value. Function | |
162 | declaration/type is replaced with the instrumented one. | |
163 | ||
164 | Example: | |
165 | ||
166 | val_1 = foo (&buf1, &buf2, &buf1, 0); | |
167 | ||
168 | is translated into: | |
169 | ||
170 | val_1 = foo.chkp (&buf1, __bound_tmp.1_2, &buf2, __bound_tmp.1_3, | |
171 | &buf1, __bound_tmp.1_2, 0); | |
172 | ||
173 | e) Returns. | |
174 | ||
175 | If function returns a pointer value we have to return bounds also. | |
176 | A new operand was added for return statement to hold returned bounds. | |
177 | ||
178 | Example: | |
179 | ||
180 | return &_buf1; | |
181 | ||
182 | is transformed into | |
183 | ||
184 | return &_buf1, __bound_tmp.1_1; | |
185 | ||
186 | 3. Bounds computation. | |
187 | ||
188 | Compiler is fully responsible for computing bounds to be used for each | |
189 | memory access. The first step for bounds computation is to find the | |
190 | origin of pointer dereferenced for memory access. Basing on pointer | |
191 | origin we define a way to compute its bounds. There are just few | |
192 | possible cases: | |
193 | ||
194 | a) Pointer is returned by call. | |
195 | ||
196 | In this case we use corresponding checker builtin method to obtain returned | |
197 | bounds. | |
198 | ||
199 | Example: | |
200 | ||
201 | buf_1 = malloc (size_2); | |
202 | foo (buf_1); | |
203 | ||
204 | is translated into: | |
205 | ||
206 | buf_1 = malloc (size_2); | |
207 | __bound_tmp.1_3 = __builtin___chkp_bndret (buf_1); | |
208 | foo (buf_1, __bound_tmp.1_3); | |
209 | ||
210 | b) Pointer is an address of an object. | |
211 | ||
212 | In this case compiler tries to compute objects size and create corresponding | |
213 | bounds. If object has incomplete type then special checker builtin is used to | |
214 | obtain its size at runtime. | |
215 | ||
216 | Example: | |
217 | ||
218 | foo () | |
219 | { | |
220 | <unnamed type> __bound_tmp.3; | |
221 | static int buf[100]; | |
222 | ||
223 | <bb 3>: | |
224 | __bound_tmp.3_2 = __builtin___chkp_bndmk (&buf, 400); | |
225 | ||
226 | <bb 2>: | |
227 | return &buf, __bound_tmp.3_2; | |
228 | } | |
229 | ||
230 | Example: | |
231 | ||
232 | Address of an object 'extern int buf[]' with incomplete type is | |
233 | returned. | |
234 | ||
235 | foo () | |
236 | { | |
237 | <unnamed type> __bound_tmp.4; | |
238 | long unsigned int __size_tmp.3; | |
239 | ||
240 | <bb 3>: | |
241 | __size_tmp.3_4 = __builtin_ia32_sizeof (buf); | |
242 | __bound_tmp.4_3 = __builtin_ia32_bndmk (&buf, __size_tmp.3_4); | |
243 | ||
244 | <bb 2>: | |
245 | return &buf, __bound_tmp.4_3; | |
246 | } | |
247 | ||
248 | c) Pointer is the result of object narrowing. | |
249 | ||
250 | It happens when we use pointer to an object to compute pointer to a part | |
251 | of an object. E.g. we take pointer to a field of a structure. In this | |
252 | case we perform bounds intersection using bounds of original object and | |
253 | bounds of object's part (which are computed basing on its type). | |
254 | ||
255 | There may be some debatable questions about when narrowing should occur | |
256 | and when it should not. To avoid false bound violations in correct | |
257 | programs we do not perform narrowing when address of an array element is | |
258 | obtained (it has address of the whole array) and when address of the first | |
259 | structure field is obtained (because it is guaranteed to be equal to | |
260 | address of the whole structure and it is legal to cast it back to structure). | |
261 | ||
262 | Default narrowing behavior may be changed using compiler flags. | |
263 | ||
264 | Example: | |
265 | ||
266 | In this example address of the second structure field is returned. | |
267 | ||
268 | foo (struct A * p, __bounds_type __bounds_of_p) | |
269 | { | |
270 | <unnamed type> __bound_tmp.3; | |
271 | int * _2; | |
272 | int * _5; | |
273 | ||
274 | <bb 2>: | |
275 | _5 = &p_1(D)->second_field; | |
276 | __bound_tmp.3_6 = __builtin___chkp_bndmk (_5, 4); | |
277 | __bound_tmp.3_8 = __builtin___chkp_intersect (__bound_tmp.3_6, | |
278 | __bounds_of_p_3(D)); | |
279 | _2 = &p_1(D)->second_field; | |
280 | return _2, __bound_tmp.3_8; | |
281 | } | |
282 | ||
283 | Example: | |
284 | ||
285 | In this example address of the first field of array element is returned. | |
286 | ||
287 | foo (struct A * p, __bounds_type __bounds_of_p, int i) | |
288 | { | |
289 | long unsigned int _3; | |
290 | long unsigned int _4; | |
291 | struct A * _6; | |
292 | int * _7; | |
293 | ||
294 | <bb 2>: | |
295 | _3 = (long unsigned int) i_1(D); | |
296 | _4 = _3 * 8; | |
297 | _6 = p_5(D) + _4; | |
298 | _7 = &_6->first_field; | |
299 | return _7, __bounds_of_p_2(D); | |
300 | } | |
301 | ||
302 | ||
303 | d) Pointer is the result of pointer arithmetic or type cast. | |
304 | ||
305 | In this case bounds of the base pointer are used. In case of binary | |
306 | operation producing a pointer we are analyzing data flow further | |
307 | looking for operand's bounds. One operand is considered as a base | |
308 | if it has some valid bounds. If we fall into a case when none of | |
309 | operands (or both of them) has valid bounds, a default bounds value | |
310 | is used. | |
311 | ||
312 | Trying to find out bounds for binary operations we may fall into | |
313 | cyclic dependencies for pointers. To avoid infinite recursion all | |
314 | walked phi nodes instantly obtain corresponding bounds but created | |
315 | bounds are marked as incomplete. It helps us to stop DF walk during | |
316 | bounds search. | |
317 | ||
318 | When we reach pointer source, some args of incomplete bounds phi obtain | |
319 | valid bounds and those values are propagated further through phi nodes. | |
320 | If no valid bounds were found for phi node then we mark its result as | |
321 | invalid bounds. Process stops when all incomplete bounds become either | |
322 | valid or invalid and we are able to choose a pointer base. | |
323 | ||
324 | e) Pointer is loaded from the memory. | |
325 | ||
326 | In this case we just need to load bounds from the bounds table. | |
327 | ||
328 | Example: | |
329 | ||
330 | foo () | |
331 | { | |
332 | <unnamed type> __bound_tmp.3; | |
333 | static int * buf; | |
334 | int * _2; | |
335 | ||
336 | <bb 2>: | |
337 | _2 = buf; | |
338 | __bound_tmp.3_4 = __builtin___chkp_bndldx (&buf, _2); | |
339 | return _2, __bound_tmp.3_4; | |
340 | } | |
341 | ||
342 | */ | |
343 | ||
344 | typedef void (*assign_handler)(tree, tree, void *); | |
345 | ||
346 | static tree chkp_get_zero_bounds (); | |
347 | static tree chkp_find_bounds (tree ptr, gimple_stmt_iterator *iter); | |
348 | static tree chkp_find_bounds_loaded (tree ptr, tree ptr_src, | |
349 | gimple_stmt_iterator *iter); | |
350 | static void chkp_parse_array_and_component_ref (tree node, tree *ptr, | |
351 | tree *elt, bool *safe, | |
352 | bool *bitfield, | |
353 | tree *bounds, | |
354 | gimple_stmt_iterator *iter, | |
355 | bool innermost_bounds); | |
356 | ||
357 | #define chkp_bndldx_fndecl \ | |
358 | (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDLDX)) | |
359 | #define chkp_bndstx_fndecl \ | |
360 | (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDSTX)) | |
361 | #define chkp_checkl_fndecl \ | |
362 | (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDCL)) | |
363 | #define chkp_checku_fndecl \ | |
364 | (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDCU)) | |
365 | #define chkp_bndmk_fndecl \ | |
366 | (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDMK)) | |
367 | #define chkp_ret_bnd_fndecl \ | |
368 | (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDRET)) | |
369 | #define chkp_intersect_fndecl \ | |
370 | (targetm.builtin_chkp_function (BUILT_IN_CHKP_INTERSECT)) | |
371 | #define chkp_narrow_bounds_fndecl \ | |
372 | (targetm.builtin_chkp_function (BUILT_IN_CHKP_NARROW)) | |
373 | #define chkp_sizeof_fndecl \ | |
374 | (targetm.builtin_chkp_function (BUILT_IN_CHKP_SIZEOF)) | |
375 | #define chkp_extract_lower_fndecl \ | |
376 | (targetm.builtin_chkp_function (BUILT_IN_CHKP_EXTRACT_LOWER)) | |
377 | #define chkp_extract_upper_fndecl \ | |
378 | (targetm.builtin_chkp_function (BUILT_IN_CHKP_EXTRACT_UPPER)) | |
379 | ||
380 | static GTY (()) tree chkp_uintptr_type; | |
381 | ||
382 | static GTY (()) tree chkp_zero_bounds_var; | |
383 | static GTY (()) tree chkp_none_bounds_var; | |
384 | ||
385 | static GTY (()) basic_block entry_block; | |
386 | static GTY (()) tree zero_bounds; | |
387 | static GTY (()) tree none_bounds; | |
388 | static GTY (()) tree incomplete_bounds; | |
389 | static GTY (()) tree tmp_var; | |
390 | static GTY (()) tree size_tmp_var; | |
391 | static GTY (()) bitmap chkp_abnormal_copies; | |
392 | ||
393 | struct hash_set<tree> *chkp_invalid_bounds; | |
394 | struct hash_set<tree> *chkp_completed_bounds_set; | |
395 | struct hash_map<tree, tree> *chkp_reg_bounds; | |
396 | struct hash_map<tree, tree> *chkp_bound_vars; | |
397 | struct hash_map<tree, tree> *chkp_reg_addr_bounds; | |
398 | struct hash_map<tree, tree> *chkp_incomplete_bounds_map; | |
399 | struct hash_map<tree, tree> *chkp_bounds_map; | |
400 | struct hash_map<tree, tree> *chkp_static_var_bounds; | |
401 | ||
402 | static bool in_chkp_pass; | |
403 | ||
404 | #define CHKP_BOUND_TMP_NAME "__bound_tmp" | |
405 | #define CHKP_SIZE_TMP_NAME "__size_tmp" | |
406 | #define CHKP_BOUNDS_OF_SYMBOL_PREFIX "__chkp_bounds_of_" | |
407 | #define CHKP_STRING_BOUNDS_PREFIX "__chkp_string_bounds_" | |
408 | #define CHKP_VAR_BOUNDS_PREFIX "__chkp_var_bounds_" | |
409 | #define CHKP_ZERO_BOUNDS_VAR_NAME "__chkp_zero_bounds" | |
410 | #define CHKP_NONE_BOUNDS_VAR_NAME "__chkp_none_bounds" | |
411 | ||
412 | /* Static checker constructors may become very large and their | |
413 | compilation with optimization may take too much time. | |
414 | Therefore we put a limit to number of statements in one | |
415 | constructor. Tests with 100 000 statically initialized | |
416 | pointers showed following compilation times on Sandy Bridge | |
417 | server (used -O2): | |
418 | limit 100 => ~18 sec. | |
419 | limit 300 => ~22 sec. | |
420 | limit 1000 => ~30 sec. | |
421 | limit 3000 => ~49 sec. | |
422 | limit 5000 => ~55 sec. | |
423 | limit 10000 => ~76 sec. | |
424 | limit 100000 => ~532 sec. */ | |
425 | #define MAX_STMTS_IN_STATIC_CHKP_CTOR (PARAM_VALUE (PARAM_CHKP_MAX_CTOR_SIZE)) | |
426 | ||
427 | struct chkp_ctor_stmt_list | |
428 | { | |
429 | tree stmts; | |
430 | int avail; | |
431 | }; | |
432 | ||
433 | /* Return 1 if function FNDECL is instrumented by Pointer | |
434 | Bounds Checker. */ | |
435 | bool | |
436 | chkp_function_instrumented_p (tree fndecl) | |
437 | { | |
438 | return fndecl | |
439 | && lookup_attribute ("chkp instrumented", DECL_ATTRIBUTES (fndecl)); | |
440 | } | |
441 | ||
442 | /* Mark function FNDECL as instrumented. */ | |
443 | void | |
444 | chkp_function_mark_instrumented (tree fndecl) | |
445 | { | |
446 | if (chkp_function_instrumented_p (fndecl)) | |
447 | return; | |
448 | ||
449 | DECL_ATTRIBUTES (fndecl) | |
450 | = tree_cons (get_identifier ("chkp instrumented"), NULL, | |
451 | DECL_ATTRIBUTES (fndecl)); | |
452 | } | |
453 | ||
454 | /* Return true when STMT is builtin call to instrumentation function | |
455 | corresponding to CODE. */ | |
456 | ||
457 | bool | |
458 | chkp_gimple_call_builtin_p (gimple call, | |
459 | enum built_in_function code) | |
460 | { | |
461 | tree fndecl; | |
462 | if (is_gimple_call (call) | |
463 | && (fndecl = targetm.builtin_chkp_function (code)) | |
464 | && gimple_call_fndecl (call) == fndecl) | |
465 | return true; | |
466 | return false; | |
467 | } | |
468 | ||
469 | /* Emit code to store zero bounds for PTR located at MEM. */ | |
470 | void | |
471 | chkp_expand_bounds_reset_for_mem (tree mem, tree ptr) | |
472 | { | |
473 | tree zero_bnd, bnd, addr, bndstx; | |
474 | ||
475 | if (flag_chkp_use_static_const_bounds) | |
476 | zero_bnd = chkp_get_zero_bounds_var (); | |
477 | else | |
478 | zero_bnd = chkp_build_make_bounds_call (integer_zero_node, | |
479 | integer_zero_node); | |
480 | bnd = make_tree (pointer_bounds_type_node, | |
481 | assign_temp (pointer_bounds_type_node, 0, 1)); | |
482 | addr = build1 (ADDR_EXPR, | |
483 | build_pointer_type (TREE_TYPE (mem)), mem); | |
484 | bndstx = chkp_build_bndstx_call (addr, ptr, bnd); | |
485 | ||
486 | expand_assignment (bnd, zero_bnd, false); | |
487 | expand_normal (bndstx); | |
488 | } | |
489 | ||
17c596cc | 490 | /* Build retbnd call for returned value RETVAL. |
491 | ||
492 | If BNDVAL is not NULL then result is stored | |
493 | in it. Otherwise a temporary is created to | |
494 | hold returned value. | |
495 | ||
496 | GSI points to a position for a retbnd call | |
497 | and is set to created stmt. | |
498 | ||
499 | Cgraph edge is created for a new call if | |
500 | UPDATE_EDGE is 1. | |
501 | ||
502 | Obtained bounds are returned. */ | |
503 | tree | |
504 | chkp_insert_retbnd_call (tree bndval, tree retval, | |
505 | gimple_stmt_iterator *gsi) | |
506 | { | |
507 | gimple call; | |
508 | ||
509 | if (!bndval) | |
510 | bndval = create_tmp_reg (pointer_bounds_type_node, "retbnd"); | |
511 | ||
512 | call = gimple_build_call (chkp_ret_bnd_fndecl, 1, retval); | |
513 | gimple_call_set_lhs (call, bndval); | |
514 | gsi_insert_after (gsi, call, GSI_CONTINUE_LINKING); | |
515 | ||
516 | return bndval; | |
517 | } | |
518 | ||
100c2304 | 519 | /* Build a GIMPLE_CALL identical to CALL but skipping bounds |
520 | arguments. */ | |
521 | ||
522 | gcall * | |
523 | chkp_copy_call_skip_bounds (gcall *call) | |
524 | { | |
525 | bitmap bounds; | |
526 | unsigned i; | |
527 | ||
528 | bitmap_obstack_initialize (NULL); | |
529 | bounds = BITMAP_ALLOC (NULL); | |
530 | ||
531 | for (i = 0; i < gimple_call_num_args (call); i++) | |
532 | if (POINTER_BOUNDS_P (gimple_call_arg (call, i))) | |
533 | bitmap_set_bit (bounds, i); | |
534 | ||
535 | if (!bitmap_empty_p (bounds)) | |
536 | call = gimple_call_copy_skip_args (call, bounds); | |
537 | gimple_call_set_with_bounds (call, false); | |
538 | ||
539 | BITMAP_FREE (bounds); | |
540 | bitmap_obstack_release (NULL); | |
541 | ||
542 | return call; | |
543 | } | |
544 | ||
545 | /* Redirect edge E to the correct node according to call_stmt. | |
546 | Return 1 if bounds removal from call_stmt should be done | |
547 | instead of redirection. */ | |
548 | ||
549 | bool | |
550 | chkp_redirect_edge (cgraph_edge *e) | |
551 | { | |
552 | bool instrumented = false; | |
553 | tree decl = e->callee->decl; | |
554 | ||
555 | if (e->callee->instrumentation_clone | |
556 | || chkp_function_instrumented_p (decl)) | |
557 | instrumented = true; | |
558 | ||
559 | if (instrumented | |
560 | && !gimple_call_with_bounds_p (e->call_stmt)) | |
561 | e->redirect_callee (cgraph_node::get_create (e->callee->orig_decl)); | |
562 | else if (!instrumented | |
563 | && gimple_call_with_bounds_p (e->call_stmt) | |
564 | && !chkp_gimple_call_builtin_p (e->call_stmt, BUILT_IN_CHKP_BNDCL) | |
565 | && !chkp_gimple_call_builtin_p (e->call_stmt, BUILT_IN_CHKP_BNDCU) | |
566 | && !chkp_gimple_call_builtin_p (e->call_stmt, BUILT_IN_CHKP_BNDSTX)) | |
567 | { | |
568 | if (e->callee->instrumented_version) | |
569 | e->redirect_callee (e->callee->instrumented_version); | |
570 | else | |
571 | { | |
572 | tree args = TYPE_ARG_TYPES (TREE_TYPE (decl)); | |
573 | /* Avoid bounds removal if all args will be removed. */ | |
574 | if (!args || TREE_VALUE (args) != void_type_node) | |
575 | return true; | |
576 | else | |
577 | gimple_call_set_with_bounds (e->call_stmt, false); | |
578 | } | |
579 | } | |
580 | ||
581 | return false; | |
582 | } | |
583 | ||
058a1b7a | 584 | /* Mark statement S to not be instrumented. */ |
585 | static void | |
586 | chkp_mark_stmt (gimple s) | |
587 | { | |
588 | gimple_set_plf (s, GF_PLF_1, true); | |
589 | } | |
590 | ||
591 | /* Mark statement S to be instrumented. */ | |
592 | static void | |
593 | chkp_unmark_stmt (gimple s) | |
594 | { | |
595 | gimple_set_plf (s, GF_PLF_1, false); | |
596 | } | |
597 | ||
598 | /* Return 1 if statement S should not be instrumented. */ | |
599 | static bool | |
600 | chkp_marked_stmt_p (gimple s) | |
601 | { | |
602 | return gimple_plf (s, GF_PLF_1); | |
603 | } | |
604 | ||
605 | /* Get var to be used for bound temps. */ | |
606 | static tree | |
607 | chkp_get_tmp_var (void) | |
608 | { | |
609 | if (!tmp_var) | |
610 | tmp_var = create_tmp_reg (pointer_bounds_type_node, CHKP_BOUND_TMP_NAME); | |
611 | ||
612 | return tmp_var; | |
613 | } | |
614 | ||
615 | /* Get SSA_NAME to be used as temp. */ | |
616 | static tree | |
617 | chkp_get_tmp_reg (gimple stmt) | |
618 | { | |
619 | if (in_chkp_pass) | |
620 | return make_ssa_name (chkp_get_tmp_var (), stmt); | |
621 | ||
622 | return make_temp_ssa_name (pointer_bounds_type_node, stmt, | |
623 | CHKP_BOUND_TMP_NAME); | |
624 | } | |
625 | ||
626 | /* Get var to be used for size temps. */ | |
627 | static tree | |
628 | chkp_get_size_tmp_var (void) | |
629 | { | |
630 | if (!size_tmp_var) | |
631 | size_tmp_var = create_tmp_reg (chkp_uintptr_type, CHKP_SIZE_TMP_NAME); | |
632 | ||
633 | return size_tmp_var; | |
634 | } | |
635 | ||
636 | /* Register bounds BND for address of OBJ. */ | |
637 | static void | |
638 | chkp_register_addr_bounds (tree obj, tree bnd) | |
639 | { | |
640 | if (bnd == incomplete_bounds) | |
641 | return; | |
642 | ||
643 | chkp_reg_addr_bounds->put (obj, bnd); | |
644 | ||
645 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
646 | { | |
647 | fprintf (dump_file, "Regsitered bound "); | |
648 | print_generic_expr (dump_file, bnd, 0); | |
649 | fprintf (dump_file, " for address of "); | |
650 | print_generic_expr (dump_file, obj, 0); | |
651 | fprintf (dump_file, "\n"); | |
652 | } | |
653 | } | |
654 | ||
655 | /* Return bounds registered for address of OBJ. */ | |
656 | static tree | |
657 | chkp_get_registered_addr_bounds (tree obj) | |
658 | { | |
659 | tree *slot = chkp_reg_addr_bounds->get (obj); | |
660 | return slot ? *slot : NULL_TREE; | |
661 | } | |
662 | ||
663 | /* Mark BOUNDS as completed. */ | |
664 | static void | |
665 | chkp_mark_completed_bounds (tree bounds) | |
666 | { | |
667 | chkp_completed_bounds_set->add (bounds); | |
668 | ||
669 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
670 | { | |
671 | fprintf (dump_file, "Marked bounds "); | |
672 | print_generic_expr (dump_file, bounds, 0); | |
673 | fprintf (dump_file, " as completed\n"); | |
674 | } | |
675 | } | |
676 | ||
677 | /* Return 1 if BOUNDS were marked as completed and 0 otherwise. */ | |
678 | static bool | |
679 | chkp_completed_bounds (tree bounds) | |
680 | { | |
681 | return chkp_completed_bounds_set->contains (bounds); | |
682 | } | |
683 | ||
684 | /* Clear comleted bound marks. */ | |
685 | static void | |
686 | chkp_erase_completed_bounds (void) | |
687 | { | |
688 | delete chkp_completed_bounds_set; | |
689 | chkp_completed_bounds_set = new hash_set<tree>; | |
690 | } | |
691 | ||
692 | /* Mark BOUNDS associated with PTR as incomplete. */ | |
693 | static void | |
694 | chkp_register_incomplete_bounds (tree bounds, tree ptr) | |
695 | { | |
696 | chkp_incomplete_bounds_map->put (bounds, ptr); | |
697 | ||
698 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
699 | { | |
700 | fprintf (dump_file, "Regsitered incomplete bounds "); | |
701 | print_generic_expr (dump_file, bounds, 0); | |
702 | fprintf (dump_file, " for "); | |
703 | print_generic_expr (dump_file, ptr, 0); | |
704 | fprintf (dump_file, "\n"); | |
705 | } | |
706 | } | |
707 | ||
708 | /* Return 1 if BOUNDS are incomplete and 0 otherwise. */ | |
709 | static bool | |
710 | chkp_incomplete_bounds (tree bounds) | |
711 | { | |
712 | if (bounds == incomplete_bounds) | |
713 | return true; | |
714 | ||
715 | if (chkp_completed_bounds (bounds)) | |
716 | return false; | |
717 | ||
718 | return chkp_incomplete_bounds_map->get (bounds) != NULL; | |
719 | } | |
720 | ||
721 | /* Clear incomleted bound marks. */ | |
722 | static void | |
723 | chkp_erase_incomplete_bounds (void) | |
724 | { | |
725 | delete chkp_incomplete_bounds_map; | |
726 | chkp_incomplete_bounds_map = new hash_map<tree, tree>; | |
727 | } | |
728 | ||
729 | /* Build and return bndmk call which creates bounds for structure | |
730 | pointed by PTR. Structure should have complete type. */ | |
731 | tree | |
732 | chkp_make_bounds_for_struct_addr (tree ptr) | |
733 | { | |
734 | tree type = TREE_TYPE (ptr); | |
735 | tree size; | |
736 | ||
737 | gcc_assert (POINTER_TYPE_P (type)); | |
738 | ||
739 | size = TYPE_SIZE (TREE_TYPE (type)); | |
740 | ||
741 | gcc_assert (size); | |
742 | ||
743 | return build_call_nary (pointer_bounds_type_node, | |
744 | build_fold_addr_expr (chkp_bndmk_fndecl), | |
745 | 2, ptr, size); | |
746 | } | |
747 | ||
748 | /* Traversal function for chkp_may_finish_incomplete_bounds. | |
749 | Set RES to 0 if at least one argument of phi statement | |
750 | defining bounds (passed in KEY arg) is unknown. | |
751 | Traversal stops when first unknown phi argument is found. */ | |
752 | bool | |
753 | chkp_may_complete_phi_bounds (tree const &bounds, tree *slot ATTRIBUTE_UNUSED, | |
754 | bool *res) | |
755 | { | |
756 | gimple phi; | |
757 | unsigned i; | |
758 | ||
759 | gcc_assert (TREE_CODE (bounds) == SSA_NAME); | |
760 | ||
761 | phi = SSA_NAME_DEF_STMT (bounds); | |
762 | ||
763 | gcc_assert (phi && gimple_code (phi) == GIMPLE_PHI); | |
764 | ||
765 | for (i = 0; i < gimple_phi_num_args (phi); i++) | |
766 | { | |
767 | tree phi_arg = gimple_phi_arg_def (phi, i); | |
768 | if (!phi_arg) | |
769 | { | |
770 | *res = false; | |
771 | /* Do not need to traverse further. */ | |
772 | return false; | |
773 | } | |
774 | } | |
775 | ||
776 | return true; | |
777 | } | |
778 | ||
779 | /* Return 1 if all phi nodes created for bounds have their | |
780 | arguments computed. */ | |
781 | static bool | |
782 | chkp_may_finish_incomplete_bounds (void) | |
783 | { | |
784 | bool res = true; | |
785 | ||
786 | chkp_incomplete_bounds_map | |
787 | ->traverse<bool *, chkp_may_complete_phi_bounds> (&res); | |
788 | ||
789 | return res; | |
790 | } | |
791 | ||
792 | /* Helper function for chkp_finish_incomplete_bounds. | |
793 | Recompute args for bounds phi node. */ | |
794 | bool | |
795 | chkp_recompute_phi_bounds (tree const &bounds, tree *slot, | |
796 | void *res ATTRIBUTE_UNUSED) | |
797 | { | |
798 | tree ptr = *slot; | |
1a91d914 | 799 | gphi *bounds_phi; |
800 | gphi *ptr_phi; | |
058a1b7a | 801 | unsigned i; |
802 | ||
803 | gcc_assert (TREE_CODE (bounds) == SSA_NAME); | |
804 | gcc_assert (TREE_CODE (ptr) == SSA_NAME); | |
805 | ||
1a91d914 | 806 | bounds_phi = as_a <gphi *> (SSA_NAME_DEF_STMT (bounds)); |
807 | ptr_phi = as_a <gphi *> (SSA_NAME_DEF_STMT (ptr)); | |
058a1b7a | 808 | |
809 | for (i = 0; i < gimple_phi_num_args (bounds_phi); i++) | |
810 | { | |
811 | tree ptr_arg = gimple_phi_arg_def (ptr_phi, i); | |
812 | tree bound_arg = chkp_find_bounds (ptr_arg, NULL); | |
813 | ||
814 | add_phi_arg (bounds_phi, bound_arg, | |
815 | gimple_phi_arg_edge (ptr_phi, i), | |
816 | UNKNOWN_LOCATION); | |
817 | } | |
818 | ||
819 | return true; | |
820 | } | |
821 | ||
822 | /* Mark BOUNDS as invalid. */ | |
823 | static void | |
824 | chkp_mark_invalid_bounds (tree bounds) | |
825 | { | |
826 | chkp_invalid_bounds->add (bounds); | |
827 | ||
828 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
829 | { | |
830 | fprintf (dump_file, "Marked bounds "); | |
831 | print_generic_expr (dump_file, bounds, 0); | |
832 | fprintf (dump_file, " as invalid\n"); | |
833 | } | |
834 | } | |
835 | ||
836 | /* Return 1 if BOUNDS were marked as invalid and 0 otherwise. */ | |
837 | static bool | |
838 | chkp_valid_bounds (tree bounds) | |
839 | { | |
840 | if (bounds == zero_bounds || bounds == none_bounds) | |
841 | return false; | |
842 | ||
843 | return !chkp_invalid_bounds->contains (bounds); | |
844 | } | |
845 | ||
846 | /* Helper function for chkp_finish_incomplete_bounds. | |
847 | Check all arguments of phi nodes trying to find | |
848 | valid completed bounds. If there is at least one | |
849 | such arg then bounds produced by phi node are marked | |
850 | as valid completed bounds and all phi args are | |
851 | recomputed. */ | |
852 | bool | |
853 | chkp_find_valid_phi_bounds (tree const &bounds, tree *slot, bool *res) | |
854 | { | |
855 | gimple phi; | |
856 | unsigned i; | |
857 | ||
858 | gcc_assert (TREE_CODE (bounds) == SSA_NAME); | |
859 | ||
860 | if (chkp_completed_bounds (bounds)) | |
861 | return true; | |
862 | ||
863 | phi = SSA_NAME_DEF_STMT (bounds); | |
864 | ||
865 | gcc_assert (phi && gimple_code (phi) == GIMPLE_PHI); | |
866 | ||
867 | for (i = 0; i < gimple_phi_num_args (phi); i++) | |
868 | { | |
869 | tree phi_arg = gimple_phi_arg_def (phi, i); | |
870 | ||
871 | gcc_assert (phi_arg); | |
872 | ||
873 | if (chkp_valid_bounds (phi_arg) && !chkp_incomplete_bounds (phi_arg)) | |
874 | { | |
875 | *res = true; | |
876 | chkp_mark_completed_bounds (bounds); | |
877 | chkp_recompute_phi_bounds (bounds, slot, NULL); | |
878 | return true; | |
879 | } | |
880 | } | |
881 | ||
882 | return true; | |
883 | } | |
884 | ||
885 | /* Helper function for chkp_finish_incomplete_bounds. | |
886 | Marks all incompleted bounds as invalid. */ | |
887 | bool | |
888 | chkp_mark_invalid_bounds_walker (tree const &bounds, | |
889 | tree *slot ATTRIBUTE_UNUSED, | |
890 | void *res ATTRIBUTE_UNUSED) | |
891 | { | |
892 | if (!chkp_completed_bounds (bounds)) | |
893 | { | |
894 | chkp_mark_invalid_bounds (bounds); | |
895 | chkp_mark_completed_bounds (bounds); | |
896 | } | |
897 | return true; | |
898 | } | |
899 | ||
900 | /* When all bound phi nodes have all their args computed | |
901 | we have enough info to find valid bounds. We iterate | |
902 | through all incompleted bounds searching for valid | |
903 | bounds. Found valid bounds are marked as completed | |
904 | and all remaining incompleted bounds are recomputed. | |
905 | Process continues until no new valid bounds may be | |
906 | found. All remained incompleted bounds are marked as | |
907 | invalid (i.e. have no valid source of bounds). */ | |
908 | static void | |
909 | chkp_finish_incomplete_bounds (void) | |
910 | { | |
911 | bool found_valid; | |
912 | ||
913 | while (found_valid) | |
914 | { | |
915 | found_valid = false; | |
916 | ||
917 | chkp_incomplete_bounds_map-> | |
918 | traverse<bool *, chkp_find_valid_phi_bounds> (&found_valid); | |
919 | ||
920 | if (found_valid) | |
921 | chkp_incomplete_bounds_map-> | |
922 | traverse<void *, chkp_recompute_phi_bounds> (NULL); | |
923 | } | |
924 | ||
925 | chkp_incomplete_bounds_map-> | |
926 | traverse<void *, chkp_mark_invalid_bounds_walker> (NULL); | |
927 | chkp_incomplete_bounds_map-> | |
928 | traverse<void *, chkp_recompute_phi_bounds> (NULL); | |
929 | ||
930 | chkp_erase_completed_bounds (); | |
931 | chkp_erase_incomplete_bounds (); | |
932 | } | |
933 | ||
934 | /* Return 1 if type TYPE is a pointer type or a | |
935 | structure having a pointer type as one of its fields. | |
936 | Otherwise return 0. */ | |
937 | bool | |
938 | chkp_type_has_pointer (const_tree type) | |
939 | { | |
940 | bool res = false; | |
941 | ||
942 | if (BOUNDED_TYPE_P (type)) | |
943 | res = true; | |
944 | else if (RECORD_OR_UNION_TYPE_P (type)) | |
945 | { | |
946 | tree field; | |
947 | ||
948 | for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field)) | |
949 | if (TREE_CODE (field) == FIELD_DECL) | |
950 | res = res || chkp_type_has_pointer (TREE_TYPE (field)); | |
951 | } | |
952 | else if (TREE_CODE (type) == ARRAY_TYPE) | |
953 | res = chkp_type_has_pointer (TREE_TYPE (type)); | |
954 | ||
955 | return res; | |
956 | } | |
957 | ||
958 | unsigned | |
959 | chkp_type_bounds_count (const_tree type) | |
960 | { | |
961 | unsigned res = 0; | |
962 | ||
963 | if (!type) | |
964 | res = 0; | |
965 | else if (BOUNDED_TYPE_P (type)) | |
966 | res = 1; | |
967 | else if (RECORD_OR_UNION_TYPE_P (type)) | |
968 | { | |
969 | bitmap have_bound; | |
970 | ||
971 | bitmap_obstack_initialize (NULL); | |
972 | have_bound = BITMAP_ALLOC (NULL); | |
973 | chkp_find_bound_slots (type, have_bound); | |
974 | res = bitmap_count_bits (have_bound); | |
975 | BITMAP_FREE (have_bound); | |
976 | bitmap_obstack_release (NULL); | |
977 | } | |
978 | ||
979 | return res; | |
980 | } | |
981 | ||
982 | /* Get bounds associated with NODE via | |
983 | chkp_set_bounds call. */ | |
984 | tree | |
985 | chkp_get_bounds (tree node) | |
986 | { | |
987 | tree *slot; | |
988 | ||
989 | if (!chkp_bounds_map) | |
990 | return NULL_TREE; | |
991 | ||
992 | slot = chkp_bounds_map->get (node); | |
993 | return slot ? *slot : NULL_TREE; | |
994 | } | |
995 | ||
996 | /* Associate bounds VAL with NODE. */ | |
997 | void | |
998 | chkp_set_bounds (tree node, tree val) | |
999 | { | |
1000 | if (!chkp_bounds_map) | |
1001 | chkp_bounds_map = new hash_map<tree, tree>; | |
1002 | ||
1003 | chkp_bounds_map->put (node, val); | |
1004 | } | |
1005 | ||
1006 | /* Check if statically initialized variable VAR require | |
1007 | static bounds initialization. If VAR is added into | |
1008 | bounds initlization list then 1 is returned. Otherwise | |
1009 | return 0. */ | |
1010 | extern bool | |
1011 | chkp_register_var_initializer (tree var) | |
1012 | { | |
1013 | if (!flag_check_pointer_bounds | |
1014 | || DECL_INITIAL (var) == error_mark_node) | |
1015 | return false; | |
1016 | ||
1017 | gcc_assert (TREE_CODE (var) == VAR_DECL); | |
1018 | gcc_assert (DECL_INITIAL (var)); | |
1019 | ||
1020 | if (TREE_STATIC (var) | |
1021 | && chkp_type_has_pointer (TREE_TYPE (var))) | |
1022 | { | |
1023 | varpool_node::get_create (var)->need_bounds_init = 1; | |
1024 | return true; | |
1025 | } | |
1026 | ||
1027 | return false; | |
1028 | } | |
1029 | ||
1030 | /* Helper function for chkp_finish_file. | |
1031 | ||
1032 | Add new modification statement (RHS is assigned to LHS) | |
1033 | into list of static initializer statementes (passed in ARG). | |
1034 | If statements list becomes too big, emit checker constructor | |
1035 | and start the new one. */ | |
1036 | static void | |
1037 | chkp_add_modification_to_stmt_list (tree lhs, | |
1038 | tree rhs, | |
1039 | void *arg) | |
1040 | { | |
1041 | struct chkp_ctor_stmt_list *stmts = (struct chkp_ctor_stmt_list *)arg; | |
1042 | tree modify; | |
1043 | ||
1044 | if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs))) | |
1045 | rhs = build1 (CONVERT_EXPR, TREE_TYPE (lhs), rhs); | |
1046 | ||
1047 | modify = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs); | |
1048 | append_to_statement_list (modify, &stmts->stmts); | |
1049 | ||
1050 | stmts->avail--; | |
1051 | } | |
1052 | ||
1053 | /* Build and return ADDR_EXPR for specified object OBJ. */ | |
1054 | static tree | |
1055 | chkp_build_addr_expr (tree obj) | |
1056 | { | |
1057 | return TREE_CODE (obj) == TARGET_MEM_REF | |
1058 | ? tree_mem_ref_addr (ptr_type_node, obj) | |
1059 | : build_fold_addr_expr (obj); | |
1060 | } | |
1061 | ||
1062 | /* Helper function for chkp_finish_file. | |
1063 | Initialize bound variable BND_VAR with bounds of variable | |
1064 | VAR to statements list STMTS. If statements list becomes | |
1065 | too big, emit checker constructor and start the new one. */ | |
1066 | static void | |
1067 | chkp_output_static_bounds (tree bnd_var, tree var, | |
1068 | struct chkp_ctor_stmt_list *stmts) | |
1069 | { | |
1070 | tree lb, ub, size; | |
1071 | ||
1072 | if (TREE_CODE (var) == STRING_CST) | |
1073 | { | |
1074 | lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var)); | |
1075 | size = build_int_cst (size_type_node, TREE_STRING_LENGTH (var) - 1); | |
1076 | } | |
1077 | else if (DECL_SIZE (var) | |
1078 | && !chkp_variable_size_type (TREE_TYPE (var))) | |
1079 | { | |
1080 | /* Compute bounds using statically known size. */ | |
1081 | lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var)); | |
1082 | size = size_binop (MINUS_EXPR, DECL_SIZE_UNIT (var), size_one_node); | |
1083 | } | |
1084 | else | |
1085 | { | |
1086 | /* Compute bounds using dynamic size. */ | |
1087 | tree call; | |
1088 | ||
1089 | lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var)); | |
1090 | call = build1 (ADDR_EXPR, | |
1091 | build_pointer_type (TREE_TYPE (chkp_sizeof_fndecl)), | |
1092 | chkp_sizeof_fndecl); | |
1093 | size = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_sizeof_fndecl)), | |
1094 | call, 1, var); | |
1095 | ||
1096 | if (flag_chkp_zero_dynamic_size_as_infinite) | |
1097 | { | |
1098 | tree max_size, cond; | |
1099 | ||
1100 | max_size = build2 (MINUS_EXPR, size_type_node, size_zero_node, lb); | |
1101 | cond = build2 (NE_EXPR, boolean_type_node, size, size_zero_node); | |
1102 | size = build3 (COND_EXPR, size_type_node, cond, size, max_size); | |
1103 | } | |
1104 | ||
1105 | size = size_binop (MINUS_EXPR, size, size_one_node); | |
1106 | } | |
1107 | ||
1108 | ub = size_binop (PLUS_EXPR, lb, size); | |
1109 | stmts->avail -= targetm.chkp_initialize_bounds (bnd_var, lb, ub, | |
1110 | &stmts->stmts); | |
1111 | if (stmts->avail <= 0) | |
1112 | { | |
1113 | cgraph_build_static_cdtor ('B', stmts->stmts, | |
1114 | MAX_RESERVED_INIT_PRIORITY + 2); | |
1115 | stmts->avail = MAX_STMTS_IN_STATIC_CHKP_CTOR; | |
1116 | stmts->stmts = NULL; | |
1117 | } | |
1118 | } | |
1119 | ||
1120 | /* Return entry block to be used for checker initilization code. | |
1121 | Create new block if required. */ | |
1122 | static basic_block | |
1123 | chkp_get_entry_block (void) | |
1124 | { | |
1125 | if (!entry_block) | |
4302d619 | 1126 | entry_block |
1127 | = split_block_after_labels (ENTRY_BLOCK_PTR_FOR_FN (cfun))->dest; | |
058a1b7a | 1128 | |
1129 | return entry_block; | |
1130 | } | |
1131 | ||
1132 | /* Return a bounds var to be used for pointer var PTR_VAR. */ | |
1133 | static tree | |
1134 | chkp_get_bounds_var (tree ptr_var) | |
1135 | { | |
1136 | tree bnd_var; | |
1137 | tree *slot; | |
1138 | ||
1139 | slot = chkp_bound_vars->get (ptr_var); | |
1140 | if (slot) | |
1141 | bnd_var = *slot; | |
1142 | else | |
1143 | { | |
1144 | bnd_var = create_tmp_reg (pointer_bounds_type_node, | |
1145 | CHKP_BOUND_TMP_NAME); | |
1146 | chkp_bound_vars->put (ptr_var, bnd_var); | |
1147 | } | |
1148 | ||
1149 | return bnd_var; | |
1150 | } | |
1151 | ||
d86dfeae | 1152 | /* If BND is an abnormal bounds copy, return a copied value. |
1153 | Otherwise return BND. */ | |
1154 | static tree | |
1155 | chkp_get_orginal_bounds_for_abnormal_copy (tree bnd) | |
1156 | { | |
1157 | if (bitmap_bit_p (chkp_abnormal_copies, SSA_NAME_VERSION (bnd))) | |
1158 | { | |
1159 | gimple bnd_def = SSA_NAME_DEF_STMT (bnd); | |
1160 | gcc_checking_assert (gimple_code (bnd_def) == GIMPLE_ASSIGN); | |
1161 | bnd = gimple_assign_rhs1 (bnd_def); | |
1162 | } | |
058a1b7a | 1163 | |
d86dfeae | 1164 | return bnd; |
1165 | } | |
058a1b7a | 1166 | |
1167 | /* Register bounds BND for object PTR in global bounds table. | |
1168 | A copy of bounds may be created for abnormal ssa names. | |
1169 | Returns bounds to use for PTR. */ | |
1170 | static tree | |
1171 | chkp_maybe_copy_and_register_bounds (tree ptr, tree bnd) | |
1172 | { | |
1173 | bool abnormal_ptr; | |
1174 | ||
1175 | if (!chkp_reg_bounds) | |
1176 | return bnd; | |
1177 | ||
1178 | /* Do nothing if bounds are incomplete_bounds | |
1179 | because it means bounds will be recomputed. */ | |
1180 | if (bnd == incomplete_bounds) | |
1181 | return bnd; | |
1182 | ||
1183 | abnormal_ptr = (TREE_CODE (ptr) == SSA_NAME | |
1184 | && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ptr) | |
1185 | && gimple_code (SSA_NAME_DEF_STMT (ptr)) != GIMPLE_PHI); | |
1186 | ||
1187 | /* A single bounds value may be reused multiple times for | |
1188 | different pointer values. It may cause coalescing issues | |
1189 | for abnormal SSA names. To avoid it we create a bounds | |
1190 | copy in case it is computed for abnormal SSA name. | |
1191 | ||
1192 | We also cannot reuse such created copies for other pointers */ | |
1193 | if (abnormal_ptr | |
1194 | || bitmap_bit_p (chkp_abnormal_copies, SSA_NAME_VERSION (bnd))) | |
1195 | { | |
1196 | tree bnd_var = NULL_TREE; | |
1197 | ||
1198 | if (abnormal_ptr) | |
1199 | { | |
1200 | if (SSA_NAME_VAR (ptr)) | |
1201 | bnd_var = chkp_get_bounds_var (SSA_NAME_VAR (ptr)); | |
1202 | } | |
1203 | else | |
1204 | bnd_var = chkp_get_tmp_var (); | |
1205 | ||
1206 | /* For abnormal copies we may just find original | |
1207 | bounds and use them. */ | |
1208 | if (!abnormal_ptr && !SSA_NAME_IS_DEFAULT_DEF (bnd)) | |
d86dfeae | 1209 | bnd = chkp_get_orginal_bounds_for_abnormal_copy (bnd); |
058a1b7a | 1210 | /* For undefined values we usually use none bounds |
1211 | value but in case of abnormal edge it may cause | |
1212 | coalescing failures. Use default definition of | |
1213 | bounds variable instead to avoid it. */ | |
1214 | else if (SSA_NAME_IS_DEFAULT_DEF (ptr) | |
1215 | && TREE_CODE (SSA_NAME_VAR (ptr)) != PARM_DECL) | |
1216 | { | |
1217 | bnd = get_or_create_ssa_default_def (cfun, bnd_var); | |
1218 | ||
1219 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
1220 | { | |
1221 | fprintf (dump_file, "Using default def bounds "); | |
1222 | print_generic_expr (dump_file, bnd, 0); | |
1223 | fprintf (dump_file, " for abnormal default def SSA name "); | |
1224 | print_generic_expr (dump_file, ptr, 0); | |
1225 | fprintf (dump_file, "\n"); | |
1226 | } | |
1227 | } | |
1228 | else | |
1229 | { | |
1230 | tree copy; | |
1231 | gimple def = SSA_NAME_DEF_STMT (ptr); | |
1232 | gimple assign; | |
1233 | gimple_stmt_iterator gsi; | |
1234 | ||
1235 | if (bnd_var) | |
3ec9c447 | 1236 | copy = make_ssa_name (bnd_var); |
058a1b7a | 1237 | else |
1238 | copy = make_temp_ssa_name (pointer_bounds_type_node, | |
3ec9c447 | 1239 | NULL, |
058a1b7a | 1240 | CHKP_BOUND_TMP_NAME); |
d86dfeae | 1241 | bnd = chkp_get_orginal_bounds_for_abnormal_copy (bnd); |
058a1b7a | 1242 | assign = gimple_build_assign (copy, bnd); |
1243 | ||
1244 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
1245 | { | |
1246 | fprintf (dump_file, "Creating a copy of bounds "); | |
1247 | print_generic_expr (dump_file, bnd, 0); | |
1248 | fprintf (dump_file, " for abnormal SSA name "); | |
1249 | print_generic_expr (dump_file, ptr, 0); | |
1250 | fprintf (dump_file, "\n"); | |
1251 | } | |
1252 | ||
1253 | if (gimple_code (def) == GIMPLE_NOP) | |
1254 | { | |
1255 | gsi = gsi_last_bb (chkp_get_entry_block ()); | |
1256 | if (!gsi_end_p (gsi) && is_ctrl_stmt (gsi_stmt (gsi))) | |
1257 | gsi_insert_before (&gsi, assign, GSI_CONTINUE_LINKING); | |
1258 | else | |
1259 | gsi_insert_after (&gsi, assign, GSI_CONTINUE_LINKING); | |
1260 | } | |
1261 | else | |
1262 | { | |
1263 | gimple bnd_def = SSA_NAME_DEF_STMT (bnd); | |
1264 | /* Sometimes (e.g. when we load a pointer from a | |
1265 | memory) bounds are produced later than a pointer. | |
1266 | We need to insert bounds copy appropriately. */ | |
1267 | if (gimple_code (bnd_def) != GIMPLE_NOP | |
1268 | && stmt_dominates_stmt_p (def, bnd_def)) | |
1269 | gsi = gsi_for_stmt (bnd_def); | |
1270 | else | |
1271 | gsi = gsi_for_stmt (def); | |
1272 | gsi_insert_after (&gsi, assign, GSI_CONTINUE_LINKING); | |
1273 | } | |
1274 | ||
1275 | bnd = copy; | |
1276 | } | |
1277 | ||
1278 | if (abnormal_ptr) | |
1279 | bitmap_set_bit (chkp_abnormal_copies, SSA_NAME_VERSION (bnd)); | |
1280 | } | |
1281 | ||
1282 | chkp_reg_bounds->put (ptr, bnd); | |
1283 | ||
1284 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
1285 | { | |
1286 | fprintf (dump_file, "Regsitered bound "); | |
1287 | print_generic_expr (dump_file, bnd, 0); | |
1288 | fprintf (dump_file, " for pointer "); | |
1289 | print_generic_expr (dump_file, ptr, 0); | |
1290 | fprintf (dump_file, "\n"); | |
1291 | } | |
1292 | ||
1293 | return bnd; | |
1294 | } | |
1295 | ||
1296 | /* Get bounds registered for object PTR in global bounds table. */ | |
1297 | static tree | |
1298 | chkp_get_registered_bounds (tree ptr) | |
1299 | { | |
1300 | tree *slot; | |
1301 | ||
1302 | if (!chkp_reg_bounds) | |
1303 | return NULL_TREE; | |
1304 | ||
1305 | slot = chkp_reg_bounds->get (ptr); | |
1306 | return slot ? *slot : NULL_TREE; | |
1307 | } | |
1308 | ||
1309 | /* Add bound retvals to return statement pointed by GSI. */ | |
1310 | ||
1311 | static void | |
1312 | chkp_add_bounds_to_ret_stmt (gimple_stmt_iterator *gsi) | |
1313 | { | |
1a91d914 | 1314 | greturn *ret = as_a <greturn *> (gsi_stmt (*gsi)); |
058a1b7a | 1315 | tree retval = gimple_return_retval (ret); |
1316 | tree ret_decl = DECL_RESULT (cfun->decl); | |
1317 | tree bounds; | |
1318 | ||
1319 | if (!retval) | |
1320 | return; | |
1321 | ||
1322 | if (BOUNDED_P (ret_decl)) | |
1323 | { | |
1324 | bounds = chkp_find_bounds (retval, gsi); | |
1325 | bounds = chkp_maybe_copy_and_register_bounds (ret_decl, bounds); | |
1326 | gimple_return_set_retbnd (ret, bounds); | |
1327 | } | |
1328 | ||
1329 | update_stmt (ret); | |
1330 | } | |
1331 | ||
1332 | /* Force OP to be suitable for using as an argument for call. | |
1333 | New statements (if any) go to SEQ. */ | |
1334 | static tree | |
1335 | chkp_force_gimple_call_op (tree op, gimple_seq *seq) | |
1336 | { | |
1337 | gimple_seq stmts; | |
1338 | gimple_stmt_iterator si; | |
1339 | ||
1340 | op = force_gimple_operand (unshare_expr (op), &stmts, true, NULL_TREE); | |
1341 | ||
1342 | for (si = gsi_start (stmts); !gsi_end_p (si); gsi_next (&si)) | |
1343 | chkp_mark_stmt (gsi_stmt (si)); | |
1344 | ||
1345 | gimple_seq_add_seq (seq, stmts); | |
1346 | ||
1347 | return op; | |
1348 | } | |
1349 | ||
1350 | /* Generate lower bound check for memory access by ADDR. | |
1351 | Check is inserted before the position pointed by ITER. | |
1352 | DIRFLAG indicates whether memory access is load or store. */ | |
1353 | static void | |
1354 | chkp_check_lower (tree addr, tree bounds, | |
1355 | gimple_stmt_iterator iter, | |
1356 | location_t location, | |
1357 | tree dirflag) | |
1358 | { | |
1359 | gimple_seq seq; | |
1360 | gimple check; | |
1361 | tree node; | |
1362 | ||
d0cb6a0d | 1363 | if (!chkp_function_instrumented_p (current_function_decl) |
1364 | && bounds == chkp_get_zero_bounds ()) | |
058a1b7a | 1365 | return; |
1366 | ||
1367 | if (dirflag == integer_zero_node | |
1368 | && !flag_chkp_check_read) | |
1369 | return; | |
1370 | ||
1371 | if (dirflag == integer_one_node | |
1372 | && !flag_chkp_check_write) | |
1373 | return; | |
1374 | ||
1375 | seq = NULL; | |
1376 | ||
1377 | node = chkp_force_gimple_call_op (addr, &seq); | |
1378 | ||
1379 | check = gimple_build_call (chkp_checkl_fndecl, 2, node, bounds); | |
1380 | chkp_mark_stmt (check); | |
1381 | gimple_call_set_with_bounds (check, true); | |
1382 | gimple_set_location (check, location); | |
1383 | gimple_seq_add_stmt (&seq, check); | |
1384 | ||
1385 | gsi_insert_seq_before (&iter, seq, GSI_SAME_STMT); | |
1386 | ||
1387 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
1388 | { | |
1389 | gimple before = gsi_stmt (iter); | |
1390 | fprintf (dump_file, "Generated lower bound check for statement "); | |
1391 | print_gimple_stmt (dump_file, before, 0, TDF_VOPS|TDF_MEMSYMS); | |
1392 | fprintf (dump_file, " "); | |
1393 | print_gimple_stmt (dump_file, check, 0, TDF_VOPS|TDF_MEMSYMS); | |
1394 | } | |
1395 | } | |
1396 | ||
1397 | /* Generate upper bound check for memory access by ADDR. | |
1398 | Check is inserted before the position pointed by ITER. | |
1399 | DIRFLAG indicates whether memory access is load or store. */ | |
1400 | static void | |
1401 | chkp_check_upper (tree addr, tree bounds, | |
1402 | gimple_stmt_iterator iter, | |
1403 | location_t location, | |
1404 | tree dirflag) | |
1405 | { | |
1406 | gimple_seq seq; | |
1407 | gimple check; | |
1408 | tree node; | |
1409 | ||
d0cb6a0d | 1410 | if (!chkp_function_instrumented_p (current_function_decl) |
1411 | && bounds == chkp_get_zero_bounds ()) | |
058a1b7a | 1412 | return; |
1413 | ||
1414 | if (dirflag == integer_zero_node | |
1415 | && !flag_chkp_check_read) | |
1416 | return; | |
1417 | ||
1418 | if (dirflag == integer_one_node | |
1419 | && !flag_chkp_check_write) | |
1420 | return; | |
1421 | ||
1422 | seq = NULL; | |
1423 | ||
1424 | node = chkp_force_gimple_call_op (addr, &seq); | |
1425 | ||
1426 | check = gimple_build_call (chkp_checku_fndecl, 2, node, bounds); | |
1427 | chkp_mark_stmt (check); | |
1428 | gimple_call_set_with_bounds (check, true); | |
1429 | gimple_set_location (check, location); | |
1430 | gimple_seq_add_stmt (&seq, check); | |
1431 | ||
1432 | gsi_insert_seq_before (&iter, seq, GSI_SAME_STMT); | |
1433 | ||
1434 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
1435 | { | |
1436 | gimple before = gsi_stmt (iter); | |
1437 | fprintf (dump_file, "Generated upper bound check for statement "); | |
1438 | print_gimple_stmt (dump_file, before, 0, TDF_VOPS|TDF_MEMSYMS); | |
1439 | fprintf (dump_file, " "); | |
1440 | print_gimple_stmt (dump_file, check, 0, TDF_VOPS|TDF_MEMSYMS); | |
1441 | } | |
1442 | } | |
1443 | ||
1444 | /* Generate lower and upper bound checks for memory access | |
1445 | to memory slot [FIRST, LAST] againsr BOUNDS. Checks | |
1446 | are inserted before the position pointed by ITER. | |
1447 | DIRFLAG indicates whether memory access is load or store. */ | |
1448 | void | |
1449 | chkp_check_mem_access (tree first, tree last, tree bounds, | |
1450 | gimple_stmt_iterator iter, | |
1451 | location_t location, | |
1452 | tree dirflag) | |
1453 | { | |
1454 | chkp_check_lower (first, bounds, iter, location, dirflag); | |
1455 | chkp_check_upper (last, bounds, iter, location, dirflag); | |
1456 | } | |
1457 | ||
1458 | /* Replace call to _bnd_chk_* pointed by GSI with | |
1459 | bndcu and bndcl calls. DIRFLAG determines whether | |
1460 | check is for read or write. */ | |
1461 | ||
1462 | void | |
1463 | chkp_replace_address_check_builtin (gimple_stmt_iterator *gsi, | |
1464 | tree dirflag) | |
1465 | { | |
1466 | gimple_stmt_iterator call_iter = *gsi; | |
1467 | gimple call = gsi_stmt (*gsi); | |
1468 | tree fndecl = gimple_call_fndecl (call); | |
1469 | tree addr = gimple_call_arg (call, 0); | |
1470 | tree bounds = chkp_find_bounds (addr, gsi); | |
1471 | ||
1472 | if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_LBOUNDS | |
1473 | || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS) | |
1474 | chkp_check_lower (addr, bounds, *gsi, gimple_location (call), dirflag); | |
1475 | ||
1476 | if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_UBOUNDS) | |
1477 | chkp_check_upper (addr, bounds, *gsi, gimple_location (call), dirflag); | |
1478 | ||
1479 | if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS) | |
1480 | { | |
1481 | tree size = gimple_call_arg (call, 1); | |
1482 | addr = fold_build_pointer_plus (addr, size); | |
1483 | addr = fold_build_pointer_plus_hwi (addr, -1); | |
1484 | chkp_check_upper (addr, bounds, *gsi, gimple_location (call), dirflag); | |
1485 | } | |
1486 | ||
1487 | gsi_remove (&call_iter, true); | |
1488 | } | |
1489 | ||
1490 | /* Replace call to _bnd_get_ptr_* pointed by GSI with | |
1491 | corresponding bounds extract call. */ | |
1492 | ||
1493 | void | |
1494 | chkp_replace_extract_builtin (gimple_stmt_iterator *gsi) | |
1495 | { | |
1496 | gimple call = gsi_stmt (*gsi); | |
1497 | tree fndecl = gimple_call_fndecl (call); | |
1498 | tree addr = gimple_call_arg (call, 0); | |
1499 | tree bounds = chkp_find_bounds (addr, gsi); | |
1500 | gimple extract; | |
1501 | ||
1502 | if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_LBOUND) | |
1503 | fndecl = chkp_extract_lower_fndecl; | |
1504 | else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_UBOUND) | |
1505 | fndecl = chkp_extract_upper_fndecl; | |
1506 | else | |
1507 | gcc_unreachable (); | |
1508 | ||
1509 | extract = gimple_build_call (fndecl, 1, bounds); | |
1510 | gimple_call_set_lhs (extract, gimple_call_lhs (call)); | |
1511 | chkp_mark_stmt (extract); | |
1512 | ||
1513 | gsi_replace (gsi, extract, false); | |
1514 | } | |
1515 | ||
1516 | /* Return COMPONENT_REF accessing FIELD in OBJ. */ | |
1517 | static tree | |
1518 | chkp_build_component_ref (tree obj, tree field) | |
1519 | { | |
1520 | tree res; | |
1521 | ||
1522 | /* If object is TMR then we do not use component_ref but | |
1523 | add offset instead. We need it to be able to get addr | |
1524 | of the reasult later. */ | |
1525 | if (TREE_CODE (obj) == TARGET_MEM_REF) | |
1526 | { | |
1527 | tree offs = TMR_OFFSET (obj); | |
1528 | offs = fold_binary_to_constant (PLUS_EXPR, TREE_TYPE (offs), | |
1529 | offs, DECL_FIELD_OFFSET (field)); | |
1530 | ||
1531 | gcc_assert (offs); | |
1532 | ||
1533 | res = copy_node (obj); | |
1534 | TREE_TYPE (res) = TREE_TYPE (field); | |
1535 | TMR_OFFSET (res) = offs; | |
1536 | } | |
1537 | else | |
1538 | res = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL_TREE); | |
1539 | ||
1540 | return res; | |
1541 | } | |
1542 | ||
1543 | /* Return ARRAY_REF for array ARR and index IDX with | |
1544 | specified element type ETYPE and element size ESIZE. */ | |
1545 | static tree | |
1546 | chkp_build_array_ref (tree arr, tree etype, tree esize, | |
1547 | unsigned HOST_WIDE_INT idx) | |
1548 | { | |
1549 | tree index = build_int_cst (size_type_node, idx); | |
1550 | tree res; | |
1551 | ||
1552 | /* If object is TMR then we do not use array_ref but | |
1553 | add offset instead. We need it to be able to get addr | |
1554 | of the reasult later. */ | |
1555 | if (TREE_CODE (arr) == TARGET_MEM_REF) | |
1556 | { | |
1557 | tree offs = TMR_OFFSET (arr); | |
1558 | ||
1559 | esize = fold_binary_to_constant (MULT_EXPR, TREE_TYPE (esize), | |
1560 | esize, index); | |
1561 | gcc_assert(esize); | |
1562 | ||
1563 | offs = fold_binary_to_constant (PLUS_EXPR, TREE_TYPE (offs), | |
1564 | offs, esize); | |
1565 | gcc_assert (offs); | |
1566 | ||
1567 | res = copy_node (arr); | |
1568 | TREE_TYPE (res) = etype; | |
1569 | TMR_OFFSET (res) = offs; | |
1570 | } | |
1571 | else | |
1572 | res = build4 (ARRAY_REF, etype, arr, index, NULL_TREE, NULL_TREE); | |
1573 | ||
1574 | return res; | |
1575 | } | |
1576 | ||
1577 | /* Helper function for chkp_add_bounds_to_call_stmt. | |
1578 | Fill ALL_BOUNDS output array with created bounds. | |
1579 | ||
1580 | OFFS is used for recursive calls and holds basic | |
1581 | offset of TYPE in outer structure in bits. | |
1582 | ||
1583 | ITER points a position where bounds are searched. | |
1584 | ||
1585 | ALL_BOUNDS[i] is filled with elem bounds if there | |
1586 | is a field in TYPE which has pointer type and offset | |
1587 | equal to i * POINTER_SIZE in bits. */ | |
1588 | static void | |
1589 | chkp_find_bounds_for_elem (tree elem, tree *all_bounds, | |
1590 | HOST_WIDE_INT offs, | |
1591 | gimple_stmt_iterator *iter) | |
1592 | { | |
1593 | tree type = TREE_TYPE (elem); | |
1594 | ||
1595 | if (BOUNDED_TYPE_P (type)) | |
1596 | { | |
1597 | if (!all_bounds[offs / POINTER_SIZE]) | |
1598 | { | |
3ec9c447 | 1599 | tree temp = make_temp_ssa_name (type, NULL, ""); |
058a1b7a | 1600 | gimple assign = gimple_build_assign (temp, elem); |
1601 | gimple_stmt_iterator gsi; | |
1602 | ||
1603 | gsi_insert_before (iter, assign, GSI_SAME_STMT); | |
1604 | gsi = gsi_for_stmt (assign); | |
1605 | ||
1606 | all_bounds[offs / POINTER_SIZE] = chkp_find_bounds (temp, &gsi); | |
1607 | } | |
1608 | } | |
1609 | else if (RECORD_OR_UNION_TYPE_P (type)) | |
1610 | { | |
1611 | tree field; | |
1612 | ||
1613 | for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field)) | |
1614 | if (TREE_CODE (field) == FIELD_DECL) | |
1615 | { | |
1616 | tree base = unshare_expr (elem); | |
1617 | tree field_ref = chkp_build_component_ref (base, field); | |
1618 | HOST_WIDE_INT field_offs | |
1619 | = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field)); | |
1620 | if (DECL_FIELD_OFFSET (field)) | |
1621 | field_offs += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field)) * 8; | |
1622 | ||
1623 | chkp_find_bounds_for_elem (field_ref, all_bounds, | |
1624 | offs + field_offs, iter); | |
1625 | } | |
1626 | } | |
1627 | else if (TREE_CODE (type) == ARRAY_TYPE) | |
1628 | { | |
1629 | tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type)); | |
1630 | tree etype = TREE_TYPE (type); | |
1631 | HOST_WIDE_INT esize = TREE_INT_CST_LOW (TYPE_SIZE (etype)); | |
1632 | unsigned HOST_WIDE_INT cur; | |
1633 | ||
1634 | if (!maxval || integer_minus_onep (maxval)) | |
1635 | return; | |
1636 | ||
1637 | for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++) | |
1638 | { | |
1639 | tree base = unshare_expr (elem); | |
1640 | tree arr_elem = chkp_build_array_ref (base, etype, | |
1641 | TYPE_SIZE (etype), | |
1642 | cur); | |
1643 | chkp_find_bounds_for_elem (arr_elem, all_bounds, offs + cur * esize, | |
1644 | iter); | |
1645 | } | |
1646 | } | |
1647 | } | |
1648 | ||
1649 | /* Fill HAVE_BOUND output bitmap with information about | |
1650 | bounds requred for object of type TYPE. | |
1651 | ||
1652 | OFFS is used for recursive calls and holds basic | |
1653 | offset of TYPE in outer structure in bits. | |
1654 | ||
1655 | HAVE_BOUND[i] is set to 1 if there is a field | |
1656 | in TYPE which has pointer type and offset | |
1657 | equal to i * POINTER_SIZE - OFFS in bits. */ | |
1658 | void | |
1659 | chkp_find_bound_slots_1 (const_tree type, bitmap have_bound, | |
1660 | HOST_WIDE_INT offs) | |
1661 | { | |
1662 | if (BOUNDED_TYPE_P (type)) | |
1663 | bitmap_set_bit (have_bound, offs / POINTER_SIZE); | |
1664 | else if (RECORD_OR_UNION_TYPE_P (type)) | |
1665 | { | |
1666 | tree field; | |
1667 | ||
1668 | for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field)) | |
1669 | if (TREE_CODE (field) == FIELD_DECL) | |
1670 | { | |
1671 | HOST_WIDE_INT field_offs | |
1672 | = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field)); | |
1673 | if (DECL_FIELD_OFFSET (field)) | |
1674 | field_offs += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field)) * 8; | |
1675 | chkp_find_bound_slots_1 (TREE_TYPE (field), have_bound, | |
1676 | offs + field_offs); | |
1677 | } | |
1678 | } | |
1679 | else if (TREE_CODE (type) == ARRAY_TYPE) | |
1680 | { | |
1681 | tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type)); | |
1682 | tree etype = TREE_TYPE (type); | |
1683 | HOST_WIDE_INT esize = TREE_INT_CST_LOW (TYPE_SIZE (etype)); | |
1684 | unsigned HOST_WIDE_INT cur; | |
1685 | ||
3f4cf42d | 1686 | if (!maxval |
1687 | || TREE_CODE (maxval) != INTEGER_CST | |
1688 | || integer_minus_onep (maxval)) | |
058a1b7a | 1689 | return; |
1690 | ||
1691 | for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++) | |
1692 | chkp_find_bound_slots_1 (etype, have_bound, offs + cur * esize); | |
1693 | } | |
1694 | } | |
1695 | ||
1696 | /* Fill bitmap RES with information about bounds for | |
1697 | type TYPE. See chkp_find_bound_slots_1 for more | |
1698 | details. */ | |
1699 | void | |
1700 | chkp_find_bound_slots (const_tree type, bitmap res) | |
1701 | { | |
1702 | bitmap_clear (res); | |
1703 | chkp_find_bound_slots_1 (type, res, 0); | |
1704 | } | |
1705 | ||
f21337ef | 1706 | /* Return 1 if call to FNDECL should be instrumented |
1707 | and 0 otherwise. */ | |
1708 | ||
1709 | static bool | |
1710 | chkp_instrument_normal_builtin (tree fndecl) | |
1711 | { | |
1712 | switch (DECL_FUNCTION_CODE (fndecl)) | |
1713 | { | |
1714 | case BUILT_IN_STRLEN: | |
1715 | case BUILT_IN_STRCPY: | |
1716 | case BUILT_IN_STRNCPY: | |
1717 | case BUILT_IN_STPCPY: | |
1718 | case BUILT_IN_STPNCPY: | |
1719 | case BUILT_IN_STRCAT: | |
1720 | case BUILT_IN_STRNCAT: | |
1721 | case BUILT_IN_MEMCPY: | |
1722 | case BUILT_IN_MEMPCPY: | |
1723 | case BUILT_IN_MEMSET: | |
1724 | case BUILT_IN_MEMMOVE: | |
1725 | case BUILT_IN_BZERO: | |
1726 | case BUILT_IN_STRCMP: | |
1727 | case BUILT_IN_STRNCMP: | |
1728 | case BUILT_IN_BCMP: | |
1729 | case BUILT_IN_MEMCMP: | |
1730 | case BUILT_IN_MEMCPY_CHK: | |
1731 | case BUILT_IN_MEMPCPY_CHK: | |
1732 | case BUILT_IN_MEMMOVE_CHK: | |
1733 | case BUILT_IN_MEMSET_CHK: | |
1734 | case BUILT_IN_STRCPY_CHK: | |
1735 | case BUILT_IN_STRNCPY_CHK: | |
1736 | case BUILT_IN_STPCPY_CHK: | |
1737 | case BUILT_IN_STPNCPY_CHK: | |
1738 | case BUILT_IN_STRCAT_CHK: | |
1739 | case BUILT_IN_STRNCAT_CHK: | |
1740 | case BUILT_IN_MALLOC: | |
1741 | case BUILT_IN_CALLOC: | |
1742 | case BUILT_IN_REALLOC: | |
1743 | return 1; | |
1744 | ||
1745 | default: | |
1746 | return 0; | |
1747 | } | |
1748 | } | |
1749 | ||
058a1b7a | 1750 | /* Add bound arguments to call statement pointed by GSI. |
1751 | Also performs a replacement of user checker builtins calls | |
1752 | with internal ones. */ | |
1753 | ||
1754 | static void | |
1755 | chkp_add_bounds_to_call_stmt (gimple_stmt_iterator *gsi) | |
1756 | { | |
1a91d914 | 1757 | gcall *call = as_a <gcall *> (gsi_stmt (*gsi)); |
058a1b7a | 1758 | unsigned arg_no = 0; |
1759 | tree fndecl = gimple_call_fndecl (call); | |
1760 | tree fntype; | |
1761 | tree first_formal_arg; | |
1762 | tree arg; | |
1763 | bool use_fntype = false; | |
1764 | tree op; | |
1765 | ssa_op_iter iter; | |
1a91d914 | 1766 | gcall *new_call; |
058a1b7a | 1767 | |
1768 | /* Do nothing for internal functions. */ | |
1769 | if (gimple_call_internal_p (call)) | |
1770 | return; | |
1771 | ||
1772 | fntype = TREE_TYPE (TREE_TYPE (gimple_call_fn (call))); | |
1773 | ||
1774 | /* Do nothing if back-end builtin is called. */ | |
1775 | if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD) | |
1776 | return; | |
1777 | ||
1778 | /* Do nothing for some middle-end builtins. */ | |
1779 | if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL | |
1780 | && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_OBJECT_SIZE) | |
1781 | return; | |
1782 | ||
66124ce7 | 1783 | /* Do nothing for calls to not instrumentable functions. */ |
1784 | if (fndecl && !chkp_instrumentable_p (fndecl)) | |
058a1b7a | 1785 | return; |
1786 | ||
1787 | /* Ignore CHKP_INIT_PTR_BOUNDS, CHKP_NULL_PTR_BOUNDS | |
1788 | and CHKP_COPY_PTR_BOUNDS. */ | |
1789 | if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL | |
1790 | && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_INIT_PTR_BOUNDS | |
1791 | || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NULL_PTR_BOUNDS | |
1792 | || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_COPY_PTR_BOUNDS | |
1793 | || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_SET_PTR_BOUNDS)) | |
1794 | return; | |
1795 | ||
1796 | /* Check user builtins are replaced with checks. */ | |
1797 | if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL | |
1798 | && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_LBOUNDS | |
1799 | || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_UBOUNDS | |
1800 | || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS)) | |
1801 | { | |
1802 | chkp_replace_address_check_builtin (gsi, integer_minus_one_node); | |
1803 | return; | |
1804 | } | |
1805 | ||
1806 | /* Check user builtins are replaced with bound extract. */ | |
1807 | if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL | |
1808 | && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_LBOUND | |
1809 | || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_UBOUND)) | |
1810 | { | |
1811 | chkp_replace_extract_builtin (gsi); | |
1812 | return; | |
1813 | } | |
1814 | ||
1815 | /* BUILT_IN_CHKP_NARROW_PTR_BOUNDS call is replaced with | |
1816 | target narrow bounds call. */ | |
1817 | if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL | |
1818 | && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NARROW_PTR_BOUNDS) | |
1819 | { | |
1820 | tree arg = gimple_call_arg (call, 1); | |
1821 | tree bounds = chkp_find_bounds (arg, gsi); | |
1822 | ||
1823 | gimple_call_set_fndecl (call, chkp_narrow_bounds_fndecl); | |
1824 | gimple_call_set_arg (call, 1, bounds); | |
1825 | update_stmt (call); | |
1826 | ||
1827 | return; | |
1828 | } | |
1829 | ||
1830 | /* BUILT_IN_CHKP_STORE_PTR_BOUNDS call is replaced with | |
1831 | bndstx call. */ | |
1832 | if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL | |
1833 | && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_STORE_PTR_BOUNDS) | |
1834 | { | |
1835 | tree addr = gimple_call_arg (call, 0); | |
1836 | tree ptr = gimple_call_arg (call, 1); | |
1837 | tree bounds = chkp_find_bounds (ptr, gsi); | |
1838 | gimple_stmt_iterator iter = gsi_for_stmt (call); | |
1839 | ||
1840 | chkp_build_bndstx (addr, ptr, bounds, gsi); | |
1841 | gsi_remove (&iter, true); | |
1842 | ||
1843 | return; | |
1844 | } | |
1845 | ||
1846 | if (!flag_chkp_instrument_calls) | |
1847 | return; | |
1848 | ||
f21337ef | 1849 | /* We instrument only some subset of builtins. We also instrument |
1850 | builtin calls to be inlined. */ | |
1851 | if (fndecl | |
1852 | && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL | |
1853 | && !chkp_instrument_normal_builtin (fndecl)) | |
1854 | { | |
1855 | if (!lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl))) | |
1856 | return; | |
1857 | ||
1858 | struct cgraph_node *clone = chkp_maybe_create_clone (fndecl); | |
1859 | if (!clone | |
1860 | || !gimple_has_body_p (clone->decl)) | |
1861 | return; | |
1862 | } | |
058a1b7a | 1863 | |
1864 | /* If function decl is available then use it for | |
1865 | formal arguments list. Otherwise use function type. */ | |
1866 | if (fndecl && DECL_ARGUMENTS (fndecl)) | |
1867 | first_formal_arg = DECL_ARGUMENTS (fndecl); | |
1868 | else | |
1869 | { | |
1870 | first_formal_arg = TYPE_ARG_TYPES (fntype); | |
1871 | use_fntype = true; | |
1872 | } | |
1873 | ||
1874 | /* Fill vector of new call args. */ | |
1875 | vec<tree> new_args = vNULL; | |
1876 | new_args.create (gimple_call_num_args (call)); | |
1877 | arg = first_formal_arg; | |
1878 | for (arg_no = 0; arg_no < gimple_call_num_args (call); arg_no++) | |
1879 | { | |
1880 | tree call_arg = gimple_call_arg (call, arg_no); | |
1881 | tree type; | |
1882 | ||
1883 | /* Get arg type using formal argument description | |
1884 | or actual argument type. */ | |
1885 | if (arg) | |
1886 | if (use_fntype) | |
1887 | if (TREE_VALUE (arg) != void_type_node) | |
1888 | { | |
1889 | type = TREE_VALUE (arg); | |
1890 | arg = TREE_CHAIN (arg); | |
1891 | } | |
1892 | else | |
1893 | type = TREE_TYPE (call_arg); | |
1894 | else | |
1895 | { | |
1896 | type = TREE_TYPE (arg); | |
1897 | arg = TREE_CHAIN (arg); | |
1898 | } | |
1899 | else | |
1900 | type = TREE_TYPE (call_arg); | |
1901 | ||
1902 | new_args.safe_push (call_arg); | |
1903 | ||
1904 | if (BOUNDED_TYPE_P (type) | |
1905 | || pass_by_reference (NULL, TYPE_MODE (type), type, true)) | |
1906 | new_args.safe_push (chkp_find_bounds (call_arg, gsi)); | |
1907 | else if (chkp_type_has_pointer (type)) | |
1908 | { | |
1909 | HOST_WIDE_INT max_bounds | |
1910 | = TREE_INT_CST_LOW (TYPE_SIZE (type)) / POINTER_SIZE; | |
1911 | tree *all_bounds = (tree *)xmalloc (sizeof (tree) * max_bounds); | |
1912 | HOST_WIDE_INT bnd_no; | |
1913 | ||
1914 | memset (all_bounds, 0, sizeof (tree) * max_bounds); | |
1915 | ||
1916 | chkp_find_bounds_for_elem (call_arg, all_bounds, 0, gsi); | |
1917 | ||
1918 | for (bnd_no = 0; bnd_no < max_bounds; bnd_no++) | |
1919 | if (all_bounds[bnd_no]) | |
1920 | new_args.safe_push (all_bounds[bnd_no]); | |
1921 | ||
1922 | free (all_bounds); | |
1923 | } | |
1924 | } | |
1925 | ||
1926 | if (new_args.length () == gimple_call_num_args (call)) | |
1927 | new_call = call; | |
1928 | else | |
1929 | { | |
1930 | new_call = gimple_build_call_vec (gimple_op (call, 1), new_args); | |
1931 | gimple_call_set_lhs (new_call, gimple_call_lhs (call)); | |
1932 | gimple_call_copy_flags (new_call, call); | |
a1406e97 | 1933 | gimple_call_set_chain (new_call, gimple_call_chain (call)); |
058a1b7a | 1934 | } |
1935 | new_args.release (); | |
1936 | ||
058a1b7a | 1937 | /* For direct calls fndecl is replaced with instrumented version. */ |
1938 | if (fndecl) | |
1939 | { | |
1940 | tree new_decl = chkp_maybe_create_clone (fndecl)->decl; | |
1941 | gimple_call_set_fndecl (new_call, new_decl); | |
1942 | gimple_call_set_fntype (new_call, TREE_TYPE (new_decl)); | |
1943 | } | |
1944 | /* For indirect call we should fix function pointer type if | |
1945 | pass some bounds. */ | |
1946 | else if (new_call != call) | |
1947 | { | |
1948 | tree type = gimple_call_fntype (call); | |
1949 | type = chkp_copy_function_type_adding_bounds (type); | |
1950 | gimple_call_set_fntype (new_call, type); | |
1951 | } | |
1952 | ||
1953 | /* replace old call statement with the new one. */ | |
1954 | if (call != new_call) | |
1955 | { | |
1956 | FOR_EACH_SSA_TREE_OPERAND (op, call, iter, SSA_OP_ALL_DEFS) | |
1957 | { | |
1958 | SSA_NAME_DEF_STMT (op) = new_call; | |
1959 | } | |
1960 | gsi_replace (gsi, new_call, true); | |
1961 | } | |
1962 | else | |
1963 | update_stmt (new_call); | |
1964 | ||
1965 | gimple_call_set_with_bounds (new_call, true); | |
1966 | } | |
1967 | ||
058a1b7a | 1968 | /* Return constant static bounds var with specified bounds LB and UB. |
1969 | If such var does not exists then new var is created with specified NAME. */ | |
1970 | static tree | |
1971 | chkp_make_static_const_bounds (HOST_WIDE_INT lb, | |
1972 | HOST_WIDE_INT ub, | |
1973 | const char *name) | |
1974 | { | |
07c77ccc | 1975 | tree id = get_identifier (name); |
058a1b7a | 1976 | tree var; |
07c77ccc | 1977 | varpool_node *node; |
1978 | symtab_node *snode; | |
1979 | ||
1980 | var = build_decl (UNKNOWN_LOCATION, VAR_DECL, id, | |
1981 | pointer_bounds_type_node); | |
1982 | TREE_STATIC (var) = 1; | |
1983 | TREE_PUBLIC (var) = 1; | |
058a1b7a | 1984 | |
1985 | /* With LTO we may have constant bounds already in varpool. | |
1986 | Try to find it. */ | |
07c77ccc | 1987 | if ((snode = symtab_node::get_for_asmname (DECL_ASSEMBLER_NAME (var)))) |
1988 | { | |
1989 | /* We don't allow this symbol usage for non bounds. */ | |
1990 | if (snode->type != SYMTAB_VARIABLE | |
1991 | || !POINTER_BOUNDS_P (snode->decl)) | |
1992 | sorry ("-fcheck-pointer-bounds requires '%s' " | |
1993 | "name for internal usage", | |
1994 | IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (var))); | |
1995 | ||
1996 | return snode->decl; | |
1997 | } | |
058a1b7a | 1998 | |
058a1b7a | 1999 | TREE_USED (var) = 1; |
2000 | TREE_READONLY (var) = 1; | |
058a1b7a | 2001 | TREE_ADDRESSABLE (var) = 0; |
2002 | DECL_ARTIFICIAL (var) = 1; | |
2003 | DECL_READ_P (var) = 1; | |
07c77ccc | 2004 | DECL_INITIAL (var) = targetm.chkp_make_bounds_constant (lb, ub); |
2005 | make_decl_one_only (var, DECL_ASSEMBLER_NAME (var)); | |
058a1b7a | 2006 | /* We may use this symbol during ctors generation in chkp_finish_file |
2007 | when all symbols are emitted. Force output to avoid undefined | |
2008 | symbols in ctors. */ | |
07c77ccc | 2009 | node = varpool_node::get_create (var); |
2010 | node->force_output = 1; | |
2011 | ||
058a1b7a | 2012 | varpool_node::finalize_decl (var); |
2013 | ||
2014 | return var; | |
2015 | } | |
2016 | ||
2017 | /* Generate code to make bounds with specified lower bound LB and SIZE. | |
2018 | if AFTER is 1 then code is inserted after position pointed by ITER | |
2019 | otherwise code is inserted before position pointed by ITER. | |
2020 | If ITER is NULL then code is added to entry block. */ | |
2021 | static tree | |
2022 | chkp_make_bounds (tree lb, tree size, gimple_stmt_iterator *iter, bool after) | |
2023 | { | |
2024 | gimple_seq seq; | |
2025 | gimple_stmt_iterator gsi; | |
2026 | gimple stmt; | |
2027 | tree bounds; | |
2028 | ||
2029 | if (iter) | |
2030 | gsi = *iter; | |
2031 | else | |
2032 | gsi = gsi_start_bb (chkp_get_entry_block ()); | |
2033 | ||
2034 | seq = NULL; | |
2035 | ||
2036 | lb = chkp_force_gimple_call_op (lb, &seq); | |
2037 | size = chkp_force_gimple_call_op (size, &seq); | |
2038 | ||
2039 | stmt = gimple_build_call (chkp_bndmk_fndecl, 2, lb, size); | |
2040 | chkp_mark_stmt (stmt); | |
2041 | ||
2042 | bounds = chkp_get_tmp_reg (stmt); | |
2043 | gimple_call_set_lhs (stmt, bounds); | |
2044 | ||
2045 | gimple_seq_add_stmt (&seq, stmt); | |
2046 | ||
2047 | if (iter && after) | |
2048 | gsi_insert_seq_after (&gsi, seq, GSI_SAME_STMT); | |
2049 | else | |
2050 | gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT); | |
2051 | ||
2052 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
2053 | { | |
2054 | fprintf (dump_file, "Made bounds: "); | |
2055 | print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS); | |
2056 | if (iter) | |
2057 | { | |
2058 | fprintf (dump_file, " inserted before statement: "); | |
2059 | print_gimple_stmt (dump_file, gsi_stmt (*iter), 0, TDF_VOPS|TDF_MEMSYMS); | |
2060 | } | |
2061 | else | |
2062 | fprintf (dump_file, " at function entry\n"); | |
2063 | } | |
2064 | ||
2065 | /* update_stmt (stmt); */ | |
2066 | ||
2067 | return bounds; | |
2068 | } | |
2069 | ||
2070 | /* Return var holding zero bounds. */ | |
2071 | tree | |
2072 | chkp_get_zero_bounds_var (void) | |
2073 | { | |
058a1b7a | 2074 | if (!chkp_zero_bounds_var) |
2075 | chkp_zero_bounds_var | |
2076 | = chkp_make_static_const_bounds (0, -1, | |
2077 | CHKP_ZERO_BOUNDS_VAR_NAME); | |
2078 | return chkp_zero_bounds_var; | |
2079 | } | |
2080 | ||
2081 | /* Return var holding none bounds. */ | |
2082 | tree | |
2083 | chkp_get_none_bounds_var (void) | |
2084 | { | |
058a1b7a | 2085 | if (!chkp_none_bounds_var) |
2086 | chkp_none_bounds_var | |
2087 | = chkp_make_static_const_bounds (-1, 0, | |
2088 | CHKP_NONE_BOUNDS_VAR_NAME); | |
2089 | return chkp_none_bounds_var; | |
2090 | } | |
2091 | ||
2092 | /* Return SSA_NAME used to represent zero bounds. */ | |
2093 | static tree | |
2094 | chkp_get_zero_bounds (void) | |
2095 | { | |
2096 | if (zero_bounds) | |
2097 | return zero_bounds; | |
2098 | ||
2099 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
2100 | fprintf (dump_file, "Creating zero bounds..."); | |
2101 | ||
2102 | if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds) | |
2103 | || flag_chkp_use_static_const_bounds > 0) | |
2104 | { | |
2105 | gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ()); | |
2106 | gimple stmt; | |
2107 | ||
3ec9c447 | 2108 | zero_bounds = chkp_get_tmp_reg (NULL); |
058a1b7a | 2109 | stmt = gimple_build_assign (zero_bounds, chkp_get_zero_bounds_var ()); |
2110 | gsi_insert_before (&gsi, stmt, GSI_SAME_STMT); | |
2111 | } | |
2112 | else | |
2113 | zero_bounds = chkp_make_bounds (integer_zero_node, | |
2114 | integer_zero_node, | |
2115 | NULL, | |
2116 | false); | |
2117 | ||
2118 | return zero_bounds; | |
2119 | } | |
2120 | ||
2121 | /* Return SSA_NAME used to represent none bounds. */ | |
2122 | static tree | |
2123 | chkp_get_none_bounds (void) | |
2124 | { | |
2125 | if (none_bounds) | |
2126 | return none_bounds; | |
2127 | ||
2128 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
2129 | fprintf (dump_file, "Creating none bounds..."); | |
2130 | ||
2131 | ||
2132 | if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds) | |
2133 | || flag_chkp_use_static_const_bounds > 0) | |
2134 | { | |
2135 | gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ()); | |
2136 | gimple stmt; | |
2137 | ||
3ec9c447 | 2138 | none_bounds = chkp_get_tmp_reg (NULL); |
058a1b7a | 2139 | stmt = gimple_build_assign (none_bounds, chkp_get_none_bounds_var ()); |
2140 | gsi_insert_before (&gsi, stmt, GSI_SAME_STMT); | |
2141 | } | |
2142 | else | |
2143 | none_bounds = chkp_make_bounds (integer_minus_one_node, | |
2144 | build_int_cst (size_type_node, 2), | |
2145 | NULL, | |
2146 | false); | |
2147 | ||
2148 | return none_bounds; | |
2149 | } | |
2150 | ||
2151 | /* Return bounds to be used as a result of operation which | |
2152 | should not create poiunter (e.g. MULT_EXPR). */ | |
2153 | static tree | |
2154 | chkp_get_invalid_op_bounds (void) | |
2155 | { | |
2156 | return chkp_get_zero_bounds (); | |
2157 | } | |
2158 | ||
2159 | /* Return bounds to be used for loads of non-pointer values. */ | |
2160 | static tree | |
2161 | chkp_get_nonpointer_load_bounds (void) | |
2162 | { | |
2163 | return chkp_get_zero_bounds (); | |
2164 | } | |
2165 | ||
3b2261cd | 2166 | /* Return 1 if may use bndret call to get bounds for pointer |
2167 | returned by CALL. */ | |
2168 | static bool | |
2169 | chkp_call_returns_bounds_p (gcall *call) | |
2170 | { | |
2171 | if (gimple_call_internal_p (call)) | |
2172 | return false; | |
2173 | ||
cf5a1dfc | 2174 | if (gimple_call_builtin_p (call, BUILT_IN_CHKP_NARROW_PTR_BOUNDS) |
2175 | || chkp_gimple_call_builtin_p (call, BUILT_IN_CHKP_NARROW)) | |
2176 | return true; | |
2177 | ||
2178 | if (gimple_call_with_bounds_p (call)) | |
2179 | return true; | |
2180 | ||
3b2261cd | 2181 | tree fndecl = gimple_call_fndecl (call); |
2182 | ||
2183 | if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD) | |
2184 | return false; | |
2185 | ||
cf5a1dfc | 2186 | if (fndecl && !chkp_instrumentable_p (fndecl)) |
3b2261cd | 2187 | return false; |
2188 | ||
2189 | if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL) | |
2190 | { | |
2191 | if (chkp_instrument_normal_builtin (fndecl)) | |
2192 | return true; | |
2193 | ||
2194 | if (!lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl))) | |
2195 | return false; | |
2196 | ||
2197 | struct cgraph_node *clone = chkp_maybe_create_clone (fndecl); | |
2198 | return (clone && gimple_has_body_p (clone->decl)); | |
2199 | } | |
2200 | ||
2201 | return true; | |
2202 | } | |
2203 | ||
058a1b7a | 2204 | /* Build bounds returned by CALL. */ |
2205 | static tree | |
1a91d914 | 2206 | chkp_build_returned_bound (gcall *call) |
058a1b7a | 2207 | { |
2208 | gimple_stmt_iterator gsi; | |
2209 | tree bounds; | |
2210 | gimple stmt; | |
2211 | tree fndecl = gimple_call_fndecl (call); | |
0bf92752 | 2212 | unsigned int retflags; |
058a1b7a | 2213 | |
2214 | /* To avoid fixing alloca expands in targets we handle | |
2215 | it separately. */ | |
2216 | if (fndecl | |
2217 | && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL | |
2218 | && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA | |
2219 | || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA_WITH_ALIGN)) | |
2220 | { | |
2221 | tree size = gimple_call_arg (call, 0); | |
2222 | tree lb = gimple_call_lhs (call); | |
2223 | gimple_stmt_iterator iter = gsi_for_stmt (call); | |
2224 | bounds = chkp_make_bounds (lb, size, &iter, true); | |
2225 | } | |
2226 | /* We know bounds returned by set_bounds builtin call. */ | |
2227 | else if (fndecl | |
2228 | && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL | |
2229 | && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_SET_PTR_BOUNDS) | |
2230 | { | |
2231 | tree lb = gimple_call_arg (call, 0); | |
2232 | tree size = gimple_call_arg (call, 1); | |
2233 | gimple_stmt_iterator iter = gsi_for_stmt (call); | |
2234 | bounds = chkp_make_bounds (lb, size, &iter, true); | |
2235 | } | |
2236 | /* Detect bounds initialization calls. */ | |
2237 | else if (fndecl | |
2238 | && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL | |
2239 | && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_INIT_PTR_BOUNDS) | |
2240 | bounds = chkp_get_zero_bounds (); | |
2241 | /* Detect bounds nullification calls. */ | |
2242 | else if (fndecl | |
2243 | && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL | |
2244 | && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NULL_PTR_BOUNDS) | |
2245 | bounds = chkp_get_none_bounds (); | |
2246 | /* Detect bounds copy calls. */ | |
2247 | else if (fndecl | |
2248 | && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL | |
2249 | && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_COPY_PTR_BOUNDS) | |
2250 | { | |
2251 | gimple_stmt_iterator iter = gsi_for_stmt (call); | |
2252 | bounds = chkp_find_bounds (gimple_call_arg (call, 1), &iter); | |
2253 | } | |
2254 | /* Do not use retbnd when returned bounds are equal to some | |
2255 | of passed bounds. */ | |
0bf92752 | 2256 | else if (((retflags = gimple_call_return_flags (call)) & ERF_RETURNS_ARG) |
2257 | && (retflags & ERF_RETURN_ARG_MASK) < gimple_call_num_args (call)) | |
058a1b7a | 2258 | { |
2259 | gimple_stmt_iterator iter = gsi_for_stmt (call); | |
0bf92752 | 2260 | unsigned int retarg = retflags & ERF_RETURN_ARG_MASK, argno; |
058a1b7a | 2261 | if (gimple_call_with_bounds_p (call)) |
2262 | { | |
2263 | for (argno = 0; argno < gimple_call_num_args (call); argno++) | |
2264 | if (!POINTER_BOUNDS_P (gimple_call_arg (call, argno))) | |
2265 | { | |
2266 | if (retarg) | |
2267 | retarg--; | |
2268 | else | |
2269 | break; | |
2270 | } | |
2271 | } | |
2272 | else | |
2273 | argno = retarg; | |
2274 | ||
2275 | bounds = chkp_find_bounds (gimple_call_arg (call, argno), &iter); | |
2276 | } | |
3b2261cd | 2277 | else if (chkp_call_returns_bounds_p (call)) |
058a1b7a | 2278 | { |
2279 | gcc_assert (TREE_CODE (gimple_call_lhs (call)) == SSA_NAME); | |
2280 | ||
2281 | /* In general case build checker builtin call to | |
2282 | obtain returned bounds. */ | |
2283 | stmt = gimple_build_call (chkp_ret_bnd_fndecl, 1, | |
2284 | gimple_call_lhs (call)); | |
2285 | chkp_mark_stmt (stmt); | |
2286 | ||
2287 | gsi = gsi_for_stmt (call); | |
2288 | gsi_insert_after (&gsi, stmt, GSI_SAME_STMT); | |
2289 | ||
2290 | bounds = chkp_get_tmp_reg (stmt); | |
2291 | gimple_call_set_lhs (stmt, bounds); | |
2292 | ||
2293 | update_stmt (stmt); | |
2294 | } | |
3b2261cd | 2295 | else |
2296 | bounds = chkp_get_zero_bounds (); | |
058a1b7a | 2297 | |
2298 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
2299 | { | |
2300 | fprintf (dump_file, "Built returned bounds ("); | |
2301 | print_generic_expr (dump_file, bounds, 0); | |
2302 | fprintf (dump_file, ") for call: "); | |
2303 | print_gimple_stmt (dump_file, call, 0, TDF_VOPS|TDF_MEMSYMS); | |
2304 | } | |
2305 | ||
2306 | bounds = chkp_maybe_copy_and_register_bounds (gimple_call_lhs (call), bounds); | |
2307 | ||
2308 | return bounds; | |
2309 | } | |
2310 | ||
2311 | /* Return bounds used as returned by call | |
2312 | which produced SSA name VAL. */ | |
1a91d914 | 2313 | gcall * |
058a1b7a | 2314 | chkp_retbnd_call_by_val (tree val) |
2315 | { | |
2316 | if (TREE_CODE (val) != SSA_NAME) | |
2317 | return NULL; | |
2318 | ||
2319 | gcc_assert (gimple_code (SSA_NAME_DEF_STMT (val)) == GIMPLE_CALL); | |
2320 | ||
2321 | imm_use_iterator use_iter; | |
2322 | use_operand_p use_p; | |
2323 | FOR_EACH_IMM_USE_FAST (use_p, use_iter, val) | |
2324 | if (gimple_code (USE_STMT (use_p)) == GIMPLE_CALL | |
2325 | && gimple_call_fndecl (USE_STMT (use_p)) == chkp_ret_bnd_fndecl) | |
1a91d914 | 2326 | return as_a <gcall *> (USE_STMT (use_p)); |
058a1b7a | 2327 | |
2328 | return NULL; | |
2329 | } | |
2330 | ||
2331 | /* Check the next parameter for the given PARM is bounds | |
2332 | and return it's default SSA_NAME (create if required). */ | |
2333 | static tree | |
2334 | chkp_get_next_bounds_parm (tree parm) | |
2335 | { | |
2336 | tree bounds = TREE_CHAIN (parm); | |
2337 | gcc_assert (POINTER_BOUNDS_P (bounds)); | |
2338 | bounds = ssa_default_def (cfun, bounds); | |
2339 | if (!bounds) | |
2340 | { | |
2341 | bounds = make_ssa_name (TREE_CHAIN (parm), gimple_build_nop ()); | |
2342 | set_ssa_default_def (cfun, TREE_CHAIN (parm), bounds); | |
2343 | } | |
2344 | return bounds; | |
2345 | } | |
2346 | ||
2347 | /* Return bounds to be used for input argument PARM. */ | |
2348 | static tree | |
2349 | chkp_get_bound_for_parm (tree parm) | |
2350 | { | |
2351 | tree decl = SSA_NAME_VAR (parm); | |
2352 | tree bounds; | |
2353 | ||
2354 | gcc_assert (TREE_CODE (decl) == PARM_DECL); | |
2355 | ||
2356 | bounds = chkp_get_registered_bounds (parm); | |
2357 | ||
2358 | if (!bounds) | |
2359 | bounds = chkp_get_registered_bounds (decl); | |
2360 | ||
2361 | if (!bounds) | |
2362 | { | |
2363 | tree orig_decl = cgraph_node::get (cfun->decl)->orig_decl; | |
2364 | ||
2365 | /* For static chain param we return zero bounds | |
2366 | because currently we do not check dereferences | |
2367 | of this pointer. */ | |
2368 | if (cfun->static_chain_decl == decl) | |
2369 | bounds = chkp_get_zero_bounds (); | |
2370 | /* If non instrumented runtime is used then it may be useful | |
2371 | to use zero bounds for input arguments of main | |
2372 | function. */ | |
2373 | else if (flag_chkp_zero_input_bounds_for_main | |
2374 | && strcmp (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (orig_decl)), | |
2375 | "main") == 0) | |
2376 | bounds = chkp_get_zero_bounds (); | |
2377 | else if (BOUNDED_P (parm)) | |
2378 | { | |
2379 | bounds = chkp_get_next_bounds_parm (decl); | |
2380 | bounds = chkp_maybe_copy_and_register_bounds (decl, bounds); | |
2381 | ||
2382 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
2383 | { | |
2384 | fprintf (dump_file, "Built arg bounds ("); | |
2385 | print_generic_expr (dump_file, bounds, 0); | |
2386 | fprintf (dump_file, ") for arg: "); | |
2387 | print_node (dump_file, "", decl, 0); | |
2388 | } | |
2389 | } | |
2390 | else | |
2391 | bounds = chkp_get_zero_bounds (); | |
2392 | } | |
2393 | ||
2394 | if (!chkp_get_registered_bounds (parm)) | |
2395 | bounds = chkp_maybe_copy_and_register_bounds (parm, bounds); | |
2396 | ||
2397 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
2398 | { | |
2399 | fprintf (dump_file, "Using bounds "); | |
2400 | print_generic_expr (dump_file, bounds, 0); | |
2401 | fprintf (dump_file, " for parm "); | |
2402 | print_generic_expr (dump_file, parm, 0); | |
2403 | fprintf (dump_file, " of type "); | |
2404 | print_generic_expr (dump_file, TREE_TYPE (parm), 0); | |
2405 | fprintf (dump_file, ".\n"); | |
2406 | } | |
2407 | ||
2408 | return bounds; | |
2409 | } | |
2410 | ||
2411 | /* Build and return CALL_EXPR for bndstx builtin with specified | |
2412 | arguments. */ | |
2413 | tree | |
2414 | chkp_build_bndldx_call (tree addr, tree ptr) | |
2415 | { | |
2416 | tree fn = build1 (ADDR_EXPR, | |
2417 | build_pointer_type (TREE_TYPE (chkp_bndldx_fndecl)), | |
2418 | chkp_bndldx_fndecl); | |
2419 | tree call = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndldx_fndecl)), | |
2420 | fn, 2, addr, ptr); | |
2421 | CALL_WITH_BOUNDS_P (call) = true; | |
2422 | return call; | |
2423 | } | |
2424 | ||
2425 | /* Insert code to load bounds for PTR located by ADDR. | |
2426 | Code is inserted after position pointed by GSI. | |
2427 | Loaded bounds are returned. */ | |
2428 | static tree | |
2429 | chkp_build_bndldx (tree addr, tree ptr, gimple_stmt_iterator *gsi) | |
2430 | { | |
2431 | gimple_seq seq; | |
2432 | gimple stmt; | |
2433 | tree bounds; | |
2434 | ||
2435 | seq = NULL; | |
2436 | ||
2437 | addr = chkp_force_gimple_call_op (addr, &seq); | |
2438 | ptr = chkp_force_gimple_call_op (ptr, &seq); | |
2439 | ||
2440 | stmt = gimple_build_call (chkp_bndldx_fndecl, 2, addr, ptr); | |
2441 | chkp_mark_stmt (stmt); | |
2442 | bounds = chkp_get_tmp_reg (stmt); | |
2443 | gimple_call_set_lhs (stmt, bounds); | |
2444 | ||
2445 | gimple_seq_add_stmt (&seq, stmt); | |
2446 | ||
2447 | gsi_insert_seq_after (gsi, seq, GSI_CONTINUE_LINKING); | |
2448 | ||
2449 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
2450 | { | |
2451 | fprintf (dump_file, "Generated bndldx for pointer "); | |
2452 | print_generic_expr (dump_file, ptr, 0); | |
2453 | fprintf (dump_file, ": "); | |
2454 | print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS); | |
2455 | } | |
2456 | ||
2457 | return bounds; | |
2458 | } | |
2459 | ||
2460 | /* Build and return CALL_EXPR for bndstx builtin with specified | |
2461 | arguments. */ | |
2462 | tree | |
2463 | chkp_build_bndstx_call (tree addr, tree ptr, tree bounds) | |
2464 | { | |
2465 | tree fn = build1 (ADDR_EXPR, | |
2466 | build_pointer_type (TREE_TYPE (chkp_bndstx_fndecl)), | |
2467 | chkp_bndstx_fndecl); | |
2468 | tree call = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndstx_fndecl)), | |
2469 | fn, 3, ptr, bounds, addr); | |
2470 | CALL_WITH_BOUNDS_P (call) = true; | |
2471 | return call; | |
2472 | } | |
2473 | ||
2474 | /* Insert code to store BOUNDS for PTR stored by ADDR. | |
2475 | New statements are inserted after position pointed | |
2476 | by GSI. */ | |
2477 | void | |
2478 | chkp_build_bndstx (tree addr, tree ptr, tree bounds, | |
2479 | gimple_stmt_iterator *gsi) | |
2480 | { | |
2481 | gimple_seq seq; | |
2482 | gimple stmt; | |
2483 | ||
2484 | seq = NULL; | |
2485 | ||
2486 | addr = chkp_force_gimple_call_op (addr, &seq); | |
2487 | ptr = chkp_force_gimple_call_op (ptr, &seq); | |
2488 | ||
2489 | stmt = gimple_build_call (chkp_bndstx_fndecl, 3, ptr, bounds, addr); | |
2490 | chkp_mark_stmt (stmt); | |
2491 | gimple_call_set_with_bounds (stmt, true); | |
2492 | ||
2493 | gimple_seq_add_stmt (&seq, stmt); | |
2494 | ||
2495 | gsi_insert_seq_after (gsi, seq, GSI_CONTINUE_LINKING); | |
2496 | ||
2497 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
2498 | { | |
2499 | fprintf (dump_file, "Generated bndstx for pointer store "); | |
2500 | print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_VOPS|TDF_MEMSYMS); | |
2501 | print_gimple_stmt (dump_file, stmt, 2, TDF_VOPS|TDF_MEMSYMS); | |
2502 | } | |
2503 | } | |
2504 | ||
2505 | /* Compute bounds for pointer NODE which was assigned in | |
2506 | assignment statement ASSIGN. Return computed bounds. */ | |
2507 | static tree | |
2508 | chkp_compute_bounds_for_assignment (tree node, gimple assign) | |
2509 | { | |
2510 | enum tree_code rhs_code = gimple_assign_rhs_code (assign); | |
2511 | tree rhs1 = gimple_assign_rhs1 (assign); | |
2512 | tree bounds = NULL_TREE; | |
2513 | gimple_stmt_iterator iter = gsi_for_stmt (assign); | |
2514 | ||
2515 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
2516 | { | |
2517 | fprintf (dump_file, "Computing bounds for assignment: "); | |
2518 | print_gimple_stmt (dump_file, assign, 0, TDF_VOPS|TDF_MEMSYMS); | |
2519 | } | |
2520 | ||
2521 | switch (rhs_code) | |
2522 | { | |
2523 | case MEM_REF: | |
2524 | case TARGET_MEM_REF: | |
2525 | case COMPONENT_REF: | |
2526 | case ARRAY_REF: | |
2527 | /* We need to load bounds from the bounds table. */ | |
2528 | bounds = chkp_find_bounds_loaded (node, rhs1, &iter); | |
2529 | break; | |
2530 | ||
2531 | case VAR_DECL: | |
2532 | case SSA_NAME: | |
2533 | case ADDR_EXPR: | |
2534 | case POINTER_PLUS_EXPR: | |
2535 | case NOP_EXPR: | |
2536 | case CONVERT_EXPR: | |
2537 | case INTEGER_CST: | |
2538 | /* Bounds are just propagated from RHS. */ | |
2539 | bounds = chkp_find_bounds (rhs1, &iter); | |
2540 | break; | |
2541 | ||
2542 | case VIEW_CONVERT_EXPR: | |
2543 | /* Bounds are just propagated from RHS. */ | |
2544 | bounds = chkp_find_bounds (TREE_OPERAND (rhs1, 0), &iter); | |
2545 | break; | |
2546 | ||
2547 | case PARM_DECL: | |
2548 | if (BOUNDED_P (rhs1)) | |
2549 | { | |
2550 | /* We need to load bounds from the bounds table. */ | |
2551 | bounds = chkp_build_bndldx (chkp_build_addr_expr (rhs1), | |
2552 | node, &iter); | |
2553 | TREE_ADDRESSABLE (rhs1) = 1; | |
2554 | } | |
2555 | else | |
2556 | bounds = chkp_get_nonpointer_load_bounds (); | |
2557 | break; | |
2558 | ||
2559 | case MINUS_EXPR: | |
2560 | case PLUS_EXPR: | |
2561 | case BIT_AND_EXPR: | |
2562 | case BIT_IOR_EXPR: | |
2563 | case BIT_XOR_EXPR: | |
2564 | { | |
2565 | tree rhs2 = gimple_assign_rhs2 (assign); | |
2566 | tree bnd1 = chkp_find_bounds (rhs1, &iter); | |
2567 | tree bnd2 = chkp_find_bounds (rhs2, &iter); | |
2568 | ||
2569 | /* First we try to check types of operands. If it | |
2570 | does not help then look at bound values. | |
2571 | ||
2572 | If some bounds are incomplete and other are | |
2573 | not proven to be valid (i.e. also incomplete | |
2574 | or invalid because value is not pointer) then | |
2575 | resulting value is incomplete and will be | |
2576 | recomputed later in chkp_finish_incomplete_bounds. */ | |
2577 | if (BOUNDED_P (rhs1) | |
2578 | && !BOUNDED_P (rhs2)) | |
2579 | bounds = bnd1; | |
2580 | else if (BOUNDED_P (rhs2) | |
2581 | && !BOUNDED_P (rhs1) | |
2582 | && rhs_code != MINUS_EXPR) | |
2583 | bounds = bnd2; | |
2584 | else if (chkp_incomplete_bounds (bnd1)) | |
2585 | if (chkp_valid_bounds (bnd2) && rhs_code != MINUS_EXPR | |
2586 | && !chkp_incomplete_bounds (bnd2)) | |
2587 | bounds = bnd2; | |
2588 | else | |
2589 | bounds = incomplete_bounds; | |
2590 | else if (chkp_incomplete_bounds (bnd2)) | |
2591 | if (chkp_valid_bounds (bnd1) | |
2592 | && !chkp_incomplete_bounds (bnd1)) | |
2593 | bounds = bnd1; | |
2594 | else | |
2595 | bounds = incomplete_bounds; | |
2596 | else if (!chkp_valid_bounds (bnd1)) | |
2597 | if (chkp_valid_bounds (bnd2) && rhs_code != MINUS_EXPR) | |
2598 | bounds = bnd2; | |
2599 | else if (bnd2 == chkp_get_zero_bounds ()) | |
2600 | bounds = bnd2; | |
2601 | else | |
2602 | bounds = bnd1; | |
2603 | else if (!chkp_valid_bounds (bnd2)) | |
2604 | bounds = bnd1; | |
2605 | else | |
2606 | /* Seems both operands may have valid bounds | |
2607 | (e.g. pointer minus pointer). In such case | |
2608 | use default invalid op bounds. */ | |
2609 | bounds = chkp_get_invalid_op_bounds (); | |
2610 | } | |
2611 | break; | |
2612 | ||
2613 | case BIT_NOT_EXPR: | |
2614 | case NEGATE_EXPR: | |
2615 | case LSHIFT_EXPR: | |
2616 | case RSHIFT_EXPR: | |
2617 | case LROTATE_EXPR: | |
2618 | case RROTATE_EXPR: | |
2619 | case EQ_EXPR: | |
2620 | case NE_EXPR: | |
2621 | case LT_EXPR: | |
2622 | case LE_EXPR: | |
2623 | case GT_EXPR: | |
2624 | case GE_EXPR: | |
2625 | case MULT_EXPR: | |
2626 | case RDIV_EXPR: | |
2627 | case TRUNC_DIV_EXPR: | |
2628 | case FLOOR_DIV_EXPR: | |
2629 | case CEIL_DIV_EXPR: | |
2630 | case ROUND_DIV_EXPR: | |
2631 | case TRUNC_MOD_EXPR: | |
2632 | case FLOOR_MOD_EXPR: | |
2633 | case CEIL_MOD_EXPR: | |
2634 | case ROUND_MOD_EXPR: | |
2635 | case EXACT_DIV_EXPR: | |
2636 | case FIX_TRUNC_EXPR: | |
2637 | case FLOAT_EXPR: | |
2638 | case REALPART_EXPR: | |
2639 | case IMAGPART_EXPR: | |
2640 | /* No valid bounds may be produced by these exprs. */ | |
2641 | bounds = chkp_get_invalid_op_bounds (); | |
2642 | break; | |
2643 | ||
2644 | case COND_EXPR: | |
2645 | { | |
2646 | tree val1 = gimple_assign_rhs2 (assign); | |
2647 | tree val2 = gimple_assign_rhs3 (assign); | |
2648 | tree bnd1 = chkp_find_bounds (val1, &iter); | |
2649 | tree bnd2 = chkp_find_bounds (val2, &iter); | |
2650 | gimple stmt; | |
2651 | ||
2652 | if (chkp_incomplete_bounds (bnd1) || chkp_incomplete_bounds (bnd2)) | |
2653 | bounds = incomplete_bounds; | |
2654 | else if (bnd1 == bnd2) | |
2655 | bounds = bnd1; | |
2656 | else | |
2657 | { | |
2658 | rhs1 = unshare_expr (rhs1); | |
2659 | ||
2660 | bounds = chkp_get_tmp_reg (assign); | |
e9cf809e | 2661 | stmt = gimple_build_assign (bounds, COND_EXPR, rhs1, bnd1, bnd2); |
058a1b7a | 2662 | gsi_insert_after (&iter, stmt, GSI_SAME_STMT); |
2663 | ||
2664 | if (!chkp_valid_bounds (bnd1) && !chkp_valid_bounds (bnd2)) | |
2665 | chkp_mark_invalid_bounds (bounds); | |
2666 | } | |
2667 | } | |
2668 | break; | |
2669 | ||
2670 | case MAX_EXPR: | |
2671 | case MIN_EXPR: | |
2672 | { | |
2673 | tree rhs2 = gimple_assign_rhs2 (assign); | |
2674 | tree bnd1 = chkp_find_bounds (rhs1, &iter); | |
2675 | tree bnd2 = chkp_find_bounds (rhs2, &iter); | |
2676 | ||
2677 | if (chkp_incomplete_bounds (bnd1) || chkp_incomplete_bounds (bnd2)) | |
2678 | bounds = incomplete_bounds; | |
2679 | else if (bnd1 == bnd2) | |
2680 | bounds = bnd1; | |
2681 | else | |
2682 | { | |
2683 | gimple stmt; | |
2684 | tree cond = build2 (rhs_code == MAX_EXPR ? GT_EXPR : LT_EXPR, | |
2685 | boolean_type_node, rhs1, rhs2); | |
2686 | bounds = chkp_get_tmp_reg (assign); | |
e9cf809e | 2687 | stmt = gimple_build_assign (bounds, COND_EXPR, cond, bnd1, bnd2); |
058a1b7a | 2688 | |
2689 | gsi_insert_after (&iter, stmt, GSI_SAME_STMT); | |
2690 | ||
2691 | if (!chkp_valid_bounds (bnd1) && !chkp_valid_bounds (bnd2)) | |
2692 | chkp_mark_invalid_bounds (bounds); | |
2693 | } | |
2694 | } | |
2695 | break; | |
2696 | ||
2697 | default: | |
2698 | bounds = chkp_get_zero_bounds (); | |
2699 | warning (0, "pointer bounds were lost due to unexpected expression %s", | |
2700 | get_tree_code_name (rhs_code)); | |
2701 | } | |
2702 | ||
2703 | gcc_assert (bounds); | |
2704 | ||
2705 | if (node) | |
2706 | bounds = chkp_maybe_copy_and_register_bounds (node, bounds); | |
2707 | ||
2708 | return bounds; | |
2709 | } | |
2710 | ||
2711 | /* Compute bounds for ssa name NODE defined by DEF_STMT pointed by ITER. | |
2712 | ||
2713 | There are just few statement codes allowed: NOP (for default ssa names), | |
2714 | ASSIGN, CALL, PHI, ASM. | |
2715 | ||
2716 | Return computed bounds. */ | |
2717 | static tree | |
2718 | chkp_get_bounds_by_definition (tree node, gimple def_stmt, | |
1a91d914 | 2719 | gphi_iterator *iter) |
058a1b7a | 2720 | { |
2721 | tree var, bounds; | |
2722 | enum gimple_code code = gimple_code (def_stmt); | |
1a91d914 | 2723 | gphi *stmt; |
058a1b7a | 2724 | |
2725 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
2726 | { | |
2727 | fprintf (dump_file, "Searching for bounds for node: "); | |
2728 | print_generic_expr (dump_file, node, 0); | |
2729 | ||
2730 | fprintf (dump_file, " using its definition: "); | |
2731 | print_gimple_stmt (dump_file, def_stmt, 0, TDF_VOPS|TDF_MEMSYMS); | |
2732 | } | |
2733 | ||
2734 | switch (code) | |
2735 | { | |
2736 | case GIMPLE_NOP: | |
2737 | var = SSA_NAME_VAR (node); | |
2738 | switch (TREE_CODE (var)) | |
2739 | { | |
2740 | case PARM_DECL: | |
2741 | bounds = chkp_get_bound_for_parm (node); | |
2742 | break; | |
2743 | ||
2744 | case VAR_DECL: | |
2745 | /* For uninitialized pointers use none bounds. */ | |
2746 | bounds = chkp_get_none_bounds (); | |
2747 | bounds = chkp_maybe_copy_and_register_bounds (node, bounds); | |
2748 | break; | |
2749 | ||
2750 | case RESULT_DECL: | |
2751 | { | |
2752 | tree base_type; | |
2753 | ||
2754 | gcc_assert (TREE_CODE (TREE_TYPE (node)) == REFERENCE_TYPE); | |
2755 | ||
2756 | base_type = TREE_TYPE (TREE_TYPE (node)); | |
2757 | ||
2758 | gcc_assert (TYPE_SIZE (base_type) | |
2759 | && TREE_CODE (TYPE_SIZE (base_type)) == INTEGER_CST | |
2760 | && tree_to_uhwi (TYPE_SIZE (base_type)) != 0); | |
2761 | ||
2762 | bounds = chkp_make_bounds (node, TYPE_SIZE_UNIT (base_type), | |
2763 | NULL, false); | |
2764 | bounds = chkp_maybe_copy_and_register_bounds (node, bounds); | |
2765 | } | |
2766 | break; | |
2767 | ||
2768 | default: | |
2769 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
2770 | { | |
2771 | fprintf (dump_file, "Unexpected var with no definition\n"); | |
2772 | print_generic_expr (dump_file, var, 0); | |
2773 | } | |
2774 | internal_error ("chkp_get_bounds_by_definition: Unexpected var of type %s", | |
2775 | get_tree_code_name (TREE_CODE (var))); | |
2776 | } | |
2777 | break; | |
2778 | ||
2779 | case GIMPLE_ASSIGN: | |
2780 | bounds = chkp_compute_bounds_for_assignment (node, def_stmt); | |
2781 | break; | |
2782 | ||
2783 | case GIMPLE_CALL: | |
1a91d914 | 2784 | bounds = chkp_build_returned_bound (as_a <gcall *> (def_stmt)); |
058a1b7a | 2785 | break; |
2786 | ||
2787 | case GIMPLE_PHI: | |
2788 | if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (node)) | |
2789 | if (SSA_NAME_VAR (node)) | |
2790 | var = chkp_get_bounds_var (SSA_NAME_VAR (node)); | |
2791 | else | |
2792 | var = make_temp_ssa_name (pointer_bounds_type_node, | |
3ec9c447 | 2793 | NULL, |
058a1b7a | 2794 | CHKP_BOUND_TMP_NAME); |
2795 | else | |
2796 | var = chkp_get_tmp_var (); | |
2797 | stmt = create_phi_node (var, gimple_bb (def_stmt)); | |
2798 | bounds = gimple_phi_result (stmt); | |
1a91d914 | 2799 | *iter = gsi_for_phi (stmt); |
058a1b7a | 2800 | |
2801 | bounds = chkp_maybe_copy_and_register_bounds (node, bounds); | |
2802 | ||
2803 | /* Created bounds do not have all phi args computed and | |
2804 | therefore we do not know if there is a valid source | |
2805 | of bounds for that node. Therefore we mark bounds | |
2806 | as incomplete and then recompute them when all phi | |
2807 | args are computed. */ | |
2808 | chkp_register_incomplete_bounds (bounds, node); | |
2809 | break; | |
2810 | ||
2811 | case GIMPLE_ASM: | |
2812 | bounds = chkp_get_zero_bounds (); | |
2813 | bounds = chkp_maybe_copy_and_register_bounds (node, bounds); | |
2814 | break; | |
2815 | ||
2816 | default: | |
2817 | internal_error ("chkp_get_bounds_by_definition: Unexpected GIMPLE code %s", | |
2818 | gimple_code_name[code]); | |
2819 | } | |
2820 | ||
2821 | return bounds; | |
2822 | } | |
2823 | ||
2824 | /* Return CALL_EXPR for bndmk with specified LOWER_BOUND and SIZE. */ | |
2825 | tree | |
2826 | chkp_build_make_bounds_call (tree lower_bound, tree size) | |
2827 | { | |
2828 | tree call = build1 (ADDR_EXPR, | |
2829 | build_pointer_type (TREE_TYPE (chkp_bndmk_fndecl)), | |
2830 | chkp_bndmk_fndecl); | |
2831 | return build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndmk_fndecl)), | |
2832 | call, 2, lower_bound, size); | |
2833 | } | |
2834 | ||
2835 | /* Create static bounds var of specfified OBJ which is | |
2836 | is either VAR_DECL or string constant. */ | |
2837 | static tree | |
2838 | chkp_make_static_bounds (tree obj) | |
2839 | { | |
2840 | static int string_id = 1; | |
2841 | static int var_id = 1; | |
2842 | tree *slot; | |
2843 | const char *var_name; | |
2844 | char *bnd_var_name; | |
2845 | tree bnd_var; | |
2846 | ||
2847 | /* First check if we already have required var. */ | |
2848 | if (chkp_static_var_bounds) | |
2849 | { | |
e5e4fa49 | 2850 | /* For vars we use assembler name as a key in |
2851 | chkp_static_var_bounds map. It allows to | |
2852 | avoid duplicating bound vars for decls | |
2853 | sharing assembler name. */ | |
2854 | if (TREE_CODE (obj) == VAR_DECL) | |
2855 | { | |
2856 | tree name = DECL_ASSEMBLER_NAME (obj); | |
2857 | slot = chkp_static_var_bounds->get (name); | |
2858 | if (slot) | |
2859 | return *slot; | |
2860 | } | |
2861 | else | |
2862 | { | |
2863 | slot = chkp_static_var_bounds->get (obj); | |
2864 | if (slot) | |
2865 | return *slot; | |
2866 | } | |
058a1b7a | 2867 | } |
2868 | ||
2869 | /* Build decl for bounds var. */ | |
2870 | if (TREE_CODE (obj) == VAR_DECL) | |
2871 | { | |
2872 | if (DECL_IGNORED_P (obj)) | |
2873 | { | |
2874 | bnd_var_name = (char *) xmalloc (strlen (CHKP_VAR_BOUNDS_PREFIX) + 10); | |
2875 | sprintf (bnd_var_name, "%s%d", CHKP_VAR_BOUNDS_PREFIX, var_id++); | |
2876 | } | |
2877 | else | |
2878 | { | |
2879 | var_name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (obj)); | |
2880 | ||
2881 | /* For hidden symbols we want to skip first '*' char. */ | |
2882 | if (*var_name == '*') | |
2883 | var_name++; | |
2884 | ||
2885 | bnd_var_name = (char *) xmalloc (strlen (var_name) | |
2886 | + strlen (CHKP_BOUNDS_OF_SYMBOL_PREFIX) + 1); | |
2887 | strcpy (bnd_var_name, CHKP_BOUNDS_OF_SYMBOL_PREFIX); | |
2888 | strcat (bnd_var_name, var_name); | |
2889 | } | |
2890 | ||
2891 | bnd_var = build_decl (UNKNOWN_LOCATION, VAR_DECL, | |
2892 | get_identifier (bnd_var_name), | |
2893 | pointer_bounds_type_node); | |
2894 | ||
2895 | /* Address of the obj will be used as lower bound. */ | |
2896 | TREE_ADDRESSABLE (obj) = 1; | |
2897 | } | |
2898 | else | |
2899 | { | |
2900 | bnd_var_name = (char *) xmalloc (strlen (CHKP_STRING_BOUNDS_PREFIX) + 10); | |
2901 | sprintf (bnd_var_name, "%s%d", CHKP_STRING_BOUNDS_PREFIX, string_id++); | |
2902 | ||
2903 | bnd_var = build_decl (UNKNOWN_LOCATION, VAR_DECL, | |
2904 | get_identifier (bnd_var_name), | |
2905 | pointer_bounds_type_node); | |
2906 | } | |
2907 | ||
2908 | TREE_PUBLIC (bnd_var) = 0; | |
2909 | TREE_USED (bnd_var) = 1; | |
2910 | TREE_READONLY (bnd_var) = 0; | |
2911 | TREE_STATIC (bnd_var) = 1; | |
2912 | TREE_ADDRESSABLE (bnd_var) = 0; | |
2913 | DECL_ARTIFICIAL (bnd_var) = 1; | |
2914 | DECL_COMMON (bnd_var) = 1; | |
2915 | DECL_COMDAT (bnd_var) = 1; | |
2916 | DECL_READ_P (bnd_var) = 1; | |
2917 | DECL_INITIAL (bnd_var) = chkp_build_addr_expr (obj); | |
2918 | /* Force output similar to constant bounds. | |
2919 | See chkp_make_static_const_bounds. */ | |
2920 | varpool_node::get_create (bnd_var)->force_output = 1; | |
2921 | /* Mark symbol as requiring bounds initialization. */ | |
2922 | varpool_node::get_create (bnd_var)->need_bounds_init = 1; | |
2923 | varpool_node::finalize_decl (bnd_var); | |
2924 | ||
2925 | /* Add created var to the map to use it for other references | |
2926 | to obj. */ | |
2927 | if (!chkp_static_var_bounds) | |
2928 | chkp_static_var_bounds = new hash_map<tree, tree>; | |
2929 | ||
e5e4fa49 | 2930 | if (TREE_CODE (obj) == VAR_DECL) |
2931 | { | |
2932 | tree name = DECL_ASSEMBLER_NAME (obj); | |
2933 | chkp_static_var_bounds->put (name, bnd_var); | |
2934 | } | |
2935 | else | |
2936 | chkp_static_var_bounds->put (obj, bnd_var); | |
058a1b7a | 2937 | |
2938 | return bnd_var; | |
2939 | } | |
2940 | ||
2941 | /* When var has incomplete type we cannot get size to | |
2942 | compute its bounds. In such cases we use checker | |
2943 | builtin call which determines object size at runtime. */ | |
2944 | static tree | |
2945 | chkp_generate_extern_var_bounds (tree var) | |
2946 | { | |
2947 | tree bounds, size_reloc, lb, size, max_size, cond; | |
2948 | gimple_stmt_iterator gsi; | |
2949 | gimple_seq seq = NULL; | |
2950 | gimple stmt; | |
2951 | ||
2952 | /* If instrumentation is not enabled for vars having | |
2953 | incomplete type then just return zero bounds to avoid | |
2954 | checks for this var. */ | |
2955 | if (!flag_chkp_incomplete_type) | |
2956 | return chkp_get_zero_bounds (); | |
2957 | ||
2958 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
2959 | { | |
2960 | fprintf (dump_file, "Generating bounds for extern symbol '"); | |
2961 | print_generic_expr (dump_file, var, 0); | |
2962 | fprintf (dump_file, "'\n"); | |
2963 | } | |
2964 | ||
2965 | stmt = gimple_build_call (chkp_sizeof_fndecl, 1, var); | |
2966 | ||
2967 | size_reloc = create_tmp_reg (chkp_uintptr_type, CHKP_SIZE_TMP_NAME); | |
2968 | gimple_call_set_lhs (stmt, size_reloc); | |
2969 | ||
2970 | gimple_seq_add_stmt (&seq, stmt); | |
2971 | ||
2972 | lb = chkp_build_addr_expr (var); | |
3ec9c447 | 2973 | size = make_ssa_name (chkp_get_size_tmp_var ()); |
058a1b7a | 2974 | |
2975 | if (flag_chkp_zero_dynamic_size_as_infinite) | |
2976 | { | |
2977 | /* We should check that size relocation was resolved. | |
2978 | If it was not then use maximum possible size for the var. */ | |
2979 | max_size = build2 (MINUS_EXPR, chkp_uintptr_type, integer_zero_node, | |
2980 | fold_convert (chkp_uintptr_type, lb)); | |
2981 | max_size = chkp_force_gimple_call_op (max_size, &seq); | |
2982 | ||
e9cf809e | 2983 | cond = build2 (NE_EXPR, boolean_type_node, |
2984 | size_reloc, integer_zero_node); | |
2985 | stmt = gimple_build_assign (size, COND_EXPR, cond, size_reloc, max_size); | |
058a1b7a | 2986 | gimple_seq_add_stmt (&seq, stmt); |
2987 | } | |
2988 | else | |
2989 | { | |
2990 | stmt = gimple_build_assign (size, size_reloc); | |
2991 | gimple_seq_add_stmt (&seq, stmt); | |
2992 | } | |
2993 | ||
2994 | gsi = gsi_start_bb (chkp_get_entry_block ()); | |
2995 | gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING); | |
2996 | ||
2997 | bounds = chkp_make_bounds (lb, size, &gsi, true); | |
2998 | ||
2999 | return bounds; | |
3000 | } | |
3001 | ||
3002 | /* Return 1 if TYPE has fields with zero size or fields | |
3003 | marked with chkp_variable_size attribute. */ | |
3004 | bool | |
3005 | chkp_variable_size_type (tree type) | |
3006 | { | |
3007 | bool res = false; | |
3008 | tree field; | |
3009 | ||
3010 | if (RECORD_OR_UNION_TYPE_P (type)) | |
3011 | for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field)) | |
3012 | { | |
3013 | if (TREE_CODE (field) == FIELD_DECL) | |
3014 | res = res | |
3015 | || lookup_attribute ("bnd_variable_size", DECL_ATTRIBUTES (field)) | |
3016 | || chkp_variable_size_type (TREE_TYPE (field)); | |
3017 | } | |
3018 | else | |
3019 | res = !TYPE_SIZE (type) | |
3020 | || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST | |
3021 | || tree_to_uhwi (TYPE_SIZE (type)) == 0; | |
3022 | ||
3023 | return res; | |
3024 | } | |
3025 | ||
3026 | /* Compute and return bounds for address of DECL which is | |
3027 | one of VAR_DECL, PARM_DECL, RESULT_DECL. */ | |
3028 | static tree | |
3029 | chkp_get_bounds_for_decl_addr (tree decl) | |
3030 | { | |
3031 | tree bounds; | |
3032 | ||
3033 | gcc_assert (TREE_CODE (decl) == VAR_DECL | |
3034 | || TREE_CODE (decl) == PARM_DECL | |
3035 | || TREE_CODE (decl) == RESULT_DECL); | |
3036 | ||
3037 | bounds = chkp_get_registered_addr_bounds (decl); | |
3038 | ||
3039 | if (bounds) | |
3040 | return bounds; | |
3041 | ||
3042 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
3043 | { | |
3044 | fprintf (dump_file, "Building bounds for address of decl "); | |
3045 | print_generic_expr (dump_file, decl, 0); | |
3046 | fprintf (dump_file, "\n"); | |
3047 | } | |
3048 | ||
3049 | /* Use zero bounds if size is unknown and checks for | |
3050 | unknown sizes are restricted. */ | |
3051 | if ((!DECL_SIZE (decl) | |
3052 | || (chkp_variable_size_type (TREE_TYPE (decl)) | |
3053 | && (TREE_STATIC (decl) | |
3054 | || DECL_EXTERNAL (decl) | |
3055 | || TREE_PUBLIC (decl)))) | |
3056 | && !flag_chkp_incomplete_type) | |
3057 | return chkp_get_zero_bounds (); | |
3058 | ||
3059 | if (flag_chkp_use_static_bounds | |
3060 | && TREE_CODE (decl) == VAR_DECL | |
3061 | && (TREE_STATIC (decl) | |
3062 | || DECL_EXTERNAL (decl) | |
3063 | || TREE_PUBLIC (decl)) | |
3064 | && !DECL_THREAD_LOCAL_P (decl)) | |
3065 | { | |
3066 | tree bnd_var = chkp_make_static_bounds (decl); | |
3067 | gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ()); | |
3068 | gimple stmt; | |
3069 | ||
3ec9c447 | 3070 | bounds = chkp_get_tmp_reg (NULL); |
058a1b7a | 3071 | stmt = gimple_build_assign (bounds, bnd_var); |
3072 | gsi_insert_before (&gsi, stmt, GSI_SAME_STMT); | |
3073 | } | |
3074 | else if (!DECL_SIZE (decl) | |
3075 | || (chkp_variable_size_type (TREE_TYPE (decl)) | |
3076 | && (TREE_STATIC (decl) | |
3077 | || DECL_EXTERNAL (decl) | |
3078 | || TREE_PUBLIC (decl)))) | |
3079 | { | |
3080 | gcc_assert (TREE_CODE (decl) == VAR_DECL); | |
3081 | bounds = chkp_generate_extern_var_bounds (decl); | |
3082 | } | |
3083 | else | |
3084 | { | |
3085 | tree lb = chkp_build_addr_expr (decl); | |
3086 | bounds = chkp_make_bounds (lb, DECL_SIZE_UNIT (decl), NULL, false); | |
3087 | } | |
3088 | ||
3089 | return bounds; | |
3090 | } | |
3091 | ||
3092 | /* Compute and return bounds for constant string. */ | |
3093 | static tree | |
3094 | chkp_get_bounds_for_string_cst (tree cst) | |
3095 | { | |
3096 | tree bounds; | |
3097 | tree lb; | |
3098 | tree size; | |
3099 | ||
3100 | gcc_assert (TREE_CODE (cst) == STRING_CST); | |
3101 | ||
3102 | bounds = chkp_get_registered_bounds (cst); | |
3103 | ||
3104 | if (bounds) | |
3105 | return bounds; | |
3106 | ||
3107 | if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds) | |
3108 | || flag_chkp_use_static_const_bounds > 0) | |
3109 | { | |
3110 | tree bnd_var = chkp_make_static_bounds (cst); | |
3111 | gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ()); | |
3112 | gimple stmt; | |
3113 | ||
3ec9c447 | 3114 | bounds = chkp_get_tmp_reg (NULL); |
058a1b7a | 3115 | stmt = gimple_build_assign (bounds, bnd_var); |
3116 | gsi_insert_before (&gsi, stmt, GSI_SAME_STMT); | |
3117 | } | |
3118 | else | |
3119 | { | |
3120 | lb = chkp_build_addr_expr (cst); | |
3121 | size = build_int_cst (chkp_uintptr_type, TREE_STRING_LENGTH (cst)); | |
3122 | bounds = chkp_make_bounds (lb, size, NULL, false); | |
3123 | } | |
3124 | ||
3125 | bounds = chkp_maybe_copy_and_register_bounds (cst, bounds); | |
3126 | ||
3127 | return bounds; | |
3128 | } | |
3129 | ||
3130 | /* Generate code to instersect bounds BOUNDS1 and BOUNDS2 and | |
3131 | return the result. if ITER is not NULL then Code is inserted | |
3132 | before position pointed by ITER. Otherwise code is added to | |
3133 | entry block. */ | |
3134 | static tree | |
3135 | chkp_intersect_bounds (tree bounds1, tree bounds2, gimple_stmt_iterator *iter) | |
3136 | { | |
3137 | if (!bounds1 || bounds1 == chkp_get_zero_bounds ()) | |
3138 | return bounds2 ? bounds2 : bounds1; | |
3139 | else if (!bounds2 || bounds2 == chkp_get_zero_bounds ()) | |
3140 | return bounds1; | |
3141 | else | |
3142 | { | |
3143 | gimple_seq seq; | |
3144 | gimple stmt; | |
3145 | tree bounds; | |
3146 | ||
3147 | seq = NULL; | |
3148 | ||
3149 | stmt = gimple_build_call (chkp_intersect_fndecl, 2, bounds1, bounds2); | |
3150 | chkp_mark_stmt (stmt); | |
3151 | ||
3152 | bounds = chkp_get_tmp_reg (stmt); | |
3153 | gimple_call_set_lhs (stmt, bounds); | |
3154 | ||
3155 | gimple_seq_add_stmt (&seq, stmt); | |
3156 | ||
3157 | /* We are probably doing narrowing for constant expression. | |
3158 | In such case iter may be undefined. */ | |
3159 | if (!iter) | |
3160 | { | |
3161 | gimple_stmt_iterator gsi = gsi_last_bb (chkp_get_entry_block ()); | |
3162 | iter = &gsi; | |
3163 | gsi_insert_seq_after (iter, seq, GSI_SAME_STMT); | |
3164 | } | |
3165 | else | |
3166 | gsi_insert_seq_before (iter, seq, GSI_SAME_STMT); | |
3167 | ||
3168 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
3169 | { | |
3170 | fprintf (dump_file, "Bounds intersection: "); | |
3171 | print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS); | |
3172 | fprintf (dump_file, " inserted before statement: "); | |
3173 | print_gimple_stmt (dump_file, gsi_stmt (*iter), 0, | |
3174 | TDF_VOPS|TDF_MEMSYMS); | |
3175 | } | |
3176 | ||
3177 | return bounds; | |
3178 | } | |
3179 | } | |
3180 | ||
3181 | /* Return 1 if we are allowed to narrow bounds for addressed FIELD | |
3182 | and 0 othersize. */ | |
3183 | static bool | |
3184 | chkp_may_narrow_to_field (tree field) | |
3185 | { | |
3186 | return DECL_SIZE (field) && TREE_CODE (DECL_SIZE (field)) == INTEGER_CST | |
3187 | && tree_to_uhwi (DECL_SIZE (field)) != 0 | |
3188 | && (!DECL_FIELD_OFFSET (field) | |
3189 | || TREE_CODE (DECL_FIELD_OFFSET (field)) == INTEGER_CST) | |
3190 | && (!DECL_FIELD_BIT_OFFSET (field) | |
3191 | || TREE_CODE (DECL_FIELD_BIT_OFFSET (field)) == INTEGER_CST) | |
3192 | && !lookup_attribute ("bnd_variable_size", DECL_ATTRIBUTES (field)) | |
3193 | && !chkp_variable_size_type (TREE_TYPE (field)); | |
3194 | } | |
3195 | ||
3196 | /* Return 1 if bounds for FIELD should be narrowed to | |
3197 | field's own size. */ | |
3198 | static bool | |
3199 | chkp_narrow_bounds_for_field (tree field) | |
3200 | { | |
3201 | HOST_WIDE_INT offs; | |
3202 | HOST_WIDE_INT bit_offs; | |
3203 | ||
3204 | if (!chkp_may_narrow_to_field (field)) | |
3205 | return false; | |
3206 | ||
3207 | /* Accesse to compiler generated fields should not cause | |
3208 | bounds narrowing. */ | |
3209 | if (DECL_ARTIFICIAL (field)) | |
3210 | return false; | |
3211 | ||
3212 | offs = tree_to_uhwi (DECL_FIELD_OFFSET (field)); | |
3213 | bit_offs = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field)); | |
3214 | ||
3215 | return (flag_chkp_narrow_bounds | |
3216 | && (flag_chkp_first_field_has_own_bounds | |
3217 | || offs | |
3218 | || bit_offs)); | |
3219 | } | |
3220 | ||
3221 | /* Perform narrowing for BOUNDS using bounds computed for field | |
3222 | access COMPONENT. ITER meaning is the same as for | |
3223 | chkp_intersect_bounds. */ | |
3224 | static tree | |
3225 | chkp_narrow_bounds_to_field (tree bounds, tree component, | |
3226 | gimple_stmt_iterator *iter) | |
3227 | { | |
3228 | tree field = TREE_OPERAND (component, 1); | |
3229 | tree size = DECL_SIZE_UNIT (field); | |
3230 | tree field_ptr = chkp_build_addr_expr (component); | |
3231 | tree field_bounds; | |
3232 | ||
3233 | field_bounds = chkp_make_bounds (field_ptr, size, iter, false); | |
3234 | ||
3235 | return chkp_intersect_bounds (field_bounds, bounds, iter); | |
3236 | } | |
3237 | ||
3238 | /* Parse field or array access NODE. | |
3239 | ||
3240 | PTR ouput parameter holds a pointer to the outermost | |
3241 | object. | |
3242 | ||
3243 | BITFIELD output parameter is set to 1 if bitfield is | |
3244 | accessed and to 0 otherwise. If it is 1 then ELT holds | |
3245 | outer component for accessed bit field. | |
3246 | ||
3247 | SAFE outer parameter is set to 1 if access is safe and | |
3248 | checks are not required. | |
3249 | ||
3250 | BOUNDS outer parameter holds bounds to be used to check | |
3251 | access (may be NULL). | |
3252 | ||
3253 | If INNERMOST_BOUNDS is 1 then try to narrow bounds to the | |
3254 | innermost accessed component. */ | |
3255 | static void | |
3256 | chkp_parse_array_and_component_ref (tree node, tree *ptr, | |
3257 | tree *elt, bool *safe, | |
3258 | bool *bitfield, | |
3259 | tree *bounds, | |
3260 | gimple_stmt_iterator *iter, | |
3261 | bool innermost_bounds) | |
3262 | { | |
3263 | tree comp_to_narrow = NULL_TREE; | |
3264 | tree last_comp = NULL_TREE; | |
3265 | bool array_ref_found = false; | |
3266 | tree *nodes; | |
3267 | tree var; | |
3268 | int len; | |
3269 | int i; | |
3270 | ||
3271 | /* Compute tree height for expression. */ | |
3272 | var = node; | |
3273 | len = 1; | |
3274 | while (TREE_CODE (var) == COMPONENT_REF | |
3275 | || TREE_CODE (var) == ARRAY_REF | |
3276 | || TREE_CODE (var) == VIEW_CONVERT_EXPR) | |
3277 | { | |
3278 | var = TREE_OPERAND (var, 0); | |
3279 | len++; | |
3280 | } | |
3281 | ||
3282 | gcc_assert (len > 1); | |
3283 | ||
3284 | /* It is more convenient for us to scan left-to-right, | |
3285 | so walk tree again and put all node to nodes vector | |
3286 | in reversed order. */ | |
3287 | nodes = XALLOCAVEC (tree, len); | |
3288 | nodes[len - 1] = node; | |
3289 | for (i = len - 2; i >= 0; i--) | |
3290 | nodes[i] = TREE_OPERAND (nodes[i + 1], 0); | |
3291 | ||
3292 | if (bounds) | |
3293 | *bounds = NULL; | |
3294 | *safe = true; | |
3295 | *bitfield = (TREE_CODE (node) == COMPONENT_REF | |
3296 | && DECL_BIT_FIELD_TYPE (TREE_OPERAND (node, 1))); | |
3297 | /* To get bitfield address we will need outer elemnt. */ | |
3298 | if (*bitfield) | |
3299 | *elt = nodes[len - 2]; | |
3300 | else | |
3301 | *elt = NULL_TREE; | |
3302 | ||
3303 | /* If we have indirection in expression then compute | |
3304 | outermost structure bounds. Computed bounds may be | |
3305 | narrowed later. */ | |
3306 | if (TREE_CODE (nodes[0]) == MEM_REF || INDIRECT_REF_P (nodes[0])) | |
3307 | { | |
3308 | *safe = false; | |
3309 | *ptr = TREE_OPERAND (nodes[0], 0); | |
3310 | if (bounds) | |
3311 | *bounds = chkp_find_bounds (*ptr, iter); | |
3312 | } | |
3313 | else | |
3314 | { | |
3315 | gcc_assert (TREE_CODE (var) == VAR_DECL | |
3316 | || TREE_CODE (var) == PARM_DECL | |
3317 | || TREE_CODE (var) == RESULT_DECL | |
3318 | || TREE_CODE (var) == STRING_CST | |
3319 | || TREE_CODE (var) == SSA_NAME); | |
3320 | ||
3321 | *ptr = chkp_build_addr_expr (var); | |
3322 | } | |
3323 | ||
3324 | /* In this loop we are trying to find a field access | |
3325 | requiring narrowing. There are two simple rules | |
3326 | for search: | |
3327 | 1. Leftmost array_ref is chosen if any. | |
3328 | 2. Rightmost suitable component_ref is chosen if innermost | |
3329 | bounds are required and no array_ref exists. */ | |
3330 | for (i = 1; i < len; i++) | |
3331 | { | |
3332 | var = nodes[i]; | |
3333 | ||
3334 | if (TREE_CODE (var) == ARRAY_REF) | |
3335 | { | |
3336 | *safe = false; | |
3337 | array_ref_found = true; | |
3338 | if (flag_chkp_narrow_bounds | |
3339 | && !flag_chkp_narrow_to_innermost_arrray | |
3340 | && (!last_comp | |
3341 | || chkp_may_narrow_to_field (TREE_OPERAND (last_comp, 1)))) | |
3342 | { | |
3343 | comp_to_narrow = last_comp; | |
3344 | break; | |
3345 | } | |
3346 | } | |
3347 | else if (TREE_CODE (var) == COMPONENT_REF) | |
3348 | { | |
3349 | tree field = TREE_OPERAND (var, 1); | |
3350 | ||
3351 | if (innermost_bounds | |
3352 | && !array_ref_found | |
3353 | && chkp_narrow_bounds_for_field (field)) | |
3354 | comp_to_narrow = var; | |
3355 | last_comp = var; | |
3356 | ||
3357 | if (flag_chkp_narrow_bounds | |
3358 | && flag_chkp_narrow_to_innermost_arrray | |
3359 | && TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE) | |
3360 | { | |
3361 | if (bounds) | |
3362 | *bounds = chkp_narrow_bounds_to_field (*bounds, var, iter); | |
3363 | comp_to_narrow = NULL; | |
3364 | } | |
3365 | } | |
3366 | else if (TREE_CODE (var) == VIEW_CONVERT_EXPR) | |
3367 | /* Nothing to do for it. */ | |
3368 | ; | |
3369 | else | |
3370 | gcc_unreachable (); | |
3371 | } | |
3372 | ||
3373 | if (comp_to_narrow && DECL_SIZE (TREE_OPERAND (comp_to_narrow, 1)) && bounds) | |
3374 | *bounds = chkp_narrow_bounds_to_field (*bounds, comp_to_narrow, iter); | |
3375 | ||
3376 | if (innermost_bounds && bounds && !*bounds) | |
3377 | *bounds = chkp_find_bounds (*ptr, iter); | |
3378 | } | |
3379 | ||
3380 | /* Compute and return bounds for address of OBJ. */ | |
3381 | static tree | |
3382 | chkp_make_addressed_object_bounds (tree obj, gimple_stmt_iterator *iter) | |
3383 | { | |
3384 | tree bounds = chkp_get_registered_addr_bounds (obj); | |
3385 | ||
3386 | if (bounds) | |
3387 | return bounds; | |
3388 | ||
3389 | switch (TREE_CODE (obj)) | |
3390 | { | |
3391 | case VAR_DECL: | |
3392 | case PARM_DECL: | |
3393 | case RESULT_DECL: | |
3394 | bounds = chkp_get_bounds_for_decl_addr (obj); | |
3395 | break; | |
3396 | ||
3397 | case STRING_CST: | |
3398 | bounds = chkp_get_bounds_for_string_cst (obj); | |
3399 | break; | |
3400 | ||
3401 | case ARRAY_REF: | |
3402 | case COMPONENT_REF: | |
3403 | { | |
3404 | tree elt; | |
3405 | tree ptr; | |
3406 | bool safe; | |
3407 | bool bitfield; | |
3408 | ||
3409 | chkp_parse_array_and_component_ref (obj, &ptr, &elt, &safe, | |
3410 | &bitfield, &bounds, iter, true); | |
3411 | ||
3412 | gcc_assert (bounds); | |
3413 | } | |
3414 | break; | |
3415 | ||
3416 | case FUNCTION_DECL: | |
3417 | case LABEL_DECL: | |
3418 | bounds = chkp_get_zero_bounds (); | |
3419 | break; | |
3420 | ||
3421 | case MEM_REF: | |
3422 | bounds = chkp_find_bounds (TREE_OPERAND (obj, 0), iter); | |
3423 | break; | |
3424 | ||
3425 | case REALPART_EXPR: | |
3426 | case IMAGPART_EXPR: | |
3427 | bounds = chkp_make_addressed_object_bounds (TREE_OPERAND (obj, 0), iter); | |
3428 | break; | |
3429 | ||
3430 | default: | |
3431 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
3432 | { | |
3433 | fprintf (dump_file, "chkp_make_addressed_object_bounds: " | |
3434 | "unexpected object of type %s\n", | |
3435 | get_tree_code_name (TREE_CODE (obj))); | |
3436 | print_node (dump_file, "", obj, 0); | |
3437 | } | |
3438 | internal_error ("chkp_make_addressed_object_bounds: " | |
3439 | "Unexpected tree code %s", | |
3440 | get_tree_code_name (TREE_CODE (obj))); | |
3441 | } | |
3442 | ||
3443 | chkp_register_addr_bounds (obj, bounds); | |
3444 | ||
3445 | return bounds; | |
3446 | } | |
3447 | ||
3448 | /* Compute bounds for pointer PTR loaded from PTR_SRC. Generate statements | |
3449 | to compute bounds if required. Computed bounds should be available at | |
3450 | position pointed by ITER. | |
3451 | ||
3452 | If PTR_SRC is NULL_TREE then pointer definition is identified. | |
3453 | ||
3454 | If PTR_SRC is not NULL_TREE then ITER points to statements which loads | |
3455 | PTR. If PTR is a any memory reference then ITER points to a statement | |
3456 | after which bndldx will be inserterd. In both cases ITER will be updated | |
3457 | to point to the inserted bndldx statement. */ | |
3458 | ||
3459 | static tree | |
3460 | chkp_find_bounds_1 (tree ptr, tree ptr_src, gimple_stmt_iterator *iter) | |
3461 | { | |
3462 | tree addr = NULL_TREE; | |
3463 | tree bounds = NULL_TREE; | |
3464 | ||
3465 | if (!ptr_src) | |
3466 | ptr_src = ptr; | |
3467 | ||
3468 | bounds = chkp_get_registered_bounds (ptr_src); | |
3469 | ||
3470 | if (bounds) | |
3471 | return bounds; | |
3472 | ||
3473 | switch (TREE_CODE (ptr_src)) | |
3474 | { | |
3475 | case MEM_REF: | |
3476 | case VAR_DECL: | |
3477 | if (BOUNDED_P (ptr_src)) | |
3478 | if (TREE_CODE (ptr) == VAR_DECL && DECL_REGISTER (ptr)) | |
3479 | bounds = chkp_get_zero_bounds (); | |
3480 | else | |
3481 | { | |
3482 | addr = chkp_build_addr_expr (ptr_src); | |
3483 | bounds = chkp_build_bndldx (addr, ptr, iter); | |
3484 | } | |
3485 | else | |
3486 | bounds = chkp_get_nonpointer_load_bounds (); | |
3487 | break; | |
3488 | ||
3489 | case ARRAY_REF: | |
3490 | case COMPONENT_REF: | |
3491 | addr = get_base_address (ptr_src); | |
3492 | if (DECL_P (addr) | |
3493 | || TREE_CODE (addr) == MEM_REF | |
3494 | || TREE_CODE (addr) == TARGET_MEM_REF) | |
3495 | { | |
3496 | if (BOUNDED_P (ptr_src)) | |
3497 | if (TREE_CODE (ptr) == VAR_DECL && DECL_REGISTER (ptr)) | |
3498 | bounds = chkp_get_zero_bounds (); | |
3499 | else | |
3500 | { | |
3501 | addr = chkp_build_addr_expr (ptr_src); | |
3502 | bounds = chkp_build_bndldx (addr, ptr, iter); | |
3503 | } | |
3504 | else | |
3505 | bounds = chkp_get_nonpointer_load_bounds (); | |
3506 | } | |
3507 | else | |
3508 | { | |
3509 | gcc_assert (TREE_CODE (addr) == SSA_NAME); | |
3510 | bounds = chkp_find_bounds (addr, iter); | |
3511 | } | |
3512 | break; | |
3513 | ||
3514 | case PARM_DECL: | |
3515 | gcc_unreachable (); | |
3516 | bounds = chkp_get_bound_for_parm (ptr_src); | |
3517 | break; | |
3518 | ||
3519 | case TARGET_MEM_REF: | |
3520 | addr = chkp_build_addr_expr (ptr_src); | |
3521 | bounds = chkp_build_bndldx (addr, ptr, iter); | |
3522 | break; | |
3523 | ||
3524 | case SSA_NAME: | |
3525 | bounds = chkp_get_registered_bounds (ptr_src); | |
3526 | if (!bounds) | |
3527 | { | |
3528 | gimple def_stmt = SSA_NAME_DEF_STMT (ptr_src); | |
1a91d914 | 3529 | gphi_iterator phi_iter; |
058a1b7a | 3530 | |
3531 | bounds = chkp_get_bounds_by_definition (ptr_src, def_stmt, &phi_iter); | |
3532 | ||
3533 | gcc_assert (bounds); | |
3534 | ||
1a91d914 | 3535 | if (gphi *def_phi = dyn_cast <gphi *> (def_stmt)) |
058a1b7a | 3536 | { |
3537 | unsigned i; | |
3538 | ||
1a91d914 | 3539 | for (i = 0; i < gimple_phi_num_args (def_phi); i++) |
058a1b7a | 3540 | { |
1a91d914 | 3541 | tree arg = gimple_phi_arg_def (def_phi, i); |
058a1b7a | 3542 | tree arg_bnd; |
1a91d914 | 3543 | gphi *phi_bnd; |
058a1b7a | 3544 | |
3545 | arg_bnd = chkp_find_bounds (arg, NULL); | |
3546 | ||
3547 | /* chkp_get_bounds_by_definition created new phi | |
3548 | statement and phi_iter points to it. | |
3549 | ||
3550 | Previous call to chkp_find_bounds could create | |
3551 | new basic block and therefore change phi statement | |
3552 | phi_iter points to. */ | |
1a91d914 | 3553 | phi_bnd = phi_iter.phi (); |
058a1b7a | 3554 | |
3555 | add_phi_arg (phi_bnd, arg_bnd, | |
1a91d914 | 3556 | gimple_phi_arg_edge (def_phi, i), |
058a1b7a | 3557 | UNKNOWN_LOCATION); |
3558 | } | |
3559 | ||
3560 | /* If all bound phi nodes have their arg computed | |
3561 | then we may finish its computation. See | |
3562 | chkp_finish_incomplete_bounds for more details. */ | |
3563 | if (chkp_may_finish_incomplete_bounds ()) | |
3564 | chkp_finish_incomplete_bounds (); | |
3565 | } | |
3566 | ||
3567 | gcc_assert (bounds == chkp_get_registered_bounds (ptr_src) | |
3568 | || chkp_incomplete_bounds (bounds)); | |
3569 | } | |
3570 | break; | |
3571 | ||
3572 | case ADDR_EXPR: | |
3573 | bounds = chkp_make_addressed_object_bounds (TREE_OPERAND (ptr_src, 0), iter); | |
3574 | break; | |
3575 | ||
3576 | case INTEGER_CST: | |
3577 | if (integer_zerop (ptr_src)) | |
3578 | bounds = chkp_get_none_bounds (); | |
3579 | else | |
3580 | bounds = chkp_get_invalid_op_bounds (); | |
3581 | break; | |
3582 | ||
3583 | default: | |
3584 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
3585 | { | |
3586 | fprintf (dump_file, "chkp_find_bounds: unexpected ptr of type %s\n", | |
3587 | get_tree_code_name (TREE_CODE (ptr_src))); | |
3588 | print_node (dump_file, "", ptr_src, 0); | |
3589 | } | |
3590 | internal_error ("chkp_find_bounds: Unexpected tree code %s", | |
3591 | get_tree_code_name (TREE_CODE (ptr_src))); | |
3592 | } | |
3593 | ||
3594 | if (!bounds) | |
3595 | { | |
3596 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
3597 | { | |
3598 | fprintf (stderr, "chkp_find_bounds: cannot find bounds for pointer\n"); | |
3599 | print_node (dump_file, "", ptr_src, 0); | |
3600 | } | |
3601 | internal_error ("chkp_find_bounds: Cannot find bounds for pointer"); | |
3602 | } | |
3603 | ||
3604 | return bounds; | |
3605 | } | |
3606 | ||
3607 | /* Normal case for bounds search without forced narrowing. */ | |
3608 | static tree | |
3609 | chkp_find_bounds (tree ptr, gimple_stmt_iterator *iter) | |
3610 | { | |
3611 | return chkp_find_bounds_1 (ptr, NULL_TREE, iter); | |
3612 | } | |
3613 | ||
3614 | /* Search bounds for pointer PTR loaded from PTR_SRC | |
3615 | by statement *ITER points to. */ | |
3616 | static tree | |
3617 | chkp_find_bounds_loaded (tree ptr, tree ptr_src, gimple_stmt_iterator *iter) | |
3618 | { | |
3619 | return chkp_find_bounds_1 (ptr, ptr_src, iter); | |
3620 | } | |
3621 | ||
3622 | /* Helper function which checks type of RHS and finds all pointers in | |
3623 | it. For each found pointer we build it's accesses in LHS and RHS | |
3624 | objects and then call HANDLER for them. Function is used to copy | |
3625 | or initilize bounds for copied object. */ | |
3626 | static void | |
3627 | chkp_walk_pointer_assignments (tree lhs, tree rhs, void *arg, | |
3628 | assign_handler handler) | |
3629 | { | |
3630 | tree type = TREE_TYPE (lhs); | |
3631 | ||
3632 | /* We have nothing to do with clobbers. */ | |
3633 | if (TREE_CLOBBER_P (rhs)) | |
3634 | return; | |
3635 | ||
3636 | if (BOUNDED_TYPE_P (type)) | |
3637 | handler (lhs, rhs, arg); | |
3638 | else if (RECORD_OR_UNION_TYPE_P (type)) | |
3639 | { | |
3640 | tree field; | |
3641 | ||
3642 | if (TREE_CODE (rhs) == CONSTRUCTOR) | |
3643 | { | |
3644 | unsigned HOST_WIDE_INT cnt; | |
3645 | tree val; | |
3646 | ||
3647 | FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs), cnt, field, val) | |
3648 | { | |
3649 | if (chkp_type_has_pointer (TREE_TYPE (field))) | |
3650 | { | |
3651 | tree lhs_field = chkp_build_component_ref (lhs, field); | |
3652 | chkp_walk_pointer_assignments (lhs_field, val, arg, handler); | |
3653 | } | |
3654 | } | |
3655 | } | |
3656 | else | |
3657 | for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field)) | |
3658 | if (TREE_CODE (field) == FIELD_DECL | |
3659 | && chkp_type_has_pointer (TREE_TYPE (field))) | |
3660 | { | |
3661 | tree rhs_field = chkp_build_component_ref (rhs, field); | |
3662 | tree lhs_field = chkp_build_component_ref (lhs, field); | |
3663 | chkp_walk_pointer_assignments (lhs_field, rhs_field, arg, handler); | |
3664 | } | |
3665 | } | |
3666 | else if (TREE_CODE (type) == ARRAY_TYPE) | |
3667 | { | |
3668 | unsigned HOST_WIDE_INT cur = 0; | |
3669 | tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type)); | |
3670 | tree etype = TREE_TYPE (type); | |
3671 | tree esize = TYPE_SIZE (etype); | |
3672 | ||
3673 | if (TREE_CODE (rhs) == CONSTRUCTOR) | |
3674 | { | |
3675 | unsigned HOST_WIDE_INT cnt; | |
3676 | tree purp, val, lhs_elem; | |
3677 | ||
3678 | FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs), cnt, purp, val) | |
3679 | { | |
3680 | if (purp && TREE_CODE (purp) == RANGE_EXPR) | |
3681 | { | |
3682 | tree lo_index = TREE_OPERAND (purp, 0); | |
3683 | tree hi_index = TREE_OPERAND (purp, 1); | |
3684 | ||
3685 | for (cur = (unsigned)tree_to_uhwi (lo_index); | |
3686 | cur <= (unsigned)tree_to_uhwi (hi_index); | |
3687 | cur++) | |
3688 | { | |
3689 | lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur); | |
3690 | chkp_walk_pointer_assignments (lhs_elem, val, arg, handler); | |
3691 | } | |
3692 | } | |
3693 | else | |
3694 | { | |
3695 | if (purp) | |
3696 | { | |
3697 | gcc_assert (TREE_CODE (purp) == INTEGER_CST); | |
3698 | cur = tree_to_uhwi (purp); | |
3699 | } | |
3700 | ||
3701 | lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur++); | |
3702 | ||
3703 | chkp_walk_pointer_assignments (lhs_elem, val, arg, handler); | |
3704 | } | |
3705 | } | |
3706 | } | |
3707 | /* Copy array only when size is known. */ | |
3708 | else if (maxval && !integer_minus_onep (maxval)) | |
3709 | for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++) | |
3710 | { | |
3711 | tree lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur); | |
3712 | tree rhs_elem = chkp_build_array_ref (rhs, etype, esize, cur); | |
3713 | chkp_walk_pointer_assignments (lhs_elem, rhs_elem, arg, handler); | |
3714 | } | |
3715 | } | |
3716 | else | |
3717 | internal_error("chkp_walk_pointer_assignments: unexpected RHS type: %s", | |
3718 | get_tree_code_name (TREE_CODE (type))); | |
3719 | } | |
3720 | ||
3721 | /* Add code to copy bounds for assignment of RHS to LHS. | |
3722 | ARG is an iterator pointing ne code position. */ | |
3723 | static void | |
3724 | chkp_copy_bounds_for_elem (tree lhs, tree rhs, void *arg) | |
3725 | { | |
3726 | gimple_stmt_iterator *iter = (gimple_stmt_iterator *)arg; | |
3727 | tree bounds = chkp_find_bounds (rhs, iter); | |
3728 | tree addr = chkp_build_addr_expr(lhs); | |
3729 | ||
3730 | chkp_build_bndstx (addr, rhs, bounds, iter); | |
3731 | } | |
3732 | ||
3733 | /* Emit static bound initilizers and size vars. */ | |
3734 | void | |
3735 | chkp_finish_file (void) | |
3736 | { | |
3737 | struct varpool_node *node; | |
3738 | struct chkp_ctor_stmt_list stmts; | |
3739 | ||
3740 | if (seen_error ()) | |
3741 | return; | |
3742 | ||
3743 | /* Iterate through varpool and generate bounds initialization | |
3744 | constructors for all statically initialized pointers. */ | |
3745 | stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR; | |
3746 | stmts.stmts = NULL; | |
3747 | FOR_EACH_VARIABLE (node) | |
3748 | /* Check that var is actually emitted and we need and may initialize | |
3749 | its bounds. */ | |
3750 | if (node->need_bounds_init | |
3751 | && !POINTER_BOUNDS_P (node->decl) | |
3752 | && DECL_RTL (node->decl) | |
3753 | && MEM_P (DECL_RTL (node->decl)) | |
3754 | && TREE_ASM_WRITTEN (node->decl)) | |
3755 | { | |
3756 | chkp_walk_pointer_assignments (node->decl, | |
3757 | DECL_INITIAL (node->decl), | |
3758 | &stmts, | |
3759 | chkp_add_modification_to_stmt_list); | |
3760 | ||
3761 | if (stmts.avail <= 0) | |
3762 | { | |
3763 | cgraph_build_static_cdtor ('P', stmts.stmts, | |
3764 | MAX_RESERVED_INIT_PRIORITY + 3); | |
3765 | stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR; | |
3766 | stmts.stmts = NULL; | |
3767 | } | |
3768 | } | |
3769 | ||
3770 | if (stmts.stmts) | |
3771 | cgraph_build_static_cdtor ('P', stmts.stmts, | |
3772 | MAX_RESERVED_INIT_PRIORITY + 3); | |
3773 | ||
3774 | /* Iterate through varpool and generate bounds initialization | |
3775 | constructors for all static bounds vars. */ | |
3776 | stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR; | |
3777 | stmts.stmts = NULL; | |
3778 | FOR_EACH_VARIABLE (node) | |
3779 | if (node->need_bounds_init | |
3780 | && POINTER_BOUNDS_P (node->decl) | |
3781 | && TREE_ASM_WRITTEN (node->decl)) | |
3782 | { | |
3783 | tree bnd = node->decl; | |
3784 | tree var; | |
3785 | ||
3786 | gcc_assert (DECL_INITIAL (bnd) | |
3787 | && TREE_CODE (DECL_INITIAL (bnd)) == ADDR_EXPR); | |
3788 | ||
3789 | var = TREE_OPERAND (DECL_INITIAL (bnd), 0); | |
3790 | chkp_output_static_bounds (bnd, var, &stmts); | |
3791 | } | |
3792 | ||
3793 | if (stmts.stmts) | |
3794 | cgraph_build_static_cdtor ('B', stmts.stmts, | |
3795 | MAX_RESERVED_INIT_PRIORITY + 2); | |
3796 | ||
3797 | delete chkp_static_var_bounds; | |
3798 | delete chkp_bounds_map; | |
3799 | } | |
3800 | ||
3801 | /* An instrumentation function which is called for each statement | |
3802 | having memory access we want to instrument. It inserts check | |
3803 | code and bounds copy code. | |
3804 | ||
3805 | ITER points to statement to instrument. | |
3806 | ||
3807 | NODE holds memory access in statement to check. | |
3808 | ||
3809 | LOC holds the location information for statement. | |
3810 | ||
3811 | DIRFLAGS determines whether access is read or write. | |
3812 | ||
3813 | ACCESS_OFFS should be added to address used in NODE | |
3814 | before check. | |
3815 | ||
3816 | ACCESS_SIZE holds size of checked access. | |
3817 | ||
3818 | SAFE indicates if NODE access is safe and should not be | |
3819 | checked. */ | |
3820 | static void | |
3821 | chkp_process_stmt (gimple_stmt_iterator *iter, tree node, | |
3822 | location_t loc, tree dirflag, | |
3823 | tree access_offs, tree access_size, | |
3824 | bool safe) | |
3825 | { | |
3826 | tree node_type = TREE_TYPE (node); | |
3827 | tree size = access_size ? access_size : TYPE_SIZE_UNIT (node_type); | |
3828 | tree addr_first = NULL_TREE; /* address of the first accessed byte */ | |
3829 | tree addr_last = NULL_TREE; /* address of the last accessed byte */ | |
3830 | tree ptr = NULL_TREE; /* a pointer used for dereference */ | |
3831 | tree bounds = NULL_TREE; | |
3832 | ||
3833 | /* We do not need instrumentation for clobbers. */ | |
3834 | if (dirflag == integer_one_node | |
3835 | && gimple_code (gsi_stmt (*iter)) == GIMPLE_ASSIGN | |
3836 | && TREE_CLOBBER_P (gimple_assign_rhs1 (gsi_stmt (*iter)))) | |
3837 | return; | |
3838 | ||
3839 | switch (TREE_CODE (node)) | |
3840 | { | |
3841 | case ARRAY_REF: | |
3842 | case COMPONENT_REF: | |
3843 | { | |
3844 | bool bitfield; | |
3845 | tree elt; | |
3846 | ||
3847 | if (safe) | |
3848 | { | |
3849 | /* We are not going to generate any checks, so do not | |
3850 | generate bounds as well. */ | |
3851 | addr_first = chkp_build_addr_expr (node); | |
3852 | break; | |
3853 | } | |
3854 | ||
3855 | chkp_parse_array_and_component_ref (node, &ptr, &elt, &safe, | |
3856 | &bitfield, &bounds, iter, false); | |
3857 | ||
3858 | /* Break if there is no dereference and operation is safe. */ | |
3859 | ||
3860 | if (bitfield) | |
3861 | { | |
3862 | tree field = TREE_OPERAND (node, 1); | |
3863 | ||
3864 | if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST) | |
3865 | size = DECL_SIZE_UNIT (field); | |
3866 | ||
3867 | if (elt) | |
3868 | elt = chkp_build_addr_expr (elt); | |
3869 | addr_first = fold_convert_loc (loc, ptr_type_node, elt ? elt : ptr); | |
3870 | addr_first = fold_build_pointer_plus_loc (loc, | |
3871 | addr_first, | |
3872 | byte_position (field)); | |
3873 | } | |
3874 | else | |
3875 | addr_first = chkp_build_addr_expr (node); | |
3876 | } | |
3877 | break; | |
3878 | ||
3879 | case INDIRECT_REF: | |
3880 | ptr = TREE_OPERAND (node, 0); | |
3881 | addr_first = ptr; | |
3882 | break; | |
3883 | ||
3884 | case MEM_REF: | |
3885 | ptr = TREE_OPERAND (node, 0); | |
3886 | addr_first = chkp_build_addr_expr (node); | |
3887 | break; | |
3888 | ||
3889 | case TARGET_MEM_REF: | |
3890 | ptr = TMR_BASE (node); | |
3891 | addr_first = chkp_build_addr_expr (node); | |
3892 | break; | |
3893 | ||
3894 | case ARRAY_RANGE_REF: | |
3895 | printf("ARRAY_RANGE_REF\n"); | |
3896 | debug_gimple_stmt(gsi_stmt(*iter)); | |
3897 | debug_tree(node); | |
3898 | gcc_unreachable (); | |
3899 | break; | |
3900 | ||
3901 | case BIT_FIELD_REF: | |
3902 | { | |
3903 | tree offs, rem, bpu; | |
3904 | ||
3905 | gcc_assert (!access_offs); | |
3906 | gcc_assert (!access_size); | |
3907 | ||
3908 | bpu = fold_convert (size_type_node, bitsize_int (BITS_PER_UNIT)); | |
3909 | offs = fold_convert (size_type_node, TREE_OPERAND (node, 2)); | |
3910 | rem = size_binop_loc (loc, TRUNC_MOD_EXPR, offs, bpu); | |
3911 | offs = size_binop_loc (loc, TRUNC_DIV_EXPR, offs, bpu); | |
3912 | ||
3913 | size = fold_convert (size_type_node, TREE_OPERAND (node, 1)); | |
3914 | size = size_binop_loc (loc, PLUS_EXPR, size, rem); | |
3915 | size = size_binop_loc (loc, CEIL_DIV_EXPR, size, bpu); | |
3916 | size = fold_convert (size_type_node, size); | |
3917 | ||
3918 | chkp_process_stmt (iter, TREE_OPERAND (node, 0), loc, | |
3919 | dirflag, offs, size, safe); | |
3920 | return; | |
3921 | } | |
3922 | break; | |
3923 | ||
3924 | case VAR_DECL: | |
3925 | case RESULT_DECL: | |
3926 | case PARM_DECL: | |
3927 | if (dirflag != integer_one_node | |
3928 | || DECL_REGISTER (node)) | |
3929 | return; | |
3930 | ||
3931 | safe = true; | |
3932 | addr_first = chkp_build_addr_expr (node); | |
3933 | break; | |
3934 | ||
3935 | default: | |
3936 | return; | |
3937 | } | |
3938 | ||
3939 | /* If addr_last was not computed then use (addr_first + size - 1) | |
3940 | expression to compute it. */ | |
3941 | if (!addr_last) | |
3942 | { | |
3943 | addr_last = fold_build_pointer_plus_loc (loc, addr_first, size); | |
3944 | addr_last = fold_build_pointer_plus_hwi_loc (loc, addr_last, -1); | |
3945 | } | |
3946 | ||
3947 | /* Shift both first_addr and last_addr by access_offs if specified. */ | |
3948 | if (access_offs) | |
3949 | { | |
3950 | addr_first = fold_build_pointer_plus_loc (loc, addr_first, access_offs); | |
3951 | addr_last = fold_build_pointer_plus_loc (loc, addr_last, access_offs); | |
3952 | } | |
3953 | ||
3954 | /* Generate bndcl/bndcu checks if memory access is not safe. */ | |
3955 | if (!safe) | |
3956 | { | |
3957 | gimple_stmt_iterator stmt_iter = *iter; | |
3958 | ||
3959 | if (!bounds) | |
3960 | bounds = chkp_find_bounds (ptr, iter); | |
3961 | ||
3962 | chkp_check_mem_access (addr_first, addr_last, bounds, | |
3963 | stmt_iter, loc, dirflag); | |
3964 | } | |
3965 | ||
3966 | /* We need to store bounds in case pointer is stored. */ | |
3967 | if (dirflag == integer_one_node | |
3968 | && chkp_type_has_pointer (node_type) | |
3969 | && flag_chkp_store_bounds) | |
3970 | { | |
3971 | gimple stmt = gsi_stmt (*iter); | |
3972 | tree rhs1 = gimple_assign_rhs1 (stmt); | |
3973 | enum tree_code rhs_code = gimple_assign_rhs_code (stmt); | |
3974 | ||
3975 | if (get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS) | |
3976 | chkp_walk_pointer_assignments (node, rhs1, iter, | |
3977 | chkp_copy_bounds_for_elem); | |
3978 | else | |
3979 | { | |
3980 | bounds = chkp_compute_bounds_for_assignment (NULL_TREE, stmt); | |
3981 | chkp_build_bndstx (addr_first, rhs1, bounds, iter); | |
3982 | } | |
3983 | } | |
3984 | } | |
3985 | ||
3986 | /* Add code to copy bounds for all pointers copied | |
3987 | in ASSIGN created during inline of EDGE. */ | |
3988 | void | |
3989 | chkp_copy_bounds_for_assign (gimple assign, struct cgraph_edge *edge) | |
3990 | { | |
3991 | tree lhs = gimple_assign_lhs (assign); | |
3992 | tree rhs = gimple_assign_rhs1 (assign); | |
3993 | gimple_stmt_iterator iter = gsi_for_stmt (assign); | |
3994 | ||
3995 | if (!flag_chkp_store_bounds) | |
3996 | return; | |
3997 | ||
3998 | chkp_walk_pointer_assignments (lhs, rhs, &iter, chkp_copy_bounds_for_elem); | |
3999 | ||
4000 | /* We should create edges for all created calls to bndldx and bndstx. */ | |
4001 | while (gsi_stmt (iter) != assign) | |
4002 | { | |
4003 | gimple stmt = gsi_stmt (iter); | |
4004 | if (gimple_code (stmt) == GIMPLE_CALL) | |
4005 | { | |
4006 | tree fndecl = gimple_call_fndecl (stmt); | |
4007 | struct cgraph_node *callee = cgraph_node::get_create (fndecl); | |
4008 | struct cgraph_edge *new_edge; | |
4009 | ||
4010 | gcc_assert (fndecl == chkp_bndstx_fndecl | |
4011 | || fndecl == chkp_bndldx_fndecl | |
4012 | || fndecl == chkp_ret_bnd_fndecl); | |
4013 | ||
1a91d914 | 4014 | new_edge = edge->caller->create_edge (callee, |
4015 | as_a <gcall *> (stmt), | |
4016 | edge->count, | |
058a1b7a | 4017 | edge->frequency); |
4018 | new_edge->frequency = compute_call_stmt_bb_frequency | |
4019 | (edge->caller->decl, gimple_bb (stmt)); | |
4020 | } | |
4021 | gsi_prev (&iter); | |
4022 | } | |
4023 | } | |
4024 | ||
4025 | /* Some code transformation made during instrumentation pass | |
4026 | may put code into inconsistent state. Here we find and fix | |
4027 | such flaws. */ | |
4028 | void | |
4029 | chkp_fix_cfg () | |
4030 | { | |
4031 | basic_block bb; | |
4032 | gimple_stmt_iterator i; | |
4033 | ||
4034 | /* We could insert some code right after stmt which ends bb. | |
4035 | We wanted to put this code on fallthru edge but did not | |
4036 | add new edges from the beginning because it may cause new | |
4037 | phi node creation which may be incorrect due to incomplete | |
4038 | bound phi nodes. */ | |
4039 | FOR_ALL_BB_FN (bb, cfun) | |
4040 | for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i)) | |
4041 | { | |
4042 | gimple stmt = gsi_stmt (i); | |
4043 | gimple_stmt_iterator next = i; | |
4044 | ||
4045 | gsi_next (&next); | |
4046 | ||
4047 | if (stmt_ends_bb_p (stmt) | |
4048 | && !gsi_end_p (next)) | |
4049 | { | |
4050 | edge fall = find_fallthru_edge (bb->succs); | |
4051 | basic_block dest = NULL; | |
4052 | int flags = 0; | |
4053 | ||
4054 | gcc_assert (fall); | |
4055 | ||
4056 | /* We cannot split abnormal edge. Therefore we | |
4057 | store its params, make it regular and then | |
4058 | rebuild abnormal edge after split. */ | |
4059 | if (fall->flags & EDGE_ABNORMAL) | |
4060 | { | |
4061 | flags = fall->flags & ~EDGE_FALLTHRU; | |
4062 | dest = fall->dest; | |
4063 | ||
4064 | fall->flags &= ~EDGE_COMPLEX; | |
4065 | } | |
4066 | ||
4067 | while (!gsi_end_p (next)) | |
4068 | { | |
4069 | gimple next_stmt = gsi_stmt (next); | |
4070 | gsi_remove (&next, false); | |
4071 | gsi_insert_on_edge (fall, next_stmt); | |
4072 | } | |
4073 | ||
4074 | gsi_commit_edge_inserts (); | |
4075 | ||
4076 | /* Re-create abnormal edge. */ | |
4077 | if (dest) | |
4078 | make_edge (bb, dest, flags); | |
4079 | } | |
4080 | } | |
4081 | } | |
4082 | ||
4083 | /* Walker callback for chkp_replace_function_pointers. Replaces | |
4084 | function pointer in the specified operand with pointer to the | |
4085 | instrumented function version. */ | |
4086 | static tree | |
4087 | chkp_replace_function_pointer (tree *op, int *walk_subtrees, | |
4088 | void *data ATTRIBUTE_UNUSED) | |
4089 | { | |
4090 | if (TREE_CODE (*op) == FUNCTION_DECL | |
4091 | && !lookup_attribute ("bnd_legacy", DECL_ATTRIBUTES (*op)) | |
f21337ef | 4092 | && (DECL_BUILT_IN_CLASS (*op) == NOT_BUILT_IN |
4093 | /* For builtins we replace pointers only for selected | |
4094 | function and functions having definitions. */ | |
4095 | || (DECL_BUILT_IN_CLASS (*op) == BUILT_IN_NORMAL | |
4096 | && (chkp_instrument_normal_builtin (*op) | |
4097 | || gimple_has_body_p (*op))))) | |
058a1b7a | 4098 | { |
4099 | struct cgraph_node *node = cgraph_node::get_create (*op); | |
f21337ef | 4100 | struct cgraph_node *clone = NULL; |
058a1b7a | 4101 | |
4102 | if (!node->instrumentation_clone) | |
f21337ef | 4103 | clone = chkp_maybe_create_clone (*op); |
058a1b7a | 4104 | |
f21337ef | 4105 | if (clone) |
4106 | *op = clone->decl; | |
058a1b7a | 4107 | *walk_subtrees = 0; |
4108 | } | |
4109 | ||
4110 | return NULL; | |
4111 | } | |
4112 | ||
4113 | /* This function searches for function pointers in statement | |
4114 | pointed by GSI and replaces them with pointers to instrumented | |
4115 | function versions. */ | |
4116 | static void | |
4117 | chkp_replace_function_pointers (gimple_stmt_iterator *gsi) | |
4118 | { | |
4119 | gimple stmt = gsi_stmt (*gsi); | |
4120 | /* For calls we want to walk call args only. */ | |
4121 | if (gimple_code (stmt) == GIMPLE_CALL) | |
4122 | { | |
4123 | unsigned i; | |
4124 | for (i = 0; i < gimple_call_num_args (stmt); i++) | |
4125 | walk_tree (gimple_call_arg_ptr (stmt, i), | |
4126 | chkp_replace_function_pointer, NULL, NULL); | |
4127 | } | |
4128 | else | |
4129 | walk_gimple_stmt (gsi, NULL, chkp_replace_function_pointer, NULL); | |
4130 | } | |
4131 | ||
4132 | /* This function instruments all statements working with memory, | |
4133 | calls and rets. | |
4134 | ||
4135 | It also removes excess statements from static initializers. */ | |
4136 | static void | |
4137 | chkp_instrument_function (void) | |
4138 | { | |
4139 | basic_block bb, next; | |
4140 | gimple_stmt_iterator i; | |
4141 | enum gimple_rhs_class grhs_class; | |
4142 | bool safe = lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl)); | |
4143 | ||
4144 | bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; | |
4145 | do | |
4146 | { | |
4147 | next = bb->next_bb; | |
4148 | for (i = gsi_start_bb (bb); !gsi_end_p (i); ) | |
4149 | { | |
4150 | gimple s = gsi_stmt (i); | |
4151 | ||
4152 | /* Skip statement marked to not be instrumented. */ | |
4153 | if (chkp_marked_stmt_p (s)) | |
4154 | { | |
4155 | gsi_next (&i); | |
4156 | continue; | |
4157 | } | |
4158 | ||
4159 | chkp_replace_function_pointers (&i); | |
4160 | ||
4161 | switch (gimple_code (s)) | |
4162 | { | |
4163 | case GIMPLE_ASSIGN: | |
4164 | chkp_process_stmt (&i, gimple_assign_lhs (s), | |
4165 | gimple_location (s), integer_one_node, | |
4166 | NULL_TREE, NULL_TREE, safe); | |
4167 | chkp_process_stmt (&i, gimple_assign_rhs1 (s), | |
4168 | gimple_location (s), integer_zero_node, | |
4169 | NULL_TREE, NULL_TREE, safe); | |
4170 | grhs_class = get_gimple_rhs_class (gimple_assign_rhs_code (s)); | |
4171 | if (grhs_class == GIMPLE_BINARY_RHS) | |
4172 | chkp_process_stmt (&i, gimple_assign_rhs2 (s), | |
4173 | gimple_location (s), integer_zero_node, | |
4174 | NULL_TREE, NULL_TREE, safe); | |
4175 | break; | |
4176 | ||
4177 | case GIMPLE_RETURN: | |
1a91d914 | 4178 | { |
4179 | greturn *r = as_a <greturn *> (s); | |
4180 | if (gimple_return_retval (r) != NULL_TREE) | |
4181 | { | |
4182 | chkp_process_stmt (&i, gimple_return_retval (r), | |
4183 | gimple_location (r), | |
4184 | integer_zero_node, | |
4185 | NULL_TREE, NULL_TREE, safe); | |
4186 | ||
4187 | /* Additionally we need to add bounds | |
4188 | to return statement. */ | |
4189 | chkp_add_bounds_to_ret_stmt (&i); | |
4190 | } | |
4191 | } | |
4192 | break; | |
058a1b7a | 4193 | |
4194 | case GIMPLE_CALL: | |
4195 | chkp_add_bounds_to_call_stmt (&i); | |
4196 | break; | |
4197 | ||
4198 | default: | |
4199 | ; | |
4200 | } | |
4201 | ||
4202 | gsi_next (&i); | |
4203 | ||
4204 | /* We do not need any actual pointer stores in checker | |
4205 | static initializer. */ | |
4206 | if (lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl)) | |
4207 | && gimple_code (s) == GIMPLE_ASSIGN | |
4208 | && gimple_store_p (s)) | |
4209 | { | |
4210 | gimple_stmt_iterator del_iter = gsi_for_stmt (s); | |
4211 | gsi_remove (&del_iter, true); | |
4212 | unlink_stmt_vdef (s); | |
4213 | release_defs(s); | |
4214 | } | |
4215 | } | |
4216 | bb = next; | |
4217 | } | |
4218 | while (bb); | |
4219 | ||
4220 | /* Some input params may have bounds and be address taken. In this case | |
4221 | we should store incoming bounds into bounds table. */ | |
4222 | tree arg; | |
4223 | if (flag_chkp_store_bounds) | |
4224 | for (arg = DECL_ARGUMENTS (cfun->decl); arg; arg = DECL_CHAIN (arg)) | |
4225 | if (TREE_ADDRESSABLE (arg)) | |
4226 | { | |
4227 | if (BOUNDED_P (arg)) | |
4228 | { | |
4229 | tree bounds = chkp_get_next_bounds_parm (arg); | |
4230 | tree def_ptr = ssa_default_def (cfun, arg); | |
4231 | gimple_stmt_iterator iter | |
4232 | = gsi_start_bb (chkp_get_entry_block ()); | |
4233 | chkp_build_bndstx (chkp_build_addr_expr (arg), | |
4234 | def_ptr ? def_ptr : arg, | |
4235 | bounds, &iter); | |
4236 | ||
4237 | /* Skip bounds arg. */ | |
4238 | arg = TREE_CHAIN (arg); | |
4239 | } | |
4240 | else if (chkp_type_has_pointer (TREE_TYPE (arg))) | |
4241 | { | |
4242 | tree orig_arg = arg; | |
4243 | bitmap slots = BITMAP_ALLOC (NULL); | |
4244 | gimple_stmt_iterator iter | |
4245 | = gsi_start_bb (chkp_get_entry_block ()); | |
4246 | bitmap_iterator bi; | |
4247 | unsigned bnd_no; | |
4248 | ||
4249 | chkp_find_bound_slots (TREE_TYPE (arg), slots); | |
4250 | ||
4251 | EXECUTE_IF_SET_IN_BITMAP (slots, 0, bnd_no, bi) | |
4252 | { | |
4253 | tree bounds = chkp_get_next_bounds_parm (arg); | |
4254 | HOST_WIDE_INT offs = bnd_no * POINTER_SIZE / BITS_PER_UNIT; | |
4255 | tree addr = chkp_build_addr_expr (orig_arg); | |
4256 | tree ptr = build2 (MEM_REF, ptr_type_node, addr, | |
4257 | build_int_cst (ptr_type_node, offs)); | |
4258 | chkp_build_bndstx (chkp_build_addr_expr (ptr), ptr, | |
4259 | bounds, &iter); | |
4260 | ||
4261 | arg = DECL_CHAIN (arg); | |
4262 | } | |
4263 | BITMAP_FREE (slots); | |
4264 | } | |
4265 | } | |
4266 | } | |
4267 | ||
4268 | /* Find init/null/copy_ptr_bounds calls and replace them | |
4269 | with assignments. It should allow better code | |
4270 | optimization. */ | |
4271 | ||
4272 | static void | |
4273 | chkp_remove_useless_builtins () | |
4274 | { | |
4275 | basic_block bb; | |
4276 | gimple_stmt_iterator gsi; | |
4277 | ||
4278 | FOR_EACH_BB_FN (bb, cfun) | |
4279 | { | |
4280 | for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) | |
4281 | { | |
4282 | gimple stmt = gsi_stmt (gsi); | |
4283 | tree fndecl; | |
4284 | enum built_in_function fcode; | |
4285 | ||
4286 | /* Find builtins returning first arg and replace | |
4287 | them with assignments. */ | |
4288 | if (gimple_code (stmt) == GIMPLE_CALL | |
4289 | && (fndecl = gimple_call_fndecl (stmt)) | |
4290 | && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL | |
4291 | && (fcode = DECL_FUNCTION_CODE (fndecl)) | |
4292 | && (fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS | |
4293 | || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS | |
4294 | || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS | |
4295 | || fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS)) | |
4296 | { | |
4297 | tree res = gimple_call_arg (stmt, 0); | |
4298 | update_call_from_tree (&gsi, res); | |
4299 | stmt = gsi_stmt (gsi); | |
4300 | update_stmt (stmt); | |
4301 | } | |
4302 | } | |
4303 | } | |
4304 | } | |
4305 | ||
4306 | /* Initialize pass. */ | |
4307 | static void | |
4308 | chkp_init (void) | |
4309 | { | |
4310 | basic_block bb; | |
4311 | gimple_stmt_iterator i; | |
4312 | ||
4313 | in_chkp_pass = true; | |
4314 | ||
4315 | for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; bb; bb = bb->next_bb) | |
4316 | for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i)) | |
4317 | chkp_unmark_stmt (gsi_stmt (i)); | |
4318 | ||
4319 | chkp_invalid_bounds = new hash_set<tree>; | |
4320 | chkp_completed_bounds_set = new hash_set<tree>; | |
4321 | delete chkp_reg_bounds; | |
4322 | chkp_reg_bounds = new hash_map<tree, tree>; | |
4323 | delete chkp_bound_vars; | |
4324 | chkp_bound_vars = new hash_map<tree, tree>; | |
4325 | chkp_reg_addr_bounds = new hash_map<tree, tree>; | |
4326 | chkp_incomplete_bounds_map = new hash_map<tree, tree>; | |
4327 | delete chkp_bounds_map; | |
4328 | chkp_bounds_map = new hash_map<tree, tree>; | |
4329 | chkp_abnormal_copies = BITMAP_GGC_ALLOC (); | |
4330 | ||
4331 | entry_block = NULL; | |
4332 | zero_bounds = NULL_TREE; | |
4333 | none_bounds = NULL_TREE; | |
4334 | incomplete_bounds = integer_zero_node; | |
4335 | tmp_var = NULL_TREE; | |
4336 | size_tmp_var = NULL_TREE; | |
4337 | ||
4338 | chkp_uintptr_type = lang_hooks.types.type_for_mode (ptr_mode, true); | |
4339 | ||
4340 | /* We create these constant bounds once for each object file. | |
4341 | These symbols go to comdat section and result in single copy | |
4342 | of each one in the final binary. */ | |
4343 | chkp_get_zero_bounds_var (); | |
4344 | chkp_get_none_bounds_var (); | |
4345 | ||
4346 | calculate_dominance_info (CDI_DOMINATORS); | |
4347 | calculate_dominance_info (CDI_POST_DOMINATORS); | |
4348 | ||
4349 | bitmap_obstack_initialize (NULL); | |
4350 | } | |
4351 | ||
4352 | /* Finalize instrumentation pass. */ | |
4353 | static void | |
4354 | chkp_fini (void) | |
4355 | { | |
4356 | in_chkp_pass = false; | |
4357 | ||
4358 | delete chkp_invalid_bounds; | |
4359 | delete chkp_completed_bounds_set; | |
4360 | delete chkp_reg_addr_bounds; | |
4361 | delete chkp_incomplete_bounds_map; | |
4362 | ||
4363 | free_dominance_info (CDI_DOMINATORS); | |
4364 | free_dominance_info (CDI_POST_DOMINATORS); | |
4365 | ||
4366 | bitmap_obstack_release (NULL); | |
d0cb6a0d | 4367 | |
4368 | entry_block = NULL; | |
4369 | zero_bounds = NULL_TREE; | |
4370 | none_bounds = NULL_TREE; | |
058a1b7a | 4371 | } |
4372 | ||
4373 | /* Main instrumentation pass function. */ | |
4374 | static unsigned int | |
4375 | chkp_execute (void) | |
4376 | { | |
4377 | chkp_init (); | |
4378 | ||
4379 | chkp_instrument_function (); | |
4380 | ||
4381 | chkp_remove_useless_builtins (); | |
4382 | ||
4383 | chkp_function_mark_instrumented (cfun->decl); | |
4384 | ||
4385 | chkp_fix_cfg (); | |
4386 | ||
4387 | chkp_fini (); | |
4388 | ||
4389 | return 0; | |
4390 | } | |
4391 | ||
4392 | /* Instrumentation pass gate. */ | |
4393 | static bool | |
4394 | chkp_gate (void) | |
4395 | { | |
20dc3373 | 4396 | cgraph_node *node = cgraph_node::get (cfun->decl); |
4397 | return ((node != NULL | |
4398 | && node->instrumentation_clone) | |
4399 | || lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl))); | |
058a1b7a | 4400 | } |
4401 | ||
4402 | namespace { | |
4403 | ||
4404 | const pass_data pass_data_chkp = | |
4405 | { | |
4406 | GIMPLE_PASS, /* type */ | |
4407 | "chkp", /* name */ | |
4408 | OPTGROUP_NONE, /* optinfo_flags */ | |
4409 | TV_NONE, /* tv_id */ | |
4410 | PROP_ssa | PROP_cfg, /* properties_required */ | |
4411 | 0, /* properties_provided */ | |
4412 | 0, /* properties_destroyed */ | |
4413 | 0, /* todo_flags_start */ | |
4414 | TODO_verify_il | |
4415 | | TODO_update_ssa /* todo_flags_finish */ | |
4416 | }; | |
4417 | ||
4418 | class pass_chkp : public gimple_opt_pass | |
4419 | { | |
4420 | public: | |
4421 | pass_chkp (gcc::context *ctxt) | |
4422 | : gimple_opt_pass (pass_data_chkp, ctxt) | |
4423 | {} | |
4424 | ||
4425 | /* opt_pass methods: */ | |
4426 | virtual opt_pass * clone () | |
4427 | { | |
4428 | return new pass_chkp (m_ctxt); | |
4429 | } | |
4430 | ||
4431 | virtual bool gate (function *) | |
4432 | { | |
4433 | return chkp_gate (); | |
4434 | } | |
4435 | ||
4436 | virtual unsigned int execute (function *) | |
4437 | { | |
4438 | return chkp_execute (); | |
4439 | } | |
4440 | ||
4441 | }; // class pass_chkp | |
4442 | ||
4443 | } // anon namespace | |
4444 | ||
4445 | gimple_opt_pass * | |
4446 | make_pass_chkp (gcc::context *ctxt) | |
4447 | { | |
4448 | return new pass_chkp (ctxt); | |
4449 | } | |
4450 | ||
4451 | #include "gt-tree-chkp.h" |