]>
Commit | Line | Data |
---|---|---|
d5e254e1 | 1 | /* Pointer Bounds Checker insrumentation pass. |
cbe34bb5 | 2 | Copyright (C) 2014-2017 Free Software Foundation, Inc. |
d5e254e1 IE |
3 | Contributed by Ilya Enkovich (ilya.enkovich@intel.com) |
4 | ||
5 | This file is part of GCC. | |
6 | ||
7 | GCC is free software; you can redistribute it and/or modify it under | |
8 | the terms of the GNU General Public License as published by the Free | |
9 | Software Foundation; either version 3, or (at your option) any later | |
10 | version. | |
11 | ||
12 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY | |
13 | WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
14 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
15 | for more details. | |
16 | ||
17 | You should have received a copy of the GNU General Public License | |
18 | along with GCC; see the file COPYING3. If not see | |
19 | <http://www.gnu.org/licenses/>. */ | |
20 | ||
21 | #include "config.h" | |
22 | #include "system.h" | |
23 | #include "coretypes.h" | |
c7131fb2 | 24 | #include "backend.h" |
957060b5 AM |
25 | #include "target.h" |
26 | #include "rtl.h" | |
40e23961 | 27 | #include "tree.h" |
c7131fb2 | 28 | #include "gimple.h" |
957060b5 AM |
29 | #include "cfghooks.h" |
30 | #include "tree-pass.h" | |
c7131fb2 | 31 | #include "ssa.h" |
957060b5 AM |
32 | #include "cgraph.h" |
33 | #include "diagnostic.h" | |
40e23961 | 34 | #include "fold-const.h" |
d5e254e1 IE |
35 | #include "stor-layout.h" |
36 | #include "varasm.h" | |
d5e254e1 IE |
37 | #include "tree-iterator.h" |
38 | #include "tree-cfg.h" | |
39 | #include "langhooks.h" | |
d5e254e1 | 40 | #include "tree-ssa-address.h" |
d5e254e1 | 41 | #include "tree-ssa-loop-niter.h" |
d5e254e1 IE |
42 | #include "gimple-pretty-print.h" |
43 | #include "gimple-iterator.h" | |
44 | #include "gimplify.h" | |
45 | #include "gimplify-me.h" | |
46 | #include "print-tree.h" | |
36566b39 | 47 | #include "calls.h" |
d5e254e1 IE |
48 | #include "expr.h" |
49 | #include "tree-ssa-propagate.h" | |
d5e254e1 IE |
50 | #include "tree-chkp.h" |
51 | #include "gimple-walk.h" | |
d5e254e1 | 52 | #include "tree-dfa.h" |
d5e254e1 IE |
53 | #include "ipa-chkp.h" |
54 | #include "params.h" | |
d5e254e1 IE |
55 | |
56 | /* Pointer Bounds Checker instruments code with memory checks to find | |
57 | out-of-bounds memory accesses. Checks are performed by computing | |
58 | bounds for each pointer and then comparing address of accessed | |
59 | memory before pointer dereferencing. | |
60 | ||
61 | 1. Function clones. | |
62 | ||
63 | See ipa-chkp.c. | |
64 | ||
65 | 2. Instrumentation. | |
66 | ||
67 | There are few things to instrument: | |
68 | ||
69 | a) Memory accesses - add checker calls to check address of accessed memory | |
70 | against bounds of dereferenced pointer. Obviously safe memory | |
71 | accesses like static variable access does not have to be instrumented | |
72 | with checks. | |
73 | ||
74 | Example: | |
75 | ||
76 | val_2 = *p_1; | |
77 | ||
78 | with 4 bytes access is transformed into: | |
79 | ||
80 | __builtin___chkp_bndcl (__bound_tmp.1_3, p_1); | |
81 | D.1_4 = p_1 + 3; | |
82 | __builtin___chkp_bndcu (__bound_tmp.1_3, D.1_4); | |
83 | val_2 = *p_1; | |
84 | ||
85 | where __bound_tmp.1_3 are bounds computed for pointer p_1, | |
86 | __builtin___chkp_bndcl is a lower bound check and | |
87 | __builtin___chkp_bndcu is an upper bound check. | |
88 | ||
89 | b) Pointer stores. | |
90 | ||
91 | When pointer is stored in memory we need to store its bounds. To | |
92 | achieve compatibility of instrumented code with regular codes | |
93 | we have to keep data layout and store bounds in special bound tables | |
94 | via special checker call. Implementation of bounds table may vary for | |
95 | different platforms. It has to associate pointer value and its | |
96 | location (it is required because we may have two equal pointers | |
97 | with different bounds stored in different places) with bounds. | |
98 | Another checker builtin allows to get bounds for specified pointer | |
99 | loaded from specified location. | |
100 | ||
101 | Example: | |
102 | ||
103 | buf1[i_1] = &buf2; | |
104 | ||
105 | is transformed into: | |
106 | ||
107 | buf1[i_1] = &buf2; | |
108 | D.1_2 = &buf1[i_1]; | |
109 | __builtin___chkp_bndstx (D.1_2, &buf2, __bound_tmp.1_2); | |
110 | ||
111 | where __bound_tmp.1_2 are bounds of &buf2. | |
112 | ||
113 | c) Static initialization. | |
114 | ||
115 | The special case of pointer store is static pointer initialization. | |
116 | Bounds initialization is performed in a few steps: | |
117 | - register all static initializations in front-end using | |
118 | chkp_register_var_initializer | |
119 | - when file compilation finishes we create functions with special | |
120 | attribute 'chkp ctor' and put explicit initialization code | |
121 | (assignments) for all statically initialized pointers. | |
122 | - when checker constructor is compiled checker pass adds required | |
123 | bounds initialization for all statically initialized pointers | |
124 | - since we do not actually need excess pointers initialization | |
125 | in checker constructor we remove such assignments from them | |
126 | ||
127 | d) Calls. | |
128 | ||
129 | For each call in the code we add additional arguments to pass | |
130 | bounds for pointer arguments. We determine type of call arguments | |
131 | using arguments list from function declaration; if function | |
132 | declaration is not available we use function type; otherwise | |
133 | (e.g. for unnamed arguments) we use type of passed value. Function | |
134 | declaration/type is replaced with the instrumented one. | |
135 | ||
136 | Example: | |
137 | ||
138 | val_1 = foo (&buf1, &buf2, &buf1, 0); | |
139 | ||
140 | is translated into: | |
141 | ||
142 | val_1 = foo.chkp (&buf1, __bound_tmp.1_2, &buf2, __bound_tmp.1_3, | |
143 | &buf1, __bound_tmp.1_2, 0); | |
144 | ||
145 | e) Returns. | |
146 | ||
147 | If function returns a pointer value we have to return bounds also. | |
148 | A new operand was added for return statement to hold returned bounds. | |
149 | ||
150 | Example: | |
151 | ||
152 | return &_buf1; | |
153 | ||
154 | is transformed into | |
155 | ||
156 | return &_buf1, __bound_tmp.1_1; | |
157 | ||
158 | 3. Bounds computation. | |
159 | ||
160 | Compiler is fully responsible for computing bounds to be used for each | |
161 | memory access. The first step for bounds computation is to find the | |
162 | origin of pointer dereferenced for memory access. Basing on pointer | |
163 | origin we define a way to compute its bounds. There are just few | |
164 | possible cases: | |
165 | ||
166 | a) Pointer is returned by call. | |
167 | ||
168 | In this case we use corresponding checker builtin method to obtain returned | |
169 | bounds. | |
170 | ||
171 | Example: | |
172 | ||
173 | buf_1 = malloc (size_2); | |
174 | foo (buf_1); | |
175 | ||
176 | is translated into: | |
177 | ||
178 | buf_1 = malloc (size_2); | |
179 | __bound_tmp.1_3 = __builtin___chkp_bndret (buf_1); | |
180 | foo (buf_1, __bound_tmp.1_3); | |
181 | ||
182 | b) Pointer is an address of an object. | |
183 | ||
184 | In this case compiler tries to compute objects size and create corresponding | |
185 | bounds. If object has incomplete type then special checker builtin is used to | |
186 | obtain its size at runtime. | |
187 | ||
188 | Example: | |
189 | ||
190 | foo () | |
191 | { | |
192 | <unnamed type> __bound_tmp.3; | |
193 | static int buf[100]; | |
194 | ||
195 | <bb 3>: | |
196 | __bound_tmp.3_2 = __builtin___chkp_bndmk (&buf, 400); | |
197 | ||
198 | <bb 2>: | |
199 | return &buf, __bound_tmp.3_2; | |
200 | } | |
201 | ||
202 | Example: | |
203 | ||
204 | Address of an object 'extern int buf[]' with incomplete type is | |
205 | returned. | |
206 | ||
207 | foo () | |
208 | { | |
209 | <unnamed type> __bound_tmp.4; | |
210 | long unsigned int __size_tmp.3; | |
211 | ||
212 | <bb 3>: | |
213 | __size_tmp.3_4 = __builtin_ia32_sizeof (buf); | |
214 | __bound_tmp.4_3 = __builtin_ia32_bndmk (&buf, __size_tmp.3_4); | |
215 | ||
216 | <bb 2>: | |
217 | return &buf, __bound_tmp.4_3; | |
218 | } | |
219 | ||
220 | c) Pointer is the result of object narrowing. | |
221 | ||
222 | It happens when we use pointer to an object to compute pointer to a part | |
223 | of an object. E.g. we take pointer to a field of a structure. In this | |
224 | case we perform bounds intersection using bounds of original object and | |
225 | bounds of object's part (which are computed basing on its type). | |
226 | ||
227 | There may be some debatable questions about when narrowing should occur | |
228 | and when it should not. To avoid false bound violations in correct | |
229 | programs we do not perform narrowing when address of an array element is | |
230 | obtained (it has address of the whole array) and when address of the first | |
231 | structure field is obtained (because it is guaranteed to be equal to | |
232 | address of the whole structure and it is legal to cast it back to structure). | |
233 | ||
234 | Default narrowing behavior may be changed using compiler flags. | |
235 | ||
236 | Example: | |
237 | ||
238 | In this example address of the second structure field is returned. | |
239 | ||
240 | foo (struct A * p, __bounds_type __bounds_of_p) | |
241 | { | |
242 | <unnamed type> __bound_tmp.3; | |
243 | int * _2; | |
244 | int * _5; | |
245 | ||
246 | <bb 2>: | |
247 | _5 = &p_1(D)->second_field; | |
248 | __bound_tmp.3_6 = __builtin___chkp_bndmk (_5, 4); | |
249 | __bound_tmp.3_8 = __builtin___chkp_intersect (__bound_tmp.3_6, | |
250 | __bounds_of_p_3(D)); | |
251 | _2 = &p_1(D)->second_field; | |
252 | return _2, __bound_tmp.3_8; | |
253 | } | |
254 | ||
255 | Example: | |
256 | ||
257 | In this example address of the first field of array element is returned. | |
258 | ||
259 | foo (struct A * p, __bounds_type __bounds_of_p, int i) | |
260 | { | |
261 | long unsigned int _3; | |
262 | long unsigned int _4; | |
263 | struct A * _6; | |
264 | int * _7; | |
265 | ||
266 | <bb 2>: | |
267 | _3 = (long unsigned int) i_1(D); | |
268 | _4 = _3 * 8; | |
269 | _6 = p_5(D) + _4; | |
270 | _7 = &_6->first_field; | |
271 | return _7, __bounds_of_p_2(D); | |
272 | } | |
273 | ||
274 | ||
275 | d) Pointer is the result of pointer arithmetic or type cast. | |
276 | ||
277 | In this case bounds of the base pointer are used. In case of binary | |
278 | operation producing a pointer we are analyzing data flow further | |
279 | looking for operand's bounds. One operand is considered as a base | |
280 | if it has some valid bounds. If we fall into a case when none of | |
281 | operands (or both of them) has valid bounds, a default bounds value | |
282 | is used. | |
283 | ||
284 | Trying to find out bounds for binary operations we may fall into | |
285 | cyclic dependencies for pointers. To avoid infinite recursion all | |
286 | walked phi nodes instantly obtain corresponding bounds but created | |
287 | bounds are marked as incomplete. It helps us to stop DF walk during | |
288 | bounds search. | |
289 | ||
290 | When we reach pointer source, some args of incomplete bounds phi obtain | |
291 | valid bounds and those values are propagated further through phi nodes. | |
292 | If no valid bounds were found for phi node then we mark its result as | |
293 | invalid bounds. Process stops when all incomplete bounds become either | |
294 | valid or invalid and we are able to choose a pointer base. | |
295 | ||
296 | e) Pointer is loaded from the memory. | |
297 | ||
298 | In this case we just need to load bounds from the bounds table. | |
299 | ||
300 | Example: | |
301 | ||
302 | foo () | |
303 | { | |
304 | <unnamed type> __bound_tmp.3; | |
305 | static int * buf; | |
306 | int * _2; | |
307 | ||
308 | <bb 2>: | |
309 | _2 = buf; | |
310 | __bound_tmp.3_4 = __builtin___chkp_bndldx (&buf, _2); | |
311 | return _2, __bound_tmp.3_4; | |
312 | } | |
313 | ||
314 | */ | |
315 | ||
316 | typedef void (*assign_handler)(tree, tree, void *); | |
317 | ||
318 | static tree chkp_get_zero_bounds (); | |
319 | static tree chkp_find_bounds (tree ptr, gimple_stmt_iterator *iter); | |
320 | static tree chkp_find_bounds_loaded (tree ptr, tree ptr_src, | |
321 | gimple_stmt_iterator *iter); | |
322 | static void chkp_parse_array_and_component_ref (tree node, tree *ptr, | |
323 | tree *elt, bool *safe, | |
324 | bool *bitfield, | |
325 | tree *bounds, | |
326 | gimple_stmt_iterator *iter, | |
327 | bool innermost_bounds); | |
a278b1c3 ML |
328 | static void chkp_parse_bit_field_ref (tree node, location_t loc, |
329 | tree *offset, tree *size); | |
0036534f AI |
330 | static tree |
331 | chkp_make_addressed_object_bounds (tree obj, gimple_stmt_iterator *iter); | |
d5e254e1 IE |
332 | |
333 | #define chkp_bndldx_fndecl \ | |
334 | (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDLDX)) | |
335 | #define chkp_bndstx_fndecl \ | |
336 | (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDSTX)) | |
337 | #define chkp_checkl_fndecl \ | |
338 | (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDCL)) | |
339 | #define chkp_checku_fndecl \ | |
340 | (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDCU)) | |
341 | #define chkp_bndmk_fndecl \ | |
342 | (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDMK)) | |
343 | #define chkp_ret_bnd_fndecl \ | |
344 | (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDRET)) | |
345 | #define chkp_intersect_fndecl \ | |
346 | (targetm.builtin_chkp_function (BUILT_IN_CHKP_INTERSECT)) | |
347 | #define chkp_narrow_bounds_fndecl \ | |
348 | (targetm.builtin_chkp_function (BUILT_IN_CHKP_NARROW)) | |
349 | #define chkp_sizeof_fndecl \ | |
350 | (targetm.builtin_chkp_function (BUILT_IN_CHKP_SIZEOF)) | |
351 | #define chkp_extract_lower_fndecl \ | |
352 | (targetm.builtin_chkp_function (BUILT_IN_CHKP_EXTRACT_LOWER)) | |
353 | #define chkp_extract_upper_fndecl \ | |
354 | (targetm.builtin_chkp_function (BUILT_IN_CHKP_EXTRACT_UPPER)) | |
355 | ||
356 | static GTY (()) tree chkp_uintptr_type; | |
357 | ||
358 | static GTY (()) tree chkp_zero_bounds_var; | |
359 | static GTY (()) tree chkp_none_bounds_var; | |
360 | ||
361 | static GTY (()) basic_block entry_block; | |
362 | static GTY (()) tree zero_bounds; | |
363 | static GTY (()) tree none_bounds; | |
364 | static GTY (()) tree incomplete_bounds; | |
365 | static GTY (()) tree tmp_var; | |
366 | static GTY (()) tree size_tmp_var; | |
367 | static GTY (()) bitmap chkp_abnormal_copies; | |
368 | ||
369 | struct hash_set<tree> *chkp_invalid_bounds; | |
370 | struct hash_set<tree> *chkp_completed_bounds_set; | |
371 | struct hash_map<tree, tree> *chkp_reg_bounds; | |
372 | struct hash_map<tree, tree> *chkp_bound_vars; | |
373 | struct hash_map<tree, tree> *chkp_reg_addr_bounds; | |
374 | struct hash_map<tree, tree> *chkp_incomplete_bounds_map; | |
375 | struct hash_map<tree, tree> *chkp_bounds_map; | |
376 | struct hash_map<tree, tree> *chkp_static_var_bounds; | |
377 | ||
378 | static bool in_chkp_pass; | |
379 | ||
380 | #define CHKP_BOUND_TMP_NAME "__bound_tmp" | |
381 | #define CHKP_SIZE_TMP_NAME "__size_tmp" | |
382 | #define CHKP_BOUNDS_OF_SYMBOL_PREFIX "__chkp_bounds_of_" | |
383 | #define CHKP_STRING_BOUNDS_PREFIX "__chkp_string_bounds_" | |
384 | #define CHKP_VAR_BOUNDS_PREFIX "__chkp_var_bounds_" | |
385 | #define CHKP_ZERO_BOUNDS_VAR_NAME "__chkp_zero_bounds" | |
386 | #define CHKP_NONE_BOUNDS_VAR_NAME "__chkp_none_bounds" | |
387 | ||
388 | /* Static checker constructors may become very large and their | |
389 | compilation with optimization may take too much time. | |
390 | Therefore we put a limit to number of statements in one | |
391 | constructor. Tests with 100 000 statically initialized | |
392 | pointers showed following compilation times on Sandy Bridge | |
393 | server (used -O2): | |
394 | limit 100 => ~18 sec. | |
395 | limit 300 => ~22 sec. | |
396 | limit 1000 => ~30 sec. | |
397 | limit 3000 => ~49 sec. | |
398 | limit 5000 => ~55 sec. | |
399 | limit 10000 => ~76 sec. | |
400 | limit 100000 => ~532 sec. */ | |
401 | #define MAX_STMTS_IN_STATIC_CHKP_CTOR (PARAM_VALUE (PARAM_CHKP_MAX_CTOR_SIZE)) | |
402 | ||
403 | struct chkp_ctor_stmt_list | |
404 | { | |
405 | tree stmts; | |
406 | int avail; | |
407 | }; | |
408 | ||
409 | /* Return 1 if function FNDECL is instrumented by Pointer | |
410 | Bounds Checker. */ | |
411 | bool | |
412 | chkp_function_instrumented_p (tree fndecl) | |
413 | { | |
414 | return fndecl | |
415 | && lookup_attribute ("chkp instrumented", DECL_ATTRIBUTES (fndecl)); | |
416 | } | |
417 | ||
418 | /* Mark function FNDECL as instrumented. */ | |
419 | void | |
420 | chkp_function_mark_instrumented (tree fndecl) | |
421 | { | |
422 | if (chkp_function_instrumented_p (fndecl)) | |
423 | return; | |
424 | ||
425 | DECL_ATTRIBUTES (fndecl) | |
426 | = tree_cons (get_identifier ("chkp instrumented"), NULL, | |
427 | DECL_ATTRIBUTES (fndecl)); | |
428 | } | |
429 | ||
430 | /* Return true when STMT is builtin call to instrumentation function | |
431 | corresponding to CODE. */ | |
432 | ||
433 | bool | |
355fe088 | 434 | chkp_gimple_call_builtin_p (gimple *call, |
d5e254e1 IE |
435 | enum built_in_function code) |
436 | { | |
437 | tree fndecl; | |
77db6c15 AI |
438 | /* We are skipping the check for address-spaces, that's |
439 | why we don't use gimple_call_builtin_p directly here. */ | |
440 | if (is_gimple_call (call) | |
441 | && (fndecl = gimple_call_fndecl (call)) != NULL | |
442 | && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD | |
d5e254e1 | 443 | && (fndecl = targetm.builtin_chkp_function (code)) |
ea6e17d5 RB |
444 | && (DECL_FUNCTION_CODE (gimple_call_fndecl (call)) |
445 | == DECL_FUNCTION_CODE (fndecl))) | |
d5e254e1 IE |
446 | return true; |
447 | return false; | |
448 | } | |
449 | ||
855f036d IE |
450 | /* Emit code to build zero bounds and return RTL holding |
451 | the result. */ | |
452 | rtx | |
453 | chkp_expand_zero_bounds () | |
454 | { | |
455 | tree zero_bnd; | |
456 | ||
457 | if (flag_chkp_use_static_const_bounds) | |
458 | zero_bnd = chkp_get_zero_bounds_var (); | |
459 | else | |
460 | zero_bnd = chkp_build_make_bounds_call (integer_zero_node, | |
461 | integer_zero_node); | |
462 | return expand_normal (zero_bnd); | |
463 | } | |
464 | ||
d5e254e1 IE |
465 | /* Emit code to store zero bounds for PTR located at MEM. */ |
466 | void | |
467 | chkp_expand_bounds_reset_for_mem (tree mem, tree ptr) | |
468 | { | |
469 | tree zero_bnd, bnd, addr, bndstx; | |
470 | ||
471 | if (flag_chkp_use_static_const_bounds) | |
472 | zero_bnd = chkp_get_zero_bounds_var (); | |
473 | else | |
474 | zero_bnd = chkp_build_make_bounds_call (integer_zero_node, | |
475 | integer_zero_node); | |
476 | bnd = make_tree (pointer_bounds_type_node, | |
477 | assign_temp (pointer_bounds_type_node, 0, 1)); | |
478 | addr = build1 (ADDR_EXPR, | |
479 | build_pointer_type (TREE_TYPE (mem)), mem); | |
480 | bndstx = chkp_build_bndstx_call (addr, ptr, bnd); | |
481 | ||
482 | expand_assignment (bnd, zero_bnd, false); | |
483 | expand_normal (bndstx); | |
484 | } | |
485 | ||
4f5a53cc IE |
486 | /* Build retbnd call for returned value RETVAL. |
487 | ||
488 | If BNDVAL is not NULL then result is stored | |
489 | in it. Otherwise a temporary is created to | |
490 | hold returned value. | |
491 | ||
492 | GSI points to a position for a retbnd call | |
493 | and is set to created stmt. | |
494 | ||
495 | Cgraph edge is created for a new call if | |
496 | UPDATE_EDGE is 1. | |
497 | ||
498 | Obtained bounds are returned. */ | |
499 | tree | |
500 | chkp_insert_retbnd_call (tree bndval, tree retval, | |
501 | gimple_stmt_iterator *gsi) | |
502 | { | |
355fe088 | 503 | gimple *call; |
4f5a53cc IE |
504 | |
505 | if (!bndval) | |
506 | bndval = create_tmp_reg (pointer_bounds_type_node, "retbnd"); | |
507 | ||
508 | call = gimple_build_call (chkp_ret_bnd_fndecl, 1, retval); | |
509 | gimple_call_set_lhs (call, bndval); | |
510 | gsi_insert_after (gsi, call, GSI_CONTINUE_LINKING); | |
511 | ||
512 | return bndval; | |
513 | } | |
514 | ||
8e9b2773 IE |
515 | /* Build a GIMPLE_CALL identical to CALL but skipping bounds |
516 | arguments. */ | |
517 | ||
518 | gcall * | |
519 | chkp_copy_call_skip_bounds (gcall *call) | |
520 | { | |
521 | bitmap bounds; | |
522 | unsigned i; | |
523 | ||
524 | bitmap_obstack_initialize (NULL); | |
525 | bounds = BITMAP_ALLOC (NULL); | |
526 | ||
527 | for (i = 0; i < gimple_call_num_args (call); i++) | |
528 | if (POINTER_BOUNDS_P (gimple_call_arg (call, i))) | |
529 | bitmap_set_bit (bounds, i); | |
530 | ||
531 | if (!bitmap_empty_p (bounds)) | |
532 | call = gimple_call_copy_skip_args (call, bounds); | |
533 | gimple_call_set_with_bounds (call, false); | |
534 | ||
535 | BITMAP_FREE (bounds); | |
536 | bitmap_obstack_release (NULL); | |
537 | ||
538 | return call; | |
539 | } | |
540 | ||
541 | /* Redirect edge E to the correct node according to call_stmt. | |
542 | Return 1 if bounds removal from call_stmt should be done | |
543 | instead of redirection. */ | |
544 | ||
545 | bool | |
546 | chkp_redirect_edge (cgraph_edge *e) | |
547 | { | |
548 | bool instrumented = false; | |
549 | tree decl = e->callee->decl; | |
550 | ||
551 | if (e->callee->instrumentation_clone | |
552 | || chkp_function_instrumented_p (decl)) | |
553 | instrumented = true; | |
554 | ||
555 | if (instrumented | |
556 | && !gimple_call_with_bounds_p (e->call_stmt)) | |
557 | e->redirect_callee (cgraph_node::get_create (e->callee->orig_decl)); | |
558 | else if (!instrumented | |
559 | && gimple_call_with_bounds_p (e->call_stmt) | |
560 | && !chkp_gimple_call_builtin_p (e->call_stmt, BUILT_IN_CHKP_BNDCL) | |
561 | && !chkp_gimple_call_builtin_p (e->call_stmt, BUILT_IN_CHKP_BNDCU) | |
562 | && !chkp_gimple_call_builtin_p (e->call_stmt, BUILT_IN_CHKP_BNDSTX)) | |
563 | { | |
564 | if (e->callee->instrumented_version) | |
565 | e->redirect_callee (e->callee->instrumented_version); | |
566 | else | |
567 | { | |
568 | tree args = TYPE_ARG_TYPES (TREE_TYPE (decl)); | |
569 | /* Avoid bounds removal if all args will be removed. */ | |
570 | if (!args || TREE_VALUE (args) != void_type_node) | |
571 | return true; | |
572 | else | |
573 | gimple_call_set_with_bounds (e->call_stmt, false); | |
574 | } | |
575 | } | |
576 | ||
577 | return false; | |
578 | } | |
579 | ||
d5e254e1 IE |
580 | /* Mark statement S to not be instrumented. */ |
581 | static void | |
355fe088 | 582 | chkp_mark_stmt (gimple *s) |
d5e254e1 IE |
583 | { |
584 | gimple_set_plf (s, GF_PLF_1, true); | |
585 | } | |
586 | ||
587 | /* Mark statement S to be instrumented. */ | |
588 | static void | |
355fe088 | 589 | chkp_unmark_stmt (gimple *s) |
d5e254e1 IE |
590 | { |
591 | gimple_set_plf (s, GF_PLF_1, false); | |
592 | } | |
593 | ||
594 | /* Return 1 if statement S should not be instrumented. */ | |
595 | static bool | |
355fe088 | 596 | chkp_marked_stmt_p (gimple *s) |
d5e254e1 IE |
597 | { |
598 | return gimple_plf (s, GF_PLF_1); | |
599 | } | |
600 | ||
601 | /* Get var to be used for bound temps. */ | |
602 | static tree | |
603 | chkp_get_tmp_var (void) | |
604 | { | |
605 | if (!tmp_var) | |
606 | tmp_var = create_tmp_reg (pointer_bounds_type_node, CHKP_BOUND_TMP_NAME); | |
607 | ||
608 | return tmp_var; | |
609 | } | |
610 | ||
611 | /* Get SSA_NAME to be used as temp. */ | |
612 | static tree | |
355fe088 | 613 | chkp_get_tmp_reg (gimple *stmt) |
d5e254e1 IE |
614 | { |
615 | if (in_chkp_pass) | |
616 | return make_ssa_name (chkp_get_tmp_var (), stmt); | |
617 | ||
618 | return make_temp_ssa_name (pointer_bounds_type_node, stmt, | |
619 | CHKP_BOUND_TMP_NAME); | |
620 | } | |
621 | ||
622 | /* Get var to be used for size temps. */ | |
623 | static tree | |
624 | chkp_get_size_tmp_var (void) | |
625 | { | |
626 | if (!size_tmp_var) | |
627 | size_tmp_var = create_tmp_reg (chkp_uintptr_type, CHKP_SIZE_TMP_NAME); | |
628 | ||
629 | return size_tmp_var; | |
630 | } | |
631 | ||
632 | /* Register bounds BND for address of OBJ. */ | |
633 | static void | |
634 | chkp_register_addr_bounds (tree obj, tree bnd) | |
635 | { | |
636 | if (bnd == incomplete_bounds) | |
637 | return; | |
638 | ||
639 | chkp_reg_addr_bounds->put (obj, bnd); | |
640 | ||
641 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
642 | { | |
643 | fprintf (dump_file, "Regsitered bound "); | |
ef6cb4c7 | 644 | print_generic_expr (dump_file, bnd); |
d5e254e1 | 645 | fprintf (dump_file, " for address of "); |
ef6cb4c7 | 646 | print_generic_expr (dump_file, obj); |
d5e254e1 IE |
647 | fprintf (dump_file, "\n"); |
648 | } | |
649 | } | |
650 | ||
651 | /* Return bounds registered for address of OBJ. */ | |
652 | static tree | |
653 | chkp_get_registered_addr_bounds (tree obj) | |
654 | { | |
655 | tree *slot = chkp_reg_addr_bounds->get (obj); | |
656 | return slot ? *slot : NULL_TREE; | |
657 | } | |
658 | ||
659 | /* Mark BOUNDS as completed. */ | |
660 | static void | |
661 | chkp_mark_completed_bounds (tree bounds) | |
662 | { | |
663 | chkp_completed_bounds_set->add (bounds); | |
664 | ||
665 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
666 | { | |
667 | fprintf (dump_file, "Marked bounds "); | |
ef6cb4c7 | 668 | print_generic_expr (dump_file, bounds); |
d5e254e1 IE |
669 | fprintf (dump_file, " as completed\n"); |
670 | } | |
671 | } | |
672 | ||
673 | /* Return 1 if BOUNDS were marked as completed and 0 otherwise. */ | |
674 | static bool | |
675 | chkp_completed_bounds (tree bounds) | |
676 | { | |
677 | return chkp_completed_bounds_set->contains (bounds); | |
678 | } | |
679 | ||
680 | /* Clear comleted bound marks. */ | |
681 | static void | |
682 | chkp_erase_completed_bounds (void) | |
683 | { | |
684 | delete chkp_completed_bounds_set; | |
685 | chkp_completed_bounds_set = new hash_set<tree>; | |
686 | } | |
687 | ||
0036534f AI |
688 | /* This function is used to provide a base address for |
689 | chkp_get_hard_register_fake_addr_expr. */ | |
690 | static tree | |
691 | chkp_get_hard_register_var_fake_base_address () | |
692 | { | |
693 | tree base = fold_convert (ptr_type_node, integer_zero_node); | |
694 | unsigned HOST_WIDE_INT offset = 1 << (TYPE_PRECISION (ptr_type_node) - 1); | |
695 | return fold_build_pointer_plus_hwi (base, offset); | |
696 | } | |
697 | ||
698 | /* If we check bounds for a hard register variable, we cannot | |
699 | use its address - it is illegal, so instead of that we use | |
700 | this fake value. */ | |
701 | static tree | |
702 | chkp_get_hard_register_fake_addr_expr (tree obj) | |
703 | { | |
704 | tree addr = chkp_get_hard_register_var_fake_base_address (); | |
705 | tree outer = obj; | |
706 | while (TREE_CODE (outer) == COMPONENT_REF || TREE_CODE (outer) == ARRAY_REF) | |
707 | { | |
708 | if (TREE_CODE (outer) == COMPONENT_REF) | |
709 | { | |
710 | addr = fold_build_pointer_plus (addr, | |
711 | component_ref_field_offset (outer)); | |
712 | outer = TREE_OPERAND (outer, 0); | |
713 | } | |
714 | else if (TREE_CODE (outer) == ARRAY_REF) | |
715 | { | |
716 | tree indx = fold_convert(size_type_node, TREE_OPERAND(outer, 1)); | |
717 | tree offset = size_binop (MULT_EXPR, | |
718 | array_ref_element_size (outer), indx); | |
719 | addr = fold_build_pointer_plus (addr, offset); | |
720 | outer = TREE_OPERAND (outer, 0); | |
721 | } | |
722 | } | |
723 | ||
724 | return addr; | |
725 | } | |
726 | ||
d5e254e1 IE |
727 | /* Mark BOUNDS associated with PTR as incomplete. */ |
728 | static void | |
729 | chkp_register_incomplete_bounds (tree bounds, tree ptr) | |
730 | { | |
731 | chkp_incomplete_bounds_map->put (bounds, ptr); | |
732 | ||
733 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
734 | { | |
735 | fprintf (dump_file, "Regsitered incomplete bounds "); | |
ef6cb4c7 | 736 | print_generic_expr (dump_file, bounds); |
d5e254e1 | 737 | fprintf (dump_file, " for "); |
ef6cb4c7 | 738 | print_generic_expr (dump_file, ptr); |
d5e254e1 IE |
739 | fprintf (dump_file, "\n"); |
740 | } | |
741 | } | |
742 | ||
743 | /* Return 1 if BOUNDS are incomplete and 0 otherwise. */ | |
744 | static bool | |
745 | chkp_incomplete_bounds (tree bounds) | |
746 | { | |
747 | if (bounds == incomplete_bounds) | |
748 | return true; | |
749 | ||
750 | if (chkp_completed_bounds (bounds)) | |
751 | return false; | |
752 | ||
753 | return chkp_incomplete_bounds_map->get (bounds) != NULL; | |
754 | } | |
755 | ||
756 | /* Clear incomleted bound marks. */ | |
757 | static void | |
758 | chkp_erase_incomplete_bounds (void) | |
759 | { | |
760 | delete chkp_incomplete_bounds_map; | |
761 | chkp_incomplete_bounds_map = new hash_map<tree, tree>; | |
762 | } | |
763 | ||
764 | /* Build and return bndmk call which creates bounds for structure | |
765 | pointed by PTR. Structure should have complete type. */ | |
766 | tree | |
767 | chkp_make_bounds_for_struct_addr (tree ptr) | |
768 | { | |
769 | tree type = TREE_TYPE (ptr); | |
770 | tree size; | |
771 | ||
772 | gcc_assert (POINTER_TYPE_P (type)); | |
773 | ||
774 | size = TYPE_SIZE (TREE_TYPE (type)); | |
775 | ||
776 | gcc_assert (size); | |
777 | ||
778 | return build_call_nary (pointer_bounds_type_node, | |
779 | build_fold_addr_expr (chkp_bndmk_fndecl), | |
780 | 2, ptr, size); | |
781 | } | |
782 | ||
783 | /* Traversal function for chkp_may_finish_incomplete_bounds. | |
784 | Set RES to 0 if at least one argument of phi statement | |
785 | defining bounds (passed in KEY arg) is unknown. | |
786 | Traversal stops when first unknown phi argument is found. */ | |
787 | bool | |
788 | chkp_may_complete_phi_bounds (tree const &bounds, tree *slot ATTRIBUTE_UNUSED, | |
789 | bool *res) | |
790 | { | |
355fe088 | 791 | gimple *phi; |
d5e254e1 IE |
792 | unsigned i; |
793 | ||
794 | gcc_assert (TREE_CODE (bounds) == SSA_NAME); | |
795 | ||
796 | phi = SSA_NAME_DEF_STMT (bounds); | |
797 | ||
798 | gcc_assert (phi && gimple_code (phi) == GIMPLE_PHI); | |
799 | ||
800 | for (i = 0; i < gimple_phi_num_args (phi); i++) | |
801 | { | |
802 | tree phi_arg = gimple_phi_arg_def (phi, i); | |
803 | if (!phi_arg) | |
804 | { | |
805 | *res = false; | |
806 | /* Do not need to traverse further. */ | |
807 | return false; | |
808 | } | |
809 | } | |
810 | ||
811 | return true; | |
812 | } | |
813 | ||
814 | /* Return 1 if all phi nodes created for bounds have their | |
815 | arguments computed. */ | |
816 | static bool | |
817 | chkp_may_finish_incomplete_bounds (void) | |
818 | { | |
819 | bool res = true; | |
820 | ||
821 | chkp_incomplete_bounds_map | |
822 | ->traverse<bool *, chkp_may_complete_phi_bounds> (&res); | |
823 | ||
824 | return res; | |
825 | } | |
826 | ||
827 | /* Helper function for chkp_finish_incomplete_bounds. | |
828 | Recompute args for bounds phi node. */ | |
829 | bool | |
830 | chkp_recompute_phi_bounds (tree const &bounds, tree *slot, | |
831 | void *res ATTRIBUTE_UNUSED) | |
832 | { | |
833 | tree ptr = *slot; | |
538dd0b7 DM |
834 | gphi *bounds_phi; |
835 | gphi *ptr_phi; | |
d5e254e1 IE |
836 | unsigned i; |
837 | ||
838 | gcc_assert (TREE_CODE (bounds) == SSA_NAME); | |
839 | gcc_assert (TREE_CODE (ptr) == SSA_NAME); | |
840 | ||
538dd0b7 DM |
841 | bounds_phi = as_a <gphi *> (SSA_NAME_DEF_STMT (bounds)); |
842 | ptr_phi = as_a <gphi *> (SSA_NAME_DEF_STMT (ptr)); | |
d5e254e1 IE |
843 | |
844 | for (i = 0; i < gimple_phi_num_args (bounds_phi); i++) | |
845 | { | |
846 | tree ptr_arg = gimple_phi_arg_def (ptr_phi, i); | |
847 | tree bound_arg = chkp_find_bounds (ptr_arg, NULL); | |
848 | ||
849 | add_phi_arg (bounds_phi, bound_arg, | |
850 | gimple_phi_arg_edge (ptr_phi, i), | |
851 | UNKNOWN_LOCATION); | |
852 | } | |
853 | ||
854 | return true; | |
855 | } | |
856 | ||
857 | /* Mark BOUNDS as invalid. */ | |
858 | static void | |
859 | chkp_mark_invalid_bounds (tree bounds) | |
860 | { | |
861 | chkp_invalid_bounds->add (bounds); | |
862 | ||
863 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
864 | { | |
865 | fprintf (dump_file, "Marked bounds "); | |
ef6cb4c7 | 866 | print_generic_expr (dump_file, bounds); |
d5e254e1 IE |
867 | fprintf (dump_file, " as invalid\n"); |
868 | } | |
869 | } | |
870 | ||
871 | /* Return 1 if BOUNDS were marked as invalid and 0 otherwise. */ | |
872 | static bool | |
873 | chkp_valid_bounds (tree bounds) | |
874 | { | |
875 | if (bounds == zero_bounds || bounds == none_bounds) | |
876 | return false; | |
877 | ||
878 | return !chkp_invalid_bounds->contains (bounds); | |
879 | } | |
880 | ||
881 | /* Helper function for chkp_finish_incomplete_bounds. | |
882 | Check all arguments of phi nodes trying to find | |
883 | valid completed bounds. If there is at least one | |
884 | such arg then bounds produced by phi node are marked | |
885 | as valid completed bounds and all phi args are | |
886 | recomputed. */ | |
887 | bool | |
888 | chkp_find_valid_phi_bounds (tree const &bounds, tree *slot, bool *res) | |
889 | { | |
355fe088 | 890 | gimple *phi; |
d5e254e1 IE |
891 | unsigned i; |
892 | ||
893 | gcc_assert (TREE_CODE (bounds) == SSA_NAME); | |
894 | ||
895 | if (chkp_completed_bounds (bounds)) | |
896 | return true; | |
897 | ||
898 | phi = SSA_NAME_DEF_STMT (bounds); | |
899 | ||
900 | gcc_assert (phi && gimple_code (phi) == GIMPLE_PHI); | |
901 | ||
902 | for (i = 0; i < gimple_phi_num_args (phi); i++) | |
903 | { | |
904 | tree phi_arg = gimple_phi_arg_def (phi, i); | |
905 | ||
906 | gcc_assert (phi_arg); | |
907 | ||
908 | if (chkp_valid_bounds (phi_arg) && !chkp_incomplete_bounds (phi_arg)) | |
909 | { | |
910 | *res = true; | |
911 | chkp_mark_completed_bounds (bounds); | |
912 | chkp_recompute_phi_bounds (bounds, slot, NULL); | |
913 | return true; | |
914 | } | |
915 | } | |
916 | ||
917 | return true; | |
918 | } | |
919 | ||
920 | /* Helper function for chkp_finish_incomplete_bounds. | |
921 | Marks all incompleted bounds as invalid. */ | |
922 | bool | |
923 | chkp_mark_invalid_bounds_walker (tree const &bounds, | |
924 | tree *slot ATTRIBUTE_UNUSED, | |
925 | void *res ATTRIBUTE_UNUSED) | |
926 | { | |
927 | if (!chkp_completed_bounds (bounds)) | |
928 | { | |
929 | chkp_mark_invalid_bounds (bounds); | |
930 | chkp_mark_completed_bounds (bounds); | |
931 | } | |
932 | return true; | |
933 | } | |
934 | ||
935 | /* When all bound phi nodes have all their args computed | |
936 | we have enough info to find valid bounds. We iterate | |
937 | through all incompleted bounds searching for valid | |
938 | bounds. Found valid bounds are marked as completed | |
939 | and all remaining incompleted bounds are recomputed. | |
940 | Process continues until no new valid bounds may be | |
941 | found. All remained incompleted bounds are marked as | |
942 | invalid (i.e. have no valid source of bounds). */ | |
943 | static void | |
944 | chkp_finish_incomplete_bounds (void) | |
945 | { | |
30b3b0f8 | 946 | bool found_valid = true; |
d5e254e1 IE |
947 | |
948 | while (found_valid) | |
949 | { | |
950 | found_valid = false; | |
951 | ||
952 | chkp_incomplete_bounds_map-> | |
953 | traverse<bool *, chkp_find_valid_phi_bounds> (&found_valid); | |
954 | ||
955 | if (found_valid) | |
956 | chkp_incomplete_bounds_map-> | |
957 | traverse<void *, chkp_recompute_phi_bounds> (NULL); | |
958 | } | |
959 | ||
960 | chkp_incomplete_bounds_map-> | |
961 | traverse<void *, chkp_mark_invalid_bounds_walker> (NULL); | |
962 | chkp_incomplete_bounds_map-> | |
963 | traverse<void *, chkp_recompute_phi_bounds> (NULL); | |
964 | ||
965 | chkp_erase_completed_bounds (); | |
966 | chkp_erase_incomplete_bounds (); | |
967 | } | |
968 | ||
969 | /* Return 1 if type TYPE is a pointer type or a | |
970 | structure having a pointer type as one of its fields. | |
971 | Otherwise return 0. */ | |
972 | bool | |
973 | chkp_type_has_pointer (const_tree type) | |
974 | { | |
975 | bool res = false; | |
976 | ||
977 | if (BOUNDED_TYPE_P (type)) | |
978 | res = true; | |
979 | else if (RECORD_OR_UNION_TYPE_P (type)) | |
980 | { | |
981 | tree field; | |
982 | ||
983 | for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field)) | |
984 | if (TREE_CODE (field) == FIELD_DECL) | |
985 | res = res || chkp_type_has_pointer (TREE_TYPE (field)); | |
986 | } | |
987 | else if (TREE_CODE (type) == ARRAY_TYPE) | |
988 | res = chkp_type_has_pointer (TREE_TYPE (type)); | |
989 | ||
990 | return res; | |
991 | } | |
992 | ||
993 | unsigned | |
994 | chkp_type_bounds_count (const_tree type) | |
995 | { | |
996 | unsigned res = 0; | |
997 | ||
998 | if (!type) | |
999 | res = 0; | |
1000 | else if (BOUNDED_TYPE_P (type)) | |
1001 | res = 1; | |
1002 | else if (RECORD_OR_UNION_TYPE_P (type)) | |
1003 | { | |
1004 | bitmap have_bound; | |
1005 | ||
1006 | bitmap_obstack_initialize (NULL); | |
1007 | have_bound = BITMAP_ALLOC (NULL); | |
1008 | chkp_find_bound_slots (type, have_bound); | |
1009 | res = bitmap_count_bits (have_bound); | |
1010 | BITMAP_FREE (have_bound); | |
1011 | bitmap_obstack_release (NULL); | |
1012 | } | |
1013 | ||
1014 | return res; | |
1015 | } | |
1016 | ||
1017 | /* Get bounds associated with NODE via | |
1018 | chkp_set_bounds call. */ | |
1019 | tree | |
1020 | chkp_get_bounds (tree node) | |
1021 | { | |
1022 | tree *slot; | |
1023 | ||
1024 | if (!chkp_bounds_map) | |
1025 | return NULL_TREE; | |
1026 | ||
1027 | slot = chkp_bounds_map->get (node); | |
1028 | return slot ? *slot : NULL_TREE; | |
1029 | } | |
1030 | ||
1031 | /* Associate bounds VAL with NODE. */ | |
1032 | void | |
1033 | chkp_set_bounds (tree node, tree val) | |
1034 | { | |
1035 | if (!chkp_bounds_map) | |
1036 | chkp_bounds_map = new hash_map<tree, tree>; | |
1037 | ||
1038 | chkp_bounds_map->put (node, val); | |
1039 | } | |
1040 | ||
1041 | /* Check if statically initialized variable VAR require | |
1042 | static bounds initialization. If VAR is added into | |
1043 | bounds initlization list then 1 is returned. Otherwise | |
1044 | return 0. */ | |
1045 | extern bool | |
1046 | chkp_register_var_initializer (tree var) | |
1047 | { | |
1048 | if (!flag_check_pointer_bounds | |
1049 | || DECL_INITIAL (var) == error_mark_node) | |
1050 | return false; | |
1051 | ||
8813a647 | 1052 | gcc_assert (VAR_P (var)); |
d5e254e1 IE |
1053 | gcc_assert (DECL_INITIAL (var)); |
1054 | ||
1055 | if (TREE_STATIC (var) | |
1056 | && chkp_type_has_pointer (TREE_TYPE (var))) | |
1057 | { | |
1058 | varpool_node::get_create (var)->need_bounds_init = 1; | |
1059 | return true; | |
1060 | } | |
1061 | ||
1062 | return false; | |
1063 | } | |
1064 | ||
1065 | /* Helper function for chkp_finish_file. | |
1066 | ||
1067 | Add new modification statement (RHS is assigned to LHS) | |
1068 | into list of static initializer statementes (passed in ARG). | |
1069 | If statements list becomes too big, emit checker constructor | |
1070 | and start the new one. */ | |
1071 | static void | |
1072 | chkp_add_modification_to_stmt_list (tree lhs, | |
1073 | tree rhs, | |
1074 | void *arg) | |
1075 | { | |
1076 | struct chkp_ctor_stmt_list *stmts = (struct chkp_ctor_stmt_list *)arg; | |
1077 | tree modify; | |
1078 | ||
1079 | if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs))) | |
1080 | rhs = build1 (CONVERT_EXPR, TREE_TYPE (lhs), rhs); | |
1081 | ||
1082 | modify = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs); | |
1083 | append_to_statement_list (modify, &stmts->stmts); | |
1084 | ||
1085 | stmts->avail--; | |
1086 | } | |
1087 | ||
1088 | /* Build and return ADDR_EXPR for specified object OBJ. */ | |
1089 | static tree | |
1090 | chkp_build_addr_expr (tree obj) | |
1091 | { | |
0036534f AI |
1092 | /* We first check whether it is a "hard reg case". */ |
1093 | tree base = get_base_address (obj); | |
1094 | if (VAR_P (base) && DECL_HARD_REGISTER (base)) | |
1095 | return chkp_get_hard_register_fake_addr_expr (obj); | |
1096 | ||
1097 | /* If not - return regular ADDR_EXPR. */ | |
d5e254e1 IE |
1098 | return TREE_CODE (obj) == TARGET_MEM_REF |
1099 | ? tree_mem_ref_addr (ptr_type_node, obj) | |
1100 | : build_fold_addr_expr (obj); | |
1101 | } | |
1102 | ||
1103 | /* Helper function for chkp_finish_file. | |
1104 | Initialize bound variable BND_VAR with bounds of variable | |
1105 | VAR to statements list STMTS. If statements list becomes | |
1106 | too big, emit checker constructor and start the new one. */ | |
1107 | static void | |
1108 | chkp_output_static_bounds (tree bnd_var, tree var, | |
1109 | struct chkp_ctor_stmt_list *stmts) | |
1110 | { | |
1111 | tree lb, ub, size; | |
1112 | ||
1113 | if (TREE_CODE (var) == STRING_CST) | |
1114 | { | |
1115 | lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var)); | |
1116 | size = build_int_cst (size_type_node, TREE_STRING_LENGTH (var) - 1); | |
1117 | } | |
1118 | else if (DECL_SIZE (var) | |
1119 | && !chkp_variable_size_type (TREE_TYPE (var))) | |
1120 | { | |
1121 | /* Compute bounds using statically known size. */ | |
1122 | lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var)); | |
1123 | size = size_binop (MINUS_EXPR, DECL_SIZE_UNIT (var), size_one_node); | |
1124 | } | |
1125 | else | |
1126 | { | |
1127 | /* Compute bounds using dynamic size. */ | |
1128 | tree call; | |
1129 | ||
1130 | lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var)); | |
1131 | call = build1 (ADDR_EXPR, | |
1132 | build_pointer_type (TREE_TYPE (chkp_sizeof_fndecl)), | |
1133 | chkp_sizeof_fndecl); | |
1134 | size = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_sizeof_fndecl)), | |
1135 | call, 1, var); | |
1136 | ||
1137 | if (flag_chkp_zero_dynamic_size_as_infinite) | |
1138 | { | |
1139 | tree max_size, cond; | |
1140 | ||
1141 | max_size = build2 (MINUS_EXPR, size_type_node, size_zero_node, lb); | |
1142 | cond = build2 (NE_EXPR, boolean_type_node, size, size_zero_node); | |
1143 | size = build3 (COND_EXPR, size_type_node, cond, size, max_size); | |
1144 | } | |
1145 | ||
1146 | size = size_binop (MINUS_EXPR, size, size_one_node); | |
1147 | } | |
1148 | ||
1149 | ub = size_binop (PLUS_EXPR, lb, size); | |
1150 | stmts->avail -= targetm.chkp_initialize_bounds (bnd_var, lb, ub, | |
1151 | &stmts->stmts); | |
1152 | if (stmts->avail <= 0) | |
1153 | { | |
1154 | cgraph_build_static_cdtor ('B', stmts->stmts, | |
1155 | MAX_RESERVED_INIT_PRIORITY + 2); | |
1156 | stmts->avail = MAX_STMTS_IN_STATIC_CHKP_CTOR; | |
1157 | stmts->stmts = NULL; | |
1158 | } | |
1159 | } | |
1160 | ||
1161 | /* Return entry block to be used for checker initilization code. | |
1162 | Create new block if required. */ | |
1163 | static basic_block | |
1164 | chkp_get_entry_block (void) | |
1165 | { | |
1166 | if (!entry_block) | |
c4d281b2 RB |
1167 | entry_block |
1168 | = split_block_after_labels (ENTRY_BLOCK_PTR_FOR_FN (cfun))->dest; | |
d5e254e1 IE |
1169 | |
1170 | return entry_block; | |
1171 | } | |
1172 | ||
1173 | /* Return a bounds var to be used for pointer var PTR_VAR. */ | |
1174 | static tree | |
1175 | chkp_get_bounds_var (tree ptr_var) | |
1176 | { | |
1177 | tree bnd_var; | |
1178 | tree *slot; | |
1179 | ||
1180 | slot = chkp_bound_vars->get (ptr_var); | |
1181 | if (slot) | |
1182 | bnd_var = *slot; | |
1183 | else | |
1184 | { | |
1185 | bnd_var = create_tmp_reg (pointer_bounds_type_node, | |
1186 | CHKP_BOUND_TMP_NAME); | |
1187 | chkp_bound_vars->put (ptr_var, bnd_var); | |
1188 | } | |
1189 | ||
1190 | return bnd_var; | |
1191 | } | |
1192 | ||
2c1f37b5 IE |
1193 | /* If BND is an abnormal bounds copy, return a copied value. |
1194 | Otherwise return BND. */ | |
1195 | static tree | |
1196 | chkp_get_orginal_bounds_for_abnormal_copy (tree bnd) | |
1197 | { | |
1198 | if (bitmap_bit_p (chkp_abnormal_copies, SSA_NAME_VERSION (bnd))) | |
1199 | { | |
355fe088 | 1200 | gimple *bnd_def = SSA_NAME_DEF_STMT (bnd); |
2c1f37b5 IE |
1201 | gcc_checking_assert (gimple_code (bnd_def) == GIMPLE_ASSIGN); |
1202 | bnd = gimple_assign_rhs1 (bnd_def); | |
1203 | } | |
d5e254e1 | 1204 | |
2c1f37b5 IE |
1205 | return bnd; |
1206 | } | |
d5e254e1 IE |
1207 | |
1208 | /* Register bounds BND for object PTR in global bounds table. | |
1209 | A copy of bounds may be created for abnormal ssa names. | |
1210 | Returns bounds to use for PTR. */ | |
1211 | static tree | |
1212 | chkp_maybe_copy_and_register_bounds (tree ptr, tree bnd) | |
1213 | { | |
1214 | bool abnormal_ptr; | |
1215 | ||
1216 | if (!chkp_reg_bounds) | |
1217 | return bnd; | |
1218 | ||
1219 | /* Do nothing if bounds are incomplete_bounds | |
1220 | because it means bounds will be recomputed. */ | |
1221 | if (bnd == incomplete_bounds) | |
1222 | return bnd; | |
1223 | ||
1224 | abnormal_ptr = (TREE_CODE (ptr) == SSA_NAME | |
1225 | && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ptr) | |
1226 | && gimple_code (SSA_NAME_DEF_STMT (ptr)) != GIMPLE_PHI); | |
1227 | ||
1228 | /* A single bounds value may be reused multiple times for | |
1229 | different pointer values. It may cause coalescing issues | |
1230 | for abnormal SSA names. To avoid it we create a bounds | |
1231 | copy in case it is computed for abnormal SSA name. | |
1232 | ||
1233 | We also cannot reuse such created copies for other pointers */ | |
1234 | if (abnormal_ptr | |
1235 | || bitmap_bit_p (chkp_abnormal_copies, SSA_NAME_VERSION (bnd))) | |
1236 | { | |
1237 | tree bnd_var = NULL_TREE; | |
1238 | ||
1239 | if (abnormal_ptr) | |
1240 | { | |
1241 | if (SSA_NAME_VAR (ptr)) | |
1242 | bnd_var = chkp_get_bounds_var (SSA_NAME_VAR (ptr)); | |
1243 | } | |
1244 | else | |
1245 | bnd_var = chkp_get_tmp_var (); | |
1246 | ||
1247 | /* For abnormal copies we may just find original | |
1248 | bounds and use them. */ | |
1249 | if (!abnormal_ptr && !SSA_NAME_IS_DEFAULT_DEF (bnd)) | |
2c1f37b5 | 1250 | bnd = chkp_get_orginal_bounds_for_abnormal_copy (bnd); |
d5e254e1 IE |
1251 | /* For undefined values we usually use none bounds |
1252 | value but in case of abnormal edge it may cause | |
1253 | coalescing failures. Use default definition of | |
1254 | bounds variable instead to avoid it. */ | |
1255 | else if (SSA_NAME_IS_DEFAULT_DEF (ptr) | |
1256 | && TREE_CODE (SSA_NAME_VAR (ptr)) != PARM_DECL) | |
1257 | { | |
1258 | bnd = get_or_create_ssa_default_def (cfun, bnd_var); | |
1259 | ||
1260 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
1261 | { | |
1262 | fprintf (dump_file, "Using default def bounds "); | |
ef6cb4c7 | 1263 | print_generic_expr (dump_file, bnd); |
d5e254e1 | 1264 | fprintf (dump_file, " for abnormal default def SSA name "); |
ef6cb4c7 | 1265 | print_generic_expr (dump_file, ptr); |
d5e254e1 IE |
1266 | fprintf (dump_file, "\n"); |
1267 | } | |
1268 | } | |
1269 | else | |
1270 | { | |
1271 | tree copy; | |
355fe088 TS |
1272 | gimple *def = SSA_NAME_DEF_STMT (ptr); |
1273 | gimple *assign; | |
d5e254e1 IE |
1274 | gimple_stmt_iterator gsi; |
1275 | ||
1276 | if (bnd_var) | |
0ff4ddeb | 1277 | copy = make_ssa_name (bnd_var); |
d5e254e1 IE |
1278 | else |
1279 | copy = make_temp_ssa_name (pointer_bounds_type_node, | |
0ff4ddeb | 1280 | NULL, |
d5e254e1 | 1281 | CHKP_BOUND_TMP_NAME); |
2c1f37b5 | 1282 | bnd = chkp_get_orginal_bounds_for_abnormal_copy (bnd); |
d5e254e1 IE |
1283 | assign = gimple_build_assign (copy, bnd); |
1284 | ||
1285 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
1286 | { | |
1287 | fprintf (dump_file, "Creating a copy of bounds "); | |
ef6cb4c7 | 1288 | print_generic_expr (dump_file, bnd); |
d5e254e1 | 1289 | fprintf (dump_file, " for abnormal SSA name "); |
ef6cb4c7 | 1290 | print_generic_expr (dump_file, ptr); |
d5e254e1 IE |
1291 | fprintf (dump_file, "\n"); |
1292 | } | |
1293 | ||
1294 | if (gimple_code (def) == GIMPLE_NOP) | |
1295 | { | |
1296 | gsi = gsi_last_bb (chkp_get_entry_block ()); | |
1297 | if (!gsi_end_p (gsi) && is_ctrl_stmt (gsi_stmt (gsi))) | |
1298 | gsi_insert_before (&gsi, assign, GSI_CONTINUE_LINKING); | |
1299 | else | |
1300 | gsi_insert_after (&gsi, assign, GSI_CONTINUE_LINKING); | |
1301 | } | |
1302 | else | |
1303 | { | |
355fe088 | 1304 | gimple *bnd_def = SSA_NAME_DEF_STMT (bnd); |
d5e254e1 IE |
1305 | /* Sometimes (e.g. when we load a pointer from a |
1306 | memory) bounds are produced later than a pointer. | |
1307 | We need to insert bounds copy appropriately. */ | |
1308 | if (gimple_code (bnd_def) != GIMPLE_NOP | |
1309 | && stmt_dominates_stmt_p (def, bnd_def)) | |
1310 | gsi = gsi_for_stmt (bnd_def); | |
1311 | else | |
1312 | gsi = gsi_for_stmt (def); | |
1313 | gsi_insert_after (&gsi, assign, GSI_CONTINUE_LINKING); | |
1314 | } | |
1315 | ||
1316 | bnd = copy; | |
1317 | } | |
1318 | ||
1319 | if (abnormal_ptr) | |
1320 | bitmap_set_bit (chkp_abnormal_copies, SSA_NAME_VERSION (bnd)); | |
1321 | } | |
1322 | ||
1323 | chkp_reg_bounds->put (ptr, bnd); | |
1324 | ||
1325 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
1326 | { | |
1327 | fprintf (dump_file, "Regsitered bound "); | |
ef6cb4c7 | 1328 | print_generic_expr (dump_file, bnd); |
d5e254e1 | 1329 | fprintf (dump_file, " for pointer "); |
ef6cb4c7 | 1330 | print_generic_expr (dump_file, ptr); |
d5e254e1 IE |
1331 | fprintf (dump_file, "\n"); |
1332 | } | |
1333 | ||
1334 | return bnd; | |
1335 | } | |
1336 | ||
1337 | /* Get bounds registered for object PTR in global bounds table. */ | |
1338 | static tree | |
1339 | chkp_get_registered_bounds (tree ptr) | |
1340 | { | |
1341 | tree *slot; | |
1342 | ||
1343 | if (!chkp_reg_bounds) | |
1344 | return NULL_TREE; | |
1345 | ||
1346 | slot = chkp_reg_bounds->get (ptr); | |
1347 | return slot ? *slot : NULL_TREE; | |
1348 | } | |
1349 | ||
1350 | /* Add bound retvals to return statement pointed by GSI. */ | |
1351 | ||
1352 | static void | |
1353 | chkp_add_bounds_to_ret_stmt (gimple_stmt_iterator *gsi) | |
1354 | { | |
538dd0b7 | 1355 | greturn *ret = as_a <greturn *> (gsi_stmt (*gsi)); |
d5e254e1 IE |
1356 | tree retval = gimple_return_retval (ret); |
1357 | tree ret_decl = DECL_RESULT (cfun->decl); | |
1358 | tree bounds; | |
1359 | ||
1360 | if (!retval) | |
1361 | return; | |
1362 | ||
1363 | if (BOUNDED_P (ret_decl)) | |
1364 | { | |
1365 | bounds = chkp_find_bounds (retval, gsi); | |
1366 | bounds = chkp_maybe_copy_and_register_bounds (ret_decl, bounds); | |
1367 | gimple_return_set_retbnd (ret, bounds); | |
1368 | } | |
1369 | ||
1370 | update_stmt (ret); | |
1371 | } | |
1372 | ||
1373 | /* Force OP to be suitable for using as an argument for call. | |
1374 | New statements (if any) go to SEQ. */ | |
1375 | static tree | |
1376 | chkp_force_gimple_call_op (tree op, gimple_seq *seq) | |
1377 | { | |
1378 | gimple_seq stmts; | |
1379 | gimple_stmt_iterator si; | |
1380 | ||
1381 | op = force_gimple_operand (unshare_expr (op), &stmts, true, NULL_TREE); | |
1382 | ||
1383 | for (si = gsi_start (stmts); !gsi_end_p (si); gsi_next (&si)) | |
1384 | chkp_mark_stmt (gsi_stmt (si)); | |
1385 | ||
1386 | gimple_seq_add_seq (seq, stmts); | |
1387 | ||
1388 | return op; | |
1389 | } | |
1390 | ||
1391 | /* Generate lower bound check for memory access by ADDR. | |
1392 | Check is inserted before the position pointed by ITER. | |
1393 | DIRFLAG indicates whether memory access is load or store. */ | |
1394 | static void | |
1395 | chkp_check_lower (tree addr, tree bounds, | |
1396 | gimple_stmt_iterator iter, | |
1397 | location_t location, | |
1398 | tree dirflag) | |
1399 | { | |
1400 | gimple_seq seq; | |
355fe088 | 1401 | gimple *check; |
d5e254e1 IE |
1402 | tree node; |
1403 | ||
321a5ceb IE |
1404 | if (!chkp_function_instrumented_p (current_function_decl) |
1405 | && bounds == chkp_get_zero_bounds ()) | |
d5e254e1 IE |
1406 | return; |
1407 | ||
1408 | if (dirflag == integer_zero_node | |
1409 | && !flag_chkp_check_read) | |
1410 | return; | |
1411 | ||
1412 | if (dirflag == integer_one_node | |
1413 | && !flag_chkp_check_write) | |
1414 | return; | |
1415 | ||
1416 | seq = NULL; | |
1417 | ||
1418 | node = chkp_force_gimple_call_op (addr, &seq); | |
1419 | ||
1420 | check = gimple_build_call (chkp_checkl_fndecl, 2, node, bounds); | |
1421 | chkp_mark_stmt (check); | |
1422 | gimple_call_set_with_bounds (check, true); | |
1423 | gimple_set_location (check, location); | |
1424 | gimple_seq_add_stmt (&seq, check); | |
1425 | ||
1426 | gsi_insert_seq_before (&iter, seq, GSI_SAME_STMT); | |
1427 | ||
1428 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
1429 | { | |
355fe088 | 1430 | gimple *before = gsi_stmt (iter); |
d5e254e1 IE |
1431 | fprintf (dump_file, "Generated lower bound check for statement "); |
1432 | print_gimple_stmt (dump_file, before, 0, TDF_VOPS|TDF_MEMSYMS); | |
1433 | fprintf (dump_file, " "); | |
1434 | print_gimple_stmt (dump_file, check, 0, TDF_VOPS|TDF_MEMSYMS); | |
1435 | } | |
1436 | } | |
1437 | ||
1438 | /* Generate upper bound check for memory access by ADDR. | |
1439 | Check is inserted before the position pointed by ITER. | |
1440 | DIRFLAG indicates whether memory access is load or store. */ | |
1441 | static void | |
1442 | chkp_check_upper (tree addr, tree bounds, | |
1443 | gimple_stmt_iterator iter, | |
1444 | location_t location, | |
1445 | tree dirflag) | |
1446 | { | |
1447 | gimple_seq seq; | |
355fe088 | 1448 | gimple *check; |
d5e254e1 IE |
1449 | tree node; |
1450 | ||
321a5ceb IE |
1451 | if (!chkp_function_instrumented_p (current_function_decl) |
1452 | && bounds == chkp_get_zero_bounds ()) | |
d5e254e1 IE |
1453 | return; |
1454 | ||
1455 | if (dirflag == integer_zero_node | |
1456 | && !flag_chkp_check_read) | |
1457 | return; | |
1458 | ||
1459 | if (dirflag == integer_one_node | |
1460 | && !flag_chkp_check_write) | |
1461 | return; | |
1462 | ||
1463 | seq = NULL; | |
1464 | ||
1465 | node = chkp_force_gimple_call_op (addr, &seq); | |
1466 | ||
1467 | check = gimple_build_call (chkp_checku_fndecl, 2, node, bounds); | |
1468 | chkp_mark_stmt (check); | |
1469 | gimple_call_set_with_bounds (check, true); | |
1470 | gimple_set_location (check, location); | |
1471 | gimple_seq_add_stmt (&seq, check); | |
1472 | ||
1473 | gsi_insert_seq_before (&iter, seq, GSI_SAME_STMT); | |
1474 | ||
1475 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
1476 | { | |
355fe088 | 1477 | gimple *before = gsi_stmt (iter); |
d5e254e1 IE |
1478 | fprintf (dump_file, "Generated upper bound check for statement "); |
1479 | print_gimple_stmt (dump_file, before, 0, TDF_VOPS|TDF_MEMSYMS); | |
1480 | fprintf (dump_file, " "); | |
1481 | print_gimple_stmt (dump_file, check, 0, TDF_VOPS|TDF_MEMSYMS); | |
1482 | } | |
1483 | } | |
1484 | ||
1485 | /* Generate lower and upper bound checks for memory access | |
1486 | to memory slot [FIRST, LAST] againsr BOUNDS. Checks | |
1487 | are inserted before the position pointed by ITER. | |
1488 | DIRFLAG indicates whether memory access is load or store. */ | |
1489 | void | |
1490 | chkp_check_mem_access (tree first, tree last, tree bounds, | |
1491 | gimple_stmt_iterator iter, | |
1492 | location_t location, | |
1493 | tree dirflag) | |
1494 | { | |
1495 | chkp_check_lower (first, bounds, iter, location, dirflag); | |
1496 | chkp_check_upper (last, bounds, iter, location, dirflag); | |
1497 | } | |
1498 | ||
1499 | /* Replace call to _bnd_chk_* pointed by GSI with | |
1500 | bndcu and bndcl calls. DIRFLAG determines whether | |
1501 | check is for read or write. */ | |
1502 | ||
1503 | void | |
1504 | chkp_replace_address_check_builtin (gimple_stmt_iterator *gsi, | |
1505 | tree dirflag) | |
1506 | { | |
1507 | gimple_stmt_iterator call_iter = *gsi; | |
355fe088 | 1508 | gimple *call = gsi_stmt (*gsi); |
d5e254e1 IE |
1509 | tree fndecl = gimple_call_fndecl (call); |
1510 | tree addr = gimple_call_arg (call, 0); | |
1511 | tree bounds = chkp_find_bounds (addr, gsi); | |
1512 | ||
1513 | if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_LBOUNDS | |
1514 | || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS) | |
1515 | chkp_check_lower (addr, bounds, *gsi, gimple_location (call), dirflag); | |
1516 | ||
1517 | if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_UBOUNDS) | |
1518 | chkp_check_upper (addr, bounds, *gsi, gimple_location (call), dirflag); | |
1519 | ||
1520 | if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS) | |
1521 | { | |
1522 | tree size = gimple_call_arg (call, 1); | |
1523 | addr = fold_build_pointer_plus (addr, size); | |
1524 | addr = fold_build_pointer_plus_hwi (addr, -1); | |
1525 | chkp_check_upper (addr, bounds, *gsi, gimple_location (call), dirflag); | |
1526 | } | |
1527 | ||
1528 | gsi_remove (&call_iter, true); | |
1529 | } | |
1530 | ||
1531 | /* Replace call to _bnd_get_ptr_* pointed by GSI with | |
1532 | corresponding bounds extract call. */ | |
1533 | ||
1534 | void | |
1535 | chkp_replace_extract_builtin (gimple_stmt_iterator *gsi) | |
1536 | { | |
355fe088 | 1537 | gimple *call = gsi_stmt (*gsi); |
d5e254e1 IE |
1538 | tree fndecl = gimple_call_fndecl (call); |
1539 | tree addr = gimple_call_arg (call, 0); | |
1540 | tree bounds = chkp_find_bounds (addr, gsi); | |
355fe088 | 1541 | gimple *extract; |
d5e254e1 IE |
1542 | |
1543 | if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_LBOUND) | |
1544 | fndecl = chkp_extract_lower_fndecl; | |
1545 | else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_UBOUND) | |
1546 | fndecl = chkp_extract_upper_fndecl; | |
1547 | else | |
1548 | gcc_unreachable (); | |
1549 | ||
1550 | extract = gimple_build_call (fndecl, 1, bounds); | |
1551 | gimple_call_set_lhs (extract, gimple_call_lhs (call)); | |
1552 | chkp_mark_stmt (extract); | |
1553 | ||
1554 | gsi_replace (gsi, extract, false); | |
1555 | } | |
1556 | ||
1557 | /* Return COMPONENT_REF accessing FIELD in OBJ. */ | |
1558 | static tree | |
1559 | chkp_build_component_ref (tree obj, tree field) | |
1560 | { | |
1561 | tree res; | |
1562 | ||
1563 | /* If object is TMR then we do not use component_ref but | |
1564 | add offset instead. We need it to be able to get addr | |
1565 | of the reasult later. */ | |
1566 | if (TREE_CODE (obj) == TARGET_MEM_REF) | |
1567 | { | |
1568 | tree offs = TMR_OFFSET (obj); | |
1569 | offs = fold_binary_to_constant (PLUS_EXPR, TREE_TYPE (offs), | |
1570 | offs, DECL_FIELD_OFFSET (field)); | |
1571 | ||
1572 | gcc_assert (offs); | |
1573 | ||
1574 | res = copy_node (obj); | |
1575 | TREE_TYPE (res) = TREE_TYPE (field); | |
1576 | TMR_OFFSET (res) = offs; | |
1577 | } | |
1578 | else | |
1579 | res = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL_TREE); | |
1580 | ||
1581 | return res; | |
1582 | } | |
1583 | ||
1584 | /* Return ARRAY_REF for array ARR and index IDX with | |
1585 | specified element type ETYPE and element size ESIZE. */ | |
1586 | static tree | |
1587 | chkp_build_array_ref (tree arr, tree etype, tree esize, | |
1588 | unsigned HOST_WIDE_INT idx) | |
1589 | { | |
1590 | tree index = build_int_cst (size_type_node, idx); | |
1591 | tree res; | |
1592 | ||
1593 | /* If object is TMR then we do not use array_ref but | |
1594 | add offset instead. We need it to be able to get addr | |
1595 | of the reasult later. */ | |
1596 | if (TREE_CODE (arr) == TARGET_MEM_REF) | |
1597 | { | |
1598 | tree offs = TMR_OFFSET (arr); | |
1599 | ||
1600 | esize = fold_binary_to_constant (MULT_EXPR, TREE_TYPE (esize), | |
1601 | esize, index); | |
1602 | gcc_assert(esize); | |
1603 | ||
1604 | offs = fold_binary_to_constant (PLUS_EXPR, TREE_TYPE (offs), | |
1605 | offs, esize); | |
1606 | gcc_assert (offs); | |
1607 | ||
1608 | res = copy_node (arr); | |
1609 | TREE_TYPE (res) = etype; | |
1610 | TMR_OFFSET (res) = offs; | |
1611 | } | |
1612 | else | |
1613 | res = build4 (ARRAY_REF, etype, arr, index, NULL_TREE, NULL_TREE); | |
1614 | ||
1615 | return res; | |
1616 | } | |
1617 | ||
1618 | /* Helper function for chkp_add_bounds_to_call_stmt. | |
1619 | Fill ALL_BOUNDS output array with created bounds. | |
1620 | ||
1621 | OFFS is used for recursive calls and holds basic | |
1622 | offset of TYPE in outer structure in bits. | |
1623 | ||
1624 | ITER points a position where bounds are searched. | |
1625 | ||
1626 | ALL_BOUNDS[i] is filled with elem bounds if there | |
1627 | is a field in TYPE which has pointer type and offset | |
1628 | equal to i * POINTER_SIZE in bits. */ | |
1629 | static void | |
1630 | chkp_find_bounds_for_elem (tree elem, tree *all_bounds, | |
1631 | HOST_WIDE_INT offs, | |
1632 | gimple_stmt_iterator *iter) | |
1633 | { | |
1634 | tree type = TREE_TYPE (elem); | |
1635 | ||
1636 | if (BOUNDED_TYPE_P (type)) | |
1637 | { | |
1638 | if (!all_bounds[offs / POINTER_SIZE]) | |
1639 | { | |
0ff4ddeb | 1640 | tree temp = make_temp_ssa_name (type, NULL, ""); |
355fe088 | 1641 | gimple *assign = gimple_build_assign (temp, elem); |
d5e254e1 IE |
1642 | gimple_stmt_iterator gsi; |
1643 | ||
1644 | gsi_insert_before (iter, assign, GSI_SAME_STMT); | |
1645 | gsi = gsi_for_stmt (assign); | |
1646 | ||
1647 | all_bounds[offs / POINTER_SIZE] = chkp_find_bounds (temp, &gsi); | |
1648 | } | |
1649 | } | |
1650 | else if (RECORD_OR_UNION_TYPE_P (type)) | |
1651 | { | |
1652 | tree field; | |
1653 | ||
1654 | for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field)) | |
1655 | if (TREE_CODE (field) == FIELD_DECL) | |
1656 | { | |
1657 | tree base = unshare_expr (elem); | |
1658 | tree field_ref = chkp_build_component_ref (base, field); | |
1659 | HOST_WIDE_INT field_offs | |
1660 | = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field)); | |
1661 | if (DECL_FIELD_OFFSET (field)) | |
1662 | field_offs += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field)) * 8; | |
1663 | ||
1664 | chkp_find_bounds_for_elem (field_ref, all_bounds, | |
1665 | offs + field_offs, iter); | |
1666 | } | |
1667 | } | |
1668 | else if (TREE_CODE (type) == ARRAY_TYPE) | |
1669 | { | |
1670 | tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type)); | |
1671 | tree etype = TREE_TYPE (type); | |
1672 | HOST_WIDE_INT esize = TREE_INT_CST_LOW (TYPE_SIZE (etype)); | |
1673 | unsigned HOST_WIDE_INT cur; | |
1674 | ||
1675 | if (!maxval || integer_minus_onep (maxval)) | |
1676 | return; | |
1677 | ||
1678 | for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++) | |
1679 | { | |
1680 | tree base = unshare_expr (elem); | |
1681 | tree arr_elem = chkp_build_array_ref (base, etype, | |
1682 | TYPE_SIZE (etype), | |
1683 | cur); | |
1684 | chkp_find_bounds_for_elem (arr_elem, all_bounds, offs + cur * esize, | |
1685 | iter); | |
1686 | } | |
1687 | } | |
1688 | } | |
1689 | ||
1690 | /* Fill HAVE_BOUND output bitmap with information about | |
1691 | bounds requred for object of type TYPE. | |
1692 | ||
1693 | OFFS is used for recursive calls and holds basic | |
1694 | offset of TYPE in outer structure in bits. | |
1695 | ||
1696 | HAVE_BOUND[i] is set to 1 if there is a field | |
1697 | in TYPE which has pointer type and offset | |
1698 | equal to i * POINTER_SIZE - OFFS in bits. */ | |
1699 | void | |
1700 | chkp_find_bound_slots_1 (const_tree type, bitmap have_bound, | |
1701 | HOST_WIDE_INT offs) | |
1702 | { | |
1703 | if (BOUNDED_TYPE_P (type)) | |
1704 | bitmap_set_bit (have_bound, offs / POINTER_SIZE); | |
1705 | else if (RECORD_OR_UNION_TYPE_P (type)) | |
1706 | { | |
1707 | tree field; | |
1708 | ||
1709 | for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field)) | |
1710 | if (TREE_CODE (field) == FIELD_DECL) | |
1711 | { | |
53f30316 IE |
1712 | HOST_WIDE_INT field_offs = 0; |
1713 | if (DECL_FIELD_BIT_OFFSET (field)) | |
1714 | field_offs += TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field)); | |
d5e254e1 IE |
1715 | if (DECL_FIELD_OFFSET (field)) |
1716 | field_offs += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field)) * 8; | |
1717 | chkp_find_bound_slots_1 (TREE_TYPE (field), have_bound, | |
1718 | offs + field_offs); | |
1719 | } | |
1720 | } | |
7e9a3ad3 | 1721 | else if (TREE_CODE (type) == ARRAY_TYPE && TYPE_DOMAIN (type)) |
d5e254e1 | 1722 | { |
7e9a3ad3 MS |
1723 | /* The object type is an array of complete type, i.e., other |
1724 | than a flexible array. */ | |
d5e254e1 IE |
1725 | tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type)); |
1726 | tree etype = TREE_TYPE (type); | |
1727 | HOST_WIDE_INT esize = TREE_INT_CST_LOW (TYPE_SIZE (etype)); | |
1728 | unsigned HOST_WIDE_INT cur; | |
1729 | ||
152ceb59 IE |
1730 | if (!maxval |
1731 | || TREE_CODE (maxval) != INTEGER_CST | |
1732 | || integer_minus_onep (maxval)) | |
d5e254e1 IE |
1733 | return; |
1734 | ||
1735 | for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++) | |
1736 | chkp_find_bound_slots_1 (etype, have_bound, offs + cur * esize); | |
1737 | } | |
1738 | } | |
1739 | ||
1740 | /* Fill bitmap RES with information about bounds for | |
1741 | type TYPE. See chkp_find_bound_slots_1 for more | |
1742 | details. */ | |
1743 | void | |
1744 | chkp_find_bound_slots (const_tree type, bitmap res) | |
1745 | { | |
1746 | bitmap_clear (res); | |
1747 | chkp_find_bound_slots_1 (type, res, 0); | |
1748 | } | |
1749 | ||
edcf72f3 IE |
1750 | /* Return 1 if call to FNDECL should be instrumented |
1751 | and 0 otherwise. */ | |
1752 | ||
1753 | static bool | |
1754 | chkp_instrument_normal_builtin (tree fndecl) | |
1755 | { | |
1756 | switch (DECL_FUNCTION_CODE (fndecl)) | |
1757 | { | |
1758 | case BUILT_IN_STRLEN: | |
1759 | case BUILT_IN_STRCPY: | |
1760 | case BUILT_IN_STRNCPY: | |
1761 | case BUILT_IN_STPCPY: | |
1762 | case BUILT_IN_STPNCPY: | |
1763 | case BUILT_IN_STRCAT: | |
1764 | case BUILT_IN_STRNCAT: | |
1765 | case BUILT_IN_MEMCPY: | |
1766 | case BUILT_IN_MEMPCPY: | |
1767 | case BUILT_IN_MEMSET: | |
1768 | case BUILT_IN_MEMMOVE: | |
1769 | case BUILT_IN_BZERO: | |
1770 | case BUILT_IN_STRCMP: | |
1771 | case BUILT_IN_STRNCMP: | |
1772 | case BUILT_IN_BCMP: | |
1773 | case BUILT_IN_MEMCMP: | |
1774 | case BUILT_IN_MEMCPY_CHK: | |
1775 | case BUILT_IN_MEMPCPY_CHK: | |
1776 | case BUILT_IN_MEMMOVE_CHK: | |
1777 | case BUILT_IN_MEMSET_CHK: | |
1778 | case BUILT_IN_STRCPY_CHK: | |
1779 | case BUILT_IN_STRNCPY_CHK: | |
1780 | case BUILT_IN_STPCPY_CHK: | |
1781 | case BUILT_IN_STPNCPY_CHK: | |
1782 | case BUILT_IN_STRCAT_CHK: | |
1783 | case BUILT_IN_STRNCAT_CHK: | |
1784 | case BUILT_IN_MALLOC: | |
1785 | case BUILT_IN_CALLOC: | |
1786 | case BUILT_IN_REALLOC: | |
1787 | return 1; | |
1788 | ||
1789 | default: | |
1790 | return 0; | |
1791 | } | |
1792 | } | |
1793 | ||
d5e254e1 IE |
1794 | /* Add bound arguments to call statement pointed by GSI. |
1795 | Also performs a replacement of user checker builtins calls | |
1796 | with internal ones. */ | |
1797 | ||
1798 | static void | |
1799 | chkp_add_bounds_to_call_stmt (gimple_stmt_iterator *gsi) | |
1800 | { | |
538dd0b7 | 1801 | gcall *call = as_a <gcall *> (gsi_stmt (*gsi)); |
d5e254e1 IE |
1802 | unsigned arg_no = 0; |
1803 | tree fndecl = gimple_call_fndecl (call); | |
1804 | tree fntype; | |
1805 | tree first_formal_arg; | |
1806 | tree arg; | |
1807 | bool use_fntype = false; | |
1808 | tree op; | |
1809 | ssa_op_iter iter; | |
538dd0b7 | 1810 | gcall *new_call; |
d5e254e1 IE |
1811 | |
1812 | /* Do nothing for internal functions. */ | |
1813 | if (gimple_call_internal_p (call)) | |
1814 | return; | |
1815 | ||
1816 | fntype = TREE_TYPE (TREE_TYPE (gimple_call_fn (call))); | |
1817 | ||
1818 | /* Do nothing if back-end builtin is called. */ | |
1819 | if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD) | |
1820 | return; | |
1821 | ||
1822 | /* Do nothing for some middle-end builtins. */ | |
1823 | if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL | |
1824 | && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_OBJECT_SIZE) | |
1825 | return; | |
1826 | ||
06201ad5 IE |
1827 | /* Do nothing for calls to not instrumentable functions. */ |
1828 | if (fndecl && !chkp_instrumentable_p (fndecl)) | |
d5e254e1 IE |
1829 | return; |
1830 | ||
1831 | /* Ignore CHKP_INIT_PTR_BOUNDS, CHKP_NULL_PTR_BOUNDS | |
1832 | and CHKP_COPY_PTR_BOUNDS. */ | |
1833 | if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL | |
1834 | && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_INIT_PTR_BOUNDS | |
1835 | || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NULL_PTR_BOUNDS | |
1836 | || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_COPY_PTR_BOUNDS | |
1837 | || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_SET_PTR_BOUNDS)) | |
1838 | return; | |
1839 | ||
1840 | /* Check user builtins are replaced with checks. */ | |
1841 | if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL | |
1842 | && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_LBOUNDS | |
1843 | || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_UBOUNDS | |
1844 | || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS)) | |
1845 | { | |
1846 | chkp_replace_address_check_builtin (gsi, integer_minus_one_node); | |
1847 | return; | |
1848 | } | |
1849 | ||
1850 | /* Check user builtins are replaced with bound extract. */ | |
1851 | if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL | |
1852 | && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_LBOUND | |
1853 | || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_UBOUND)) | |
1854 | { | |
1855 | chkp_replace_extract_builtin (gsi); | |
1856 | return; | |
1857 | } | |
1858 | ||
1859 | /* BUILT_IN_CHKP_NARROW_PTR_BOUNDS call is replaced with | |
1860 | target narrow bounds call. */ | |
1861 | if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL | |
1862 | && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NARROW_PTR_BOUNDS) | |
1863 | { | |
1864 | tree arg = gimple_call_arg (call, 1); | |
1865 | tree bounds = chkp_find_bounds (arg, gsi); | |
1866 | ||
1867 | gimple_call_set_fndecl (call, chkp_narrow_bounds_fndecl); | |
1868 | gimple_call_set_arg (call, 1, bounds); | |
1869 | update_stmt (call); | |
1870 | ||
1871 | return; | |
1872 | } | |
1873 | ||
1874 | /* BUILT_IN_CHKP_STORE_PTR_BOUNDS call is replaced with | |
1875 | bndstx call. */ | |
1876 | if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL | |
1877 | && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_STORE_PTR_BOUNDS) | |
1878 | { | |
1879 | tree addr = gimple_call_arg (call, 0); | |
1880 | tree ptr = gimple_call_arg (call, 1); | |
1881 | tree bounds = chkp_find_bounds (ptr, gsi); | |
1882 | gimple_stmt_iterator iter = gsi_for_stmt (call); | |
1883 | ||
1884 | chkp_build_bndstx (addr, ptr, bounds, gsi); | |
1885 | gsi_remove (&iter, true); | |
1886 | ||
1887 | return; | |
1888 | } | |
1889 | ||
1890 | if (!flag_chkp_instrument_calls) | |
1891 | return; | |
1892 | ||
edcf72f3 IE |
1893 | /* We instrument only some subset of builtins. We also instrument |
1894 | builtin calls to be inlined. */ | |
1895 | if (fndecl | |
1896 | && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL | |
1897 | && !chkp_instrument_normal_builtin (fndecl)) | |
1898 | { | |
1899 | if (!lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl))) | |
1900 | return; | |
1901 | ||
1902 | struct cgraph_node *clone = chkp_maybe_create_clone (fndecl); | |
1903 | if (!clone | |
1904 | || !gimple_has_body_p (clone->decl)) | |
1905 | return; | |
1906 | } | |
d5e254e1 IE |
1907 | |
1908 | /* If function decl is available then use it for | |
1909 | formal arguments list. Otherwise use function type. */ | |
1e3af2a4 IE |
1910 | if (fndecl |
1911 | && DECL_ARGUMENTS (fndecl) | |
1912 | && gimple_call_fntype (call) == TREE_TYPE (fndecl)) | |
d5e254e1 IE |
1913 | first_formal_arg = DECL_ARGUMENTS (fndecl); |
1914 | else | |
1915 | { | |
1916 | first_formal_arg = TYPE_ARG_TYPES (fntype); | |
1917 | use_fntype = true; | |
1918 | } | |
1919 | ||
1920 | /* Fill vector of new call args. */ | |
1921 | vec<tree> new_args = vNULL; | |
1922 | new_args.create (gimple_call_num_args (call)); | |
1923 | arg = first_formal_arg; | |
1924 | for (arg_no = 0; arg_no < gimple_call_num_args (call); arg_no++) | |
1925 | { | |
1926 | tree call_arg = gimple_call_arg (call, arg_no); | |
1927 | tree type; | |
1928 | ||
1929 | /* Get arg type using formal argument description | |
1930 | or actual argument type. */ | |
1931 | if (arg) | |
1932 | if (use_fntype) | |
1933 | if (TREE_VALUE (arg) != void_type_node) | |
1934 | { | |
1935 | type = TREE_VALUE (arg); | |
1936 | arg = TREE_CHAIN (arg); | |
1937 | } | |
1938 | else | |
1939 | type = TREE_TYPE (call_arg); | |
1940 | else | |
1941 | { | |
1942 | type = TREE_TYPE (arg); | |
1943 | arg = TREE_CHAIN (arg); | |
1944 | } | |
1945 | else | |
1946 | type = TREE_TYPE (call_arg); | |
1947 | ||
1948 | new_args.safe_push (call_arg); | |
1949 | ||
1950 | if (BOUNDED_TYPE_P (type) | |
1951 | || pass_by_reference (NULL, TYPE_MODE (type), type, true)) | |
1952 | new_args.safe_push (chkp_find_bounds (call_arg, gsi)); | |
1953 | else if (chkp_type_has_pointer (type)) | |
1954 | { | |
1955 | HOST_WIDE_INT max_bounds | |
1956 | = TREE_INT_CST_LOW (TYPE_SIZE (type)) / POINTER_SIZE; | |
1957 | tree *all_bounds = (tree *)xmalloc (sizeof (tree) * max_bounds); | |
1958 | HOST_WIDE_INT bnd_no; | |
1959 | ||
1960 | memset (all_bounds, 0, sizeof (tree) * max_bounds); | |
1961 | ||
1962 | chkp_find_bounds_for_elem (call_arg, all_bounds, 0, gsi); | |
1963 | ||
1964 | for (bnd_no = 0; bnd_no < max_bounds; bnd_no++) | |
1965 | if (all_bounds[bnd_no]) | |
1966 | new_args.safe_push (all_bounds[bnd_no]); | |
1967 | ||
1968 | free (all_bounds); | |
1969 | } | |
1970 | } | |
1971 | ||
1972 | if (new_args.length () == gimple_call_num_args (call)) | |
1973 | new_call = call; | |
1974 | else | |
1975 | { | |
1976 | new_call = gimple_build_call_vec (gimple_op (call, 1), new_args); | |
1977 | gimple_call_set_lhs (new_call, gimple_call_lhs (call)); | |
1978 | gimple_call_copy_flags (new_call, call); | |
b2953ba0 | 1979 | gimple_call_set_chain (new_call, gimple_call_chain (call)); |
d5e254e1 IE |
1980 | } |
1981 | new_args.release (); | |
1982 | ||
d5e254e1 IE |
1983 | /* For direct calls fndecl is replaced with instrumented version. */ |
1984 | if (fndecl) | |
1985 | { | |
1986 | tree new_decl = chkp_maybe_create_clone (fndecl)->decl; | |
1987 | gimple_call_set_fndecl (new_call, new_decl); | |
1e3af2a4 IE |
1988 | /* In case of a type cast we should modify used function |
1989 | type instead of using type of new fndecl. */ | |
1990 | if (gimple_call_fntype (call) != TREE_TYPE (fndecl)) | |
1991 | { | |
1992 | tree type = gimple_call_fntype (call); | |
1993 | type = chkp_copy_function_type_adding_bounds (type); | |
1994 | gimple_call_set_fntype (new_call, type); | |
1995 | } | |
1996 | else | |
1997 | gimple_call_set_fntype (new_call, TREE_TYPE (new_decl)); | |
d5e254e1 IE |
1998 | } |
1999 | /* For indirect call we should fix function pointer type if | |
2000 | pass some bounds. */ | |
2001 | else if (new_call != call) | |
2002 | { | |
2003 | tree type = gimple_call_fntype (call); | |
2004 | type = chkp_copy_function_type_adding_bounds (type); | |
2005 | gimple_call_set_fntype (new_call, type); | |
2006 | } | |
2007 | ||
2008 | /* replace old call statement with the new one. */ | |
2009 | if (call != new_call) | |
2010 | { | |
2011 | FOR_EACH_SSA_TREE_OPERAND (op, call, iter, SSA_OP_ALL_DEFS) | |
2012 | { | |
2013 | SSA_NAME_DEF_STMT (op) = new_call; | |
2014 | } | |
2015 | gsi_replace (gsi, new_call, true); | |
2016 | } | |
2017 | else | |
2018 | update_stmt (new_call); | |
2019 | ||
2020 | gimple_call_set_with_bounds (new_call, true); | |
2021 | } | |
2022 | ||
d5e254e1 IE |
2023 | /* Return constant static bounds var with specified bounds LB and UB. |
2024 | If such var does not exists then new var is created with specified NAME. */ | |
2025 | static tree | |
2026 | chkp_make_static_const_bounds (HOST_WIDE_INT lb, | |
2027 | HOST_WIDE_INT ub, | |
2028 | const char *name) | |
2029 | { | |
36f371de | 2030 | tree id = get_identifier (name); |
d5e254e1 | 2031 | tree var; |
36f371de IE |
2032 | varpool_node *node; |
2033 | symtab_node *snode; | |
2034 | ||
2035 | var = build_decl (UNKNOWN_LOCATION, VAR_DECL, id, | |
2036 | pointer_bounds_type_node); | |
2037 | TREE_STATIC (var) = 1; | |
2038 | TREE_PUBLIC (var) = 1; | |
d5e254e1 IE |
2039 | |
2040 | /* With LTO we may have constant bounds already in varpool. | |
2041 | Try to find it. */ | |
36f371de IE |
2042 | if ((snode = symtab_node::get_for_asmname (DECL_ASSEMBLER_NAME (var)))) |
2043 | { | |
2044 | /* We don't allow this symbol usage for non bounds. */ | |
2045 | if (snode->type != SYMTAB_VARIABLE | |
2046 | || !POINTER_BOUNDS_P (snode->decl)) | |
84671705 | 2047 | sorry ("-fcheck-pointer-bounds requires %qs " |
36f371de IE |
2048 | "name for internal usage", |
2049 | IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (var))); | |
2050 | ||
2051 | return snode->decl; | |
2052 | } | |
d5e254e1 | 2053 | |
d5e254e1 IE |
2054 | TREE_USED (var) = 1; |
2055 | TREE_READONLY (var) = 1; | |
d5e254e1 IE |
2056 | TREE_ADDRESSABLE (var) = 0; |
2057 | DECL_ARTIFICIAL (var) = 1; | |
2058 | DECL_READ_P (var) = 1; | |
36f371de IE |
2059 | DECL_INITIAL (var) = targetm.chkp_make_bounds_constant (lb, ub); |
2060 | make_decl_one_only (var, DECL_ASSEMBLER_NAME (var)); | |
d5e254e1 IE |
2061 | /* We may use this symbol during ctors generation in chkp_finish_file |
2062 | when all symbols are emitted. Force output to avoid undefined | |
2063 | symbols in ctors. */ | |
36f371de IE |
2064 | node = varpool_node::get_create (var); |
2065 | node->force_output = 1; | |
2066 | ||
d5e254e1 IE |
2067 | varpool_node::finalize_decl (var); |
2068 | ||
2069 | return var; | |
2070 | } | |
2071 | ||
2072 | /* Generate code to make bounds with specified lower bound LB and SIZE. | |
2073 | if AFTER is 1 then code is inserted after position pointed by ITER | |
2074 | otherwise code is inserted before position pointed by ITER. | |
2075 | If ITER is NULL then code is added to entry block. */ | |
2076 | static tree | |
2077 | chkp_make_bounds (tree lb, tree size, gimple_stmt_iterator *iter, bool after) | |
2078 | { | |
2079 | gimple_seq seq; | |
2080 | gimple_stmt_iterator gsi; | |
355fe088 | 2081 | gimple *stmt; |
d5e254e1 IE |
2082 | tree bounds; |
2083 | ||
2084 | if (iter) | |
2085 | gsi = *iter; | |
2086 | else | |
2087 | gsi = gsi_start_bb (chkp_get_entry_block ()); | |
2088 | ||
2089 | seq = NULL; | |
2090 | ||
2091 | lb = chkp_force_gimple_call_op (lb, &seq); | |
2092 | size = chkp_force_gimple_call_op (size, &seq); | |
2093 | ||
2094 | stmt = gimple_build_call (chkp_bndmk_fndecl, 2, lb, size); | |
2095 | chkp_mark_stmt (stmt); | |
2096 | ||
2097 | bounds = chkp_get_tmp_reg (stmt); | |
2098 | gimple_call_set_lhs (stmt, bounds); | |
2099 | ||
2100 | gimple_seq_add_stmt (&seq, stmt); | |
2101 | ||
2102 | if (iter && after) | |
2103 | gsi_insert_seq_after (&gsi, seq, GSI_SAME_STMT); | |
2104 | else | |
2105 | gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT); | |
2106 | ||
2107 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
2108 | { | |
2109 | fprintf (dump_file, "Made bounds: "); | |
2110 | print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS); | |
2111 | if (iter) | |
2112 | { | |
2113 | fprintf (dump_file, " inserted before statement: "); | |
2114 | print_gimple_stmt (dump_file, gsi_stmt (*iter), 0, TDF_VOPS|TDF_MEMSYMS); | |
2115 | } | |
2116 | else | |
2117 | fprintf (dump_file, " at function entry\n"); | |
2118 | } | |
2119 | ||
2120 | /* update_stmt (stmt); */ | |
2121 | ||
2122 | return bounds; | |
2123 | } | |
2124 | ||
2125 | /* Return var holding zero bounds. */ | |
2126 | tree | |
2127 | chkp_get_zero_bounds_var (void) | |
2128 | { | |
d5e254e1 IE |
2129 | if (!chkp_zero_bounds_var) |
2130 | chkp_zero_bounds_var | |
2131 | = chkp_make_static_const_bounds (0, -1, | |
2132 | CHKP_ZERO_BOUNDS_VAR_NAME); | |
2133 | return chkp_zero_bounds_var; | |
2134 | } | |
2135 | ||
2136 | /* Return var holding none bounds. */ | |
2137 | tree | |
2138 | chkp_get_none_bounds_var (void) | |
2139 | { | |
d5e254e1 IE |
2140 | if (!chkp_none_bounds_var) |
2141 | chkp_none_bounds_var | |
2142 | = chkp_make_static_const_bounds (-1, 0, | |
2143 | CHKP_NONE_BOUNDS_VAR_NAME); | |
2144 | return chkp_none_bounds_var; | |
2145 | } | |
2146 | ||
2147 | /* Return SSA_NAME used to represent zero bounds. */ | |
2148 | static tree | |
2149 | chkp_get_zero_bounds (void) | |
2150 | { | |
2151 | if (zero_bounds) | |
2152 | return zero_bounds; | |
2153 | ||
2154 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
2155 | fprintf (dump_file, "Creating zero bounds..."); | |
2156 | ||
2157 | if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds) | |
2158 | || flag_chkp_use_static_const_bounds > 0) | |
2159 | { | |
2160 | gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ()); | |
355fe088 | 2161 | gimple *stmt; |
d5e254e1 | 2162 | |
0ff4ddeb | 2163 | zero_bounds = chkp_get_tmp_reg (NULL); |
d5e254e1 IE |
2164 | stmt = gimple_build_assign (zero_bounds, chkp_get_zero_bounds_var ()); |
2165 | gsi_insert_before (&gsi, stmt, GSI_SAME_STMT); | |
2166 | } | |
2167 | else | |
2168 | zero_bounds = chkp_make_bounds (integer_zero_node, | |
2169 | integer_zero_node, | |
2170 | NULL, | |
2171 | false); | |
2172 | ||
2173 | return zero_bounds; | |
2174 | } | |
2175 | ||
2176 | /* Return SSA_NAME used to represent none bounds. */ | |
2177 | static tree | |
2178 | chkp_get_none_bounds (void) | |
2179 | { | |
2180 | if (none_bounds) | |
2181 | return none_bounds; | |
2182 | ||
2183 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
2184 | fprintf (dump_file, "Creating none bounds..."); | |
2185 | ||
2186 | ||
2187 | if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds) | |
2188 | || flag_chkp_use_static_const_bounds > 0) | |
2189 | { | |
2190 | gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ()); | |
355fe088 | 2191 | gimple *stmt; |
d5e254e1 | 2192 | |
0ff4ddeb | 2193 | none_bounds = chkp_get_tmp_reg (NULL); |
d5e254e1 IE |
2194 | stmt = gimple_build_assign (none_bounds, chkp_get_none_bounds_var ()); |
2195 | gsi_insert_before (&gsi, stmt, GSI_SAME_STMT); | |
2196 | } | |
2197 | else | |
2198 | none_bounds = chkp_make_bounds (integer_minus_one_node, | |
2199 | build_int_cst (size_type_node, 2), | |
2200 | NULL, | |
2201 | false); | |
2202 | ||
2203 | return none_bounds; | |
2204 | } | |
2205 | ||
2206 | /* Return bounds to be used as a result of operation which | |
2207 | should not create poiunter (e.g. MULT_EXPR). */ | |
2208 | static tree | |
2209 | chkp_get_invalid_op_bounds (void) | |
2210 | { | |
2211 | return chkp_get_zero_bounds (); | |
2212 | } | |
2213 | ||
2214 | /* Return bounds to be used for loads of non-pointer values. */ | |
2215 | static tree | |
2216 | chkp_get_nonpointer_load_bounds (void) | |
2217 | { | |
2218 | return chkp_get_zero_bounds (); | |
2219 | } | |
2220 | ||
985f48f7 IE |
2221 | /* Return 1 if may use bndret call to get bounds for pointer |
2222 | returned by CALL. */ | |
2223 | static bool | |
2224 | chkp_call_returns_bounds_p (gcall *call) | |
2225 | { | |
2226 | if (gimple_call_internal_p (call)) | |
64a948e9 IE |
2227 | { |
2228 | if (gimple_call_internal_fn (call) == IFN_VA_ARG) | |
2229 | return true; | |
2230 | return false; | |
2231 | } | |
985f48f7 | 2232 | |
3dd598be IE |
2233 | if (gimple_call_builtin_p (call, BUILT_IN_CHKP_NARROW_PTR_BOUNDS) |
2234 | || chkp_gimple_call_builtin_p (call, BUILT_IN_CHKP_NARROW)) | |
2235 | return true; | |
2236 | ||
2237 | if (gimple_call_with_bounds_p (call)) | |
2238 | return true; | |
2239 | ||
985f48f7 IE |
2240 | tree fndecl = gimple_call_fndecl (call); |
2241 | ||
2242 | if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD) | |
2243 | return false; | |
2244 | ||
3dd598be | 2245 | if (fndecl && !chkp_instrumentable_p (fndecl)) |
985f48f7 IE |
2246 | return false; |
2247 | ||
2248 | if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL) | |
2249 | { | |
2250 | if (chkp_instrument_normal_builtin (fndecl)) | |
2251 | return true; | |
2252 | ||
2253 | if (!lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl))) | |
2254 | return false; | |
2255 | ||
2256 | struct cgraph_node *clone = chkp_maybe_create_clone (fndecl); | |
2257 | return (clone && gimple_has_body_p (clone->decl)); | |
2258 | } | |
2259 | ||
2260 | return true; | |
2261 | } | |
2262 | ||
d5e254e1 IE |
2263 | /* Build bounds returned by CALL. */ |
2264 | static tree | |
538dd0b7 | 2265 | chkp_build_returned_bound (gcall *call) |
d5e254e1 IE |
2266 | { |
2267 | gimple_stmt_iterator gsi; | |
2268 | tree bounds; | |
355fe088 | 2269 | gimple *stmt; |
d5e254e1 | 2270 | tree fndecl = gimple_call_fndecl (call); |
fc6a2c5d | 2271 | unsigned int retflags; |
243e28bf | 2272 | tree lhs = gimple_call_lhs (call); |
d5e254e1 IE |
2273 | |
2274 | /* To avoid fixing alloca expands in targets we handle | |
2275 | it separately. */ | |
2276 | if (fndecl | |
2277 | && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL | |
2278 | && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA | |
2279 | || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA_WITH_ALIGN)) | |
2280 | { | |
2281 | tree size = gimple_call_arg (call, 0); | |
d5e254e1 | 2282 | gimple_stmt_iterator iter = gsi_for_stmt (call); |
243e28bf | 2283 | bounds = chkp_make_bounds (lhs, size, &iter, true); |
d5e254e1 IE |
2284 | } |
2285 | /* We know bounds returned by set_bounds builtin call. */ | |
2286 | else if (fndecl | |
2287 | && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL | |
2288 | && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_SET_PTR_BOUNDS) | |
2289 | { | |
2290 | tree lb = gimple_call_arg (call, 0); | |
2291 | tree size = gimple_call_arg (call, 1); | |
2292 | gimple_stmt_iterator iter = gsi_for_stmt (call); | |
2293 | bounds = chkp_make_bounds (lb, size, &iter, true); | |
2294 | } | |
2295 | /* Detect bounds initialization calls. */ | |
2296 | else if (fndecl | |
2297 | && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL | |
2298 | && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_INIT_PTR_BOUNDS) | |
2299 | bounds = chkp_get_zero_bounds (); | |
2300 | /* Detect bounds nullification calls. */ | |
2301 | else if (fndecl | |
2302 | && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL | |
2303 | && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NULL_PTR_BOUNDS) | |
2304 | bounds = chkp_get_none_bounds (); | |
2305 | /* Detect bounds copy calls. */ | |
2306 | else if (fndecl | |
2307 | && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL | |
2308 | && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_COPY_PTR_BOUNDS) | |
2309 | { | |
2310 | gimple_stmt_iterator iter = gsi_for_stmt (call); | |
2311 | bounds = chkp_find_bounds (gimple_call_arg (call, 1), &iter); | |
2312 | } | |
2313 | /* Do not use retbnd when returned bounds are equal to some | |
2314 | of passed bounds. */ | |
fc6a2c5d JJ |
2315 | else if (((retflags = gimple_call_return_flags (call)) & ERF_RETURNS_ARG) |
2316 | && (retflags & ERF_RETURN_ARG_MASK) < gimple_call_num_args (call)) | |
d5e254e1 IE |
2317 | { |
2318 | gimple_stmt_iterator iter = gsi_for_stmt (call); | |
fc6a2c5d | 2319 | unsigned int retarg = retflags & ERF_RETURN_ARG_MASK, argno; |
d5e254e1 IE |
2320 | if (gimple_call_with_bounds_p (call)) |
2321 | { | |
2322 | for (argno = 0; argno < gimple_call_num_args (call); argno++) | |
2323 | if (!POINTER_BOUNDS_P (gimple_call_arg (call, argno))) | |
2324 | { | |
2325 | if (retarg) | |
2326 | retarg--; | |
2327 | else | |
2328 | break; | |
2329 | } | |
2330 | } | |
2331 | else | |
2332 | argno = retarg; | |
2333 | ||
2334 | bounds = chkp_find_bounds (gimple_call_arg (call, argno), &iter); | |
2335 | } | |
243e28bf ML |
2336 | else if (chkp_call_returns_bounds_p (call) |
2337 | && BOUNDED_P (lhs)) | |
d5e254e1 | 2338 | { |
243e28bf | 2339 | gcc_assert (TREE_CODE (lhs) == SSA_NAME); |
d5e254e1 IE |
2340 | |
2341 | /* In general case build checker builtin call to | |
2342 | obtain returned bounds. */ | |
2343 | stmt = gimple_build_call (chkp_ret_bnd_fndecl, 1, | |
2344 | gimple_call_lhs (call)); | |
2345 | chkp_mark_stmt (stmt); | |
2346 | ||
2347 | gsi = gsi_for_stmt (call); | |
2348 | gsi_insert_after (&gsi, stmt, GSI_SAME_STMT); | |
2349 | ||
2350 | bounds = chkp_get_tmp_reg (stmt); | |
2351 | gimple_call_set_lhs (stmt, bounds); | |
2352 | ||
2353 | update_stmt (stmt); | |
2354 | } | |
985f48f7 IE |
2355 | else |
2356 | bounds = chkp_get_zero_bounds (); | |
d5e254e1 IE |
2357 | |
2358 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
2359 | { | |
2360 | fprintf (dump_file, "Built returned bounds ("); | |
ef6cb4c7 | 2361 | print_generic_expr (dump_file, bounds); |
d5e254e1 | 2362 | fprintf (dump_file, ") for call: "); |
ef6cb4c7 | 2363 | print_gimple_stmt (dump_file, call, 0, TDF_VOPS | TDF_MEMSYMS); |
d5e254e1 IE |
2364 | } |
2365 | ||
243e28bf | 2366 | bounds = chkp_maybe_copy_and_register_bounds (lhs, bounds); |
d5e254e1 IE |
2367 | |
2368 | return bounds; | |
2369 | } | |
2370 | ||
2371 | /* Return bounds used as returned by call | |
2372 | which produced SSA name VAL. */ | |
538dd0b7 | 2373 | gcall * |
d5e254e1 IE |
2374 | chkp_retbnd_call_by_val (tree val) |
2375 | { | |
2376 | if (TREE_CODE (val) != SSA_NAME) | |
2377 | return NULL; | |
2378 | ||
2379 | gcc_assert (gimple_code (SSA_NAME_DEF_STMT (val)) == GIMPLE_CALL); | |
2380 | ||
2381 | imm_use_iterator use_iter; | |
2382 | use_operand_p use_p; | |
2383 | FOR_EACH_IMM_USE_FAST (use_p, use_iter, val) | |
69cd47a1 | 2384 | if (chkp_gimple_call_builtin_p (USE_STMT (use_p), BUILT_IN_CHKP_BNDRET)) |
538dd0b7 | 2385 | return as_a <gcall *> (USE_STMT (use_p)); |
d5e254e1 IE |
2386 | |
2387 | return NULL; | |
2388 | } | |
2389 | ||
2390 | /* Check the next parameter for the given PARM is bounds | |
2391 | and return it's default SSA_NAME (create if required). */ | |
2392 | static tree | |
2393 | chkp_get_next_bounds_parm (tree parm) | |
2394 | { | |
2395 | tree bounds = TREE_CHAIN (parm); | |
2396 | gcc_assert (POINTER_BOUNDS_P (bounds)); | |
2397 | bounds = ssa_default_def (cfun, bounds); | |
2398 | if (!bounds) | |
2399 | { | |
2400 | bounds = make_ssa_name (TREE_CHAIN (parm), gimple_build_nop ()); | |
2401 | set_ssa_default_def (cfun, TREE_CHAIN (parm), bounds); | |
2402 | } | |
2403 | return bounds; | |
2404 | } | |
2405 | ||
2406 | /* Return bounds to be used for input argument PARM. */ | |
2407 | static tree | |
2408 | chkp_get_bound_for_parm (tree parm) | |
2409 | { | |
2410 | tree decl = SSA_NAME_VAR (parm); | |
2411 | tree bounds; | |
2412 | ||
2413 | gcc_assert (TREE_CODE (decl) == PARM_DECL); | |
2414 | ||
2415 | bounds = chkp_get_registered_bounds (parm); | |
2416 | ||
2417 | if (!bounds) | |
2418 | bounds = chkp_get_registered_bounds (decl); | |
2419 | ||
2420 | if (!bounds) | |
2421 | { | |
2422 | tree orig_decl = cgraph_node::get (cfun->decl)->orig_decl; | |
2423 | ||
2424 | /* For static chain param we return zero bounds | |
2425 | because currently we do not check dereferences | |
2426 | of this pointer. */ | |
2427 | if (cfun->static_chain_decl == decl) | |
2428 | bounds = chkp_get_zero_bounds (); | |
2429 | /* If non instrumented runtime is used then it may be useful | |
2430 | to use zero bounds for input arguments of main | |
2431 | function. */ | |
2432 | else if (flag_chkp_zero_input_bounds_for_main | |
a01f151f | 2433 | && id_equal (DECL_ASSEMBLER_NAME (orig_decl), "main")) |
d5e254e1 IE |
2434 | bounds = chkp_get_zero_bounds (); |
2435 | else if (BOUNDED_P (parm)) | |
2436 | { | |
2437 | bounds = chkp_get_next_bounds_parm (decl); | |
2438 | bounds = chkp_maybe_copy_and_register_bounds (decl, bounds); | |
2439 | ||
2440 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
2441 | { | |
2442 | fprintf (dump_file, "Built arg bounds ("); | |
ef6cb4c7 | 2443 | print_generic_expr (dump_file, bounds); |
d5e254e1 IE |
2444 | fprintf (dump_file, ") for arg: "); |
2445 | print_node (dump_file, "", decl, 0); | |
2446 | } | |
2447 | } | |
2448 | else | |
2449 | bounds = chkp_get_zero_bounds (); | |
2450 | } | |
2451 | ||
2452 | if (!chkp_get_registered_bounds (parm)) | |
2453 | bounds = chkp_maybe_copy_and_register_bounds (parm, bounds); | |
2454 | ||
2455 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
2456 | { | |
2457 | fprintf (dump_file, "Using bounds "); | |
ef6cb4c7 | 2458 | print_generic_expr (dump_file, bounds); |
d5e254e1 | 2459 | fprintf (dump_file, " for parm "); |
ef6cb4c7 | 2460 | print_generic_expr (dump_file, parm); |
d5e254e1 | 2461 | fprintf (dump_file, " of type "); |
ef6cb4c7 | 2462 | print_generic_expr (dump_file, TREE_TYPE (parm)); |
d5e254e1 IE |
2463 | fprintf (dump_file, ".\n"); |
2464 | } | |
2465 | ||
2466 | return bounds; | |
2467 | } | |
2468 | ||
2469 | /* Build and return CALL_EXPR for bndstx builtin with specified | |
2470 | arguments. */ | |
2471 | tree | |
2472 | chkp_build_bndldx_call (tree addr, tree ptr) | |
2473 | { | |
2474 | tree fn = build1 (ADDR_EXPR, | |
2475 | build_pointer_type (TREE_TYPE (chkp_bndldx_fndecl)), | |
2476 | chkp_bndldx_fndecl); | |
2477 | tree call = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndldx_fndecl)), | |
2478 | fn, 2, addr, ptr); | |
2479 | CALL_WITH_BOUNDS_P (call) = true; | |
2480 | return call; | |
2481 | } | |
2482 | ||
2483 | /* Insert code to load bounds for PTR located by ADDR. | |
2484 | Code is inserted after position pointed by GSI. | |
2485 | Loaded bounds are returned. */ | |
2486 | static tree | |
2487 | chkp_build_bndldx (tree addr, tree ptr, gimple_stmt_iterator *gsi) | |
2488 | { | |
2489 | gimple_seq seq; | |
355fe088 | 2490 | gimple *stmt; |
d5e254e1 IE |
2491 | tree bounds; |
2492 | ||
2493 | seq = NULL; | |
2494 | ||
2495 | addr = chkp_force_gimple_call_op (addr, &seq); | |
2496 | ptr = chkp_force_gimple_call_op (ptr, &seq); | |
2497 | ||
2498 | stmt = gimple_build_call (chkp_bndldx_fndecl, 2, addr, ptr); | |
2499 | chkp_mark_stmt (stmt); | |
2500 | bounds = chkp_get_tmp_reg (stmt); | |
2501 | gimple_call_set_lhs (stmt, bounds); | |
2502 | ||
2503 | gimple_seq_add_stmt (&seq, stmt); | |
2504 | ||
2505 | gsi_insert_seq_after (gsi, seq, GSI_CONTINUE_LINKING); | |
2506 | ||
2507 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
2508 | { | |
2509 | fprintf (dump_file, "Generated bndldx for pointer "); | |
ef6cb4c7 | 2510 | print_generic_expr (dump_file, ptr); |
d5e254e1 | 2511 | fprintf (dump_file, ": "); |
ef6cb4c7 | 2512 | print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS | TDF_MEMSYMS); |
d5e254e1 IE |
2513 | } |
2514 | ||
2515 | return bounds; | |
2516 | } | |
2517 | ||
2518 | /* Build and return CALL_EXPR for bndstx builtin with specified | |
2519 | arguments. */ | |
2520 | tree | |
2521 | chkp_build_bndstx_call (tree addr, tree ptr, tree bounds) | |
2522 | { | |
2523 | tree fn = build1 (ADDR_EXPR, | |
2524 | build_pointer_type (TREE_TYPE (chkp_bndstx_fndecl)), | |
2525 | chkp_bndstx_fndecl); | |
2526 | tree call = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndstx_fndecl)), | |
2527 | fn, 3, ptr, bounds, addr); | |
2528 | CALL_WITH_BOUNDS_P (call) = true; | |
2529 | return call; | |
2530 | } | |
2531 | ||
2532 | /* Insert code to store BOUNDS for PTR stored by ADDR. | |
2533 | New statements are inserted after position pointed | |
2534 | by GSI. */ | |
2535 | void | |
2536 | chkp_build_bndstx (tree addr, tree ptr, tree bounds, | |
2537 | gimple_stmt_iterator *gsi) | |
2538 | { | |
2539 | gimple_seq seq; | |
355fe088 | 2540 | gimple *stmt; |
d5e254e1 IE |
2541 | |
2542 | seq = NULL; | |
2543 | ||
2544 | addr = chkp_force_gimple_call_op (addr, &seq); | |
2545 | ptr = chkp_force_gimple_call_op (ptr, &seq); | |
2546 | ||
2547 | stmt = gimple_build_call (chkp_bndstx_fndecl, 3, ptr, bounds, addr); | |
2548 | chkp_mark_stmt (stmt); | |
2549 | gimple_call_set_with_bounds (stmt, true); | |
2550 | ||
2551 | gimple_seq_add_stmt (&seq, stmt); | |
2552 | ||
2553 | gsi_insert_seq_after (gsi, seq, GSI_CONTINUE_LINKING); | |
2554 | ||
2555 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
2556 | { | |
2557 | fprintf (dump_file, "Generated bndstx for pointer store "); | |
2558 | print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_VOPS|TDF_MEMSYMS); | |
2559 | print_gimple_stmt (dump_file, stmt, 2, TDF_VOPS|TDF_MEMSYMS); | |
2560 | } | |
2561 | } | |
2562 | ||
64a948e9 IE |
2563 | /* This function is called when call statement |
2564 | is inlined and therefore we can't use bndret | |
2565 | for its LHS anymore. Function fixes bndret | |
2566 | call using new RHS value if possible. */ | |
2567 | void | |
2568 | chkp_fixup_inlined_call (tree lhs, tree rhs) | |
2569 | { | |
2570 | tree addr, bounds; | |
2571 | gcall *retbnd, *bndldx; | |
2572 | ||
2573 | if (!BOUNDED_P (lhs)) | |
2574 | return; | |
2575 | ||
2576 | /* Search for retbnd call. */ | |
2577 | retbnd = chkp_retbnd_call_by_val (lhs); | |
2578 | if (!retbnd) | |
2579 | return; | |
2580 | ||
2581 | /* Currently only handle cases when call is replaced | |
2582 | with a memory access. In this case bndret call | |
2583 | may be replaced with bndldx call. Otherwise we | |
2584 | have to search for bounds which may cause wrong | |
2585 | result due to various optimizations applied. */ | |
2586 | switch (TREE_CODE (rhs)) | |
2587 | { | |
2588 | case VAR_DECL: | |
2589 | if (DECL_REGISTER (rhs)) | |
2590 | return; | |
2591 | break; | |
2592 | ||
2593 | case MEM_REF: | |
2594 | break; | |
2595 | ||
2596 | case ARRAY_REF: | |
2597 | case COMPONENT_REF: | |
2598 | addr = get_base_address (rhs); | |
2599 | if (!DECL_P (addr) | |
2600 | && TREE_CODE (addr) != MEM_REF) | |
2601 | return; | |
2602 | if (DECL_P (addr) && DECL_REGISTER (addr)) | |
2603 | return; | |
2604 | break; | |
2605 | ||
2606 | default: | |
2607 | return; | |
2608 | } | |
2609 | ||
2610 | /* Create a new statements sequence with bndldx call. */ | |
2611 | gimple_stmt_iterator gsi = gsi_for_stmt (retbnd); | |
2612 | addr = build_fold_addr_expr (rhs); | |
2613 | chkp_build_bndldx (addr, lhs, &gsi); | |
2614 | bndldx = as_a <gcall *> (gsi_stmt (gsi)); | |
2615 | ||
2616 | /* Remove bndret call. */ | |
2617 | bounds = gimple_call_lhs (retbnd); | |
2618 | gsi = gsi_for_stmt (retbnd); | |
2619 | gsi_remove (&gsi, true); | |
2620 | ||
2621 | /* Link new bndldx call. */ | |
2622 | gimple_call_set_lhs (bndldx, bounds); | |
2623 | update_stmt (bndldx); | |
2624 | } | |
2625 | ||
d5e254e1 IE |
2626 | /* Compute bounds for pointer NODE which was assigned in |
2627 | assignment statement ASSIGN. Return computed bounds. */ | |
2628 | static tree | |
355fe088 | 2629 | chkp_compute_bounds_for_assignment (tree node, gimple *assign) |
d5e254e1 IE |
2630 | { |
2631 | enum tree_code rhs_code = gimple_assign_rhs_code (assign); | |
2632 | tree rhs1 = gimple_assign_rhs1 (assign); | |
2633 | tree bounds = NULL_TREE; | |
2634 | gimple_stmt_iterator iter = gsi_for_stmt (assign); | |
b1668acb | 2635 | tree base = NULL; |
d5e254e1 IE |
2636 | |
2637 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
2638 | { | |
2639 | fprintf (dump_file, "Computing bounds for assignment: "); | |
2640 | print_gimple_stmt (dump_file, assign, 0, TDF_VOPS|TDF_MEMSYMS); | |
2641 | } | |
2642 | ||
2643 | switch (rhs_code) | |
2644 | { | |
2645 | case MEM_REF: | |
2646 | case TARGET_MEM_REF: | |
2647 | case COMPONENT_REF: | |
2648 | case ARRAY_REF: | |
2649 | /* We need to load bounds from the bounds table. */ | |
2650 | bounds = chkp_find_bounds_loaded (node, rhs1, &iter); | |
2651 | break; | |
2652 | ||
2653 | case VAR_DECL: | |
2654 | case SSA_NAME: | |
2655 | case ADDR_EXPR: | |
2656 | case POINTER_PLUS_EXPR: | |
2657 | case NOP_EXPR: | |
2658 | case CONVERT_EXPR: | |
2659 | case INTEGER_CST: | |
2660 | /* Bounds are just propagated from RHS. */ | |
2661 | bounds = chkp_find_bounds (rhs1, &iter); | |
b1668acb | 2662 | base = rhs1; |
d5e254e1 IE |
2663 | break; |
2664 | ||
2665 | case VIEW_CONVERT_EXPR: | |
2666 | /* Bounds are just propagated from RHS. */ | |
2667 | bounds = chkp_find_bounds (TREE_OPERAND (rhs1, 0), &iter); | |
2668 | break; | |
2669 | ||
2670 | case PARM_DECL: | |
2671 | if (BOUNDED_P (rhs1)) | |
2672 | { | |
2673 | /* We need to load bounds from the bounds table. */ | |
2674 | bounds = chkp_build_bndldx (chkp_build_addr_expr (rhs1), | |
2675 | node, &iter); | |
2676 | TREE_ADDRESSABLE (rhs1) = 1; | |
2677 | } | |
2678 | else | |
2679 | bounds = chkp_get_nonpointer_load_bounds (); | |
2680 | break; | |
2681 | ||
2682 | case MINUS_EXPR: | |
2683 | case PLUS_EXPR: | |
2684 | case BIT_AND_EXPR: | |
2685 | case BIT_IOR_EXPR: | |
2686 | case BIT_XOR_EXPR: | |
2687 | { | |
2688 | tree rhs2 = gimple_assign_rhs2 (assign); | |
2689 | tree bnd1 = chkp_find_bounds (rhs1, &iter); | |
2690 | tree bnd2 = chkp_find_bounds (rhs2, &iter); | |
2691 | ||
2692 | /* First we try to check types of operands. If it | |
2693 | does not help then look at bound values. | |
2694 | ||
2695 | If some bounds are incomplete and other are | |
2696 | not proven to be valid (i.e. also incomplete | |
2697 | or invalid because value is not pointer) then | |
2698 | resulting value is incomplete and will be | |
2699 | recomputed later in chkp_finish_incomplete_bounds. */ | |
2700 | if (BOUNDED_P (rhs1) | |
2701 | && !BOUNDED_P (rhs2)) | |
2702 | bounds = bnd1; | |
2703 | else if (BOUNDED_P (rhs2) | |
2704 | && !BOUNDED_P (rhs1) | |
2705 | && rhs_code != MINUS_EXPR) | |
2706 | bounds = bnd2; | |
2707 | else if (chkp_incomplete_bounds (bnd1)) | |
2708 | if (chkp_valid_bounds (bnd2) && rhs_code != MINUS_EXPR | |
2709 | && !chkp_incomplete_bounds (bnd2)) | |
2710 | bounds = bnd2; | |
2711 | else | |
2712 | bounds = incomplete_bounds; | |
2713 | else if (chkp_incomplete_bounds (bnd2)) | |
2714 | if (chkp_valid_bounds (bnd1) | |
2715 | && !chkp_incomplete_bounds (bnd1)) | |
2716 | bounds = bnd1; | |
2717 | else | |
2718 | bounds = incomplete_bounds; | |
2719 | else if (!chkp_valid_bounds (bnd1)) | |
2720 | if (chkp_valid_bounds (bnd2) && rhs_code != MINUS_EXPR) | |
2721 | bounds = bnd2; | |
2722 | else if (bnd2 == chkp_get_zero_bounds ()) | |
2723 | bounds = bnd2; | |
2724 | else | |
2725 | bounds = bnd1; | |
2726 | else if (!chkp_valid_bounds (bnd2)) | |
2727 | bounds = bnd1; | |
2728 | else | |
2729 | /* Seems both operands may have valid bounds | |
2730 | (e.g. pointer minus pointer). In such case | |
2731 | use default invalid op bounds. */ | |
2732 | bounds = chkp_get_invalid_op_bounds (); | |
b1668acb IE |
2733 | |
2734 | base = (bounds == bnd1) ? rhs1 : (bounds == bnd2) ? rhs2 : NULL; | |
d5e254e1 IE |
2735 | } |
2736 | break; | |
2737 | ||
2738 | case BIT_NOT_EXPR: | |
2739 | case NEGATE_EXPR: | |
2740 | case LSHIFT_EXPR: | |
2741 | case RSHIFT_EXPR: | |
2742 | case LROTATE_EXPR: | |
2743 | case RROTATE_EXPR: | |
2744 | case EQ_EXPR: | |
2745 | case NE_EXPR: | |
2746 | case LT_EXPR: | |
2747 | case LE_EXPR: | |
2748 | case GT_EXPR: | |
2749 | case GE_EXPR: | |
2750 | case MULT_EXPR: | |
2751 | case RDIV_EXPR: | |
2752 | case TRUNC_DIV_EXPR: | |
2753 | case FLOOR_DIV_EXPR: | |
2754 | case CEIL_DIV_EXPR: | |
2755 | case ROUND_DIV_EXPR: | |
2756 | case TRUNC_MOD_EXPR: | |
2757 | case FLOOR_MOD_EXPR: | |
2758 | case CEIL_MOD_EXPR: | |
2759 | case ROUND_MOD_EXPR: | |
2760 | case EXACT_DIV_EXPR: | |
2761 | case FIX_TRUNC_EXPR: | |
2762 | case FLOAT_EXPR: | |
2763 | case REALPART_EXPR: | |
2764 | case IMAGPART_EXPR: | |
2765 | /* No valid bounds may be produced by these exprs. */ | |
2766 | bounds = chkp_get_invalid_op_bounds (); | |
2767 | break; | |
2768 | ||
2769 | case COND_EXPR: | |
2770 | { | |
2771 | tree val1 = gimple_assign_rhs2 (assign); | |
2772 | tree val2 = gimple_assign_rhs3 (assign); | |
2773 | tree bnd1 = chkp_find_bounds (val1, &iter); | |
2774 | tree bnd2 = chkp_find_bounds (val2, &iter); | |
355fe088 | 2775 | gimple *stmt; |
d5e254e1 IE |
2776 | |
2777 | if (chkp_incomplete_bounds (bnd1) || chkp_incomplete_bounds (bnd2)) | |
2778 | bounds = incomplete_bounds; | |
2779 | else if (bnd1 == bnd2) | |
2780 | bounds = bnd1; | |
2781 | else | |
2782 | { | |
2783 | rhs1 = unshare_expr (rhs1); | |
2784 | ||
2785 | bounds = chkp_get_tmp_reg (assign); | |
0d0e4a03 | 2786 | stmt = gimple_build_assign (bounds, COND_EXPR, rhs1, bnd1, bnd2); |
d5e254e1 IE |
2787 | gsi_insert_after (&iter, stmt, GSI_SAME_STMT); |
2788 | ||
2789 | if (!chkp_valid_bounds (bnd1) && !chkp_valid_bounds (bnd2)) | |
2790 | chkp_mark_invalid_bounds (bounds); | |
2791 | } | |
2792 | } | |
2793 | break; | |
2794 | ||
2795 | case MAX_EXPR: | |
2796 | case MIN_EXPR: | |
2797 | { | |
2798 | tree rhs2 = gimple_assign_rhs2 (assign); | |
2799 | tree bnd1 = chkp_find_bounds (rhs1, &iter); | |
2800 | tree bnd2 = chkp_find_bounds (rhs2, &iter); | |
2801 | ||
2802 | if (chkp_incomplete_bounds (bnd1) || chkp_incomplete_bounds (bnd2)) | |
2803 | bounds = incomplete_bounds; | |
2804 | else if (bnd1 == bnd2) | |
2805 | bounds = bnd1; | |
2806 | else | |
2807 | { | |
355fe088 | 2808 | gimple *stmt; |
d5e254e1 IE |
2809 | tree cond = build2 (rhs_code == MAX_EXPR ? GT_EXPR : LT_EXPR, |
2810 | boolean_type_node, rhs1, rhs2); | |
2811 | bounds = chkp_get_tmp_reg (assign); | |
0d0e4a03 | 2812 | stmt = gimple_build_assign (bounds, COND_EXPR, cond, bnd1, bnd2); |
d5e254e1 IE |
2813 | |
2814 | gsi_insert_after (&iter, stmt, GSI_SAME_STMT); | |
2815 | ||
2816 | if (!chkp_valid_bounds (bnd1) && !chkp_valid_bounds (bnd2)) | |
2817 | chkp_mark_invalid_bounds (bounds); | |
2818 | } | |
2819 | } | |
2820 | break; | |
2821 | ||
2822 | default: | |
2823 | bounds = chkp_get_zero_bounds (); | |
2824 | warning (0, "pointer bounds were lost due to unexpected expression %s", | |
2825 | get_tree_code_name (rhs_code)); | |
2826 | } | |
2827 | ||
2828 | gcc_assert (bounds); | |
2829 | ||
b1668acb IE |
2830 | /* We may reuse bounds of other pointer we copy/modify. But it is not |
2831 | allowed for abnormal ssa names. If we produced a pointer using | |
2832 | abnormal ssa name, we better make a bounds copy to avoid coalescing | |
2833 | issues. */ | |
2834 | if (base | |
2835 | && TREE_CODE (base) == SSA_NAME | |
2836 | && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (base)) | |
2837 | { | |
355fe088 | 2838 | gimple *stmt = gimple_build_assign (chkp_get_tmp_reg (NULL), bounds); |
b1668acb IE |
2839 | gsi_insert_after (&iter, stmt, GSI_SAME_STMT); |
2840 | bounds = gimple_assign_lhs (stmt); | |
2841 | } | |
2842 | ||
d5e254e1 IE |
2843 | if (node) |
2844 | bounds = chkp_maybe_copy_and_register_bounds (node, bounds); | |
2845 | ||
2846 | return bounds; | |
2847 | } | |
2848 | ||
2849 | /* Compute bounds for ssa name NODE defined by DEF_STMT pointed by ITER. | |
2850 | ||
2851 | There are just few statement codes allowed: NOP (for default ssa names), | |
2852 | ASSIGN, CALL, PHI, ASM. | |
2853 | ||
2854 | Return computed bounds. */ | |
2855 | static tree | |
355fe088 | 2856 | chkp_get_bounds_by_definition (tree node, gimple *def_stmt, |
538dd0b7 | 2857 | gphi_iterator *iter) |
d5e254e1 IE |
2858 | { |
2859 | tree var, bounds; | |
2860 | enum gimple_code code = gimple_code (def_stmt); | |
538dd0b7 | 2861 | gphi *stmt; |
d5e254e1 IE |
2862 | |
2863 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
2864 | { | |
2865 | fprintf (dump_file, "Searching for bounds for node: "); | |
ef6cb4c7 | 2866 | print_generic_expr (dump_file, node); |
d5e254e1 IE |
2867 | |
2868 | fprintf (dump_file, " using its definition: "); | |
ef6cb4c7 | 2869 | print_gimple_stmt (dump_file, def_stmt, 0, TDF_VOPS | TDF_MEMSYMS); |
d5e254e1 IE |
2870 | } |
2871 | ||
2872 | switch (code) | |
2873 | { | |
2874 | case GIMPLE_NOP: | |
2875 | var = SSA_NAME_VAR (node); | |
2876 | switch (TREE_CODE (var)) | |
2877 | { | |
2878 | case PARM_DECL: | |
2879 | bounds = chkp_get_bound_for_parm (node); | |
2880 | break; | |
2881 | ||
2882 | case VAR_DECL: | |
2883 | /* For uninitialized pointers use none bounds. */ | |
2884 | bounds = chkp_get_none_bounds (); | |
2885 | bounds = chkp_maybe_copy_and_register_bounds (node, bounds); | |
2886 | break; | |
2887 | ||
2888 | case RESULT_DECL: | |
2889 | { | |
2890 | tree base_type; | |
2891 | ||
2892 | gcc_assert (TREE_CODE (TREE_TYPE (node)) == REFERENCE_TYPE); | |
2893 | ||
2894 | base_type = TREE_TYPE (TREE_TYPE (node)); | |
2895 | ||
2896 | gcc_assert (TYPE_SIZE (base_type) | |
2897 | && TREE_CODE (TYPE_SIZE (base_type)) == INTEGER_CST | |
2898 | && tree_to_uhwi (TYPE_SIZE (base_type)) != 0); | |
2899 | ||
2900 | bounds = chkp_make_bounds (node, TYPE_SIZE_UNIT (base_type), | |
2901 | NULL, false); | |
2902 | bounds = chkp_maybe_copy_and_register_bounds (node, bounds); | |
2903 | } | |
2904 | break; | |
2905 | ||
2906 | default: | |
2907 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
2908 | { | |
2909 | fprintf (dump_file, "Unexpected var with no definition\n"); | |
ef6cb4c7 | 2910 | print_generic_expr (dump_file, var); |
d5e254e1 IE |
2911 | } |
2912 | internal_error ("chkp_get_bounds_by_definition: Unexpected var of type %s", | |
2913 | get_tree_code_name (TREE_CODE (var))); | |
2914 | } | |
2915 | break; | |
2916 | ||
2917 | case GIMPLE_ASSIGN: | |
2918 | bounds = chkp_compute_bounds_for_assignment (node, def_stmt); | |
2919 | break; | |
2920 | ||
2921 | case GIMPLE_CALL: | |
538dd0b7 | 2922 | bounds = chkp_build_returned_bound (as_a <gcall *> (def_stmt)); |
d5e254e1 IE |
2923 | break; |
2924 | ||
2925 | case GIMPLE_PHI: | |
2926 | if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (node)) | |
2927 | if (SSA_NAME_VAR (node)) | |
2928 | var = chkp_get_bounds_var (SSA_NAME_VAR (node)); | |
2929 | else | |
2930 | var = make_temp_ssa_name (pointer_bounds_type_node, | |
0ff4ddeb | 2931 | NULL, |
d5e254e1 IE |
2932 | CHKP_BOUND_TMP_NAME); |
2933 | else | |
2934 | var = chkp_get_tmp_var (); | |
2935 | stmt = create_phi_node (var, gimple_bb (def_stmt)); | |
2936 | bounds = gimple_phi_result (stmt); | |
538dd0b7 | 2937 | *iter = gsi_for_phi (stmt); |
d5e254e1 IE |
2938 | |
2939 | bounds = chkp_maybe_copy_and_register_bounds (node, bounds); | |
2940 | ||
2941 | /* Created bounds do not have all phi args computed and | |
2942 | therefore we do not know if there is a valid source | |
2943 | of bounds for that node. Therefore we mark bounds | |
2944 | as incomplete and then recompute them when all phi | |
2945 | args are computed. */ | |
2946 | chkp_register_incomplete_bounds (bounds, node); | |
2947 | break; | |
2948 | ||
2949 | case GIMPLE_ASM: | |
2950 | bounds = chkp_get_zero_bounds (); | |
2951 | bounds = chkp_maybe_copy_and_register_bounds (node, bounds); | |
2952 | break; | |
2953 | ||
2954 | default: | |
2955 | internal_error ("chkp_get_bounds_by_definition: Unexpected GIMPLE code %s", | |
2956 | gimple_code_name[code]); | |
2957 | } | |
2958 | ||
2959 | return bounds; | |
2960 | } | |
2961 | ||
2962 | /* Return CALL_EXPR for bndmk with specified LOWER_BOUND and SIZE. */ | |
2963 | tree | |
2964 | chkp_build_make_bounds_call (tree lower_bound, tree size) | |
2965 | { | |
2966 | tree call = build1 (ADDR_EXPR, | |
2967 | build_pointer_type (TREE_TYPE (chkp_bndmk_fndecl)), | |
2968 | chkp_bndmk_fndecl); | |
2969 | return build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndmk_fndecl)), | |
2970 | call, 2, lower_bound, size); | |
2971 | } | |
2972 | ||
2973 | /* Create static bounds var of specfified OBJ which is | |
2974 | is either VAR_DECL or string constant. */ | |
2975 | static tree | |
2976 | chkp_make_static_bounds (tree obj) | |
2977 | { | |
2978 | static int string_id = 1; | |
2979 | static int var_id = 1; | |
2980 | tree *slot; | |
2981 | const char *var_name; | |
2982 | char *bnd_var_name; | |
2983 | tree bnd_var; | |
2984 | ||
2985 | /* First check if we already have required var. */ | |
2986 | if (chkp_static_var_bounds) | |
2987 | { | |
227eabce IE |
2988 | /* For vars we use assembler name as a key in |
2989 | chkp_static_var_bounds map. It allows to | |
2990 | avoid duplicating bound vars for decls | |
2991 | sharing assembler name. */ | |
8813a647 | 2992 | if (VAR_P (obj)) |
227eabce IE |
2993 | { |
2994 | tree name = DECL_ASSEMBLER_NAME (obj); | |
2995 | slot = chkp_static_var_bounds->get (name); | |
2996 | if (slot) | |
2997 | return *slot; | |
2998 | } | |
2999 | else | |
3000 | { | |
3001 | slot = chkp_static_var_bounds->get (obj); | |
3002 | if (slot) | |
3003 | return *slot; | |
3004 | } | |
d5e254e1 IE |
3005 | } |
3006 | ||
3007 | /* Build decl for bounds var. */ | |
8813a647 | 3008 | if (VAR_P (obj)) |
d5e254e1 IE |
3009 | { |
3010 | if (DECL_IGNORED_P (obj)) | |
3011 | { | |
3012 | bnd_var_name = (char *) xmalloc (strlen (CHKP_VAR_BOUNDS_PREFIX) + 10); | |
3013 | sprintf (bnd_var_name, "%s%d", CHKP_VAR_BOUNDS_PREFIX, var_id++); | |
3014 | } | |
3015 | else | |
3016 | { | |
3017 | var_name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (obj)); | |
3018 | ||
3019 | /* For hidden symbols we want to skip first '*' char. */ | |
3020 | if (*var_name == '*') | |
3021 | var_name++; | |
3022 | ||
3023 | bnd_var_name = (char *) xmalloc (strlen (var_name) | |
3024 | + strlen (CHKP_BOUNDS_OF_SYMBOL_PREFIX) + 1); | |
3025 | strcpy (bnd_var_name, CHKP_BOUNDS_OF_SYMBOL_PREFIX); | |
3026 | strcat (bnd_var_name, var_name); | |
3027 | } | |
3028 | ||
3029 | bnd_var = build_decl (UNKNOWN_LOCATION, VAR_DECL, | |
3030 | get_identifier (bnd_var_name), | |
3031 | pointer_bounds_type_node); | |
3032 | ||
3033 | /* Address of the obj will be used as lower bound. */ | |
3034 | TREE_ADDRESSABLE (obj) = 1; | |
3035 | } | |
3036 | else | |
3037 | { | |
3038 | bnd_var_name = (char *) xmalloc (strlen (CHKP_STRING_BOUNDS_PREFIX) + 10); | |
3039 | sprintf (bnd_var_name, "%s%d", CHKP_STRING_BOUNDS_PREFIX, string_id++); | |
3040 | ||
3041 | bnd_var = build_decl (UNKNOWN_LOCATION, VAR_DECL, | |
3042 | get_identifier (bnd_var_name), | |
3043 | pointer_bounds_type_node); | |
3044 | } | |
3045 | ||
2f0fc505 ML |
3046 | free (bnd_var_name); |
3047 | ||
d5e254e1 IE |
3048 | TREE_PUBLIC (bnd_var) = 0; |
3049 | TREE_USED (bnd_var) = 1; | |
3050 | TREE_READONLY (bnd_var) = 0; | |
3051 | TREE_STATIC (bnd_var) = 1; | |
3052 | TREE_ADDRESSABLE (bnd_var) = 0; | |
3053 | DECL_ARTIFICIAL (bnd_var) = 1; | |
3054 | DECL_COMMON (bnd_var) = 1; | |
3055 | DECL_COMDAT (bnd_var) = 1; | |
3056 | DECL_READ_P (bnd_var) = 1; | |
3057 | DECL_INITIAL (bnd_var) = chkp_build_addr_expr (obj); | |
3058 | /* Force output similar to constant bounds. | |
3059 | See chkp_make_static_const_bounds. */ | |
3060 | varpool_node::get_create (bnd_var)->force_output = 1; | |
3061 | /* Mark symbol as requiring bounds initialization. */ | |
3062 | varpool_node::get_create (bnd_var)->need_bounds_init = 1; | |
3063 | varpool_node::finalize_decl (bnd_var); | |
3064 | ||
3065 | /* Add created var to the map to use it for other references | |
3066 | to obj. */ | |
3067 | if (!chkp_static_var_bounds) | |
3068 | chkp_static_var_bounds = new hash_map<tree, tree>; | |
3069 | ||
8813a647 | 3070 | if (VAR_P (obj)) |
227eabce IE |
3071 | { |
3072 | tree name = DECL_ASSEMBLER_NAME (obj); | |
3073 | chkp_static_var_bounds->put (name, bnd_var); | |
3074 | } | |
3075 | else | |
3076 | chkp_static_var_bounds->put (obj, bnd_var); | |
d5e254e1 IE |
3077 | |
3078 | return bnd_var; | |
3079 | } | |
3080 | ||
3081 | /* When var has incomplete type we cannot get size to | |
3082 | compute its bounds. In such cases we use checker | |
3083 | builtin call which determines object size at runtime. */ | |
3084 | static tree | |
3085 | chkp_generate_extern_var_bounds (tree var) | |
3086 | { | |
3087 | tree bounds, size_reloc, lb, size, max_size, cond; | |
3088 | gimple_stmt_iterator gsi; | |
3089 | gimple_seq seq = NULL; | |
355fe088 | 3090 | gimple *stmt; |
d5e254e1 IE |
3091 | |
3092 | /* If instrumentation is not enabled for vars having | |
3093 | incomplete type then just return zero bounds to avoid | |
3094 | checks for this var. */ | |
3095 | if (!flag_chkp_incomplete_type) | |
3096 | return chkp_get_zero_bounds (); | |
3097 | ||
3098 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
3099 | { | |
3100 | fprintf (dump_file, "Generating bounds for extern symbol '"); | |
ef6cb4c7 | 3101 | print_generic_expr (dump_file, var); |
d5e254e1 IE |
3102 | fprintf (dump_file, "'\n"); |
3103 | } | |
3104 | ||
3105 | stmt = gimple_build_call (chkp_sizeof_fndecl, 1, var); | |
3106 | ||
3107 | size_reloc = create_tmp_reg (chkp_uintptr_type, CHKP_SIZE_TMP_NAME); | |
3108 | gimple_call_set_lhs (stmt, size_reloc); | |
3109 | ||
3110 | gimple_seq_add_stmt (&seq, stmt); | |
3111 | ||
3112 | lb = chkp_build_addr_expr (var); | |
0ff4ddeb | 3113 | size = make_ssa_name (chkp_get_size_tmp_var ()); |
d5e254e1 IE |
3114 | |
3115 | if (flag_chkp_zero_dynamic_size_as_infinite) | |
3116 | { | |
3117 | /* We should check that size relocation was resolved. | |
3118 | If it was not then use maximum possible size for the var. */ | |
3119 | max_size = build2 (MINUS_EXPR, chkp_uintptr_type, integer_zero_node, | |
3120 | fold_convert (chkp_uintptr_type, lb)); | |
3121 | max_size = chkp_force_gimple_call_op (max_size, &seq); | |
3122 | ||
0d0e4a03 JJ |
3123 | cond = build2 (NE_EXPR, boolean_type_node, |
3124 | size_reloc, integer_zero_node); | |
3125 | stmt = gimple_build_assign (size, COND_EXPR, cond, size_reloc, max_size); | |
d5e254e1 IE |
3126 | gimple_seq_add_stmt (&seq, stmt); |
3127 | } | |
3128 | else | |
3129 | { | |
3130 | stmt = gimple_build_assign (size, size_reloc); | |
3131 | gimple_seq_add_stmt (&seq, stmt); | |
3132 | } | |
3133 | ||
3134 | gsi = gsi_start_bb (chkp_get_entry_block ()); | |
3135 | gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING); | |
3136 | ||
3137 | bounds = chkp_make_bounds (lb, size, &gsi, true); | |
3138 | ||
3139 | return bounds; | |
3140 | } | |
3141 | ||
3142 | /* Return 1 if TYPE has fields with zero size or fields | |
3143 | marked with chkp_variable_size attribute. */ | |
3144 | bool | |
3145 | chkp_variable_size_type (tree type) | |
3146 | { | |
3147 | bool res = false; | |
3148 | tree field; | |
3149 | ||
3150 | if (RECORD_OR_UNION_TYPE_P (type)) | |
3151 | for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field)) | |
3152 | { | |
3153 | if (TREE_CODE (field) == FIELD_DECL) | |
3154 | res = res | |
3155 | || lookup_attribute ("bnd_variable_size", DECL_ATTRIBUTES (field)) | |
3156 | || chkp_variable_size_type (TREE_TYPE (field)); | |
3157 | } | |
3158 | else | |
3159 | res = !TYPE_SIZE (type) | |
3160 | || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST | |
3161 | || tree_to_uhwi (TYPE_SIZE (type)) == 0; | |
3162 | ||
3163 | return res; | |
3164 | } | |
3165 | ||
3166 | /* Compute and return bounds for address of DECL which is | |
3167 | one of VAR_DECL, PARM_DECL, RESULT_DECL. */ | |
3168 | static tree | |
3169 | chkp_get_bounds_for_decl_addr (tree decl) | |
3170 | { | |
3171 | tree bounds; | |
3172 | ||
8813a647 | 3173 | gcc_assert (VAR_P (decl) |
d5e254e1 IE |
3174 | || TREE_CODE (decl) == PARM_DECL |
3175 | || TREE_CODE (decl) == RESULT_DECL); | |
3176 | ||
3177 | bounds = chkp_get_registered_addr_bounds (decl); | |
3178 | ||
3179 | if (bounds) | |
3180 | return bounds; | |
3181 | ||
3182 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
3183 | { | |
3184 | fprintf (dump_file, "Building bounds for address of decl "); | |
ef6cb4c7 | 3185 | print_generic_expr (dump_file, decl); |
d5e254e1 IE |
3186 | fprintf (dump_file, "\n"); |
3187 | } | |
3188 | ||
3189 | /* Use zero bounds if size is unknown and checks for | |
3190 | unknown sizes are restricted. */ | |
3191 | if ((!DECL_SIZE (decl) | |
3192 | || (chkp_variable_size_type (TREE_TYPE (decl)) | |
3193 | && (TREE_STATIC (decl) | |
3194 | || DECL_EXTERNAL (decl) | |
3195 | || TREE_PUBLIC (decl)))) | |
3196 | && !flag_chkp_incomplete_type) | |
3197 | return chkp_get_zero_bounds (); | |
3198 | ||
3199 | if (flag_chkp_use_static_bounds | |
8813a647 | 3200 | && VAR_P (decl) |
d5e254e1 IE |
3201 | && (TREE_STATIC (decl) |
3202 | || DECL_EXTERNAL (decl) | |
3203 | || TREE_PUBLIC (decl)) | |
3204 | && !DECL_THREAD_LOCAL_P (decl)) | |
3205 | { | |
3206 | tree bnd_var = chkp_make_static_bounds (decl); | |
3207 | gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ()); | |
355fe088 | 3208 | gimple *stmt; |
d5e254e1 | 3209 | |
0ff4ddeb | 3210 | bounds = chkp_get_tmp_reg (NULL); |
d5e254e1 IE |
3211 | stmt = gimple_build_assign (bounds, bnd_var); |
3212 | gsi_insert_before (&gsi, stmt, GSI_SAME_STMT); | |
3213 | } | |
3214 | else if (!DECL_SIZE (decl) | |
3215 | || (chkp_variable_size_type (TREE_TYPE (decl)) | |
3216 | && (TREE_STATIC (decl) | |
3217 | || DECL_EXTERNAL (decl) | |
3218 | || TREE_PUBLIC (decl)))) | |
3219 | { | |
8813a647 | 3220 | gcc_assert (VAR_P (decl)); |
d5e254e1 IE |
3221 | bounds = chkp_generate_extern_var_bounds (decl); |
3222 | } | |
3223 | else | |
3224 | { | |
3225 | tree lb = chkp_build_addr_expr (decl); | |
3226 | bounds = chkp_make_bounds (lb, DECL_SIZE_UNIT (decl), NULL, false); | |
3227 | } | |
3228 | ||
3229 | return bounds; | |
3230 | } | |
3231 | ||
3232 | /* Compute and return bounds for constant string. */ | |
3233 | static tree | |
3234 | chkp_get_bounds_for_string_cst (tree cst) | |
3235 | { | |
3236 | tree bounds; | |
3237 | tree lb; | |
3238 | tree size; | |
3239 | ||
3240 | gcc_assert (TREE_CODE (cst) == STRING_CST); | |
3241 | ||
3242 | bounds = chkp_get_registered_bounds (cst); | |
3243 | ||
3244 | if (bounds) | |
3245 | return bounds; | |
3246 | ||
3247 | if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds) | |
3248 | || flag_chkp_use_static_const_bounds > 0) | |
3249 | { | |
3250 | tree bnd_var = chkp_make_static_bounds (cst); | |
3251 | gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ()); | |
355fe088 | 3252 | gimple *stmt; |
d5e254e1 | 3253 | |
0ff4ddeb | 3254 | bounds = chkp_get_tmp_reg (NULL); |
d5e254e1 IE |
3255 | stmt = gimple_build_assign (bounds, bnd_var); |
3256 | gsi_insert_before (&gsi, stmt, GSI_SAME_STMT); | |
3257 | } | |
3258 | else | |
3259 | { | |
3260 | lb = chkp_build_addr_expr (cst); | |
3261 | size = build_int_cst (chkp_uintptr_type, TREE_STRING_LENGTH (cst)); | |
3262 | bounds = chkp_make_bounds (lb, size, NULL, false); | |
3263 | } | |
3264 | ||
3265 | bounds = chkp_maybe_copy_and_register_bounds (cst, bounds); | |
3266 | ||
3267 | return bounds; | |
3268 | } | |
3269 | ||
3270 | /* Generate code to instersect bounds BOUNDS1 and BOUNDS2 and | |
3271 | return the result. if ITER is not NULL then Code is inserted | |
3272 | before position pointed by ITER. Otherwise code is added to | |
3273 | entry block. */ | |
3274 | static tree | |
3275 | chkp_intersect_bounds (tree bounds1, tree bounds2, gimple_stmt_iterator *iter) | |
3276 | { | |
3277 | if (!bounds1 || bounds1 == chkp_get_zero_bounds ()) | |
3278 | return bounds2 ? bounds2 : bounds1; | |
3279 | else if (!bounds2 || bounds2 == chkp_get_zero_bounds ()) | |
3280 | return bounds1; | |
3281 | else | |
3282 | { | |
3283 | gimple_seq seq; | |
355fe088 | 3284 | gimple *stmt; |
d5e254e1 IE |
3285 | tree bounds; |
3286 | ||
3287 | seq = NULL; | |
3288 | ||
3289 | stmt = gimple_build_call (chkp_intersect_fndecl, 2, bounds1, bounds2); | |
3290 | chkp_mark_stmt (stmt); | |
3291 | ||
3292 | bounds = chkp_get_tmp_reg (stmt); | |
3293 | gimple_call_set_lhs (stmt, bounds); | |
3294 | ||
3295 | gimple_seq_add_stmt (&seq, stmt); | |
3296 | ||
3297 | /* We are probably doing narrowing for constant expression. | |
3298 | In such case iter may be undefined. */ | |
3299 | if (!iter) | |
3300 | { | |
3301 | gimple_stmt_iterator gsi = gsi_last_bb (chkp_get_entry_block ()); | |
3302 | iter = &gsi; | |
3303 | gsi_insert_seq_after (iter, seq, GSI_SAME_STMT); | |
3304 | } | |
3305 | else | |
3306 | gsi_insert_seq_before (iter, seq, GSI_SAME_STMT); | |
3307 | ||
3308 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
3309 | { | |
3310 | fprintf (dump_file, "Bounds intersection: "); | |
3311 | print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS); | |
3312 | fprintf (dump_file, " inserted before statement: "); | |
3313 | print_gimple_stmt (dump_file, gsi_stmt (*iter), 0, | |
3314 | TDF_VOPS|TDF_MEMSYMS); | |
3315 | } | |
3316 | ||
3317 | return bounds; | |
3318 | } | |
3319 | } | |
3320 | ||
3321 | /* Return 1 if we are allowed to narrow bounds for addressed FIELD | |
a5e9e3e4 ML |
3322 | and 0 othersize. REF is reference to the field. */ |
3323 | ||
d5e254e1 | 3324 | static bool |
a5e9e3e4 | 3325 | chkp_may_narrow_to_field (tree ref, tree field) |
d5e254e1 IE |
3326 | { |
3327 | return DECL_SIZE (field) && TREE_CODE (DECL_SIZE (field)) == INTEGER_CST | |
3328 | && tree_to_uhwi (DECL_SIZE (field)) != 0 | |
8ba4f506 | 3329 | && !(flag_chkp_flexible_struct_trailing_arrays |
c3e46927 | 3330 | && array_at_struct_end_p (ref)) |
d5e254e1 IE |
3331 | && (!DECL_FIELD_OFFSET (field) |
3332 | || TREE_CODE (DECL_FIELD_OFFSET (field)) == INTEGER_CST) | |
3333 | && (!DECL_FIELD_BIT_OFFSET (field) | |
3334 | || TREE_CODE (DECL_FIELD_BIT_OFFSET (field)) == INTEGER_CST) | |
3335 | && !lookup_attribute ("bnd_variable_size", DECL_ATTRIBUTES (field)) | |
3336 | && !chkp_variable_size_type (TREE_TYPE (field)); | |
3337 | } | |
3338 | ||
3339 | /* Return 1 if bounds for FIELD should be narrowed to | |
a5e9e3e4 ML |
3340 | field's own size. REF is reference to the field. */ |
3341 | ||
d5e254e1 | 3342 | static bool |
a5e9e3e4 | 3343 | chkp_narrow_bounds_for_field (tree ref, tree field) |
d5e254e1 IE |
3344 | { |
3345 | HOST_WIDE_INT offs; | |
3346 | HOST_WIDE_INT bit_offs; | |
3347 | ||
a5e9e3e4 | 3348 | if (!chkp_may_narrow_to_field (ref, field)) |
d5e254e1 IE |
3349 | return false; |
3350 | ||
a278b1c3 | 3351 | /* Access to compiler generated fields should not cause |
d5e254e1 IE |
3352 | bounds narrowing. */ |
3353 | if (DECL_ARTIFICIAL (field)) | |
3354 | return false; | |
3355 | ||
3356 | offs = tree_to_uhwi (DECL_FIELD_OFFSET (field)); | |
3357 | bit_offs = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field)); | |
3358 | ||
3359 | return (flag_chkp_narrow_bounds | |
3360 | && (flag_chkp_first_field_has_own_bounds | |
3361 | || offs | |
3362 | || bit_offs)); | |
3363 | } | |
3364 | ||
a278b1c3 ML |
3365 | /* Perform narrowing for BOUNDS of an INNER reference. Shift boundary |
3366 | by OFFSET bytes and limit to SIZE bytes. Newly created statements are | |
3367 | added to ITER. */ | |
3368 | ||
3369 | static tree | |
3370 | chkp_narrow_size_and_offset (tree bounds, tree inner, tree offset, | |
3371 | tree size, gimple_stmt_iterator *iter) | |
3372 | { | |
3373 | tree addr = chkp_build_addr_expr (unshare_expr (inner)); | |
3374 | tree t = TREE_TYPE (addr); | |
3375 | ||
3376 | gimple *stmt = gimple_build_assign (NULL_TREE, addr); | |
3377 | addr = make_temp_ssa_name (t, stmt, CHKP_BOUND_TMP_NAME); | |
3378 | gimple_assign_set_lhs (stmt, addr); | |
3379 | gsi_insert_seq_before (iter, stmt, GSI_SAME_STMT); | |
3380 | ||
3381 | stmt = gimple_build_assign (NULL_TREE, POINTER_PLUS_EXPR, addr, offset); | |
3382 | tree shifted = make_temp_ssa_name (t, stmt, CHKP_BOUND_TMP_NAME); | |
3383 | gimple_assign_set_lhs (stmt, shifted); | |
3384 | gsi_insert_seq_before (iter, stmt, GSI_SAME_STMT); | |
3385 | ||
3386 | tree bounds2 = chkp_make_bounds (shifted, size, iter, false); | |
3387 | ||
3388 | return chkp_intersect_bounds (bounds, bounds2, iter); | |
3389 | } | |
3390 | ||
d5e254e1 IE |
3391 | /* Perform narrowing for BOUNDS using bounds computed for field |
3392 | access COMPONENT. ITER meaning is the same as for | |
3393 | chkp_intersect_bounds. */ | |
a278b1c3 | 3394 | |
d5e254e1 IE |
3395 | static tree |
3396 | chkp_narrow_bounds_to_field (tree bounds, tree component, | |
3397 | gimple_stmt_iterator *iter) | |
3398 | { | |
3399 | tree field = TREE_OPERAND (component, 1); | |
3400 | tree size = DECL_SIZE_UNIT (field); | |
3401 | tree field_ptr = chkp_build_addr_expr (component); | |
3402 | tree field_bounds; | |
3403 | ||
3404 | field_bounds = chkp_make_bounds (field_ptr, size, iter, false); | |
3405 | ||
3406 | return chkp_intersect_bounds (field_bounds, bounds, iter); | |
3407 | } | |
3408 | ||
3409 | /* Parse field or array access NODE. | |
3410 | ||
3411 | PTR ouput parameter holds a pointer to the outermost | |
3412 | object. | |
3413 | ||
3414 | BITFIELD output parameter is set to 1 if bitfield is | |
3415 | accessed and to 0 otherwise. If it is 1 then ELT holds | |
3416 | outer component for accessed bit field. | |
3417 | ||
3418 | SAFE outer parameter is set to 1 if access is safe and | |
3419 | checks are not required. | |
3420 | ||
3421 | BOUNDS outer parameter holds bounds to be used to check | |
3422 | access (may be NULL). | |
3423 | ||
3424 | If INNERMOST_BOUNDS is 1 then try to narrow bounds to the | |
3425 | innermost accessed component. */ | |
3426 | static void | |
3427 | chkp_parse_array_and_component_ref (tree node, tree *ptr, | |
3428 | tree *elt, bool *safe, | |
3429 | bool *bitfield, | |
3430 | tree *bounds, | |
3431 | gimple_stmt_iterator *iter, | |
3432 | bool innermost_bounds) | |
3433 | { | |
3434 | tree comp_to_narrow = NULL_TREE; | |
3435 | tree last_comp = NULL_TREE; | |
3436 | bool array_ref_found = false; | |
3437 | tree *nodes; | |
3438 | tree var; | |
3439 | int len; | |
3440 | int i; | |
3441 | ||
3442 | /* Compute tree height for expression. */ | |
3443 | var = node; | |
3444 | len = 1; | |
3445 | while (TREE_CODE (var) == COMPONENT_REF | |
3446 | || TREE_CODE (var) == ARRAY_REF | |
a278b1c3 ML |
3447 | || TREE_CODE (var) == VIEW_CONVERT_EXPR |
3448 | || TREE_CODE (var) == BIT_FIELD_REF) | |
d5e254e1 IE |
3449 | { |
3450 | var = TREE_OPERAND (var, 0); | |
3451 | len++; | |
3452 | } | |
3453 | ||
3454 | gcc_assert (len > 1); | |
3455 | ||
3456 | /* It is more convenient for us to scan left-to-right, | |
3457 | so walk tree again and put all node to nodes vector | |
3458 | in reversed order. */ | |
3459 | nodes = XALLOCAVEC (tree, len); | |
3460 | nodes[len - 1] = node; | |
3461 | for (i = len - 2; i >= 0; i--) | |
3462 | nodes[i] = TREE_OPERAND (nodes[i + 1], 0); | |
3463 | ||
3464 | if (bounds) | |
3465 | *bounds = NULL; | |
3466 | *safe = true; | |
a278b1c3 ML |
3467 | *bitfield = ((TREE_CODE (node) == COMPONENT_REF |
3468 | && DECL_BIT_FIELD_TYPE (TREE_OPERAND (node, 1))) | |
3469 | || TREE_CODE (node) == BIT_FIELD_REF); | |
3470 | /* To get bitfield address we will need outer element. */ | |
d5e254e1 IE |
3471 | if (*bitfield) |
3472 | *elt = nodes[len - 2]; | |
3473 | else | |
3474 | *elt = NULL_TREE; | |
3475 | ||
3476 | /* If we have indirection in expression then compute | |
3477 | outermost structure bounds. Computed bounds may be | |
3478 | narrowed later. */ | |
3479 | if (TREE_CODE (nodes[0]) == MEM_REF || INDIRECT_REF_P (nodes[0])) | |
3480 | { | |
3481 | *safe = false; | |
3482 | *ptr = TREE_OPERAND (nodes[0], 0); | |
3483 | if (bounds) | |
3484 | *bounds = chkp_find_bounds (*ptr, iter); | |
3485 | } | |
3486 | else | |
3487 | { | |
8813a647 | 3488 | gcc_assert (VAR_P (var) |
d5e254e1 IE |
3489 | || TREE_CODE (var) == PARM_DECL |
3490 | || TREE_CODE (var) == RESULT_DECL | |
3491 | || TREE_CODE (var) == STRING_CST | |
3492 | || TREE_CODE (var) == SSA_NAME); | |
3493 | ||
3494 | *ptr = chkp_build_addr_expr (var); | |
0036534f AI |
3495 | |
3496 | /* For hard register cases chkp_build_addr_expr returns INTEGER_CST | |
3497 | and later on chkp_find_bounds will fail to find proper bounds. | |
3498 | In order to avoid that, we find/create bounds right aways using | |
3499 | the var itself. */ | |
3500 | if (VAR_P (var) && DECL_HARD_REGISTER (var)) | |
3501 | *bounds = chkp_make_addressed_object_bounds (var, iter); | |
d5e254e1 IE |
3502 | } |
3503 | ||
3504 | /* In this loop we are trying to find a field access | |
3505 | requiring narrowing. There are two simple rules | |
3506 | for search: | |
3507 | 1. Leftmost array_ref is chosen if any. | |
3508 | 2. Rightmost suitable component_ref is chosen if innermost | |
3509 | bounds are required and no array_ref exists. */ | |
3510 | for (i = 1; i < len; i++) | |
3511 | { | |
3512 | var = nodes[i]; | |
3513 | ||
3514 | if (TREE_CODE (var) == ARRAY_REF) | |
3515 | { | |
3516 | *safe = false; | |
3517 | array_ref_found = true; | |
3518 | if (flag_chkp_narrow_bounds | |
3519 | && !flag_chkp_narrow_to_innermost_arrray | |
3520 | && (!last_comp | |
a5e9e3e4 ML |
3521 | || chkp_may_narrow_to_field (var, |
3522 | TREE_OPERAND (last_comp, 1)))) | |
d5e254e1 IE |
3523 | { |
3524 | comp_to_narrow = last_comp; | |
3525 | break; | |
3526 | } | |
3527 | } | |
3528 | else if (TREE_CODE (var) == COMPONENT_REF) | |
3529 | { | |
3530 | tree field = TREE_OPERAND (var, 1); | |
3531 | ||
3532 | if (innermost_bounds | |
3533 | && !array_ref_found | |
a5e9e3e4 | 3534 | && chkp_narrow_bounds_for_field (var, field)) |
d5e254e1 IE |
3535 | comp_to_narrow = var; |
3536 | last_comp = var; | |
3537 | ||
3538 | if (flag_chkp_narrow_bounds | |
3539 | && flag_chkp_narrow_to_innermost_arrray | |
3540 | && TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE) | |
3541 | { | |
3542 | if (bounds) | |
3543 | *bounds = chkp_narrow_bounds_to_field (*bounds, var, iter); | |
3544 | comp_to_narrow = NULL; | |
3545 | } | |
3546 | } | |
a278b1c3 ML |
3547 | else if (TREE_CODE (var) == BIT_FIELD_REF) |
3548 | { | |
3549 | if (flag_chkp_narrow_bounds && bounds) | |
3550 | { | |
3551 | tree offset, size; | |
3552 | chkp_parse_bit_field_ref (var, UNKNOWN_LOCATION, &offset, &size); | |
3553 | *bounds | |
3554 | = chkp_narrow_size_and_offset (*bounds, TREE_OPERAND (var, 0), | |
3555 | offset, size, iter); | |
3556 | } | |
3557 | } | |
d5e254e1 IE |
3558 | else if (TREE_CODE (var) == VIEW_CONVERT_EXPR) |
3559 | /* Nothing to do for it. */ | |
3560 | ; | |
3561 | else | |
3562 | gcc_unreachable (); | |
3563 | } | |
3564 | ||
3565 | if (comp_to_narrow && DECL_SIZE (TREE_OPERAND (comp_to_narrow, 1)) && bounds) | |
3566 | *bounds = chkp_narrow_bounds_to_field (*bounds, comp_to_narrow, iter); | |
3567 | ||
3568 | if (innermost_bounds && bounds && !*bounds) | |
3569 | *bounds = chkp_find_bounds (*ptr, iter); | |
3570 | } | |
3571 | ||
a278b1c3 ML |
3572 | /* Parse BIT_FIELD_REF to a NODE for a given location LOC. Return OFFSET |
3573 | and SIZE in bytes. */ | |
3574 | ||
3575 | static | |
3576 | void chkp_parse_bit_field_ref (tree node, location_t loc, tree *offset, | |
3577 | tree *size) | |
3578 | { | |
3579 | tree bpu = fold_convert (size_type_node, bitsize_int (BITS_PER_UNIT)); | |
3580 | tree offs = fold_convert (size_type_node, TREE_OPERAND (node, 2)); | |
3581 | tree rem = size_binop_loc (loc, TRUNC_MOD_EXPR, offs, bpu); | |
3582 | offs = size_binop_loc (loc, TRUNC_DIV_EXPR, offs, bpu); | |
3583 | ||
3584 | tree s = fold_convert (size_type_node, TREE_OPERAND (node, 1)); | |
3585 | s = size_binop_loc (loc, PLUS_EXPR, s, rem); | |
3586 | s = size_binop_loc (loc, CEIL_DIV_EXPR, s, bpu); | |
3587 | s = fold_convert (size_type_node, s); | |
3588 | ||
3589 | *offset = offs; | |
3590 | *size = s; | |
3591 | } | |
3592 | ||
d5e254e1 IE |
3593 | /* Compute and return bounds for address of OBJ. */ |
3594 | static tree | |
3595 | chkp_make_addressed_object_bounds (tree obj, gimple_stmt_iterator *iter) | |
3596 | { | |
3597 | tree bounds = chkp_get_registered_addr_bounds (obj); | |
3598 | ||
3599 | if (bounds) | |
3600 | return bounds; | |
3601 | ||
3602 | switch (TREE_CODE (obj)) | |
3603 | { | |
3604 | case VAR_DECL: | |
3605 | case PARM_DECL: | |
3606 | case RESULT_DECL: | |
3607 | bounds = chkp_get_bounds_for_decl_addr (obj); | |
3608 | break; | |
3609 | ||
3610 | case STRING_CST: | |
3611 | bounds = chkp_get_bounds_for_string_cst (obj); | |
3612 | break; | |
3613 | ||
3614 | case ARRAY_REF: | |
3615 | case COMPONENT_REF: | |
a278b1c3 | 3616 | case BIT_FIELD_REF: |
d5e254e1 IE |
3617 | { |
3618 | tree elt; | |
3619 | tree ptr; | |
3620 | bool safe; | |
3621 | bool bitfield; | |
3622 | ||
3623 | chkp_parse_array_and_component_ref (obj, &ptr, &elt, &safe, | |
3624 | &bitfield, &bounds, iter, true); | |
3625 | ||
3626 | gcc_assert (bounds); | |
3627 | } | |
3628 | break; | |
3629 | ||
3630 | case FUNCTION_DECL: | |
3631 | case LABEL_DECL: | |
3632 | bounds = chkp_get_zero_bounds (); | |
3633 | break; | |
3634 | ||
3635 | case MEM_REF: | |
3636 | bounds = chkp_find_bounds (TREE_OPERAND (obj, 0), iter); | |
3637 | break; | |
3638 | ||
3639 | case REALPART_EXPR: | |
3640 | case IMAGPART_EXPR: | |
3641 | bounds = chkp_make_addressed_object_bounds (TREE_OPERAND (obj, 0), iter); | |
3642 | break; | |
3643 | ||
3644 | default: | |
3645 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
3646 | { | |
3647 | fprintf (dump_file, "chkp_make_addressed_object_bounds: " | |
3648 | "unexpected object of type %s\n", | |
3649 | get_tree_code_name (TREE_CODE (obj))); | |
3650 | print_node (dump_file, "", obj, 0); | |
3651 | } | |
3652 | internal_error ("chkp_make_addressed_object_bounds: " | |
3653 | "Unexpected tree code %s", | |
3654 | get_tree_code_name (TREE_CODE (obj))); | |
3655 | } | |
3656 | ||
3657 | chkp_register_addr_bounds (obj, bounds); | |
3658 | ||
3659 | return bounds; | |
3660 | } | |
3661 | ||
3662 | /* Compute bounds for pointer PTR loaded from PTR_SRC. Generate statements | |
3663 | to compute bounds if required. Computed bounds should be available at | |
3664 | position pointed by ITER. | |
3665 | ||
3666 | If PTR_SRC is NULL_TREE then pointer definition is identified. | |
3667 | ||
3668 | If PTR_SRC is not NULL_TREE then ITER points to statements which loads | |
3669 | PTR. If PTR is a any memory reference then ITER points to a statement | |
3670 | after which bndldx will be inserterd. In both cases ITER will be updated | |
3671 | to point to the inserted bndldx statement. */ | |
3672 | ||
3673 | static tree | |
3674 | chkp_find_bounds_1 (tree ptr, tree ptr_src, gimple_stmt_iterator *iter) | |
3675 | { | |
3676 | tree addr = NULL_TREE; | |
3677 | tree bounds = NULL_TREE; | |
3678 | ||
3679 | if (!ptr_src) | |
3680 | ptr_src = ptr; | |
3681 | ||
3682 | bounds = chkp_get_registered_bounds (ptr_src); | |
3683 | ||
3684 | if (bounds) | |
3685 | return bounds; | |
3686 | ||
3687 | switch (TREE_CODE (ptr_src)) | |
3688 | { | |
3689 | case MEM_REF: | |
3690 | case VAR_DECL: | |
3691 | if (BOUNDED_P (ptr_src)) | |
8813a647 | 3692 | if (VAR_P (ptr) && DECL_REGISTER (ptr)) |
d5e254e1 IE |
3693 | bounds = chkp_get_zero_bounds (); |
3694 | else | |
3695 | { | |
3696 | addr = chkp_build_addr_expr (ptr_src); | |
3697 | bounds = chkp_build_bndldx (addr, ptr, iter); | |
3698 | } | |
3699 | else | |
3700 | bounds = chkp_get_nonpointer_load_bounds (); | |
3701 | break; | |
3702 | ||
3703 | case ARRAY_REF: | |
3704 | case COMPONENT_REF: | |
3705 | addr = get_base_address (ptr_src); | |
0036534f AI |
3706 | if (VAR_P (addr) && DECL_HARD_REGISTER (addr)) |
3707 | { | |
3708 | bounds = chkp_get_zero_bounds (); | |
3709 | break; | |
3710 | } | |
d5e254e1 IE |
3711 | if (DECL_P (addr) |
3712 | || TREE_CODE (addr) == MEM_REF | |
3713 | || TREE_CODE (addr) == TARGET_MEM_REF) | |
3714 | { | |
3715 | if (BOUNDED_P (ptr_src)) | |
8813a647 | 3716 | if (VAR_P (ptr) && DECL_REGISTER (ptr)) |
d5e254e1 IE |
3717 | bounds = chkp_get_zero_bounds (); |
3718 | else | |
3719 | { | |
3720 | addr = chkp_build_addr_expr (ptr_src); | |
3721 | bounds = chkp_build_bndldx (addr, ptr, iter); | |
3722 | } | |
3723 | else | |
3724 | bounds = chkp_get_nonpointer_load_bounds (); | |
3725 | } | |
3726 | else | |
3727 | { | |
3728 | gcc_assert (TREE_CODE (addr) == SSA_NAME); | |
3729 | bounds = chkp_find_bounds (addr, iter); | |
3730 | } | |
3731 | break; | |
3732 | ||
3733 | case PARM_DECL: | |
8dc19053 ML |
3734 | /* Handled above but failed. */ |
3735 | bounds = chkp_get_invalid_op_bounds (); | |
d5e254e1 IE |
3736 | break; |
3737 | ||
3738 | case TARGET_MEM_REF: | |
3739 | addr = chkp_build_addr_expr (ptr_src); | |
3740 | bounds = chkp_build_bndldx (addr, ptr, iter); | |
3741 | break; | |
3742 | ||
3743 | case SSA_NAME: | |
3744 | bounds = chkp_get_registered_bounds (ptr_src); | |
3745 | if (!bounds) | |
3746 | { | |
355fe088 | 3747 | gimple *def_stmt = SSA_NAME_DEF_STMT (ptr_src); |
538dd0b7 | 3748 | gphi_iterator phi_iter; |
d5e254e1 IE |
3749 | |
3750 | bounds = chkp_get_bounds_by_definition (ptr_src, def_stmt, &phi_iter); | |
3751 | ||
3752 | gcc_assert (bounds); | |
3753 | ||
538dd0b7 | 3754 | if (gphi *def_phi = dyn_cast <gphi *> (def_stmt)) |
d5e254e1 IE |
3755 | { |
3756 | unsigned i; | |
3757 | ||
538dd0b7 | 3758 | for (i = 0; i < gimple_phi_num_args (def_phi); i++) |
d5e254e1 | 3759 | { |
538dd0b7 | 3760 | tree arg = gimple_phi_arg_def (def_phi, i); |
d5e254e1 | 3761 | tree arg_bnd; |
538dd0b7 | 3762 | gphi *phi_bnd; |
d5e254e1 IE |
3763 | |
3764 | arg_bnd = chkp_find_bounds (arg, NULL); | |
3765 | ||
3766 | /* chkp_get_bounds_by_definition created new phi | |
3767 | statement and phi_iter points to it. | |
3768 | ||
3769 | Previous call to chkp_find_bounds could create | |
3770 | new basic block and therefore change phi statement | |
3771 | phi_iter points to. */ | |
538dd0b7 | 3772 | phi_bnd = phi_iter.phi (); |
d5e254e1 IE |
3773 | |
3774 | add_phi_arg (phi_bnd, arg_bnd, | |
538dd0b7 | 3775 | gimple_phi_arg_edge (def_phi, i), |
d5e254e1 IE |
3776 | UNKNOWN_LOCATION); |
3777 | } | |
3778 | ||
3779 | /* If all bound phi nodes have their arg computed | |
3780 | then we may finish its computation. See | |
3781 | chkp_finish_incomplete_bounds for more details. */ | |
3782 | if (chkp_may_finish_incomplete_bounds ()) | |
3783 | chkp_finish_incomplete_bounds (); | |
3784 | } | |
3785 | ||
3786 | gcc_assert (bounds == chkp_get_registered_bounds (ptr_src) | |
3787 | || chkp_incomplete_bounds (bounds)); | |
3788 | } | |
3789 | break; | |
3790 | ||
3791 | case ADDR_EXPR: | |
afc610db | 3792 | case WITH_SIZE_EXPR: |
d5e254e1 IE |
3793 | bounds = chkp_make_addressed_object_bounds (TREE_OPERAND (ptr_src, 0), iter); |
3794 | break; | |
3795 | ||
3796 | case INTEGER_CST: | |
918112d3 ML |
3797 | case COMPLEX_CST: |
3798 | case VECTOR_CST: | |
d5e254e1 IE |
3799 | if (integer_zerop (ptr_src)) |
3800 | bounds = chkp_get_none_bounds (); | |
3801 | else | |
3802 | bounds = chkp_get_invalid_op_bounds (); | |
3803 | break; | |
3804 | ||
3805 | default: | |
3806 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
3807 | { | |
3808 | fprintf (dump_file, "chkp_find_bounds: unexpected ptr of type %s\n", | |
3809 | get_tree_code_name (TREE_CODE (ptr_src))); | |
3810 | print_node (dump_file, "", ptr_src, 0); | |
3811 | } | |
3812 | internal_error ("chkp_find_bounds: Unexpected tree code %s", | |
3813 | get_tree_code_name (TREE_CODE (ptr_src))); | |
3814 | } | |
3815 | ||
3816 | if (!bounds) | |
3817 | { | |
3818 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
3819 | { | |
3820 | fprintf (stderr, "chkp_find_bounds: cannot find bounds for pointer\n"); | |
3821 | print_node (dump_file, "", ptr_src, 0); | |
3822 | } | |
3823 | internal_error ("chkp_find_bounds: Cannot find bounds for pointer"); | |
3824 | } | |
3825 | ||
3826 | return bounds; | |
3827 | } | |
3828 | ||
3829 | /* Normal case for bounds search without forced narrowing. */ | |
3830 | static tree | |
3831 | chkp_find_bounds (tree ptr, gimple_stmt_iterator *iter) | |
3832 | { | |
3833 | return chkp_find_bounds_1 (ptr, NULL_TREE, iter); | |
3834 | } | |
3835 | ||
3836 | /* Search bounds for pointer PTR loaded from PTR_SRC | |
3837 | by statement *ITER points to. */ | |
3838 | static tree | |
3839 | chkp_find_bounds_loaded (tree ptr, tree ptr_src, gimple_stmt_iterator *iter) | |
3840 | { | |
3841 | return chkp_find_bounds_1 (ptr, ptr_src, iter); | |
3842 | } | |
3843 | ||
3844 | /* Helper function which checks type of RHS and finds all pointers in | |
3845 | it. For each found pointer we build it's accesses in LHS and RHS | |
3846 | objects and then call HANDLER for them. Function is used to copy | |
3847 | or initilize bounds for copied object. */ | |
3848 | static void | |
3849 | chkp_walk_pointer_assignments (tree lhs, tree rhs, void *arg, | |
3850 | assign_handler handler) | |
3851 | { | |
3852 | tree type = TREE_TYPE (lhs); | |
3853 | ||
3854 | /* We have nothing to do with clobbers. */ | |
3855 | if (TREE_CLOBBER_P (rhs)) | |
3856 | return; | |
3857 | ||
3858 | if (BOUNDED_TYPE_P (type)) | |
3859 | handler (lhs, rhs, arg); | |
3860 | else if (RECORD_OR_UNION_TYPE_P (type)) | |
3861 | { | |
3862 | tree field; | |
3863 | ||
3864 | if (TREE_CODE (rhs) == CONSTRUCTOR) | |
3865 | { | |
3866 | unsigned HOST_WIDE_INT cnt; | |
3867 | tree val; | |
3868 | ||
3869 | FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs), cnt, field, val) | |
3870 | { | |
8823ce3f | 3871 | if (field && chkp_type_has_pointer (TREE_TYPE (field))) |
d5e254e1 IE |
3872 | { |
3873 | tree lhs_field = chkp_build_component_ref (lhs, field); | |
3874 | chkp_walk_pointer_assignments (lhs_field, val, arg, handler); | |
3875 | } | |
3876 | } | |
3877 | } | |
3878 | else | |
3879 | for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field)) | |
3880 | if (TREE_CODE (field) == FIELD_DECL | |
3881 | && chkp_type_has_pointer (TREE_TYPE (field))) | |
3882 | { | |
3883 | tree rhs_field = chkp_build_component_ref (rhs, field); | |
3884 | tree lhs_field = chkp_build_component_ref (lhs, field); | |
3885 | chkp_walk_pointer_assignments (lhs_field, rhs_field, arg, handler); | |
3886 | } | |
3887 | } | |
3888 | else if (TREE_CODE (type) == ARRAY_TYPE) | |
3889 | { | |
3890 | unsigned HOST_WIDE_INT cur = 0; | |
3891 | tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type)); | |
3892 | tree etype = TREE_TYPE (type); | |
3893 | tree esize = TYPE_SIZE (etype); | |
3894 | ||
3895 | if (TREE_CODE (rhs) == CONSTRUCTOR) | |
3896 | { | |
3897 | unsigned HOST_WIDE_INT cnt; | |
3898 | tree purp, val, lhs_elem; | |
3899 | ||
3900 | FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs), cnt, purp, val) | |
3901 | { | |
3902 | if (purp && TREE_CODE (purp) == RANGE_EXPR) | |
3903 | { | |
3904 | tree lo_index = TREE_OPERAND (purp, 0); | |
3905 | tree hi_index = TREE_OPERAND (purp, 1); | |
3906 | ||
3907 | for (cur = (unsigned)tree_to_uhwi (lo_index); | |
3908 | cur <= (unsigned)tree_to_uhwi (hi_index); | |
3909 | cur++) | |
3910 | { | |
3911 | lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur); | |
3912 | chkp_walk_pointer_assignments (lhs_elem, val, arg, handler); | |
3913 | } | |
3914 | } | |
3915 | else | |
3916 | { | |
3917 | if (purp) | |
3918 | { | |
3919 | gcc_assert (TREE_CODE (purp) == INTEGER_CST); | |
3920 | cur = tree_to_uhwi (purp); | |
3921 | } | |
3922 | ||
3923 | lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur++); | |
3924 | ||
3925 | chkp_walk_pointer_assignments (lhs_elem, val, arg, handler); | |
3926 | } | |
3927 | } | |
3928 | } | |
3929 | /* Copy array only when size is known. */ | |
3930 | else if (maxval && !integer_minus_onep (maxval)) | |
3931 | for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++) | |
3932 | { | |
3933 | tree lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur); | |
3934 | tree rhs_elem = chkp_build_array_ref (rhs, etype, esize, cur); | |
3935 | chkp_walk_pointer_assignments (lhs_elem, rhs_elem, arg, handler); | |
3936 | } | |
3937 | } | |
3938 | else | |
3939 | internal_error("chkp_walk_pointer_assignments: unexpected RHS type: %s", | |
3940 | get_tree_code_name (TREE_CODE (type))); | |
3941 | } | |
3942 | ||
3943 | /* Add code to copy bounds for assignment of RHS to LHS. | |
3944 | ARG is an iterator pointing ne code position. */ | |
3945 | static void | |
3946 | chkp_copy_bounds_for_elem (tree lhs, tree rhs, void *arg) | |
3947 | { | |
3948 | gimple_stmt_iterator *iter = (gimple_stmt_iterator *)arg; | |
3949 | tree bounds = chkp_find_bounds (rhs, iter); | |
3950 | tree addr = chkp_build_addr_expr(lhs); | |
3951 | ||
3952 | chkp_build_bndstx (addr, rhs, bounds, iter); | |
3953 | } | |
3954 | ||
3955 | /* Emit static bound initilizers and size vars. */ | |
3956 | void | |
3957 | chkp_finish_file (void) | |
3958 | { | |
3959 | struct varpool_node *node; | |
3960 | struct chkp_ctor_stmt_list stmts; | |
3961 | ||
3962 | if (seen_error ()) | |
3963 | return; | |
3964 | ||
3965 | /* Iterate through varpool and generate bounds initialization | |
3966 | constructors for all statically initialized pointers. */ | |
3967 | stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR; | |
3968 | stmts.stmts = NULL; | |
3969 | FOR_EACH_VARIABLE (node) | |
3970 | /* Check that var is actually emitted and we need and may initialize | |
3971 | its bounds. */ | |
3972 | if (node->need_bounds_init | |
3973 | && !POINTER_BOUNDS_P (node->decl) | |
3974 | && DECL_RTL (node->decl) | |
3975 | && MEM_P (DECL_RTL (node->decl)) | |
3976 | && TREE_ASM_WRITTEN (node->decl)) | |
3977 | { | |
3978 | chkp_walk_pointer_assignments (node->decl, | |
3979 | DECL_INITIAL (node->decl), | |
3980 | &stmts, | |
3981 | chkp_add_modification_to_stmt_list); | |
3982 | ||
3983 | if (stmts.avail <= 0) | |
3984 | { | |
3985 | cgraph_build_static_cdtor ('P', stmts.stmts, | |
3986 | MAX_RESERVED_INIT_PRIORITY + 3); | |
3987 | stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR; | |
3988 | stmts.stmts = NULL; | |
3989 | } | |
3990 | } | |
3991 | ||
3992 | if (stmts.stmts) | |
3993 | cgraph_build_static_cdtor ('P', stmts.stmts, | |
3994 | MAX_RESERVED_INIT_PRIORITY + 3); | |
3995 | ||
3996 | /* Iterate through varpool and generate bounds initialization | |
3997 | constructors for all static bounds vars. */ | |
3998 | stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR; | |
3999 | stmts.stmts = NULL; | |
4000 | FOR_EACH_VARIABLE (node) | |
4001 | if (node->need_bounds_init | |
4002 | && POINTER_BOUNDS_P (node->decl) | |
4003 | && TREE_ASM_WRITTEN (node->decl)) | |
4004 | { | |
4005 | tree bnd = node->decl; | |
4006 | tree var; | |
4007 | ||
4008 | gcc_assert (DECL_INITIAL (bnd) | |
4009 | && TREE_CODE (DECL_INITIAL (bnd)) == ADDR_EXPR); | |
4010 | ||
4011 | var = TREE_OPERAND (DECL_INITIAL (bnd), 0); | |
4012 | chkp_output_static_bounds (bnd, var, &stmts); | |
4013 | } | |
4014 | ||
4015 | if (stmts.stmts) | |
4016 | cgraph_build_static_cdtor ('B', stmts.stmts, | |
4017 | MAX_RESERVED_INIT_PRIORITY + 2); | |
4018 | ||
4019 | delete chkp_static_var_bounds; | |
4020 | delete chkp_bounds_map; | |
4021 | } | |
4022 | ||
4023 | /* An instrumentation function which is called for each statement | |
4024 | having memory access we want to instrument. It inserts check | |
4025 | code and bounds copy code. | |
4026 | ||
4027 | ITER points to statement to instrument. | |
4028 | ||
4029 | NODE holds memory access in statement to check. | |
4030 | ||
4031 | LOC holds the location information for statement. | |
4032 | ||
4033 | DIRFLAGS determines whether access is read or write. | |
4034 | ||
4035 | ACCESS_OFFS should be added to address used in NODE | |
4036 | before check. | |
4037 | ||
4038 | ACCESS_SIZE holds size of checked access. | |
4039 | ||
4040 | SAFE indicates if NODE access is safe and should not be | |
4041 | checked. */ | |
4042 | static void | |
4043 | chkp_process_stmt (gimple_stmt_iterator *iter, tree node, | |
4044 | location_t loc, tree dirflag, | |
4045 | tree access_offs, tree access_size, | |
4046 | bool safe) | |
4047 | { | |
4048 | tree node_type = TREE_TYPE (node); | |
4049 | tree size = access_size ? access_size : TYPE_SIZE_UNIT (node_type); | |
4050 | tree addr_first = NULL_TREE; /* address of the first accessed byte */ | |
4051 | tree addr_last = NULL_TREE; /* address of the last accessed byte */ | |
4052 | tree ptr = NULL_TREE; /* a pointer used for dereference */ | |
4053 | tree bounds = NULL_TREE; | |
0036534f | 4054 | bool reg_store = false; |
d5e254e1 IE |
4055 | |
4056 | /* We do not need instrumentation for clobbers. */ | |
4057 | if (dirflag == integer_one_node | |
4058 | && gimple_code (gsi_stmt (*iter)) == GIMPLE_ASSIGN | |
4059 | && TREE_CLOBBER_P (gimple_assign_rhs1 (gsi_stmt (*iter)))) | |
4060 | return; | |
4061 | ||
4062 | switch (TREE_CODE (node)) | |
4063 | { | |
4064 | case ARRAY_REF: | |
4065 | case COMPONENT_REF: | |
4066 | { | |
4067 | bool bitfield; | |
4068 | tree elt; | |
4069 | ||
4070 | if (safe) | |
4071 | { | |
4072 | /* We are not going to generate any checks, so do not | |
4073 | generate bounds as well. */ | |
4074 | addr_first = chkp_build_addr_expr (node); | |
4075 | break; | |
4076 | } | |
4077 | ||
4078 | chkp_parse_array_and_component_ref (node, &ptr, &elt, &safe, | |
4079 | &bitfield, &bounds, iter, false); | |
4080 | ||
4081 | /* Break if there is no dereference and operation is safe. */ | |
4082 | ||
4083 | if (bitfield) | |
4084 | { | |
4085 | tree field = TREE_OPERAND (node, 1); | |
4086 | ||
4087 | if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST) | |
4088 | size = DECL_SIZE_UNIT (field); | |
4089 | ||
4090 | if (elt) | |
4091 | elt = chkp_build_addr_expr (elt); | |
4092 | addr_first = fold_convert_loc (loc, ptr_type_node, elt ? elt : ptr); | |
4093 | addr_first = fold_build_pointer_plus_loc (loc, | |
4094 | addr_first, | |
4095 | byte_position (field)); | |
4096 | } | |
4097 | else | |
4098 | addr_first = chkp_build_addr_expr (node); | |
4099 | } | |
4100 | break; | |
4101 | ||
4102 | case INDIRECT_REF: | |
4103 | ptr = TREE_OPERAND (node, 0); | |
4104 | addr_first = ptr; | |
4105 | break; | |
4106 | ||
4107 | case MEM_REF: | |
4108 | ptr = TREE_OPERAND (node, 0); | |
4109 | addr_first = chkp_build_addr_expr (node); | |
4110 | break; | |
4111 | ||
4112 | case TARGET_MEM_REF: | |
4113 | ptr = TMR_BASE (node); | |
4114 | addr_first = chkp_build_addr_expr (node); | |
4115 | break; | |
4116 | ||
4117 | case ARRAY_RANGE_REF: | |
4118 | printf("ARRAY_RANGE_REF\n"); | |
4119 | debug_gimple_stmt(gsi_stmt(*iter)); | |
4120 | debug_tree(node); | |
4121 | gcc_unreachable (); | |
4122 | break; | |
4123 | ||
4124 | case BIT_FIELD_REF: | |
4125 | { | |
a278b1c3 | 4126 | tree offset, size; |
d5e254e1 IE |
4127 | |
4128 | gcc_assert (!access_offs); | |
4129 | gcc_assert (!access_size); | |
4130 | ||
a278b1c3 | 4131 | chkp_parse_bit_field_ref (node, loc, &offset, &size); |
d5e254e1 IE |
4132 | |
4133 | chkp_process_stmt (iter, TREE_OPERAND (node, 0), loc, | |
a278b1c3 | 4134 | dirflag, offset, size, safe); |
d5e254e1 IE |
4135 | return; |
4136 | } | |
4137 | break; | |
4138 | ||
4139 | case VAR_DECL: | |
4140 | case RESULT_DECL: | |
4141 | case PARM_DECL: | |
4142 | if (dirflag != integer_one_node | |
4143 | || DECL_REGISTER (node)) | |
4144 | return; | |
4145 | ||
4146 | safe = true; | |
4147 | addr_first = chkp_build_addr_expr (node); | |
4148 | break; | |
4149 | ||
4150 | default: | |
4151 | return; | |
4152 | } | |
4153 | ||
4154 | /* If addr_last was not computed then use (addr_first + size - 1) | |
4155 | expression to compute it. */ | |
4156 | if (!addr_last) | |
4157 | { | |
4158 | addr_last = fold_build_pointer_plus_loc (loc, addr_first, size); | |
4159 | addr_last = fold_build_pointer_plus_hwi_loc (loc, addr_last, -1); | |
4160 | } | |
4161 | ||
4162 | /* Shift both first_addr and last_addr by access_offs if specified. */ | |
4163 | if (access_offs) | |
4164 | { | |
4165 | addr_first = fold_build_pointer_plus_loc (loc, addr_first, access_offs); | |
4166 | addr_last = fold_build_pointer_plus_loc (loc, addr_last, access_offs); | |
4167 | } | |
4168 | ||
0036534f AI |
4169 | if (dirflag == integer_one_node) |
4170 | { | |
4171 | tree base = get_base_address (node); | |
4172 | if (VAR_P (base) && DECL_HARD_REGISTER (base)) | |
4173 | reg_store = true; | |
4174 | } | |
4175 | ||
d5e254e1 IE |
4176 | /* Generate bndcl/bndcu checks if memory access is not safe. */ |
4177 | if (!safe) | |
4178 | { | |
4179 | gimple_stmt_iterator stmt_iter = *iter; | |
4180 | ||
4181 | if (!bounds) | |
4182 | bounds = chkp_find_bounds (ptr, iter); | |
4183 | ||
4184 | chkp_check_mem_access (addr_first, addr_last, bounds, | |
4185 | stmt_iter, loc, dirflag); | |
4186 | } | |
4187 | ||
4188 | /* We need to store bounds in case pointer is stored. */ | |
4189 | if (dirflag == integer_one_node | |
0036534f | 4190 | && !reg_store |
d5e254e1 IE |
4191 | && chkp_type_has_pointer (node_type) |
4192 | && flag_chkp_store_bounds) | |
4193 | { | |
355fe088 | 4194 | gimple *stmt = gsi_stmt (*iter); |
d5e254e1 IE |
4195 | tree rhs1 = gimple_assign_rhs1 (stmt); |
4196 | enum tree_code rhs_code = gimple_assign_rhs_code (stmt); | |
4197 | ||
4198 | if (get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS) | |
4199 | chkp_walk_pointer_assignments (node, rhs1, iter, | |
4200 | chkp_copy_bounds_for_elem); | |
4201 | else | |
4202 | { | |
4203 | bounds = chkp_compute_bounds_for_assignment (NULL_TREE, stmt); | |
4204 | chkp_build_bndstx (addr_first, rhs1, bounds, iter); | |
4205 | } | |
4206 | } | |
4207 | } | |
4208 | ||
4209 | /* Add code to copy bounds for all pointers copied | |
4210 | in ASSIGN created during inline of EDGE. */ | |
4211 | void | |
355fe088 | 4212 | chkp_copy_bounds_for_assign (gimple *assign, struct cgraph_edge *edge) |
d5e254e1 IE |
4213 | { |
4214 | tree lhs = gimple_assign_lhs (assign); | |
4215 | tree rhs = gimple_assign_rhs1 (assign); | |
4216 | gimple_stmt_iterator iter = gsi_for_stmt (assign); | |
4217 | ||
4218 | if (!flag_chkp_store_bounds) | |
4219 | return; | |
4220 | ||
4221 | chkp_walk_pointer_assignments (lhs, rhs, &iter, chkp_copy_bounds_for_elem); | |
4222 | ||
4223 | /* We should create edges for all created calls to bndldx and bndstx. */ | |
4224 | while (gsi_stmt (iter) != assign) | |
4225 | { | |
355fe088 | 4226 | gimple *stmt = gsi_stmt (iter); |
d5e254e1 IE |
4227 | if (gimple_code (stmt) == GIMPLE_CALL) |
4228 | { | |
4229 | tree fndecl = gimple_call_fndecl (stmt); | |
4230 | struct cgraph_node *callee = cgraph_node::get_create (fndecl); | |
4231 | struct cgraph_edge *new_edge; | |
4232 | ||
69cd47a1 IE |
4233 | gcc_assert (chkp_gimple_call_builtin_p (stmt, BUILT_IN_CHKP_BNDSTX) |
4234 | || chkp_gimple_call_builtin_p (stmt, BUILT_IN_CHKP_BNDLDX) | |
4235 | || chkp_gimple_call_builtin_p (stmt, BUILT_IN_CHKP_BNDRET)); | |
d5e254e1 | 4236 | |
538dd0b7 DM |
4237 | new_edge = edge->caller->create_edge (callee, |
4238 | as_a <gcall *> (stmt), | |
4239 | edge->count, | |
d5e254e1 IE |
4240 | edge->frequency); |
4241 | new_edge->frequency = compute_call_stmt_bb_frequency | |
4242 | (edge->caller->decl, gimple_bb (stmt)); | |
4243 | } | |
4244 | gsi_prev (&iter); | |
4245 | } | |
4246 | } | |
4247 | ||
4248 | /* Some code transformation made during instrumentation pass | |
4249 | may put code into inconsistent state. Here we find and fix | |
4250 | such flaws. */ | |
4251 | void | |
4252 | chkp_fix_cfg () | |
4253 | { | |
4254 | basic_block bb; | |
4255 | gimple_stmt_iterator i; | |
4256 | ||
4257 | /* We could insert some code right after stmt which ends bb. | |
4258 | We wanted to put this code on fallthru edge but did not | |
4259 | add new edges from the beginning because it may cause new | |
4260 | phi node creation which may be incorrect due to incomplete | |
4261 | bound phi nodes. */ | |
4262 | FOR_ALL_BB_FN (bb, cfun) | |
4263 | for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i)) | |
4264 | { | |
355fe088 | 4265 | gimple *stmt = gsi_stmt (i); |
d5e254e1 IE |
4266 | gimple_stmt_iterator next = i; |
4267 | ||
4268 | gsi_next (&next); | |
4269 | ||
4270 | if (stmt_ends_bb_p (stmt) | |
4271 | && !gsi_end_p (next)) | |
4272 | { | |
4273 | edge fall = find_fallthru_edge (bb->succs); | |
4274 | basic_block dest = NULL; | |
4275 | int flags = 0; | |
4276 | ||
4277 | gcc_assert (fall); | |
4278 | ||
4279 | /* We cannot split abnormal edge. Therefore we | |
4280 | store its params, make it regular and then | |
4281 | rebuild abnormal edge after split. */ | |
4282 | if (fall->flags & EDGE_ABNORMAL) | |
4283 | { | |
4284 | flags = fall->flags & ~EDGE_FALLTHRU; | |
4285 | dest = fall->dest; | |
4286 | ||
4287 | fall->flags &= ~EDGE_COMPLEX; | |
4288 | } | |
4289 | ||
4290 | while (!gsi_end_p (next)) | |
4291 | { | |
355fe088 | 4292 | gimple *next_stmt = gsi_stmt (next); |
d5e254e1 IE |
4293 | gsi_remove (&next, false); |
4294 | gsi_insert_on_edge (fall, next_stmt); | |
4295 | } | |
4296 | ||
4297 | gsi_commit_edge_inserts (); | |
4298 | ||
4299 | /* Re-create abnormal edge. */ | |
4300 | if (dest) | |
4301 | make_edge (bb, dest, flags); | |
4302 | } | |
4303 | } | |
4304 | } | |
4305 | ||
4306 | /* Walker callback for chkp_replace_function_pointers. Replaces | |
4307 | function pointer in the specified operand with pointer to the | |
4308 | instrumented function version. */ | |
4309 | static tree | |
4310 | chkp_replace_function_pointer (tree *op, int *walk_subtrees, | |
4311 | void *data ATTRIBUTE_UNUSED) | |
4312 | { | |
4313 | if (TREE_CODE (*op) == FUNCTION_DECL | |
847ffe17 | 4314 | && chkp_instrumentable_p (*op) |
edcf72f3 IE |
4315 | && (DECL_BUILT_IN_CLASS (*op) == NOT_BUILT_IN |
4316 | /* For builtins we replace pointers only for selected | |
4317 | function and functions having definitions. */ | |
4318 | || (DECL_BUILT_IN_CLASS (*op) == BUILT_IN_NORMAL | |
4319 | && (chkp_instrument_normal_builtin (*op) | |
4320 | || gimple_has_body_p (*op))))) | |
d5e254e1 IE |
4321 | { |
4322 | struct cgraph_node *node = cgraph_node::get_create (*op); | |
edcf72f3 | 4323 | struct cgraph_node *clone = NULL; |
d5e254e1 IE |
4324 | |
4325 | if (!node->instrumentation_clone) | |
edcf72f3 | 4326 | clone = chkp_maybe_create_clone (*op); |
d5e254e1 | 4327 | |
edcf72f3 IE |
4328 | if (clone) |
4329 | *op = clone->decl; | |
d5e254e1 IE |
4330 | *walk_subtrees = 0; |
4331 | } | |
4332 | ||
4333 | return NULL; | |
4334 | } | |
4335 | ||
4336 | /* This function searches for function pointers in statement | |
4337 | pointed by GSI and replaces them with pointers to instrumented | |
4338 | function versions. */ | |
4339 | static void | |
4340 | chkp_replace_function_pointers (gimple_stmt_iterator *gsi) | |
4341 | { | |
355fe088 | 4342 | gimple *stmt = gsi_stmt (*gsi); |
d5e254e1 IE |
4343 | /* For calls we want to walk call args only. */ |
4344 | if (gimple_code (stmt) == GIMPLE_CALL) | |
4345 | { | |
4346 | unsigned i; | |
4347 | for (i = 0; i < gimple_call_num_args (stmt); i++) | |
4348 | walk_tree (gimple_call_arg_ptr (stmt, i), | |
4349 | chkp_replace_function_pointer, NULL, NULL); | |
4350 | } | |
4351 | else | |
4352 | walk_gimple_stmt (gsi, NULL, chkp_replace_function_pointer, NULL); | |
4353 | } | |
4354 | ||
4355 | /* This function instruments all statements working with memory, | |
4356 | calls and rets. | |
4357 | ||
4358 | It also removes excess statements from static initializers. */ | |
4359 | static void | |
4360 | chkp_instrument_function (void) | |
4361 | { | |
4362 | basic_block bb, next; | |
4363 | gimple_stmt_iterator i; | |
4364 | enum gimple_rhs_class grhs_class; | |
4365 | bool safe = lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl)); | |
4366 | ||
4367 | bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; | |
4368 | do | |
4369 | { | |
4370 | next = bb->next_bb; | |
4371 | for (i = gsi_start_bb (bb); !gsi_end_p (i); ) | |
4372 | { | |
355fe088 | 4373 | gimple *s = gsi_stmt (i); |
d5e254e1 IE |
4374 | |
4375 | /* Skip statement marked to not be instrumented. */ | |
4376 | if (chkp_marked_stmt_p (s)) | |
4377 | { | |
4378 | gsi_next (&i); | |
4379 | continue; | |
4380 | } | |
4381 | ||
4382 | chkp_replace_function_pointers (&i); | |
4383 | ||
4384 | switch (gimple_code (s)) | |
4385 | { | |
4386 | case GIMPLE_ASSIGN: | |
4387 | chkp_process_stmt (&i, gimple_assign_lhs (s), | |
4388 | gimple_location (s), integer_one_node, | |
4389 | NULL_TREE, NULL_TREE, safe); | |
4390 | chkp_process_stmt (&i, gimple_assign_rhs1 (s), | |
4391 | gimple_location (s), integer_zero_node, | |
4392 | NULL_TREE, NULL_TREE, safe); | |
4393 | grhs_class = get_gimple_rhs_class (gimple_assign_rhs_code (s)); | |
4394 | if (grhs_class == GIMPLE_BINARY_RHS) | |
4395 | chkp_process_stmt (&i, gimple_assign_rhs2 (s), | |
4396 | gimple_location (s), integer_zero_node, | |
4397 | NULL_TREE, NULL_TREE, safe); | |
4398 | break; | |
4399 | ||
4400 | case GIMPLE_RETURN: | |
538dd0b7 DM |
4401 | { |
4402 | greturn *r = as_a <greturn *> (s); | |
4403 | if (gimple_return_retval (r) != NULL_TREE) | |
4404 | { | |
4405 | chkp_process_stmt (&i, gimple_return_retval (r), | |
4406 | gimple_location (r), | |
4407 | integer_zero_node, | |
4408 | NULL_TREE, NULL_TREE, safe); | |
4409 | ||
4410 | /* Additionally we need to add bounds | |
4411 | to return statement. */ | |
4412 | chkp_add_bounds_to_ret_stmt (&i); | |
4413 | } | |
4414 | } | |
4415 | break; | |
d5e254e1 IE |
4416 | |
4417 | case GIMPLE_CALL: | |
4418 | chkp_add_bounds_to_call_stmt (&i); | |
4419 | break; | |
4420 | ||
4421 | default: | |
4422 | ; | |
4423 | } | |
4424 | ||
4425 | gsi_next (&i); | |
4426 | ||
4427 | /* We do not need any actual pointer stores in checker | |
4428 | static initializer. */ | |
4429 | if (lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl)) | |
4430 | && gimple_code (s) == GIMPLE_ASSIGN | |
4431 | && gimple_store_p (s)) | |
4432 | { | |
4433 | gimple_stmt_iterator del_iter = gsi_for_stmt (s); | |
4434 | gsi_remove (&del_iter, true); | |
4435 | unlink_stmt_vdef (s); | |
4436 | release_defs(s); | |
4437 | } | |
4438 | } | |
4439 | bb = next; | |
4440 | } | |
4441 | while (bb); | |
4442 | ||
4443 | /* Some input params may have bounds and be address taken. In this case | |
4444 | we should store incoming bounds into bounds table. */ | |
4445 | tree arg; | |
4446 | if (flag_chkp_store_bounds) | |
4447 | for (arg = DECL_ARGUMENTS (cfun->decl); arg; arg = DECL_CHAIN (arg)) | |
4448 | if (TREE_ADDRESSABLE (arg)) | |
4449 | { | |
4450 | if (BOUNDED_P (arg)) | |
4451 | { | |
4452 | tree bounds = chkp_get_next_bounds_parm (arg); | |
4453 | tree def_ptr = ssa_default_def (cfun, arg); | |
4454 | gimple_stmt_iterator iter | |
4455 | = gsi_start_bb (chkp_get_entry_block ()); | |
4456 | chkp_build_bndstx (chkp_build_addr_expr (arg), | |
4457 | def_ptr ? def_ptr : arg, | |
4458 | bounds, &iter); | |
4459 | ||
4460 | /* Skip bounds arg. */ | |
4461 | arg = TREE_CHAIN (arg); | |
4462 | } | |
4463 | else if (chkp_type_has_pointer (TREE_TYPE (arg))) | |
4464 | { | |
4465 | tree orig_arg = arg; | |
4466 | bitmap slots = BITMAP_ALLOC (NULL); | |
4467 | gimple_stmt_iterator iter | |
4468 | = gsi_start_bb (chkp_get_entry_block ()); | |
4469 | bitmap_iterator bi; | |
4470 | unsigned bnd_no; | |
4471 | ||
4472 | chkp_find_bound_slots (TREE_TYPE (arg), slots); | |
4473 | ||
4474 | EXECUTE_IF_SET_IN_BITMAP (slots, 0, bnd_no, bi) | |
4475 | { | |
4476 | tree bounds = chkp_get_next_bounds_parm (arg); | |
4477 | HOST_WIDE_INT offs = bnd_no * POINTER_SIZE / BITS_PER_UNIT; | |
4478 | tree addr = chkp_build_addr_expr (orig_arg); | |
4479 | tree ptr = build2 (MEM_REF, ptr_type_node, addr, | |
4480 | build_int_cst (ptr_type_node, offs)); | |
4481 | chkp_build_bndstx (chkp_build_addr_expr (ptr), ptr, | |
4482 | bounds, &iter); | |
4483 | ||
4484 | arg = DECL_CHAIN (arg); | |
4485 | } | |
4486 | BITMAP_FREE (slots); | |
4487 | } | |
4488 | } | |
4489 | } | |
4490 | ||
4491 | /* Find init/null/copy_ptr_bounds calls and replace them | |
4492 | with assignments. It should allow better code | |
4493 | optimization. */ | |
4494 | ||
4495 | static void | |
4496 | chkp_remove_useless_builtins () | |
4497 | { | |
4498 | basic_block bb; | |
4499 | gimple_stmt_iterator gsi; | |
4500 | ||
4501 | FOR_EACH_BB_FN (bb, cfun) | |
4502 | { | |
4503 | for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) | |
4504 | { | |
355fe088 | 4505 | gimple *stmt = gsi_stmt (gsi); |
d5e254e1 IE |
4506 | tree fndecl; |
4507 | enum built_in_function fcode; | |
4508 | ||
4509 | /* Find builtins returning first arg and replace | |
4510 | them with assignments. */ | |
4511 | if (gimple_code (stmt) == GIMPLE_CALL | |
4512 | && (fndecl = gimple_call_fndecl (stmt)) | |
4513 | && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL | |
4514 | && (fcode = DECL_FUNCTION_CODE (fndecl)) | |
4515 | && (fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS | |
4516 | || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS | |
4517 | || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS | |
4518 | || fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS)) | |
4519 | { | |
4520 | tree res = gimple_call_arg (stmt, 0); | |
4521 | update_call_from_tree (&gsi, res); | |
4522 | stmt = gsi_stmt (gsi); | |
4523 | update_stmt (stmt); | |
4524 | } | |
4525 | } | |
4526 | } | |
4527 | } | |
4528 | ||
4529 | /* Initialize pass. */ | |
4530 | static void | |
4531 | chkp_init (void) | |
4532 | { | |
4533 | basic_block bb; | |
4534 | gimple_stmt_iterator i; | |
4535 | ||
4536 | in_chkp_pass = true; | |
4537 | ||
4538 | for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; bb; bb = bb->next_bb) | |
4539 | for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i)) | |
4540 | chkp_unmark_stmt (gsi_stmt (i)); | |
4541 | ||
4542 | chkp_invalid_bounds = new hash_set<tree>; | |
4543 | chkp_completed_bounds_set = new hash_set<tree>; | |
4544 | delete chkp_reg_bounds; | |
4545 | chkp_reg_bounds = new hash_map<tree, tree>; | |
4546 | delete chkp_bound_vars; | |
4547 | chkp_bound_vars = new hash_map<tree, tree>; | |
4548 | chkp_reg_addr_bounds = new hash_map<tree, tree>; | |
4549 | chkp_incomplete_bounds_map = new hash_map<tree, tree>; | |
4550 | delete chkp_bounds_map; | |
4551 | chkp_bounds_map = new hash_map<tree, tree>; | |
4552 | chkp_abnormal_copies = BITMAP_GGC_ALLOC (); | |
4553 | ||
4554 | entry_block = NULL; | |
4555 | zero_bounds = NULL_TREE; | |
4556 | none_bounds = NULL_TREE; | |
4557 | incomplete_bounds = integer_zero_node; | |
4558 | tmp_var = NULL_TREE; | |
4559 | size_tmp_var = NULL_TREE; | |
4560 | ||
4561 | chkp_uintptr_type = lang_hooks.types.type_for_mode (ptr_mode, true); | |
4562 | ||
4563 | /* We create these constant bounds once for each object file. | |
4564 | These symbols go to comdat section and result in single copy | |
4565 | of each one in the final binary. */ | |
4566 | chkp_get_zero_bounds_var (); | |
4567 | chkp_get_none_bounds_var (); | |
4568 | ||
4569 | calculate_dominance_info (CDI_DOMINATORS); | |
4570 | calculate_dominance_info (CDI_POST_DOMINATORS); | |
4571 | ||
4572 | bitmap_obstack_initialize (NULL); | |
4573 | } | |
4574 | ||
4575 | /* Finalize instrumentation pass. */ | |
4576 | static void | |
4577 | chkp_fini (void) | |
4578 | { | |
4579 | in_chkp_pass = false; | |
4580 | ||
4581 | delete chkp_invalid_bounds; | |
4582 | delete chkp_completed_bounds_set; | |
4583 | delete chkp_reg_addr_bounds; | |
4584 | delete chkp_incomplete_bounds_map; | |
4585 | ||
4586 | free_dominance_info (CDI_DOMINATORS); | |
4587 | free_dominance_info (CDI_POST_DOMINATORS); | |
4588 | ||
4589 | bitmap_obstack_release (NULL); | |
321a5ceb IE |
4590 | |
4591 | entry_block = NULL; | |
4592 | zero_bounds = NULL_TREE; | |
4593 | none_bounds = NULL_TREE; | |
d5e254e1 IE |
4594 | } |
4595 | ||
4596 | /* Main instrumentation pass function. */ | |
4597 | static unsigned int | |
4598 | chkp_execute (void) | |
4599 | { | |
4600 | chkp_init (); | |
4601 | ||
4602 | chkp_instrument_function (); | |
4603 | ||
4604 | chkp_remove_useless_builtins (); | |
4605 | ||
4606 | chkp_function_mark_instrumented (cfun->decl); | |
4607 | ||
4608 | chkp_fix_cfg (); | |
4609 | ||
4610 | chkp_fini (); | |
4611 | ||
4612 | return 0; | |
4613 | } | |
4614 | ||
4615 | /* Instrumentation pass gate. */ | |
4616 | static bool | |
4617 | chkp_gate (void) | |
4618 | { | |
5283d1ec TV |
4619 | cgraph_node *node = cgraph_node::get (cfun->decl); |
4620 | return ((node != NULL | |
4621 | && node->instrumentation_clone) | |
4622 | || lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl))); | |
d5e254e1 IE |
4623 | } |
4624 | ||
4625 | namespace { | |
4626 | ||
4627 | const pass_data pass_data_chkp = | |
4628 | { | |
4629 | GIMPLE_PASS, /* type */ | |
4630 | "chkp", /* name */ | |
4631 | OPTGROUP_NONE, /* optinfo_flags */ | |
4632 | TV_NONE, /* tv_id */ | |
4633 | PROP_ssa | PROP_cfg, /* properties_required */ | |
4634 | 0, /* properties_provided */ | |
4635 | 0, /* properties_destroyed */ | |
4636 | 0, /* todo_flags_start */ | |
4637 | TODO_verify_il | |
4638 | | TODO_update_ssa /* todo_flags_finish */ | |
4639 | }; | |
4640 | ||
4641 | class pass_chkp : public gimple_opt_pass | |
4642 | { | |
4643 | public: | |
4644 | pass_chkp (gcc::context *ctxt) | |
4645 | : gimple_opt_pass (pass_data_chkp, ctxt) | |
4646 | {} | |
4647 | ||
4648 | /* opt_pass methods: */ | |
4649 | virtual opt_pass * clone () | |
4650 | { | |
4651 | return new pass_chkp (m_ctxt); | |
4652 | } | |
4653 | ||
4654 | virtual bool gate (function *) | |
4655 | { | |
4656 | return chkp_gate (); | |
4657 | } | |
4658 | ||
4659 | virtual unsigned int execute (function *) | |
4660 | { | |
4661 | return chkp_execute (); | |
4662 | } | |
4663 | ||
4664 | }; // class pass_chkp | |
4665 | ||
4666 | } // anon namespace | |
4667 | ||
4668 | gimple_opt_pass * | |
4669 | make_pass_chkp (gcc::context *ctxt) | |
4670 | { | |
4671 | return new pass_chkp (ctxt); | |
4672 | } | |
4673 | ||
4674 | #include "gt-tree-chkp.h" |