]>
Commit | Line | Data |
---|---|---|
8fdc414d JL |
1 | /* Detect paths through the CFG which can never be executed in a conforming |
2 | program and isolate them. | |
3 | ||
5624e564 | 4 | Copyright (C) 2013-2015 Free Software Foundation, Inc. |
8fdc414d JL |
5 | |
6 | This file is part of GCC. | |
7 | ||
8 | GCC is free software; you can redistribute it and/or modify | |
9 | it under the terms of the GNU General Public License as published by | |
10 | the Free Software Foundation; either version 3, or (at your option) | |
11 | any later version. | |
12 | ||
13 | GCC is distributed in the hope that it will be useful, | |
14 | but WITHOUT ANY WARRANTY; without even the implied warranty of | |
15 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
16 | GNU General Public License for more details. | |
17 | ||
18 | You should have received a copy of the GNU General Public License | |
19 | along with GCC; see the file COPYING3. If not see | |
20 | <http://www.gnu.org/licenses/>. */ | |
21 | ||
22 | #include "config.h" | |
23 | #include "system.h" | |
24 | #include "coretypes.h" | |
40e23961 | 25 | #include "alias.h" |
c7131fb2 | 26 | #include "backend.h" |
8fdc414d | 27 | #include "tree.h" |
c7131fb2 AM |
28 | #include "gimple.h" |
29 | #include "hard-reg-set.h" | |
30 | #include "ssa.h" | |
31 | #include "options.h" | |
40e23961 | 32 | #include "fold-const.h" |
8fdc414d | 33 | #include "flags.h" |
2fb9a547 | 34 | #include "internal-fn.h" |
5be5c238 AM |
35 | #include "gimple-iterator.h" |
36 | #include "gimple-walk.h" | |
8fdc414d | 37 | #include "tree-ssa.h" |
8fdc414d JL |
38 | #include "cfgloop.h" |
39 | #include "tree-pass.h" | |
5e94175f | 40 | #include "tree-cfg.h" |
b4dfdc11 MG |
41 | #include "diagnostic-core.h" |
42 | #include "intl.h" | |
8fdc414d JL |
43 | |
44 | ||
45 | static bool cfg_altered; | |
46 | ||
6ab7a3d7 | 47 | /* Callback for walk_stmt_load_store_ops. |
ae93744d | 48 | |
6ab7a3d7 JL |
49 | Return TRUE if OP will dereference the tree stored in DATA, FALSE |
50 | otherwise. | |
51 | ||
52 | This routine only makes a superficial check for a dereference. Thus, | |
53 | it must only be used if it is safe to return a false negative. */ | |
54 | static bool | |
9f1363cd | 55 | check_loadstore (gimple stmt, tree op, tree, void *data) |
6ab7a3d7 JL |
56 | { |
57 | if ((TREE_CODE (op) == MEM_REF || TREE_CODE (op) == TARGET_MEM_REF) | |
58 | && operand_equal_p (TREE_OPERAND (op, 0), (tree)data, 0)) | |
115d1851 JL |
59 | { |
60 | TREE_THIS_VOLATILE (op) = 1; | |
61 | TREE_SIDE_EFFECTS (op) = 1; | |
62 | update_stmt (stmt); | |
63 | return true; | |
64 | } | |
6ab7a3d7 JL |
65 | return false; |
66 | } | |
67 | ||
68 | /* Insert a trap after SI and remove SI and all statements after the trap. */ | |
8fdc414d JL |
69 | |
70 | static void | |
6ab7a3d7 | 71 | insert_trap_and_remove_trailing_statements (gimple_stmt_iterator *si_p, tree op) |
8fdc414d | 72 | { |
6ab7a3d7 JL |
73 | /* We want the NULL pointer dereference to actually occur so that |
74 | code that wishes to catch the signal can do so. | |
75 | ||
76 | If the dereference is a load, then there's nothing to do as the | |
115d1851 | 77 | LHS will be a throw-away SSA_NAME and the RHS is the NULL dereference. |
8fdc414d | 78 | |
6ab7a3d7 | 79 | If the dereference is a store and we can easily transform the RHS, |
46dfed65 JL |
80 | then simplify the RHS to enable more DCE. Note that we require the |
81 | statement to be a GIMPLE_ASSIGN which filters out calls on the RHS. */ | |
6ab7a3d7 JL |
82 | gimple stmt = gsi_stmt (*si_p); |
83 | if (walk_stmt_load_store_ops (stmt, (void *)op, NULL, check_loadstore) | |
46dfed65 | 84 | && is_gimple_assign (stmt) |
6ab7a3d7 JL |
85 | && INTEGRAL_TYPE_P (TREE_TYPE (gimple_assign_lhs (stmt)))) |
86 | { | |
87 | /* We just need to turn the RHS into zero converted to the proper | |
88 | type. */ | |
89 | tree type = TREE_TYPE (gimple_assign_lhs (stmt)); | |
90 | gimple_assign_set_rhs_code (stmt, INTEGER_CST); | |
91 | gimple_assign_set_rhs1 (stmt, fold_convert (type, integer_zero_node)); | |
92 | update_stmt (stmt); | |
93 | } | |
94 | ||
538dd0b7 | 95 | gcall *new_stmt |
6ab7a3d7 JL |
96 | = gimple_build_call (builtin_decl_explicit (BUILT_IN_TRAP), 0); |
97 | gimple_seq seq = NULL; | |
98 | gimple_seq_add_stmt (&seq, new_stmt); | |
99 | ||
100 | /* If we had a NULL pointer dereference, then we want to insert the | |
101 | __builtin_trap after the statement, for the other cases we want | |
102 | to insert before the statement. */ | |
103 | if (walk_stmt_load_store_ops (stmt, (void *)op, | |
104 | check_loadstore, | |
105 | check_loadstore)) | |
106 | gsi_insert_after (si_p, seq, GSI_NEW_STMT); | |
107 | else | |
108 | gsi_insert_before (si_p, seq, GSI_NEW_STMT); | |
109 | ||
56d338c9 JL |
110 | /* We must remove statements from the end of the block so that we |
111 | never reference a released SSA_NAME. */ | |
112 | basic_block bb = gimple_bb (gsi_stmt (*si_p)); | |
113 | for (gimple_stmt_iterator si = gsi_last_bb (bb); | |
114 | gsi_stmt (si) != gsi_stmt (*si_p); | |
115 | si = gsi_last_bb (bb)) | |
8fdc414d | 116 | { |
56d338c9 | 117 | stmt = gsi_stmt (si); |
8fdc414d | 118 | unlink_stmt_vdef (stmt); |
56d338c9 | 119 | gsi_remove (&si, true); |
8fdc414d JL |
120 | release_defs (stmt); |
121 | } | |
122 | } | |
123 | ||
124 | /* BB when reached via incoming edge E will exhibit undefined behaviour | |
125 | at STMT. Isolate and optimize the path which exhibits undefined | |
126 | behaviour. | |
127 | ||
128 | Isolation is simple. Duplicate BB and redirect E to BB'. | |
129 | ||
130 | Optimization is simple as well. Replace STMT in BB' with an | |
131 | unconditional trap and remove all outgoing edges from BB'. | |
132 | ||
b4dfdc11 MG |
133 | If RET_ZERO, do not trap, only return NULL. |
134 | ||
8fdc414d JL |
135 | DUPLICATE is a pre-existing duplicate, use it as BB' if it exists. |
136 | ||
137 | Return BB'. */ | |
138 | ||
139 | basic_block | |
6ab7a3d7 | 140 | isolate_path (basic_block bb, basic_block duplicate, |
b4dfdc11 | 141 | edge e, gimple stmt, tree op, bool ret_zero) |
8fdc414d JL |
142 | { |
143 | gimple_stmt_iterator si, si2; | |
144 | edge_iterator ei; | |
145 | edge e2; | |
8fdc414d JL |
146 | |
147 | /* First duplicate BB if we have not done so already and remove all | |
148 | the duplicate's outgoing edges as duplicate is going to unconditionally | |
149 | trap. Removing the outgoing edges is both an optimization and ensures | |
150 | we don't need to do any PHI node updates. */ | |
151 | if (!duplicate) | |
152 | { | |
153 | duplicate = duplicate_block (bb, NULL, NULL); | |
b4dfdc11 MG |
154 | if (!ret_zero) |
155 | for (ei = ei_start (duplicate->succs); (e2 = ei_safe_edge (ei)); ) | |
156 | remove_edge (e2); | |
8fdc414d JL |
157 | } |
158 | ||
159 | /* Complete the isolation step by redirecting E to reach DUPLICATE. */ | |
160 | e2 = redirect_edge_and_branch (e, duplicate); | |
161 | if (e2) | |
162 | flush_pending_stmts (e2); | |
163 | ||
164 | ||
165 | /* There may be more than one statement in DUPLICATE which exhibits | |
166 | undefined behaviour. Ultimately we want the first such statement in | |
167 | DUPLCIATE so that we're able to delete as much code as possible. | |
168 | ||
169 | So each time we discover undefined behaviour in DUPLICATE, search for | |
170 | the statement which triggers undefined behaviour. If found, then | |
171 | transform the statement into a trap and delete everything after the | |
172 | statement. If not found, then this particular instance was subsumed by | |
ae93744d | 173 | an earlier instance of undefined behaviour and there's nothing to do. |
8fdc414d JL |
174 | |
175 | This is made more complicated by the fact that we have STMT, which is in | |
176 | BB rather than in DUPLICATE. So we set up two iterators, one for each | |
177 | block and walk forward looking for STMT in BB, advancing each iterator at | |
178 | each step. | |
179 | ||
180 | When we find STMT the second iterator should point to STMT's equivalent in | |
181 | duplicate. If DUPLICATE ends before STMT is found in BB, then there's | |
ae93744d | 182 | nothing to do. |
8fdc414d JL |
183 | |
184 | Ignore labels and debug statements. */ | |
185 | si = gsi_start_nondebug_after_labels_bb (bb); | |
186 | si2 = gsi_start_nondebug_after_labels_bb (duplicate); | |
187 | while (!gsi_end_p (si) && !gsi_end_p (si2) && gsi_stmt (si) != stmt) | |
188 | { | |
189 | gsi_next_nondebug (&si); | |
190 | gsi_next_nondebug (&si2); | |
191 | } | |
192 | ||
193 | /* This would be an indicator that we never found STMT in BB, which should | |
194 | never happen. */ | |
195 | gcc_assert (!gsi_end_p (si)); | |
196 | ||
197 | /* If we did not run to the end of DUPLICATE, then SI points to STMT and | |
198 | SI2 points to the duplicate of STMT in DUPLICATE. Insert a trap | |
199 | before SI2 and remove SI2 and all trailing statements. */ | |
200 | if (!gsi_end_p (si2)) | |
b4dfdc11 MG |
201 | { |
202 | if (ret_zero) | |
203 | { | |
538dd0b7 | 204 | greturn *ret = as_a <greturn *> (gsi_stmt (si2)); |
b4dfdc11 MG |
205 | tree zero = build_zero_cst (TREE_TYPE (gimple_return_retval (ret))); |
206 | gimple_return_set_retval (ret, zero); | |
207 | update_stmt (ret); | |
208 | } | |
209 | else | |
210 | insert_trap_and_remove_trailing_statements (&si2, op); | |
211 | } | |
8fdc414d JL |
212 | |
213 | return duplicate; | |
214 | } | |
215 | ||
56d338c9 JL |
216 | /* Look for PHI nodes which feed statements in the same block where |
217 | the value of the PHI node implies the statement is erroneous. | |
8fdc414d | 218 | |
56d338c9 JL |
219 | For example, a NULL PHI arg value which then feeds a pointer |
220 | dereference. | |
8fdc414d | 221 | |
56d338c9 JL |
222 | When found isolate and optimize the path associated with the PHI |
223 | argument feeding the erroneous statement. */ | |
224 | static void | |
225 | find_implicit_erroneous_behaviour (void) | |
8fdc414d JL |
226 | { |
227 | basic_block bb; | |
228 | ||
11cd3bed | 229 | FOR_EACH_BB_FN (bb, cfun) |
8fdc414d | 230 | { |
538dd0b7 | 231 | gphi_iterator si; |
8fdc414d | 232 | |
5e94175f JL |
233 | /* Out of an abundance of caution, do not isolate paths to a |
234 | block where the block has any abnormal outgoing edges. | |
235 | ||
236 | We might be able to relax this in the future. We have to detect | |
237 | when we have to split the block with the NULL dereference and | |
238 | the trap we insert. We have to preserve abnormal edges out | |
239 | of the isolated block which in turn means updating PHIs at | |
240 | the targets of those abnormal outgoing edges. */ | |
6efe83b2 | 241 | if (has_abnormal_or_eh_outgoing_edge_p (bb)) |
5e94175f JL |
242 | continue; |
243 | ||
8fdc414d JL |
244 | /* First look for a PHI which sets a pointer to NULL and which |
245 | is then dereferenced within BB. This is somewhat overly | |
246 | conservative, but probably catches most of the interesting | |
247 | cases. */ | |
248 | for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si)) | |
249 | { | |
538dd0b7 | 250 | gphi *phi = si.phi (); |
8fdc414d JL |
251 | tree lhs = gimple_phi_result (phi); |
252 | ||
253 | /* If the result is not a pointer, then there is no need to | |
254 | examine the arguments. */ | |
255 | if (!POINTER_TYPE_P (TREE_TYPE (lhs))) | |
256 | continue; | |
257 | ||
258 | /* PHI produces a pointer result. See if any of the PHI's | |
ae93744d | 259 | arguments are NULL. |
8fdc414d JL |
260 | |
261 | When we remove an edge, we want to reprocess the current | |
262 | index, hence the ugly way we update I for each iteration. */ | |
263 | basic_block duplicate = NULL; | |
264 | for (unsigned i = 0, next_i = 0; | |
265 | i < gimple_phi_num_args (phi); | |
266 | i = next_i) | |
267 | { | |
268 | tree op = gimple_phi_arg_def (phi, i); | |
b4dfdc11 MG |
269 | edge e = gimple_phi_arg_edge (phi, i); |
270 | imm_use_iterator iter; | |
271 | gimple use_stmt; | |
8fdc414d JL |
272 | |
273 | next_i = i + 1; | |
ae93744d | 274 | |
b4dfdc11 MG |
275 | if (TREE_CODE (op) == ADDR_EXPR) |
276 | { | |
277 | tree valbase = get_base_address (TREE_OPERAND (op, 0)); | |
278 | if ((TREE_CODE (valbase) == VAR_DECL | |
279 | && !is_global_var (valbase)) | |
280 | || TREE_CODE (valbase) == PARM_DECL) | |
281 | { | |
282 | FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs) | |
283 | { | |
538dd0b7 DM |
284 | greturn *return_stmt |
285 | = dyn_cast <greturn *> (use_stmt); | |
286 | if (!return_stmt) | |
287 | continue; | |
288 | ||
289 | if (gimple_return_retval (return_stmt) != lhs) | |
b4dfdc11 MG |
290 | continue; |
291 | ||
292 | if (warning_at (gimple_location (use_stmt), | |
293 | OPT_Wreturn_local_addr, | |
294 | "function may return address " | |
295 | "of local variable")) | |
296 | inform (DECL_SOURCE_LOCATION(valbase), | |
297 | "declared here"); | |
298 | ||
299 | if (gimple_bb (use_stmt) == bb) | |
300 | { | |
301 | duplicate = isolate_path (bb, duplicate, e, | |
302 | use_stmt, lhs, true); | |
303 | ||
304 | /* When we remove an incoming edge, we need to | |
305 | reprocess the Ith element. */ | |
306 | next_i = i; | |
307 | cfg_altered = true; | |
308 | } | |
309 | } | |
310 | } | |
311 | } | |
312 | ||
8fdc414d JL |
313 | if (!integer_zerop (op)) |
314 | continue; | |
315 | ||
8fdc414d JL |
316 | /* We've got a NULL PHI argument. Now see if the |
317 | PHI's result is dereferenced within BB. */ | |
318 | FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs) | |
319 | { | |
320 | /* We only care about uses in BB. Catching cases in | |
321 | in other blocks would require more complex path | |
6efe83b2 JL |
322 | isolation code. */ |
323 | if (gimple_bb (use_stmt) != bb) | |
8fdc414d JL |
324 | continue; |
325 | ||
ae93744d JL |
326 | if (infer_nonnull_range (use_stmt, lhs, |
327 | flag_isolate_erroneous_paths_dereference, | |
328 | flag_isolate_erroneous_paths_attribute)) | |
329 | ||
8fdc414d | 330 | { |
b4dfdc11 MG |
331 | duplicate = isolate_path (bb, duplicate, e, |
332 | use_stmt, lhs, false); | |
8fdc414d JL |
333 | |
334 | /* When we remove an incoming edge, we need to | |
335 | reprocess the Ith element. */ | |
336 | next_i = i; | |
337 | cfg_altered = true; | |
338 | } | |
339 | } | |
340 | } | |
341 | } | |
56d338c9 JL |
342 | } |
343 | } | |
344 | ||
345 | /* Look for statements which exhibit erroneous behaviour. For example | |
ae93744d | 346 | a NULL pointer dereference. |
56d338c9 JL |
347 | |
348 | When found, optimize the block containing the erroneous behaviour. */ | |
349 | static void | |
350 | find_explicit_erroneous_behaviour (void) | |
351 | { | |
352 | basic_block bb; | |
353 | ||
11cd3bed | 354 | FOR_EACH_BB_FN (bb, cfun) |
56d338c9 JL |
355 | { |
356 | gimple_stmt_iterator si; | |
8fdc414d | 357 | |
5e94175f JL |
358 | /* Out of an abundance of caution, do not isolate paths to a |
359 | block where the block has any abnormal outgoing edges. | |
360 | ||
361 | We might be able to relax this in the future. We have to detect | |
362 | when we have to split the block with the NULL dereference and | |
363 | the trap we insert. We have to preserve abnormal edges out | |
364 | of the isolated block which in turn means updating PHIs at | |
365 | the targets of those abnormal outgoing edges. */ | |
6efe83b2 | 366 | if (has_abnormal_or_eh_outgoing_edge_p (bb)) |
5e94175f JL |
367 | continue; |
368 | ||
8fdc414d JL |
369 | /* Now look at the statements in the block and see if any of |
370 | them explicitly dereference a NULL pointer. This happens | |
371 | because of jump threading and constant propagation. */ | |
372 | for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si)) | |
373 | { | |
374 | gimple stmt = gsi_stmt (si); | |
375 | ||
376 | /* By passing null_pointer_node, we can use infer_nonnull_range | |
377 | to detect explicit NULL pointer dereferences and other uses | |
378 | where a non-NULL value is required. */ | |
ae93744d JL |
379 | if (infer_nonnull_range (stmt, null_pointer_node, |
380 | flag_isolate_erroneous_paths_dereference, | |
381 | flag_isolate_erroneous_paths_attribute)) | |
8fdc414d | 382 | { |
6ab7a3d7 JL |
383 | insert_trap_and_remove_trailing_statements (&si, |
384 | null_pointer_node); | |
8fdc414d JL |
385 | |
386 | /* And finally, remove all outgoing edges from BB. */ | |
387 | edge e; | |
388 | for (edge_iterator ei = ei_start (bb->succs); | |
389 | (e = ei_safe_edge (ei)); ) | |
390 | remove_edge (e); | |
391 | ||
392 | /* Ignore any more operands on this statement and | |
393 | continue the statement iterator (which should | |
394 | terminate its loop immediately. */ | |
395 | cfg_altered = true; | |
396 | break; | |
397 | } | |
b4dfdc11 MG |
398 | |
399 | /* Detect returning the address of a local variable. This only | |
400 | becomes undefined behavior if the result is used, so we do not | |
401 | insert a trap and only return NULL instead. */ | |
538dd0b7 | 402 | if (greturn *return_stmt = dyn_cast <greturn *> (stmt)) |
b4dfdc11 | 403 | { |
538dd0b7 | 404 | tree val = gimple_return_retval (return_stmt); |
b4dfdc11 MG |
405 | if (val && TREE_CODE (val) == ADDR_EXPR) |
406 | { | |
407 | tree valbase = get_base_address (TREE_OPERAND (val, 0)); | |
408 | if ((TREE_CODE (valbase) == VAR_DECL | |
409 | && !is_global_var (valbase)) | |
410 | || TREE_CODE (valbase) == PARM_DECL) | |
411 | { | |
412 | /* We only need it for this particular case. */ | |
413 | calculate_dominance_info (CDI_POST_DOMINATORS); | |
414 | const char* msg; | |
415 | bool always_executed = dominated_by_p | |
416 | (CDI_POST_DOMINATORS, | |
417 | single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)), bb); | |
418 | if (always_executed) | |
419 | msg = N_("function returns address of local variable"); | |
420 | else | |
421 | msg = N_("function may return address of " | |
422 | "local variable"); | |
423 | ||
424 | if (warning_at (gimple_location (stmt), | |
425 | OPT_Wreturn_local_addr, msg)) | |
426 | inform (DECL_SOURCE_LOCATION(valbase), "declared here"); | |
427 | tree zero = build_zero_cst (TREE_TYPE (val)); | |
538dd0b7 | 428 | gimple_return_set_retval (return_stmt, zero); |
b4dfdc11 MG |
429 | update_stmt (stmt); |
430 | } | |
431 | } | |
432 | } | |
8fdc414d JL |
433 | } |
434 | } | |
56d338c9 | 435 | } |
b4dfdc11 | 436 | |
56d338c9 JL |
437 | /* Search the function for statements which, if executed, would cause |
438 | the program to fault such as a dereference of a NULL pointer. | |
439 | ||
440 | Such a program can't be valid if such a statement was to execute | |
441 | according to ISO standards. | |
442 | ||
443 | We detect explicit NULL pointer dereferences as well as those implied | |
444 | by a PHI argument having a NULL value which unconditionally flows into | |
445 | a dereference in the same block as the PHI. | |
446 | ||
447 | In the former case we replace the offending statement with an | |
448 | unconditional trap and eliminate the outgoing edges from the statement's | |
449 | basic block. This may expose secondary optimization opportunities. | |
450 | ||
ae93744d | 451 | In the latter case, we isolate the path(s) with the NULL PHI |
56d338c9 JL |
452 | feeding the dereference. We can then replace the offending statement |
453 | and eliminate the outgoing edges in the duplicate. Again, this may | |
454 | expose secondary optimization opportunities. | |
455 | ||
456 | A warning for both cases may be advisable as well. | |
457 | ||
458 | Other statically detectable violations of the ISO standard could be | |
459 | handled in a similar way, such as out-of-bounds array indexing. */ | |
460 | ||
461 | static unsigned int | |
462 | gimple_ssa_isolate_erroneous_paths (void) | |
463 | { | |
464 | initialize_original_copy_tables (); | |
465 | ||
466 | /* Search all the blocks for edges which, if traversed, will | |
467 | result in undefined behaviour. */ | |
468 | cfg_altered = false; | |
469 | ||
470 | /* First handle cases where traversal of a particular edge | |
471 | triggers undefined behaviour. These cases require creating | |
472 | duplicate blocks and thus new SSA_NAMEs. | |
473 | ||
474 | We want that process complete prior to the phase where we start | |
475 | removing edges from the CFG. Edge removal may ultimately result in | |
476 | removal of PHI nodes and thus releasing SSA_NAMEs back to the | |
477 | name manager. | |
478 | ||
479 | If the two processes run in parallel we could release an SSA_NAME | |
480 | back to the manager but we could still have dangling references | |
481 | to the released SSA_NAME in unreachable blocks. | |
482 | that any released names not have dangling references in the IL. */ | |
483 | find_implicit_erroneous_behaviour (); | |
484 | find_explicit_erroneous_behaviour (); | |
485 | ||
8fdc414d JL |
486 | free_original_copy_tables (); |
487 | ||
ae93744d | 488 | /* We scramble the CFG and loop structures a bit, clean up |
8fdc414d JL |
489 | appropriately. We really should incrementally update the |
490 | loop structures, in theory it shouldn't be that hard. */ | |
491 | if (cfg_altered) | |
492 | { | |
493 | free_dominance_info (CDI_DOMINATORS); | |
494 | free_dominance_info (CDI_POST_DOMINATORS); | |
495 | loops_state_set (LOOPS_NEED_FIXUP); | |
496 | return TODO_cleanup_cfg | TODO_update_ssa; | |
497 | } | |
498 | return 0; | |
499 | } | |
500 | ||
8fdc414d JL |
501 | namespace { |
502 | const pass_data pass_data_isolate_erroneous_paths = | |
503 | { | |
504 | GIMPLE_PASS, /* type */ | |
505 | "isolate-paths", /* name */ | |
506 | OPTGROUP_NONE, /* optinfo_flags */ | |
8fdc414d JL |
507 | TV_ISOLATE_ERRONEOUS_PATHS, /* tv_id */ |
508 | ( PROP_cfg | PROP_ssa ), /* properties_required */ | |
509 | 0, /* properties_provided */ | |
510 | 0, /* properties_destroyed */ | |
511 | 0, /* todo_flags_start */ | |
3bea341f | 512 | 0, /* todo_flags_finish */ |
8fdc414d JL |
513 | }; |
514 | ||
515 | class pass_isolate_erroneous_paths : public gimple_opt_pass | |
516 | { | |
517 | public: | |
518 | pass_isolate_erroneous_paths (gcc::context *ctxt) | |
519 | : gimple_opt_pass (pass_data_isolate_erroneous_paths, ctxt) | |
520 | {} | |
521 | ||
522 | /* opt_pass methods: */ | |
523 | opt_pass * clone () { return new pass_isolate_erroneous_paths (m_ctxt); } | |
1a3d085c TS |
524 | virtual bool gate (function *) |
525 | { | |
526 | /* If we do not have a suitable builtin function for the trap statement, | |
527 | then do not perform the optimization. */ | |
528 | return (flag_isolate_erroneous_paths_dereference != 0 | |
529 | || flag_isolate_erroneous_paths_attribute != 0); | |
530 | } | |
531 | ||
be55bfe6 TS |
532 | virtual unsigned int execute (function *) |
533 | { | |
534 | return gimple_ssa_isolate_erroneous_paths (); | |
535 | } | |
8fdc414d | 536 | |
d35e43b9 | 537 | }; // class pass_isolate_erroneous_paths |
8fdc414d JL |
538 | } |
539 | ||
540 | gimple_opt_pass * | |
541 | make_pass_isolate_erroneous_paths (gcc::context *ctxt) | |
542 | { | |
543 | return new pass_isolate_erroneous_paths (ctxt); | |
544 | } |