]>
Commit | Line | Data |
---|---|---|
5be5c238 AM |
1 | /* Gimple walk support. |
2 | ||
5624e564 | 3 | Copyright (C) 2007-2015 Free Software Foundation, Inc. |
5be5c238 AM |
4 | Contributed by Aldy Hernandez <aldyh@redhat.com> |
5 | ||
6 | This file is part of GCC. | |
7 | ||
8 | GCC is free software; you can redistribute it and/or modify it under | |
9 | the terms of the GNU General Public License as published by the Free | |
10 | Software Foundation; either version 3, or (at your option) any later | |
11 | version. | |
12 | ||
13 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY | |
14 | WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
15 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
16 | for more details. | |
17 | ||
18 | You should have received a copy of the GNU General Public License | |
19 | along with GCC; see the file COPYING3. If not see | |
20 | <http://www.gnu.org/licenses/>. */ | |
21 | ||
22 | #include "config.h" | |
23 | #include "system.h" | |
24 | #include "coretypes.h" | |
c7131fb2 | 25 | #include "backend.h" |
5be5c238 | 26 | #include "tree.h" |
c7131fb2 | 27 | #include "gimple.h" |
60393bbc | 28 | #include "hard-reg-set.h" |
c7131fb2 | 29 | #include "alias.h" |
917c68f5 | 30 | #include "fold-const.h" |
5be5c238 AM |
31 | #include "gimple-iterator.h" |
32 | #include "gimple-walk.h" | |
917c68f5 | 33 | #include "stmt.h" |
5be5c238 AM |
34 | |
35 | /* Walk all the statements in the sequence *PSEQ calling walk_gimple_stmt | |
36 | on each one. WI is as in walk_gimple_stmt. | |
37 | ||
38 | If walk_gimple_stmt returns non-NULL, the walk is stopped, and the | |
39 | value is stored in WI->CALLBACK_RESULT. Also, the statement that | |
40 | produced the value is returned if this statement has not been | |
41 | removed by a callback (wi->removed_stmt). If the statement has | |
42 | been removed, NULL is returned. | |
43 | ||
44 | Otherwise, all the statements are walked and NULL returned. */ | |
45 | ||
46 | gimple | |
47 | walk_gimple_seq_mod (gimple_seq *pseq, walk_stmt_fn callback_stmt, | |
48 | walk_tree_fn callback_op, struct walk_stmt_info *wi) | |
49 | { | |
50 | gimple_stmt_iterator gsi; | |
51 | ||
52 | for (gsi = gsi_start (*pseq); !gsi_end_p (gsi); ) | |
53 | { | |
54 | tree ret = walk_gimple_stmt (&gsi, callback_stmt, callback_op, wi); | |
55 | if (ret) | |
56 | { | |
57 | /* If CALLBACK_STMT or CALLBACK_OP return a value, WI must exist | |
58 | to hold it. */ | |
59 | gcc_assert (wi); | |
60 | wi->callback_result = ret; | |
61 | ||
62 | return wi->removed_stmt ? NULL : gsi_stmt (gsi); | |
63 | } | |
64 | ||
65 | if (!wi->removed_stmt) | |
66 | gsi_next (&gsi); | |
67 | } | |
68 | ||
69 | if (wi) | |
70 | wi->callback_result = NULL_TREE; | |
71 | ||
72 | return NULL; | |
73 | } | |
74 | ||
75 | ||
76 | /* Like walk_gimple_seq_mod, but ensure that the head of SEQ isn't | |
77 | changed by the callbacks. */ | |
78 | ||
79 | gimple | |
80 | walk_gimple_seq (gimple_seq seq, walk_stmt_fn callback_stmt, | |
81 | walk_tree_fn callback_op, struct walk_stmt_info *wi) | |
82 | { | |
83 | gimple_seq seq2 = seq; | |
84 | gimple ret = walk_gimple_seq_mod (&seq2, callback_stmt, callback_op, wi); | |
85 | gcc_assert (seq2 == seq); | |
86 | return ret; | |
87 | } | |
88 | ||
89 | ||
90 | /* Helper function for walk_gimple_stmt. Walk operands of a GIMPLE_ASM. */ | |
91 | ||
92 | static tree | |
538dd0b7 | 93 | walk_gimple_asm (gasm *stmt, walk_tree_fn callback_op, |
5be5c238 AM |
94 | struct walk_stmt_info *wi) |
95 | { | |
96 | tree ret, op; | |
97 | unsigned noutputs; | |
98 | const char **oconstraints; | |
99 | unsigned i, n; | |
100 | const char *constraint; | |
101 | bool allows_mem, allows_reg, is_inout; | |
102 | ||
103 | noutputs = gimple_asm_noutputs (stmt); | |
104 | oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *)); | |
105 | ||
106 | if (wi) | |
107 | wi->is_lhs = true; | |
108 | ||
109 | for (i = 0; i < noutputs; i++) | |
110 | { | |
111 | op = gimple_asm_output_op (stmt, i); | |
112 | constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op))); | |
113 | oconstraints[i] = constraint; | |
5be5c238 | 114 | if (wi) |
917c68f5 BRF |
115 | { |
116 | if (parse_output_constraint (&constraint, i, 0, 0, &allows_mem, | |
117 | &allows_reg, &is_inout)) | |
118 | wi->val_only = (allows_reg || !allows_mem); | |
119 | } | |
5be5c238 AM |
120 | ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL); |
121 | if (ret) | |
122 | return ret; | |
123 | } | |
124 | ||
125 | n = gimple_asm_ninputs (stmt); | |
126 | for (i = 0; i < n; i++) | |
127 | { | |
128 | op = gimple_asm_input_op (stmt, i); | |
129 | constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op))); | |
917c68f5 | 130 | |
5be5c238 AM |
131 | if (wi) |
132 | { | |
917c68f5 BRF |
133 | if (parse_input_constraint (&constraint, 0, 0, noutputs, 0, |
134 | oconstraints, &allows_mem, &allows_reg)) | |
135 | { | |
136 | wi->val_only = (allows_reg || !allows_mem); | |
137 | /* Although input "m" is not really a LHS, we need a lvalue. */ | |
138 | wi->is_lhs = !wi->val_only; | |
139 | } | |
5be5c238 AM |
140 | } |
141 | ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL); | |
142 | if (ret) | |
143 | return ret; | |
144 | } | |
145 | ||
146 | if (wi) | |
147 | { | |
148 | wi->is_lhs = false; | |
149 | wi->val_only = true; | |
150 | } | |
151 | ||
152 | n = gimple_asm_nlabels (stmt); | |
153 | for (i = 0; i < n; i++) | |
154 | { | |
155 | op = gimple_asm_label_op (stmt, i); | |
156 | ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL); | |
157 | if (ret) | |
158 | return ret; | |
159 | } | |
160 | ||
161 | return NULL_TREE; | |
162 | } | |
163 | ||
164 | ||
165 | /* Helper function of WALK_GIMPLE_STMT. Walk every tree operand in | |
166 | STMT. CALLBACK_OP and WI are as in WALK_GIMPLE_STMT. | |
167 | ||
168 | CALLBACK_OP is called on each operand of STMT via walk_tree. | |
169 | Additional parameters to walk_tree must be stored in WI. For each operand | |
170 | OP, walk_tree is called as: | |
171 | ||
172 | walk_tree (&OP, CALLBACK_OP, WI, WI->PSET) | |
173 | ||
174 | If CALLBACK_OP returns non-NULL for an operand, the remaining | |
175 | operands are not scanned. | |
176 | ||
177 | The return value is that returned by the last call to walk_tree, or | |
178 | NULL_TREE if no CALLBACK_OP is specified. */ | |
179 | ||
180 | tree | |
181 | walk_gimple_op (gimple stmt, walk_tree_fn callback_op, | |
182 | struct walk_stmt_info *wi) | |
183 | { | |
6e2830c3 | 184 | hash_set<tree> *pset = (wi) ? wi->pset : NULL; |
5be5c238 AM |
185 | unsigned i; |
186 | tree ret = NULL_TREE; | |
187 | ||
188 | switch (gimple_code (stmt)) | |
189 | { | |
190 | case GIMPLE_ASSIGN: | |
191 | /* Walk the RHS operands. If the LHS is of a non-renamable type or | |
192 | is a register variable, we may use a COMPONENT_REF on the RHS. */ | |
193 | if (wi) | |
194 | { | |
195 | tree lhs = gimple_assign_lhs (stmt); | |
196 | wi->val_only | |
197 | = (is_gimple_reg_type (TREE_TYPE (lhs)) && !is_gimple_reg (lhs)) | |
198 | || gimple_assign_rhs_class (stmt) != GIMPLE_SINGLE_RHS; | |
199 | } | |
200 | ||
201 | for (i = 1; i < gimple_num_ops (stmt); i++) | |
202 | { | |
203 | ret = walk_tree (gimple_op_ptr (stmt, i), callback_op, wi, | |
204 | pset); | |
205 | if (ret) | |
206 | return ret; | |
207 | } | |
208 | ||
209 | /* Walk the LHS. If the RHS is appropriate for a memory, we | |
210 | may use a COMPONENT_REF on the LHS. */ | |
211 | if (wi) | |
212 | { | |
213 | /* If the RHS is of a non-renamable type or is a register variable, | |
214 | we may use a COMPONENT_REF on the LHS. */ | |
215 | tree rhs1 = gimple_assign_rhs1 (stmt); | |
216 | wi->val_only | |
217 | = (is_gimple_reg_type (TREE_TYPE (rhs1)) && !is_gimple_reg (rhs1)) | |
218 | || gimple_assign_rhs_class (stmt) != GIMPLE_SINGLE_RHS; | |
219 | wi->is_lhs = true; | |
220 | } | |
221 | ||
222 | ret = walk_tree (gimple_op_ptr (stmt, 0), callback_op, wi, pset); | |
223 | if (ret) | |
224 | return ret; | |
225 | ||
226 | if (wi) | |
227 | { | |
228 | wi->val_only = true; | |
229 | wi->is_lhs = false; | |
230 | } | |
231 | break; | |
232 | ||
233 | case GIMPLE_CALL: | |
234 | if (wi) | |
235 | { | |
236 | wi->is_lhs = false; | |
237 | wi->val_only = true; | |
238 | } | |
239 | ||
538dd0b7 DM |
240 | ret = walk_tree (gimple_call_chain_ptr (as_a <gcall *> (stmt)), |
241 | callback_op, wi, pset); | |
5be5c238 AM |
242 | if (ret) |
243 | return ret; | |
244 | ||
245 | ret = walk_tree (gimple_call_fn_ptr (stmt), callback_op, wi, pset); | |
246 | if (ret) | |
247 | return ret; | |
248 | ||
249 | for (i = 0; i < gimple_call_num_args (stmt); i++) | |
250 | { | |
251 | if (wi) | |
252 | wi->val_only | |
253 | = is_gimple_reg_type (TREE_TYPE (gimple_call_arg (stmt, i))); | |
254 | ret = walk_tree (gimple_call_arg_ptr (stmt, i), callback_op, wi, | |
255 | pset); | |
256 | if (ret) | |
257 | return ret; | |
258 | } | |
259 | ||
260 | if (gimple_call_lhs (stmt)) | |
261 | { | |
262 | if (wi) | |
263 | { | |
264 | wi->is_lhs = true; | |
265 | wi->val_only | |
266 | = is_gimple_reg_type (TREE_TYPE (gimple_call_lhs (stmt))); | |
267 | } | |
268 | ||
269 | ret = walk_tree (gimple_call_lhs_ptr (stmt), callback_op, wi, pset); | |
270 | if (ret) | |
271 | return ret; | |
272 | } | |
273 | ||
274 | if (wi) | |
275 | { | |
276 | wi->is_lhs = false; | |
277 | wi->val_only = true; | |
278 | } | |
279 | break; | |
280 | ||
281 | case GIMPLE_CATCH: | |
538dd0b7 DM |
282 | ret = walk_tree (gimple_catch_types_ptr (as_a <gcatch *> (stmt)), |
283 | callback_op, wi, pset); | |
5be5c238 AM |
284 | if (ret) |
285 | return ret; | |
286 | break; | |
287 | ||
288 | case GIMPLE_EH_FILTER: | |
289 | ret = walk_tree (gimple_eh_filter_types_ptr (stmt), callback_op, wi, | |
290 | pset); | |
291 | if (ret) | |
292 | return ret; | |
293 | break; | |
294 | ||
295 | case GIMPLE_ASM: | |
538dd0b7 | 296 | ret = walk_gimple_asm (as_a <gasm *> (stmt), callback_op, wi); |
5be5c238 AM |
297 | if (ret) |
298 | return ret; | |
299 | break; | |
300 | ||
301 | case GIMPLE_OMP_CONTINUE: | |
538dd0b7 DM |
302 | { |
303 | gomp_continue *cont_stmt = as_a <gomp_continue *> (stmt); | |
304 | ret = walk_tree (gimple_omp_continue_control_def_ptr (cont_stmt), | |
305 | callback_op, wi, pset); | |
306 | if (ret) | |
307 | return ret; | |
308 | ||
309 | ret = walk_tree (gimple_omp_continue_control_use_ptr (cont_stmt), | |
310 | callback_op, wi, pset); | |
311 | if (ret) | |
312 | return ret; | |
313 | } | |
5be5c238 AM |
314 | break; |
315 | ||
316 | case GIMPLE_OMP_CRITICAL: | |
8e25a61a TS |
317 | { |
318 | gomp_critical *omp_stmt = as_a <gomp_critical *> (stmt); | |
319 | ret = walk_tree (gimple_omp_critical_name_ptr (omp_stmt), | |
320 | callback_op, wi, pset); | |
321 | if (ret) | |
322 | return ret; | |
323 | } | |
5be5c238 AM |
324 | break; |
325 | ||
326 | case GIMPLE_OMP_FOR: | |
327 | ret = walk_tree (gimple_omp_for_clauses_ptr (stmt), callback_op, wi, | |
328 | pset); | |
329 | if (ret) | |
330 | return ret; | |
331 | for (i = 0; i < gimple_omp_for_collapse (stmt); i++) | |
332 | { | |
333 | ret = walk_tree (gimple_omp_for_index_ptr (stmt, i), callback_op, | |
334 | wi, pset); | |
335 | if (ret) | |
336 | return ret; | |
337 | ret = walk_tree (gimple_omp_for_initial_ptr (stmt, i), callback_op, | |
338 | wi, pset); | |
339 | if (ret) | |
340 | return ret; | |
341 | ret = walk_tree (gimple_omp_for_final_ptr (stmt, i), callback_op, | |
342 | wi, pset); | |
343 | if (ret) | |
344 | return ret; | |
345 | ret = walk_tree (gimple_omp_for_incr_ptr (stmt, i), callback_op, | |
346 | wi, pset); | |
8e25a61a TS |
347 | if (ret) |
348 | return ret; | |
5be5c238 | 349 | } |
5be5c238 AM |
350 | break; |
351 | ||
352 | case GIMPLE_OMP_PARALLEL: | |
538dd0b7 DM |
353 | { |
354 | gomp_parallel *omp_par_stmt = as_a <gomp_parallel *> (stmt); | |
355 | ret = walk_tree (gimple_omp_parallel_clauses_ptr (omp_par_stmt), | |
356 | callback_op, wi, pset); | |
357 | if (ret) | |
358 | return ret; | |
359 | ret = walk_tree (gimple_omp_parallel_child_fn_ptr (omp_par_stmt), | |
360 | callback_op, wi, pset); | |
361 | if (ret) | |
362 | return ret; | |
363 | ret = walk_tree (gimple_omp_parallel_data_arg_ptr (omp_par_stmt), | |
364 | callback_op, wi, pset); | |
365 | if (ret) | |
366 | return ret; | |
367 | } | |
5be5c238 AM |
368 | break; |
369 | ||
370 | case GIMPLE_OMP_TASK: | |
371 | ret = walk_tree (gimple_omp_task_clauses_ptr (stmt), callback_op, | |
372 | wi, pset); | |
373 | if (ret) | |
374 | return ret; | |
375 | ret = walk_tree (gimple_omp_task_child_fn_ptr (stmt), callback_op, | |
376 | wi, pset); | |
377 | if (ret) | |
378 | return ret; | |
379 | ret = walk_tree (gimple_omp_task_data_arg_ptr (stmt), callback_op, | |
380 | wi, pset); | |
381 | if (ret) | |
382 | return ret; | |
383 | ret = walk_tree (gimple_omp_task_copy_fn_ptr (stmt), callback_op, | |
384 | wi, pset); | |
385 | if (ret) | |
386 | return ret; | |
387 | ret = walk_tree (gimple_omp_task_arg_size_ptr (stmt), callback_op, | |
388 | wi, pset); | |
389 | if (ret) | |
390 | return ret; | |
391 | ret = walk_tree (gimple_omp_task_arg_align_ptr (stmt), callback_op, | |
392 | wi, pset); | |
393 | if (ret) | |
394 | return ret; | |
395 | break; | |
396 | ||
397 | case GIMPLE_OMP_SECTIONS: | |
398 | ret = walk_tree (gimple_omp_sections_clauses_ptr (stmt), callback_op, | |
399 | wi, pset); | |
400 | if (ret) | |
401 | return ret; | |
5be5c238 AM |
402 | ret = walk_tree (gimple_omp_sections_control_ptr (stmt), callback_op, |
403 | wi, pset); | |
404 | if (ret) | |
405 | return ret; | |
406 | ||
407 | break; | |
408 | ||
409 | case GIMPLE_OMP_SINGLE: | |
410 | ret = walk_tree (gimple_omp_single_clauses_ptr (stmt), callback_op, wi, | |
411 | pset); | |
412 | if (ret) | |
413 | return ret; | |
414 | break; | |
415 | ||
416 | case GIMPLE_OMP_TARGET: | |
8e25a61a TS |
417 | { |
418 | gomp_target *omp_stmt = as_a <gomp_target *> (stmt); | |
419 | ret = walk_tree (gimple_omp_target_clauses_ptr (omp_stmt), | |
420 | callback_op, wi, pset); | |
421 | if (ret) | |
422 | return ret; | |
423 | ret = walk_tree (gimple_omp_target_child_fn_ptr (omp_stmt), | |
424 | callback_op, wi, pset); | |
425 | if (ret) | |
426 | return ret; | |
427 | ret = walk_tree (gimple_omp_target_data_arg_ptr (omp_stmt), | |
428 | callback_op, wi, pset); | |
429 | if (ret) | |
430 | return ret; | |
431 | } | |
5be5c238 AM |
432 | break; |
433 | ||
434 | case GIMPLE_OMP_TEAMS: | |
435 | ret = walk_tree (gimple_omp_teams_clauses_ptr (stmt), callback_op, wi, | |
436 | pset); | |
437 | if (ret) | |
438 | return ret; | |
439 | break; | |
440 | ||
441 | case GIMPLE_OMP_ATOMIC_LOAD: | |
538dd0b7 DM |
442 | { |
443 | gomp_atomic_load *omp_stmt = as_a <gomp_atomic_load *> (stmt); | |
444 | ret = walk_tree (gimple_omp_atomic_load_lhs_ptr (omp_stmt), | |
445 | callback_op, wi, pset); | |
446 | if (ret) | |
447 | return ret; | |
538dd0b7 DM |
448 | ret = walk_tree (gimple_omp_atomic_load_rhs_ptr (omp_stmt), |
449 | callback_op, wi, pset); | |
450 | if (ret) | |
451 | return ret; | |
452 | } | |
5be5c238 AM |
453 | break; |
454 | ||
455 | case GIMPLE_OMP_ATOMIC_STORE: | |
8e25a61a TS |
456 | { |
457 | gomp_atomic_store *omp_stmt = as_a <gomp_atomic_store *> (stmt); | |
458 | ret = walk_tree (gimple_omp_atomic_store_val_ptr (omp_stmt), | |
459 | callback_op, wi, pset); | |
460 | if (ret) | |
461 | return ret; | |
462 | } | |
5be5c238 AM |
463 | break; |
464 | ||
465 | case GIMPLE_TRANSACTION: | |
538dd0b7 DM |
466 | ret = walk_tree (gimple_transaction_label_ptr ( |
467 | as_a <gtransaction *> (stmt)), | |
468 | callback_op, wi, pset); | |
5be5c238 AM |
469 | if (ret) |
470 | return ret; | |
471 | break; | |
472 | ||
473 | case GIMPLE_OMP_RETURN: | |
474 | ret = walk_tree (gimple_omp_return_lhs_ptr (stmt), callback_op, wi, | |
475 | pset); | |
476 | if (ret) | |
477 | return ret; | |
478 | break; | |
479 | ||
480 | /* Tuples that do not have operands. */ | |
481 | case GIMPLE_NOP: | |
482 | case GIMPLE_RESX: | |
483 | case GIMPLE_PREDICT: | |
484 | break; | |
485 | ||
486 | default: | |
487 | { | |
488 | enum gimple_statement_structure_enum gss; | |
489 | gss = gimple_statement_structure (stmt); | |
490 | if (gss == GSS_WITH_OPS || gss == GSS_WITH_MEM_OPS) | |
491 | for (i = 0; i < gimple_num_ops (stmt); i++) | |
492 | { | |
493 | ret = walk_tree (gimple_op_ptr (stmt, i), callback_op, wi, pset); | |
494 | if (ret) | |
495 | return ret; | |
496 | } | |
497 | } | |
498 | break; | |
499 | } | |
500 | ||
501 | return NULL_TREE; | |
502 | } | |
503 | ||
504 | ||
505 | /* Walk the current statement in GSI (optionally using traversal state | |
506 | stored in WI). If WI is NULL, no state is kept during traversal. | |
507 | The callback CALLBACK_STMT is called. If CALLBACK_STMT indicates | |
508 | that it has handled all the operands of the statement, its return | |
509 | value is returned. Otherwise, the return value from CALLBACK_STMT | |
510 | is discarded and its operands are scanned. | |
511 | ||
512 | If CALLBACK_STMT is NULL or it didn't handle the operands, | |
513 | CALLBACK_OP is called on each operand of the statement via | |
514 | walk_gimple_op. If walk_gimple_op returns non-NULL for any | |
515 | operand, the remaining operands are not scanned. In this case, the | |
516 | return value from CALLBACK_OP is returned. | |
517 | ||
518 | In any other case, NULL_TREE is returned. */ | |
519 | ||
520 | tree | |
521 | walk_gimple_stmt (gimple_stmt_iterator *gsi, walk_stmt_fn callback_stmt, | |
522 | walk_tree_fn callback_op, struct walk_stmt_info *wi) | |
523 | { | |
524 | gimple ret; | |
525 | tree tree_ret; | |
526 | gimple stmt = gsi_stmt (*gsi); | |
527 | ||
528 | if (wi) | |
529 | { | |
530 | wi->gsi = *gsi; | |
531 | wi->removed_stmt = false; | |
532 | ||
533 | if (wi->want_locations && gimple_has_location (stmt)) | |
534 | input_location = gimple_location (stmt); | |
535 | } | |
536 | ||
537 | ret = NULL; | |
538 | ||
539 | /* Invoke the statement callback. Return if the callback handled | |
540 | all of STMT operands by itself. */ | |
541 | if (callback_stmt) | |
542 | { | |
543 | bool handled_ops = false; | |
544 | tree_ret = callback_stmt (gsi, &handled_ops, wi); | |
545 | if (handled_ops) | |
546 | return tree_ret; | |
547 | ||
548 | /* If CALLBACK_STMT did not handle operands, it should not have | |
549 | a value to return. */ | |
550 | gcc_assert (tree_ret == NULL); | |
551 | ||
552 | if (wi && wi->removed_stmt) | |
553 | return NULL; | |
554 | ||
555 | /* Re-read stmt in case the callback changed it. */ | |
556 | stmt = gsi_stmt (*gsi); | |
557 | } | |
558 | ||
559 | /* If CALLBACK_OP is defined, invoke it on every operand of STMT. */ | |
560 | if (callback_op) | |
561 | { | |
562 | tree_ret = walk_gimple_op (stmt, callback_op, wi); | |
563 | if (tree_ret) | |
564 | return tree_ret; | |
565 | } | |
566 | ||
567 | /* If STMT can have statements inside (e.g. GIMPLE_BIND), walk them. */ | |
568 | switch (gimple_code (stmt)) | |
569 | { | |
570 | case GIMPLE_BIND: | |
538dd0b7 DM |
571 | ret = walk_gimple_seq_mod (gimple_bind_body_ptr (as_a <gbind *> (stmt)), |
572 | callback_stmt, callback_op, wi); | |
5be5c238 AM |
573 | if (ret) |
574 | return wi->callback_result; | |
575 | break; | |
576 | ||
577 | case GIMPLE_CATCH: | |
538dd0b7 DM |
578 | ret = walk_gimple_seq_mod (gimple_catch_handler_ptr ( |
579 | as_a <gcatch *> (stmt)), | |
580 | callback_stmt, callback_op, wi); | |
5be5c238 AM |
581 | if (ret) |
582 | return wi->callback_result; | |
583 | break; | |
584 | ||
585 | case GIMPLE_EH_FILTER: | |
586 | ret = walk_gimple_seq_mod (gimple_eh_filter_failure_ptr (stmt), callback_stmt, | |
587 | callback_op, wi); | |
588 | if (ret) | |
589 | return wi->callback_result; | |
590 | break; | |
591 | ||
592 | case GIMPLE_EH_ELSE: | |
538dd0b7 DM |
593 | { |
594 | geh_else *eh_else_stmt = as_a <geh_else *> (stmt); | |
595 | ret = walk_gimple_seq_mod (gimple_eh_else_n_body_ptr (eh_else_stmt), | |
596 | callback_stmt, callback_op, wi); | |
597 | if (ret) | |
598 | return wi->callback_result; | |
599 | ret = walk_gimple_seq_mod (gimple_eh_else_e_body_ptr (eh_else_stmt), | |
600 | callback_stmt, callback_op, wi); | |
601 | if (ret) | |
602 | return wi->callback_result; | |
603 | } | |
5be5c238 AM |
604 | break; |
605 | ||
606 | case GIMPLE_TRY: | |
607 | ret = walk_gimple_seq_mod (gimple_try_eval_ptr (stmt), callback_stmt, callback_op, | |
608 | wi); | |
609 | if (ret) | |
610 | return wi->callback_result; | |
611 | ||
612 | ret = walk_gimple_seq_mod (gimple_try_cleanup_ptr (stmt), callback_stmt, | |
613 | callback_op, wi); | |
614 | if (ret) | |
615 | return wi->callback_result; | |
616 | break; | |
617 | ||
618 | case GIMPLE_OMP_FOR: | |
619 | ret = walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt), callback_stmt, | |
620 | callback_op, wi); | |
621 | if (ret) | |
622 | return wi->callback_result; | |
623 | ||
624 | /* FALL THROUGH. */ | |
625 | case GIMPLE_OMP_CRITICAL: | |
626 | case GIMPLE_OMP_MASTER: | |
627 | case GIMPLE_OMP_TASKGROUP: | |
628 | case GIMPLE_OMP_ORDERED: | |
629 | case GIMPLE_OMP_SECTION: | |
630 | case GIMPLE_OMP_PARALLEL: | |
631 | case GIMPLE_OMP_TASK: | |
632 | case GIMPLE_OMP_SECTIONS: | |
633 | case GIMPLE_OMP_SINGLE: | |
634 | case GIMPLE_OMP_TARGET: | |
635 | case GIMPLE_OMP_TEAMS: | |
636 | ret = walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), callback_stmt, | |
637 | callback_op, wi); | |
638 | if (ret) | |
639 | return wi->callback_result; | |
640 | break; | |
641 | ||
642 | case GIMPLE_WITH_CLEANUP_EXPR: | |
643 | ret = walk_gimple_seq_mod (gimple_wce_cleanup_ptr (stmt), callback_stmt, | |
644 | callback_op, wi); | |
645 | if (ret) | |
646 | return wi->callback_result; | |
647 | break; | |
648 | ||
649 | case GIMPLE_TRANSACTION: | |
538dd0b7 DM |
650 | ret = walk_gimple_seq_mod (gimple_transaction_body_ptr ( |
651 | as_a <gtransaction *> (stmt)), | |
5be5c238 AM |
652 | callback_stmt, callback_op, wi); |
653 | if (ret) | |
654 | return wi->callback_result; | |
655 | break; | |
656 | ||
657 | default: | |
658 | gcc_assert (!gimple_has_substatements (stmt)); | |
659 | break; | |
660 | } | |
661 | ||
662 | return NULL; | |
663 | } | |
664 | ||
665 | /* From a tree operand OP return the base of a load or store operation | |
666 | or NULL_TREE if OP is not a load or a store. */ | |
667 | ||
668 | static tree | |
669 | get_base_loadstore (tree op) | |
670 | { | |
671 | while (handled_component_p (op)) | |
672 | op = TREE_OPERAND (op, 0); | |
673 | if (DECL_P (op) | |
674 | || INDIRECT_REF_P (op) | |
675 | || TREE_CODE (op) == MEM_REF | |
676 | || TREE_CODE (op) == TARGET_MEM_REF) | |
677 | return op; | |
678 | return NULL_TREE; | |
679 | } | |
680 | ||
681 | ||
682 | /* For the statement STMT call the callbacks VISIT_LOAD, VISIT_STORE and | |
683 | VISIT_ADDR if non-NULL on loads, store and address-taken operands | |
9f1363cd JJ |
684 | passing the STMT, the base of the operand, the operand itself containing |
685 | the base and DATA to it. The base will be either a decl, an indirect | |
686 | reference (including TARGET_MEM_REF) or the argument of an address | |
687 | expression. | |
5be5c238 AM |
688 | Returns the results of these callbacks or'ed. */ |
689 | ||
690 | bool | |
691 | walk_stmt_load_store_addr_ops (gimple stmt, void *data, | |
9f1363cd JJ |
692 | walk_stmt_load_store_addr_fn visit_load, |
693 | walk_stmt_load_store_addr_fn visit_store, | |
694 | walk_stmt_load_store_addr_fn visit_addr) | |
5be5c238 AM |
695 | { |
696 | bool ret = false; | |
697 | unsigned i; | |
698 | if (gimple_assign_single_p (stmt)) | |
699 | { | |
9f1363cd | 700 | tree lhs, rhs, arg; |
5be5c238 AM |
701 | if (visit_store) |
702 | { | |
9f1363cd JJ |
703 | arg = gimple_assign_lhs (stmt); |
704 | lhs = get_base_loadstore (arg); | |
5be5c238 | 705 | if (lhs) |
9f1363cd | 706 | ret |= visit_store (stmt, lhs, arg, data); |
5be5c238 | 707 | } |
9f1363cd JJ |
708 | arg = gimple_assign_rhs1 (stmt); |
709 | rhs = arg; | |
5be5c238 AM |
710 | while (handled_component_p (rhs)) |
711 | rhs = TREE_OPERAND (rhs, 0); | |
712 | if (visit_addr) | |
713 | { | |
714 | if (TREE_CODE (rhs) == ADDR_EXPR) | |
9f1363cd | 715 | ret |= visit_addr (stmt, TREE_OPERAND (rhs, 0), arg, data); |
5be5c238 AM |
716 | else if (TREE_CODE (rhs) == TARGET_MEM_REF |
717 | && TREE_CODE (TMR_BASE (rhs)) == ADDR_EXPR) | |
9f1363cd JJ |
718 | ret |= visit_addr (stmt, TREE_OPERAND (TMR_BASE (rhs), 0), arg, |
719 | data); | |
5be5c238 AM |
720 | else if (TREE_CODE (rhs) == OBJ_TYPE_REF |
721 | && TREE_CODE (OBJ_TYPE_REF_OBJECT (rhs)) == ADDR_EXPR) | |
722 | ret |= visit_addr (stmt, TREE_OPERAND (OBJ_TYPE_REF_OBJECT (rhs), | |
9f1363cd | 723 | 0), arg, data); |
5be5c238 AM |
724 | else if (TREE_CODE (rhs) == CONSTRUCTOR) |
725 | { | |
726 | unsigned int ix; | |
727 | tree val; | |
728 | ||
729 | FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), ix, val) | |
730 | if (TREE_CODE (val) == ADDR_EXPR) | |
9f1363cd | 731 | ret |= visit_addr (stmt, TREE_OPERAND (val, 0), arg, data); |
5be5c238 AM |
732 | else if (TREE_CODE (val) == OBJ_TYPE_REF |
733 | && TREE_CODE (OBJ_TYPE_REF_OBJECT (val)) == ADDR_EXPR) | |
734 | ret |= visit_addr (stmt, | |
735 | TREE_OPERAND (OBJ_TYPE_REF_OBJECT (val), | |
9f1363cd | 736 | 0), arg, data); |
5be5c238 AM |
737 | } |
738 | lhs = gimple_assign_lhs (stmt); | |
739 | if (TREE_CODE (lhs) == TARGET_MEM_REF | |
740 | && TREE_CODE (TMR_BASE (lhs)) == ADDR_EXPR) | |
9f1363cd | 741 | ret |= visit_addr (stmt, TREE_OPERAND (TMR_BASE (lhs), 0), lhs, data); |
5be5c238 AM |
742 | } |
743 | if (visit_load) | |
744 | { | |
745 | rhs = get_base_loadstore (rhs); | |
746 | if (rhs) | |
9f1363cd | 747 | ret |= visit_load (stmt, rhs, arg, data); |
5be5c238 AM |
748 | } |
749 | } | |
750 | else if (visit_addr | |
751 | && (is_gimple_assign (stmt) | |
752 | || gimple_code (stmt) == GIMPLE_COND)) | |
753 | { | |
754 | for (i = 0; i < gimple_num_ops (stmt); ++i) | |
755 | { | |
756 | tree op = gimple_op (stmt, i); | |
757 | if (op == NULL_TREE) | |
758 | ; | |
759 | else if (TREE_CODE (op) == ADDR_EXPR) | |
9f1363cd | 760 | ret |= visit_addr (stmt, TREE_OPERAND (op, 0), op, data); |
5be5c238 AM |
761 | /* COND_EXPR and VCOND_EXPR rhs1 argument is a comparison |
762 | tree with two operands. */ | |
763 | else if (i == 1 && COMPARISON_CLASS_P (op)) | |
764 | { | |
765 | if (TREE_CODE (TREE_OPERAND (op, 0)) == ADDR_EXPR) | |
766 | ret |= visit_addr (stmt, TREE_OPERAND (TREE_OPERAND (op, 0), | |
9f1363cd | 767 | 0), op, data); |
5be5c238 AM |
768 | if (TREE_CODE (TREE_OPERAND (op, 1)) == ADDR_EXPR) |
769 | ret |= visit_addr (stmt, TREE_OPERAND (TREE_OPERAND (op, 1), | |
9f1363cd | 770 | 0), op, data); |
5be5c238 AM |
771 | } |
772 | } | |
773 | } | |
538dd0b7 | 774 | else if (gcall *call_stmt = dyn_cast <gcall *> (stmt)) |
5be5c238 AM |
775 | { |
776 | if (visit_store) | |
777 | { | |
538dd0b7 | 778 | tree arg = gimple_call_lhs (call_stmt); |
9f1363cd | 779 | if (arg) |
5be5c238 | 780 | { |
9f1363cd | 781 | tree lhs = get_base_loadstore (arg); |
5be5c238 | 782 | if (lhs) |
9f1363cd | 783 | ret |= visit_store (stmt, lhs, arg, data); |
5be5c238 AM |
784 | } |
785 | } | |
786 | if (visit_load || visit_addr) | |
538dd0b7 | 787 | for (i = 0; i < gimple_call_num_args (call_stmt); ++i) |
5be5c238 | 788 | { |
538dd0b7 | 789 | tree arg = gimple_call_arg (call_stmt, i); |
5be5c238 | 790 | if (visit_addr |
9f1363cd JJ |
791 | && TREE_CODE (arg) == ADDR_EXPR) |
792 | ret |= visit_addr (stmt, TREE_OPERAND (arg, 0), arg, data); | |
5be5c238 AM |
793 | else if (visit_load) |
794 | { | |
9f1363cd | 795 | tree rhs = get_base_loadstore (arg); |
5be5c238 | 796 | if (rhs) |
9f1363cd | 797 | ret |= visit_load (stmt, rhs, arg, data); |
5be5c238 AM |
798 | } |
799 | } | |
800 | if (visit_addr | |
538dd0b7 DM |
801 | && gimple_call_chain (call_stmt) |
802 | && TREE_CODE (gimple_call_chain (call_stmt)) == ADDR_EXPR) | |
803 | ret |= visit_addr (stmt, TREE_OPERAND (gimple_call_chain (call_stmt), 0), | |
804 | gimple_call_chain (call_stmt), data); | |
5be5c238 | 805 | if (visit_addr |
538dd0b7 DM |
806 | && gimple_call_return_slot_opt_p (call_stmt) |
807 | && gimple_call_lhs (call_stmt) != NULL_TREE | |
808 | && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (call_stmt)))) | |
809 | ret |= visit_addr (stmt, gimple_call_lhs (call_stmt), | |
810 | gimple_call_lhs (call_stmt), data); | |
5be5c238 | 811 | } |
538dd0b7 | 812 | else if (gasm *asm_stmt = dyn_cast <gasm *> (stmt)) |
5be5c238 AM |
813 | { |
814 | unsigned noutputs; | |
815 | const char *constraint; | |
816 | const char **oconstraints; | |
817 | bool allows_mem, allows_reg, is_inout; | |
538dd0b7 | 818 | noutputs = gimple_asm_noutputs (asm_stmt); |
5be5c238 AM |
819 | oconstraints = XALLOCAVEC (const char *, noutputs); |
820 | if (visit_store || visit_addr) | |
538dd0b7 | 821 | for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i) |
5be5c238 | 822 | { |
538dd0b7 | 823 | tree link = gimple_asm_output_op (asm_stmt, i); |
5be5c238 AM |
824 | tree op = get_base_loadstore (TREE_VALUE (link)); |
825 | if (op && visit_store) | |
9f1363cd | 826 | ret |= visit_store (stmt, op, TREE_VALUE (link), data); |
5be5c238 AM |
827 | if (visit_addr) |
828 | { | |
829 | constraint = TREE_STRING_POINTER | |
830 | (TREE_VALUE (TREE_PURPOSE (link))); | |
831 | oconstraints[i] = constraint; | |
832 | parse_output_constraint (&constraint, i, 0, 0, &allows_mem, | |
833 | &allows_reg, &is_inout); | |
834 | if (op && !allows_reg && allows_mem) | |
9f1363cd | 835 | ret |= visit_addr (stmt, op, TREE_VALUE (link), data); |
5be5c238 AM |
836 | } |
837 | } | |
838 | if (visit_load || visit_addr) | |
538dd0b7 | 839 | for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i) |
5be5c238 | 840 | { |
538dd0b7 | 841 | tree link = gimple_asm_input_op (asm_stmt, i); |
5be5c238 AM |
842 | tree op = TREE_VALUE (link); |
843 | if (visit_addr | |
844 | && TREE_CODE (op) == ADDR_EXPR) | |
9f1363cd | 845 | ret |= visit_addr (stmt, TREE_OPERAND (op, 0), op, data); |
5be5c238 AM |
846 | else if (visit_load || visit_addr) |
847 | { | |
848 | op = get_base_loadstore (op); | |
849 | if (op) | |
850 | { | |
851 | if (visit_load) | |
9f1363cd | 852 | ret |= visit_load (stmt, op, TREE_VALUE (link), data); |
5be5c238 AM |
853 | if (visit_addr) |
854 | { | |
855 | constraint = TREE_STRING_POINTER | |
856 | (TREE_VALUE (TREE_PURPOSE (link))); | |
857 | parse_input_constraint (&constraint, 0, 0, noutputs, | |
858 | 0, oconstraints, | |
859 | &allows_mem, &allows_reg); | |
860 | if (!allows_reg && allows_mem) | |
9f1363cd JJ |
861 | ret |= visit_addr (stmt, op, TREE_VALUE (link), |
862 | data); | |
5be5c238 AM |
863 | } |
864 | } | |
865 | } | |
866 | } | |
867 | } | |
538dd0b7 | 868 | else if (greturn *return_stmt = dyn_cast <greturn *> (stmt)) |
5be5c238 | 869 | { |
538dd0b7 | 870 | tree op = gimple_return_retval (return_stmt); |
5be5c238 AM |
871 | if (op) |
872 | { | |
873 | if (visit_addr | |
874 | && TREE_CODE (op) == ADDR_EXPR) | |
9f1363cd | 875 | ret |= visit_addr (stmt, TREE_OPERAND (op, 0), op, data); |
5be5c238 AM |
876 | else if (visit_load) |
877 | { | |
9f1363cd JJ |
878 | tree base = get_base_loadstore (op); |
879 | if (base) | |
880 | ret |= visit_load (stmt, base, op, data); | |
5be5c238 AM |
881 | } |
882 | } | |
883 | } | |
884 | else if (visit_addr | |
885 | && gimple_code (stmt) == GIMPLE_PHI) | |
886 | { | |
887 | for (i = 0; i < gimple_phi_num_args (stmt); ++i) | |
888 | { | |
889 | tree op = gimple_phi_arg_def (stmt, i); | |
890 | if (TREE_CODE (op) == ADDR_EXPR) | |
9f1363cd | 891 | ret |= visit_addr (stmt, TREE_OPERAND (op, 0), op, data); |
5be5c238 AM |
892 | } |
893 | } | |
894 | else if (visit_addr | |
895 | && gimple_code (stmt) == GIMPLE_GOTO) | |
896 | { | |
897 | tree op = gimple_goto_dest (stmt); | |
898 | if (TREE_CODE (op) == ADDR_EXPR) | |
9f1363cd | 899 | ret |= visit_addr (stmt, TREE_OPERAND (op, 0), op, data); |
5be5c238 AM |
900 | } |
901 | ||
902 | return ret; | |
903 | } | |
904 | ||
905 | /* Like walk_stmt_load_store_addr_ops but with NULL visit_addr. IPA-CP | |
906 | should make a faster clone for this case. */ | |
907 | ||
908 | bool | |
909 | walk_stmt_load_store_ops (gimple stmt, void *data, | |
9f1363cd JJ |
910 | walk_stmt_load_store_addr_fn visit_load, |
911 | walk_stmt_load_store_addr_fn visit_store) | |
5be5c238 AM |
912 | { |
913 | return walk_stmt_load_store_addr_ops (stmt, data, | |
914 | visit_load, visit_store, NULL); | |
915 | } |