]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/gimple-walk.c
* gimple.h: Remove all includes.
[thirdparty/gcc.git] / gcc / gimple-walk.c
CommitLineData
dcf1a1ec 1/* Gimple walk support.
2
3 Copyright (C) 2007-2013 Free Software Foundation, Inc.
4 Contributed by Aldy Hernandez <aldyh@redhat.com>
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 3, or (at your option) any later
11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
21
22#include "config.h"
23#include "system.h"
24#include "coretypes.h"
25#include "tm.h"
26#include "tree.h"
9ed99284 27#include "stmt.h"
bc61cadb 28#include "basic-block.h"
29#include "tree-ssa-alias.h"
30#include "internal-fn.h"
31#include "gimple-expr.h"
32#include "is-a.h"
dcf1a1ec 33#include "gimple.h"
34#include "gimple-iterator.h"
35#include "gimple-walk.h"
36#include "gimple-walk.h"
37#include "demangle.h"
38
39/* Walk all the statements in the sequence *PSEQ calling walk_gimple_stmt
40 on each one. WI is as in walk_gimple_stmt.
41
42 If walk_gimple_stmt returns non-NULL, the walk is stopped, and the
43 value is stored in WI->CALLBACK_RESULT. Also, the statement that
44 produced the value is returned if this statement has not been
45 removed by a callback (wi->removed_stmt). If the statement has
46 been removed, NULL is returned.
47
48 Otherwise, all the statements are walked and NULL returned. */
49
50gimple
51walk_gimple_seq_mod (gimple_seq *pseq, walk_stmt_fn callback_stmt,
52 walk_tree_fn callback_op, struct walk_stmt_info *wi)
53{
54 gimple_stmt_iterator gsi;
55
56 for (gsi = gsi_start (*pseq); !gsi_end_p (gsi); )
57 {
58 tree ret = walk_gimple_stmt (&gsi, callback_stmt, callback_op, wi);
59 if (ret)
60 {
61 /* If CALLBACK_STMT or CALLBACK_OP return a value, WI must exist
62 to hold it. */
63 gcc_assert (wi);
64 wi->callback_result = ret;
65
66 return wi->removed_stmt ? NULL : gsi_stmt (gsi);
67 }
68
69 if (!wi->removed_stmt)
70 gsi_next (&gsi);
71 }
72
73 if (wi)
74 wi->callback_result = NULL_TREE;
75
76 return NULL;
77}
78
79
80/* Like walk_gimple_seq_mod, but ensure that the head of SEQ isn't
81 changed by the callbacks. */
82
83gimple
84walk_gimple_seq (gimple_seq seq, walk_stmt_fn callback_stmt,
85 walk_tree_fn callback_op, struct walk_stmt_info *wi)
86{
87 gimple_seq seq2 = seq;
88 gimple ret = walk_gimple_seq_mod (&seq2, callback_stmt, callback_op, wi);
89 gcc_assert (seq2 == seq);
90 return ret;
91}
92
93
94/* Helper function for walk_gimple_stmt. Walk operands of a GIMPLE_ASM. */
95
96static tree
97walk_gimple_asm (gimple stmt, walk_tree_fn callback_op,
98 struct walk_stmt_info *wi)
99{
100 tree ret, op;
101 unsigned noutputs;
102 const char **oconstraints;
103 unsigned i, n;
104 const char *constraint;
105 bool allows_mem, allows_reg, is_inout;
106
107 noutputs = gimple_asm_noutputs (stmt);
108 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
109
110 if (wi)
111 wi->is_lhs = true;
112
113 for (i = 0; i < noutputs; i++)
114 {
115 op = gimple_asm_output_op (stmt, i);
116 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
117 oconstraints[i] = constraint;
118 parse_output_constraint (&constraint, i, 0, 0, &allows_mem, &allows_reg,
119 &is_inout);
120 if (wi)
121 wi->val_only = (allows_reg || !allows_mem);
122 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
123 if (ret)
124 return ret;
125 }
126
127 n = gimple_asm_ninputs (stmt);
128 for (i = 0; i < n; i++)
129 {
130 op = gimple_asm_input_op (stmt, i);
131 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
132 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
133 oconstraints, &allows_mem, &allows_reg);
134 if (wi)
135 {
136 wi->val_only = (allows_reg || !allows_mem);
137 /* Although input "m" is not really a LHS, we need a lvalue. */
138 wi->is_lhs = !wi->val_only;
139 }
140 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
141 if (ret)
142 return ret;
143 }
144
145 if (wi)
146 {
147 wi->is_lhs = false;
148 wi->val_only = true;
149 }
150
151 n = gimple_asm_nlabels (stmt);
152 for (i = 0; i < n; i++)
153 {
154 op = gimple_asm_label_op (stmt, i);
155 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
156 if (ret)
157 return ret;
158 }
159
160 return NULL_TREE;
161}
162
163
164/* Helper function of WALK_GIMPLE_STMT. Walk every tree operand in
165 STMT. CALLBACK_OP and WI are as in WALK_GIMPLE_STMT.
166
167 CALLBACK_OP is called on each operand of STMT via walk_tree.
168 Additional parameters to walk_tree must be stored in WI. For each operand
169 OP, walk_tree is called as:
170
171 walk_tree (&OP, CALLBACK_OP, WI, WI->PSET)
172
173 If CALLBACK_OP returns non-NULL for an operand, the remaining
174 operands are not scanned.
175
176 The return value is that returned by the last call to walk_tree, or
177 NULL_TREE if no CALLBACK_OP is specified. */
178
179tree
180walk_gimple_op (gimple stmt, walk_tree_fn callback_op,
181 struct walk_stmt_info *wi)
182{
183 struct pointer_set_t *pset = (wi) ? wi->pset : NULL;
184 unsigned i;
185 tree ret = NULL_TREE;
186
187 switch (gimple_code (stmt))
188 {
189 case GIMPLE_ASSIGN:
190 /* Walk the RHS operands. If the LHS is of a non-renamable type or
191 is a register variable, we may use a COMPONENT_REF on the RHS. */
192 if (wi)
193 {
194 tree lhs = gimple_assign_lhs (stmt);
195 wi->val_only
196 = (is_gimple_reg_type (TREE_TYPE (lhs)) && !is_gimple_reg (lhs))
197 || gimple_assign_rhs_class (stmt) != GIMPLE_SINGLE_RHS;
198 }
199
200 for (i = 1; i < gimple_num_ops (stmt); i++)
201 {
202 ret = walk_tree (gimple_op_ptr (stmt, i), callback_op, wi,
203 pset);
204 if (ret)
205 return ret;
206 }
207
208 /* Walk the LHS. If the RHS is appropriate for a memory, we
209 may use a COMPONENT_REF on the LHS. */
210 if (wi)
211 {
212 /* If the RHS is of a non-renamable type or is a register variable,
213 we may use a COMPONENT_REF on the LHS. */
214 tree rhs1 = gimple_assign_rhs1 (stmt);
215 wi->val_only
216 = (is_gimple_reg_type (TREE_TYPE (rhs1)) && !is_gimple_reg (rhs1))
217 || gimple_assign_rhs_class (stmt) != GIMPLE_SINGLE_RHS;
218 wi->is_lhs = true;
219 }
220
221 ret = walk_tree (gimple_op_ptr (stmt, 0), callback_op, wi, pset);
222 if (ret)
223 return ret;
224
225 if (wi)
226 {
227 wi->val_only = true;
228 wi->is_lhs = false;
229 }
230 break;
231
232 case GIMPLE_CALL:
233 if (wi)
234 {
235 wi->is_lhs = false;
236 wi->val_only = true;
237 }
238
239 ret = walk_tree (gimple_call_chain_ptr (stmt), callback_op, wi, pset);
240 if (ret)
241 return ret;
242
243 ret = walk_tree (gimple_call_fn_ptr (stmt), callback_op, wi, pset);
244 if (ret)
245 return ret;
246
247 for (i = 0; i < gimple_call_num_args (stmt); i++)
248 {
249 if (wi)
250 wi->val_only
251 = is_gimple_reg_type (TREE_TYPE (gimple_call_arg (stmt, i)));
252 ret = walk_tree (gimple_call_arg_ptr (stmt, i), callback_op, wi,
253 pset);
254 if (ret)
255 return ret;
256 }
257
258 if (gimple_call_lhs (stmt))
259 {
260 if (wi)
261 {
262 wi->is_lhs = true;
263 wi->val_only
264 = is_gimple_reg_type (TREE_TYPE (gimple_call_lhs (stmt)));
265 }
266
267 ret = walk_tree (gimple_call_lhs_ptr (stmt), callback_op, wi, pset);
268 if (ret)
269 return ret;
270 }
271
272 if (wi)
273 {
274 wi->is_lhs = false;
275 wi->val_only = true;
276 }
277 break;
278
279 case GIMPLE_CATCH:
280 ret = walk_tree (gimple_catch_types_ptr (stmt), callback_op, wi,
281 pset);
282 if (ret)
283 return ret;
284 break;
285
286 case GIMPLE_EH_FILTER:
287 ret = walk_tree (gimple_eh_filter_types_ptr (stmt), callback_op, wi,
288 pset);
289 if (ret)
290 return ret;
291 break;
292
293 case GIMPLE_ASM:
294 ret = walk_gimple_asm (stmt, callback_op, wi);
295 if (ret)
296 return ret;
297 break;
298
299 case GIMPLE_OMP_CONTINUE:
300 ret = walk_tree (gimple_omp_continue_control_def_ptr (stmt),
301 callback_op, wi, pset);
302 if (ret)
303 return ret;
304
305 ret = walk_tree (gimple_omp_continue_control_use_ptr (stmt),
306 callback_op, wi, pset);
307 if (ret)
308 return ret;
309 break;
310
311 case GIMPLE_OMP_CRITICAL:
312 ret = walk_tree (gimple_omp_critical_name_ptr (stmt), callback_op, wi,
313 pset);
314 if (ret)
315 return ret;
316 break;
317
318 case GIMPLE_OMP_FOR:
319 ret = walk_tree (gimple_omp_for_clauses_ptr (stmt), callback_op, wi,
320 pset);
321 if (ret)
322 return ret;
323 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
324 {
325 ret = walk_tree (gimple_omp_for_index_ptr (stmt, i), callback_op,
326 wi, pset);
327 if (ret)
328 return ret;
329 ret = walk_tree (gimple_omp_for_initial_ptr (stmt, i), callback_op,
330 wi, pset);
331 if (ret)
332 return ret;
333 ret = walk_tree (gimple_omp_for_final_ptr (stmt, i), callback_op,
334 wi, pset);
335 if (ret)
336 return ret;
337 ret = walk_tree (gimple_omp_for_incr_ptr (stmt, i), callback_op,
338 wi, pset);
339 }
340 if (ret)
341 return ret;
342 break;
343
344 case GIMPLE_OMP_PARALLEL:
345 ret = walk_tree (gimple_omp_parallel_clauses_ptr (stmt), callback_op,
346 wi, pset);
347 if (ret)
348 return ret;
349 ret = walk_tree (gimple_omp_parallel_child_fn_ptr (stmt), callback_op,
350 wi, pset);
351 if (ret)
352 return ret;
353 ret = walk_tree (gimple_omp_parallel_data_arg_ptr (stmt), callback_op,
354 wi, pset);
355 if (ret)
356 return ret;
357 break;
358
359 case GIMPLE_OMP_TASK:
360 ret = walk_tree (gimple_omp_task_clauses_ptr (stmt), callback_op,
361 wi, pset);
362 if (ret)
363 return ret;
364 ret = walk_tree (gimple_omp_task_child_fn_ptr (stmt), callback_op,
365 wi, pset);
366 if (ret)
367 return ret;
368 ret = walk_tree (gimple_omp_task_data_arg_ptr (stmt), callback_op,
369 wi, pset);
370 if (ret)
371 return ret;
372 ret = walk_tree (gimple_omp_task_copy_fn_ptr (stmt), callback_op,
373 wi, pset);
374 if (ret)
375 return ret;
376 ret = walk_tree (gimple_omp_task_arg_size_ptr (stmt), callback_op,
377 wi, pset);
378 if (ret)
379 return ret;
380 ret = walk_tree (gimple_omp_task_arg_align_ptr (stmt), callback_op,
381 wi, pset);
382 if (ret)
383 return ret;
384 break;
385
386 case GIMPLE_OMP_SECTIONS:
387 ret = walk_tree (gimple_omp_sections_clauses_ptr (stmt), callback_op,
388 wi, pset);
389 if (ret)
390 return ret;
391
392 ret = walk_tree (gimple_omp_sections_control_ptr (stmt), callback_op,
393 wi, pset);
394 if (ret)
395 return ret;
396
397 break;
398
399 case GIMPLE_OMP_SINGLE:
400 ret = walk_tree (gimple_omp_single_clauses_ptr (stmt), callback_op, wi,
401 pset);
402 if (ret)
403 return ret;
404 break;
405
406 case GIMPLE_OMP_TARGET:
407 ret = walk_tree (gimple_omp_target_clauses_ptr (stmt), callback_op, wi,
408 pset);
409 if (ret)
410 return ret;
411 break;
412
413 case GIMPLE_OMP_TEAMS:
414 ret = walk_tree (gimple_omp_teams_clauses_ptr (stmt), callback_op, wi,
415 pset);
416 if (ret)
417 return ret;
418 break;
419
420 case GIMPLE_OMP_ATOMIC_LOAD:
421 ret = walk_tree (gimple_omp_atomic_load_lhs_ptr (stmt), callback_op, wi,
422 pset);
423 if (ret)
424 return ret;
425
426 ret = walk_tree (gimple_omp_atomic_load_rhs_ptr (stmt), callback_op, wi,
427 pset);
428 if (ret)
429 return ret;
430 break;
431
432 case GIMPLE_OMP_ATOMIC_STORE:
433 ret = walk_tree (gimple_omp_atomic_store_val_ptr (stmt), callback_op,
434 wi, pset);
435 if (ret)
436 return ret;
437 break;
438
439 case GIMPLE_TRANSACTION:
440 ret = walk_tree (gimple_transaction_label_ptr (stmt), callback_op,
441 wi, pset);
442 if (ret)
443 return ret;
444 break;
445
446 case GIMPLE_OMP_RETURN:
447 ret = walk_tree (gimple_omp_return_lhs_ptr (stmt), callback_op, wi,
448 pset);
449 if (ret)
450 return ret;
451 break;
452
453 /* Tuples that do not have operands. */
454 case GIMPLE_NOP:
455 case GIMPLE_RESX:
456 case GIMPLE_PREDICT:
457 break;
458
459 default:
460 {
461 enum gimple_statement_structure_enum gss;
462 gss = gimple_statement_structure (stmt);
463 if (gss == GSS_WITH_OPS || gss == GSS_WITH_MEM_OPS)
464 for (i = 0; i < gimple_num_ops (stmt); i++)
465 {
466 ret = walk_tree (gimple_op_ptr (stmt, i), callback_op, wi, pset);
467 if (ret)
468 return ret;
469 }
470 }
471 break;
472 }
473
474 return NULL_TREE;
475}
476
477
478/* Walk the current statement in GSI (optionally using traversal state
479 stored in WI). If WI is NULL, no state is kept during traversal.
480 The callback CALLBACK_STMT is called. If CALLBACK_STMT indicates
481 that it has handled all the operands of the statement, its return
482 value is returned. Otherwise, the return value from CALLBACK_STMT
483 is discarded and its operands are scanned.
484
485 If CALLBACK_STMT is NULL or it didn't handle the operands,
486 CALLBACK_OP is called on each operand of the statement via
487 walk_gimple_op. If walk_gimple_op returns non-NULL for any
488 operand, the remaining operands are not scanned. In this case, the
489 return value from CALLBACK_OP is returned.
490
491 In any other case, NULL_TREE is returned. */
492
493tree
494walk_gimple_stmt (gimple_stmt_iterator *gsi, walk_stmt_fn callback_stmt,
495 walk_tree_fn callback_op, struct walk_stmt_info *wi)
496{
497 gimple ret;
498 tree tree_ret;
499 gimple stmt = gsi_stmt (*gsi);
500
501 if (wi)
502 {
503 wi->gsi = *gsi;
504 wi->removed_stmt = false;
505
506 if (wi->want_locations && gimple_has_location (stmt))
507 input_location = gimple_location (stmt);
508 }
509
510 ret = NULL;
511
512 /* Invoke the statement callback. Return if the callback handled
513 all of STMT operands by itself. */
514 if (callback_stmt)
515 {
516 bool handled_ops = false;
517 tree_ret = callback_stmt (gsi, &handled_ops, wi);
518 if (handled_ops)
519 return tree_ret;
520
521 /* If CALLBACK_STMT did not handle operands, it should not have
522 a value to return. */
523 gcc_assert (tree_ret == NULL);
524
525 if (wi && wi->removed_stmt)
526 return NULL;
527
528 /* Re-read stmt in case the callback changed it. */
529 stmt = gsi_stmt (*gsi);
530 }
531
532 /* If CALLBACK_OP is defined, invoke it on every operand of STMT. */
533 if (callback_op)
534 {
535 tree_ret = walk_gimple_op (stmt, callback_op, wi);
536 if (tree_ret)
537 return tree_ret;
538 }
539
540 /* If STMT can have statements inside (e.g. GIMPLE_BIND), walk them. */
541 switch (gimple_code (stmt))
542 {
543 case GIMPLE_BIND:
544 ret = walk_gimple_seq_mod (gimple_bind_body_ptr (stmt), callback_stmt,
545 callback_op, wi);
546 if (ret)
547 return wi->callback_result;
548 break;
549
550 case GIMPLE_CATCH:
551 ret = walk_gimple_seq_mod (gimple_catch_handler_ptr (stmt), callback_stmt,
552 callback_op, wi);
553 if (ret)
554 return wi->callback_result;
555 break;
556
557 case GIMPLE_EH_FILTER:
558 ret = walk_gimple_seq_mod (gimple_eh_filter_failure_ptr (stmt), callback_stmt,
559 callback_op, wi);
560 if (ret)
561 return wi->callback_result;
562 break;
563
564 case GIMPLE_EH_ELSE:
565 ret = walk_gimple_seq_mod (gimple_eh_else_n_body_ptr (stmt),
566 callback_stmt, callback_op, wi);
567 if (ret)
568 return wi->callback_result;
569 ret = walk_gimple_seq_mod (gimple_eh_else_e_body_ptr (stmt),
570 callback_stmt, callback_op, wi);
571 if (ret)
572 return wi->callback_result;
573 break;
574
575 case GIMPLE_TRY:
576 ret = walk_gimple_seq_mod (gimple_try_eval_ptr (stmt), callback_stmt, callback_op,
577 wi);
578 if (ret)
579 return wi->callback_result;
580
581 ret = walk_gimple_seq_mod (gimple_try_cleanup_ptr (stmt), callback_stmt,
582 callback_op, wi);
583 if (ret)
584 return wi->callback_result;
585 break;
586
587 case GIMPLE_OMP_FOR:
588 ret = walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt), callback_stmt,
589 callback_op, wi);
590 if (ret)
591 return wi->callback_result;
592
593 /* FALL THROUGH. */
594 case GIMPLE_OMP_CRITICAL:
595 case GIMPLE_OMP_MASTER:
596 case GIMPLE_OMP_TASKGROUP:
597 case GIMPLE_OMP_ORDERED:
598 case GIMPLE_OMP_SECTION:
599 case GIMPLE_OMP_PARALLEL:
600 case GIMPLE_OMP_TASK:
601 case GIMPLE_OMP_SECTIONS:
602 case GIMPLE_OMP_SINGLE:
603 case GIMPLE_OMP_TARGET:
604 case GIMPLE_OMP_TEAMS:
605 ret = walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), callback_stmt,
606 callback_op, wi);
607 if (ret)
608 return wi->callback_result;
609 break;
610
611 case GIMPLE_WITH_CLEANUP_EXPR:
612 ret = walk_gimple_seq_mod (gimple_wce_cleanup_ptr (stmt), callback_stmt,
613 callback_op, wi);
614 if (ret)
615 return wi->callback_result;
616 break;
617
618 case GIMPLE_TRANSACTION:
619 ret = walk_gimple_seq_mod (gimple_transaction_body_ptr (stmt),
620 callback_stmt, callback_op, wi);
621 if (ret)
622 return wi->callback_result;
623 break;
624
625 default:
626 gcc_assert (!gimple_has_substatements (stmt));
627 break;
628 }
629
630 return NULL;
631}
632
633/* From a tree operand OP return the base of a load or store operation
634 or NULL_TREE if OP is not a load or a store. */
635
636static tree
637get_base_loadstore (tree op)
638{
639 while (handled_component_p (op))
640 op = TREE_OPERAND (op, 0);
641 if (DECL_P (op)
642 || INDIRECT_REF_P (op)
643 || TREE_CODE (op) == MEM_REF
644 || TREE_CODE (op) == TARGET_MEM_REF)
645 return op;
646 return NULL_TREE;
647}
648
649
650/* For the statement STMT call the callbacks VISIT_LOAD, VISIT_STORE and
651 VISIT_ADDR if non-NULL on loads, store and address-taken operands
652 passing the STMT, the base of the operand and DATA to it. The base
653 will be either a decl, an indirect reference (including TARGET_MEM_REF)
654 or the argument of an address expression.
655 Returns the results of these callbacks or'ed. */
656
657bool
658walk_stmt_load_store_addr_ops (gimple stmt, void *data,
659 bool (*visit_load)(gimple, tree, void *),
660 bool (*visit_store)(gimple, tree, void *),
661 bool (*visit_addr)(gimple, tree, void *))
662{
663 bool ret = false;
664 unsigned i;
665 if (gimple_assign_single_p (stmt))
666 {
667 tree lhs, rhs;
668 if (visit_store)
669 {
670 lhs = get_base_loadstore (gimple_assign_lhs (stmt));
671 if (lhs)
672 ret |= visit_store (stmt, lhs, data);
673 }
674 rhs = gimple_assign_rhs1 (stmt);
675 while (handled_component_p (rhs))
676 rhs = TREE_OPERAND (rhs, 0);
677 if (visit_addr)
678 {
679 if (TREE_CODE (rhs) == ADDR_EXPR)
680 ret |= visit_addr (stmt, TREE_OPERAND (rhs, 0), data);
681 else if (TREE_CODE (rhs) == TARGET_MEM_REF
682 && TREE_CODE (TMR_BASE (rhs)) == ADDR_EXPR)
683 ret |= visit_addr (stmt, TREE_OPERAND (TMR_BASE (rhs), 0), data);
684 else if (TREE_CODE (rhs) == OBJ_TYPE_REF
685 && TREE_CODE (OBJ_TYPE_REF_OBJECT (rhs)) == ADDR_EXPR)
686 ret |= visit_addr (stmt, TREE_OPERAND (OBJ_TYPE_REF_OBJECT (rhs),
687 0), data);
688 else if (TREE_CODE (rhs) == CONSTRUCTOR)
689 {
690 unsigned int ix;
691 tree val;
692
693 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), ix, val)
694 if (TREE_CODE (val) == ADDR_EXPR)
695 ret |= visit_addr (stmt, TREE_OPERAND (val, 0), data);
696 else if (TREE_CODE (val) == OBJ_TYPE_REF
697 && TREE_CODE (OBJ_TYPE_REF_OBJECT (val)) == ADDR_EXPR)
698 ret |= visit_addr (stmt,
699 TREE_OPERAND (OBJ_TYPE_REF_OBJECT (val),
700 0), data);
701 }
702 lhs = gimple_assign_lhs (stmt);
703 if (TREE_CODE (lhs) == TARGET_MEM_REF
704 && TREE_CODE (TMR_BASE (lhs)) == ADDR_EXPR)
705 ret |= visit_addr (stmt, TREE_OPERAND (TMR_BASE (lhs), 0), data);
706 }
707 if (visit_load)
708 {
709 rhs = get_base_loadstore (rhs);
710 if (rhs)
711 ret |= visit_load (stmt, rhs, data);
712 }
713 }
714 else if (visit_addr
715 && (is_gimple_assign (stmt)
716 || gimple_code (stmt) == GIMPLE_COND))
717 {
718 for (i = 0; i < gimple_num_ops (stmt); ++i)
719 {
720 tree op = gimple_op (stmt, i);
721 if (op == NULL_TREE)
722 ;
723 else if (TREE_CODE (op) == ADDR_EXPR)
724 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
725 /* COND_EXPR and VCOND_EXPR rhs1 argument is a comparison
726 tree with two operands. */
727 else if (i == 1 && COMPARISON_CLASS_P (op))
728 {
729 if (TREE_CODE (TREE_OPERAND (op, 0)) == ADDR_EXPR)
730 ret |= visit_addr (stmt, TREE_OPERAND (TREE_OPERAND (op, 0),
731 0), data);
732 if (TREE_CODE (TREE_OPERAND (op, 1)) == ADDR_EXPR)
733 ret |= visit_addr (stmt, TREE_OPERAND (TREE_OPERAND (op, 1),
734 0), data);
735 }
736 }
737 }
738 else if (is_gimple_call (stmt))
739 {
740 if (visit_store)
741 {
742 tree lhs = gimple_call_lhs (stmt);
743 if (lhs)
744 {
745 lhs = get_base_loadstore (lhs);
746 if (lhs)
747 ret |= visit_store (stmt, lhs, data);
748 }
749 }
750 if (visit_load || visit_addr)
751 for (i = 0; i < gimple_call_num_args (stmt); ++i)
752 {
753 tree rhs = gimple_call_arg (stmt, i);
754 if (visit_addr
755 && TREE_CODE (rhs) == ADDR_EXPR)
756 ret |= visit_addr (stmt, TREE_OPERAND (rhs, 0), data);
757 else if (visit_load)
758 {
759 rhs = get_base_loadstore (rhs);
760 if (rhs)
761 ret |= visit_load (stmt, rhs, data);
762 }
763 }
764 if (visit_addr
765 && gimple_call_chain (stmt)
766 && TREE_CODE (gimple_call_chain (stmt)) == ADDR_EXPR)
767 ret |= visit_addr (stmt, TREE_OPERAND (gimple_call_chain (stmt), 0),
768 data);
769 if (visit_addr
770 && gimple_call_return_slot_opt_p (stmt)
771 && gimple_call_lhs (stmt) != NULL_TREE
772 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
773 ret |= visit_addr (stmt, gimple_call_lhs (stmt), data);
774 }
775 else if (gimple_code (stmt) == GIMPLE_ASM)
776 {
777 unsigned noutputs;
778 const char *constraint;
779 const char **oconstraints;
780 bool allows_mem, allows_reg, is_inout;
781 noutputs = gimple_asm_noutputs (stmt);
782 oconstraints = XALLOCAVEC (const char *, noutputs);
783 if (visit_store || visit_addr)
784 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
785 {
786 tree link = gimple_asm_output_op (stmt, i);
787 tree op = get_base_loadstore (TREE_VALUE (link));
788 if (op && visit_store)
789 ret |= visit_store (stmt, op, data);
790 if (visit_addr)
791 {
792 constraint = TREE_STRING_POINTER
793 (TREE_VALUE (TREE_PURPOSE (link)));
794 oconstraints[i] = constraint;
795 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
796 &allows_reg, &is_inout);
797 if (op && !allows_reg && allows_mem)
798 ret |= visit_addr (stmt, op, data);
799 }
800 }
801 if (visit_load || visit_addr)
802 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
803 {
804 tree link = gimple_asm_input_op (stmt, i);
805 tree op = TREE_VALUE (link);
806 if (visit_addr
807 && TREE_CODE (op) == ADDR_EXPR)
808 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
809 else if (visit_load || visit_addr)
810 {
811 op = get_base_loadstore (op);
812 if (op)
813 {
814 if (visit_load)
815 ret |= visit_load (stmt, op, data);
816 if (visit_addr)
817 {
818 constraint = TREE_STRING_POINTER
819 (TREE_VALUE (TREE_PURPOSE (link)));
820 parse_input_constraint (&constraint, 0, 0, noutputs,
821 0, oconstraints,
822 &allows_mem, &allows_reg);
823 if (!allows_reg && allows_mem)
824 ret |= visit_addr (stmt, op, data);
825 }
826 }
827 }
828 }
829 }
830 else if (gimple_code (stmt) == GIMPLE_RETURN)
831 {
832 tree op = gimple_return_retval (stmt);
833 if (op)
834 {
835 if (visit_addr
836 && TREE_CODE (op) == ADDR_EXPR)
837 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
838 else if (visit_load)
839 {
840 op = get_base_loadstore (op);
841 if (op)
842 ret |= visit_load (stmt, op, data);
843 }
844 }
845 }
846 else if (visit_addr
847 && gimple_code (stmt) == GIMPLE_PHI)
848 {
849 for (i = 0; i < gimple_phi_num_args (stmt); ++i)
850 {
851 tree op = gimple_phi_arg_def (stmt, i);
852 if (TREE_CODE (op) == ADDR_EXPR)
853 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
854 }
855 }
856 else if (visit_addr
857 && gimple_code (stmt) == GIMPLE_GOTO)
858 {
859 tree op = gimple_goto_dest (stmt);
860 if (TREE_CODE (op) == ADDR_EXPR)
861 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
862 }
863
864 return ret;
865}
866
867/* Like walk_stmt_load_store_addr_ops but with NULL visit_addr. IPA-CP
868 should make a faster clone for this case. */
869
870bool
871walk_stmt_load_store_ops (gimple stmt, void *data,
872 bool (*visit_load)(gimple, tree, void *),
873 bool (*visit_store)(gimple, tree, void *))
874{
875 return walk_stmt_load_store_addr_ops (stmt, data,
876 visit_load, visit_store, NULL);
877}