]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/gimple.c
invoke.texi (-fvar-tracking-assignments): New.
[thirdparty/gcc.git] / gcc / gimple.c
1 /* Gimple IR support functions.
2
3 Copyright 2007, 2008, 2009 Free Software Foundation, Inc.
4 Contributed by Aldy Hernandez <aldyh@redhat.com>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "ggc.h"
28 #include "hard-reg-set.h"
29 #include "basic-block.h"
30 #include "gimple.h"
31 #include "toplev.h"
32 #include "diagnostic.h"
33 #include "tree-flow.h"
34 #include "value-prof.h"
35 #include "flags.h"
36
37 #define DEFGSCODE(SYM, NAME, STRUCT) NAME,
38 const char *const gimple_code_name[] = {
39 #include "gimple.def"
40 };
41 #undef DEFGSCODE
42
43 /* All the tuples have their operand vector at the very bottom
44 of the structure. Therefore, the offset required to find the
45 operands vector the size of the structure minus the size of the 1
46 element tree array at the end (see gimple_ops). */
47 #define DEFGSCODE(SYM, NAME, STRUCT) (sizeof (STRUCT) - sizeof (tree)),
48 EXPORTED_CONST size_t gimple_ops_offset_[] = {
49 #include "gimple.def"
50 };
51 #undef DEFGSCODE
52
53 #ifdef GATHER_STATISTICS
54 /* Gimple stats. */
55
56 int gimple_alloc_counts[(int) gimple_alloc_kind_all];
57 int gimple_alloc_sizes[(int) gimple_alloc_kind_all];
58
59 /* Keep in sync with gimple.h:enum gimple_alloc_kind. */
60 static const char * const gimple_alloc_kind_names[] = {
61 "assignments",
62 "phi nodes",
63 "conditionals",
64 "sequences",
65 "everything else"
66 };
67
68 #endif /* GATHER_STATISTICS */
69
70 /* A cache of gimple_seq objects. Sequences are created and destroyed
71 fairly often during gimplification. */
72 static GTY ((deletable)) struct gimple_seq_d *gimple_seq_cache;
73
74 /* Private API manipulation functions shared only with some
75 other files. */
76 extern void gimple_set_stored_syms (gimple, bitmap, bitmap_obstack *);
77 extern void gimple_set_loaded_syms (gimple, bitmap, bitmap_obstack *);
78
79 /* Gimple tuple constructors.
80 Note: Any constructor taking a ``gimple_seq'' as a parameter, can
81 be passed a NULL to start with an empty sequence. */
82
83 /* Set the code for statement G to CODE. */
84
85 static inline void
86 gimple_set_code (gimple g, enum gimple_code code)
87 {
88 g->gsbase.code = code;
89 }
90
91
92 /* Return the GSS_* identifier for the given GIMPLE statement CODE. */
93
94 static enum gimple_statement_structure_enum
95 gss_for_code (enum gimple_code code)
96 {
97 switch (code)
98 {
99 case GIMPLE_ASSIGN:
100 case GIMPLE_CALL:
101 case GIMPLE_RETURN: return GSS_WITH_MEM_OPS;
102 case GIMPLE_COND:
103 case GIMPLE_GOTO:
104 case GIMPLE_LABEL:
105 case GIMPLE_DEBUG:
106 case GIMPLE_SWITCH: return GSS_WITH_OPS;
107 case GIMPLE_ASM: return GSS_ASM;
108 case GIMPLE_BIND: return GSS_BIND;
109 case GIMPLE_CATCH: return GSS_CATCH;
110 case GIMPLE_EH_FILTER: return GSS_EH_FILTER;
111 case GIMPLE_NOP: return GSS_BASE;
112 case GIMPLE_PHI: return GSS_PHI;
113 case GIMPLE_RESX: return GSS_RESX;
114 case GIMPLE_TRY: return GSS_TRY;
115 case GIMPLE_WITH_CLEANUP_EXPR: return GSS_WCE;
116 case GIMPLE_OMP_CRITICAL: return GSS_OMP_CRITICAL;
117 case GIMPLE_OMP_FOR: return GSS_OMP_FOR;
118 case GIMPLE_OMP_MASTER:
119 case GIMPLE_OMP_ORDERED:
120 case GIMPLE_OMP_SECTION: return GSS_OMP;
121 case GIMPLE_OMP_RETURN:
122 case GIMPLE_OMP_SECTIONS_SWITCH: return GSS_BASE;
123 case GIMPLE_OMP_CONTINUE: return GSS_OMP_CONTINUE;
124 case GIMPLE_OMP_PARALLEL: return GSS_OMP_PARALLEL;
125 case GIMPLE_OMP_TASK: return GSS_OMP_TASK;
126 case GIMPLE_OMP_SECTIONS: return GSS_OMP_SECTIONS;
127 case GIMPLE_OMP_SINGLE: return GSS_OMP_SINGLE;
128 case GIMPLE_OMP_ATOMIC_LOAD: return GSS_OMP_ATOMIC_LOAD;
129 case GIMPLE_OMP_ATOMIC_STORE: return GSS_OMP_ATOMIC_STORE;
130 case GIMPLE_PREDICT: return GSS_BASE;
131 default: gcc_unreachable ();
132 }
133 }
134
135
136 /* Return the number of bytes needed to hold a GIMPLE statement with
137 code CODE. */
138
139 static size_t
140 gimple_size (enum gimple_code code)
141 {
142 enum gimple_statement_structure_enum gss = gss_for_code (code);
143
144 if (gss == GSS_WITH_OPS)
145 return sizeof (struct gimple_statement_with_ops);
146 else if (gss == GSS_WITH_MEM_OPS)
147 return sizeof (struct gimple_statement_with_memory_ops);
148
149 switch (code)
150 {
151 case GIMPLE_ASM:
152 return sizeof (struct gimple_statement_asm);
153 case GIMPLE_NOP:
154 return sizeof (struct gimple_statement_base);
155 case GIMPLE_BIND:
156 return sizeof (struct gimple_statement_bind);
157 case GIMPLE_CATCH:
158 return sizeof (struct gimple_statement_catch);
159 case GIMPLE_EH_FILTER:
160 return sizeof (struct gimple_statement_eh_filter);
161 case GIMPLE_TRY:
162 return sizeof (struct gimple_statement_try);
163 case GIMPLE_RESX:
164 return sizeof (struct gimple_statement_resx);
165 case GIMPLE_OMP_CRITICAL:
166 return sizeof (struct gimple_statement_omp_critical);
167 case GIMPLE_OMP_FOR:
168 return sizeof (struct gimple_statement_omp_for);
169 case GIMPLE_OMP_PARALLEL:
170 return sizeof (struct gimple_statement_omp_parallel);
171 case GIMPLE_OMP_TASK:
172 return sizeof (struct gimple_statement_omp_task);
173 case GIMPLE_OMP_SECTION:
174 case GIMPLE_OMP_MASTER:
175 case GIMPLE_OMP_ORDERED:
176 return sizeof (struct gimple_statement_omp);
177 case GIMPLE_OMP_RETURN:
178 return sizeof (struct gimple_statement_base);
179 case GIMPLE_OMP_CONTINUE:
180 return sizeof (struct gimple_statement_omp_continue);
181 case GIMPLE_OMP_SECTIONS:
182 return sizeof (struct gimple_statement_omp_sections);
183 case GIMPLE_OMP_SECTIONS_SWITCH:
184 return sizeof (struct gimple_statement_base);
185 case GIMPLE_OMP_SINGLE:
186 return sizeof (struct gimple_statement_omp_single);
187 case GIMPLE_OMP_ATOMIC_LOAD:
188 return sizeof (struct gimple_statement_omp_atomic_load);
189 case GIMPLE_OMP_ATOMIC_STORE:
190 return sizeof (struct gimple_statement_omp_atomic_store);
191 case GIMPLE_WITH_CLEANUP_EXPR:
192 return sizeof (struct gimple_statement_wce);
193 case GIMPLE_PREDICT:
194 return sizeof (struct gimple_statement_base);
195 default:
196 break;
197 }
198
199 gcc_unreachable ();
200 }
201
202
203 /* Allocate memory for a GIMPLE statement with code CODE and NUM_OPS
204 operands. */
205
206 #define gimple_alloc(c, n) gimple_alloc_stat (c, n MEM_STAT_INFO)
207 static gimple
208 gimple_alloc_stat (enum gimple_code code, unsigned num_ops MEM_STAT_DECL)
209 {
210 size_t size;
211 gimple stmt;
212
213 size = gimple_size (code);
214 if (num_ops > 0)
215 size += sizeof (tree) * (num_ops - 1);
216
217 #ifdef GATHER_STATISTICS
218 {
219 enum gimple_alloc_kind kind = gimple_alloc_kind (code);
220 gimple_alloc_counts[(int) kind]++;
221 gimple_alloc_sizes[(int) kind] += size;
222 }
223 #endif
224
225 stmt = (gimple) ggc_alloc_cleared_stat (size PASS_MEM_STAT);
226 gimple_set_code (stmt, code);
227 gimple_set_num_ops (stmt, num_ops);
228
229 /* Do not call gimple_set_modified here as it has other side
230 effects and this tuple is still not completely built. */
231 stmt->gsbase.modified = 1;
232
233 return stmt;
234 }
235
236 /* Set SUBCODE to be the code of the expression computed by statement G. */
237
238 static inline void
239 gimple_set_subcode (gimple g, unsigned subcode)
240 {
241 /* We only have 16 bits for the RHS code. Assert that we are not
242 overflowing it. */
243 gcc_assert (subcode < (1 << 16));
244 g->gsbase.subcode = subcode;
245 }
246
247
248
249 /* Build a tuple with operands. CODE is the statement to build (which
250 must be one of the GIMPLE_WITH_OPS tuples). SUBCODE is the sub-code
251 for the new tuple. NUM_OPS is the number of operands to allocate. */
252
253 #define gimple_build_with_ops(c, s, n) \
254 gimple_build_with_ops_stat (c, s, n MEM_STAT_INFO)
255
256 static gimple
257 gimple_build_with_ops_stat (enum gimple_code code, unsigned subcode,
258 unsigned num_ops MEM_STAT_DECL)
259 {
260 gimple s = gimple_alloc_stat (code, num_ops PASS_MEM_STAT);
261 gimple_set_subcode (s, subcode);
262
263 return s;
264 }
265
266
267 /* Build a GIMPLE_RETURN statement returning RETVAL. */
268
269 gimple
270 gimple_build_return (tree retval)
271 {
272 gimple s = gimple_build_with_ops (GIMPLE_RETURN, ERROR_MARK, 1);
273 if (retval)
274 gimple_return_set_retval (s, retval);
275 return s;
276 }
277
278 /* Helper for gimple_build_call, gimple_build_call_vec and
279 gimple_build_call_from_tree. Build the basic components of a
280 GIMPLE_CALL statement to function FN with NARGS arguments. */
281
282 static inline gimple
283 gimple_build_call_1 (tree fn, unsigned nargs)
284 {
285 gimple s = gimple_build_with_ops (GIMPLE_CALL, ERROR_MARK, nargs + 3);
286 if (TREE_CODE (fn) == FUNCTION_DECL)
287 fn = build_fold_addr_expr (fn);
288 gimple_set_op (s, 1, fn);
289 return s;
290 }
291
292
293 /* Build a GIMPLE_CALL statement to function FN with the arguments
294 specified in vector ARGS. */
295
296 gimple
297 gimple_build_call_vec (tree fn, VEC(tree, heap) *args)
298 {
299 unsigned i;
300 unsigned nargs = VEC_length (tree, args);
301 gimple call = gimple_build_call_1 (fn, nargs);
302
303 for (i = 0; i < nargs; i++)
304 gimple_call_set_arg (call, i, VEC_index (tree, args, i));
305
306 return call;
307 }
308
309
310 /* Build a GIMPLE_CALL statement to function FN. NARGS is the number of
311 arguments. The ... are the arguments. */
312
313 gimple
314 gimple_build_call (tree fn, unsigned nargs, ...)
315 {
316 va_list ap;
317 gimple call;
318 unsigned i;
319
320 gcc_assert (TREE_CODE (fn) == FUNCTION_DECL || is_gimple_call_addr (fn));
321
322 call = gimple_build_call_1 (fn, nargs);
323
324 va_start (ap, nargs);
325 for (i = 0; i < nargs; i++)
326 gimple_call_set_arg (call, i, va_arg (ap, tree));
327 va_end (ap);
328
329 return call;
330 }
331
332
333 /* Build a GIMPLE_CALL statement from CALL_EXPR T. Note that T is
334 assumed to be in GIMPLE form already. Minimal checking is done of
335 this fact. */
336
337 gimple
338 gimple_build_call_from_tree (tree t)
339 {
340 unsigned i, nargs;
341 gimple call;
342 tree fndecl = get_callee_fndecl (t);
343
344 gcc_assert (TREE_CODE (t) == CALL_EXPR);
345
346 nargs = call_expr_nargs (t);
347 call = gimple_build_call_1 (fndecl ? fndecl : CALL_EXPR_FN (t), nargs);
348
349 for (i = 0; i < nargs; i++)
350 gimple_call_set_arg (call, i, CALL_EXPR_ARG (t, i));
351
352 gimple_set_block (call, TREE_BLOCK (t));
353
354 /* Carry all the CALL_EXPR flags to the new GIMPLE_CALL. */
355 gimple_call_set_chain (call, CALL_EXPR_STATIC_CHAIN (t));
356 gimple_call_set_tail (call, CALL_EXPR_TAILCALL (t));
357 gimple_call_set_cannot_inline (call, CALL_CANNOT_INLINE_P (t));
358 gimple_call_set_return_slot_opt (call, CALL_EXPR_RETURN_SLOT_OPT (t));
359 gimple_call_set_from_thunk (call, CALL_FROM_THUNK_P (t));
360 gimple_call_set_va_arg_pack (call, CALL_EXPR_VA_ARG_PACK (t));
361 gimple_set_no_warning (call, TREE_NO_WARNING (t));
362
363 return call;
364 }
365
366
367 /* Extract the operands and code for expression EXPR into *SUBCODE_P,
368 *OP1_P and *OP2_P respectively. */
369
370 void
371 extract_ops_from_tree (tree expr, enum tree_code *subcode_p, tree *op1_p,
372 tree *op2_p)
373 {
374 enum gimple_rhs_class grhs_class;
375
376 *subcode_p = TREE_CODE (expr);
377 grhs_class = get_gimple_rhs_class (*subcode_p);
378
379 if (grhs_class == GIMPLE_BINARY_RHS)
380 {
381 *op1_p = TREE_OPERAND (expr, 0);
382 *op2_p = TREE_OPERAND (expr, 1);
383 }
384 else if (grhs_class == GIMPLE_UNARY_RHS)
385 {
386 *op1_p = TREE_OPERAND (expr, 0);
387 *op2_p = NULL_TREE;
388 }
389 else if (grhs_class == GIMPLE_SINGLE_RHS)
390 {
391 *op1_p = expr;
392 *op2_p = NULL_TREE;
393 }
394 else
395 gcc_unreachable ();
396 }
397
398
399 /* Build a GIMPLE_ASSIGN statement.
400
401 LHS of the assignment.
402 RHS of the assignment which can be unary or binary. */
403
404 gimple
405 gimple_build_assign_stat (tree lhs, tree rhs MEM_STAT_DECL)
406 {
407 enum tree_code subcode;
408 tree op1, op2;
409
410 extract_ops_from_tree (rhs, &subcode, &op1, &op2);
411 return gimple_build_assign_with_ops_stat (subcode, lhs, op1, op2
412 PASS_MEM_STAT);
413 }
414
415
416 /* Build a GIMPLE_ASSIGN statement with sub-code SUBCODE and operands
417 OP1 and OP2. If OP2 is NULL then SUBCODE must be of class
418 GIMPLE_UNARY_RHS or GIMPLE_SINGLE_RHS. */
419
420 gimple
421 gimple_build_assign_with_ops_stat (enum tree_code subcode, tree lhs, tree op1,
422 tree op2 MEM_STAT_DECL)
423 {
424 unsigned num_ops;
425 gimple p;
426
427 /* Need 1 operand for LHS and 1 or 2 for the RHS (depending on the
428 code). */
429 num_ops = get_gimple_rhs_num_ops (subcode) + 1;
430
431 p = gimple_build_with_ops_stat (GIMPLE_ASSIGN, (unsigned)subcode, num_ops
432 PASS_MEM_STAT);
433 gimple_assign_set_lhs (p, lhs);
434 gimple_assign_set_rhs1 (p, op1);
435 if (op2)
436 {
437 gcc_assert (num_ops > 2);
438 gimple_assign_set_rhs2 (p, op2);
439 }
440
441 return p;
442 }
443
444
445 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
446
447 DST/SRC are the destination and source respectively. You can pass
448 ungimplified trees in DST or SRC, in which case they will be
449 converted to a gimple operand if necessary.
450
451 This function returns the newly created GIMPLE_ASSIGN tuple. */
452
453 gimple
454 gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
455 {
456 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
457 gimplify_and_add (t, seq_p);
458 ggc_free (t);
459 return gimple_seq_last_stmt (*seq_p);
460 }
461
462
463 /* Build a GIMPLE_COND statement.
464
465 PRED is the condition used to compare LHS and the RHS.
466 T_LABEL is the label to jump to if the condition is true.
467 F_LABEL is the label to jump to otherwise. */
468
469 gimple
470 gimple_build_cond (enum tree_code pred_code, tree lhs, tree rhs,
471 tree t_label, tree f_label)
472 {
473 gimple p;
474
475 gcc_assert (TREE_CODE_CLASS (pred_code) == tcc_comparison);
476 p = gimple_build_with_ops (GIMPLE_COND, pred_code, 4);
477 gimple_cond_set_lhs (p, lhs);
478 gimple_cond_set_rhs (p, rhs);
479 gimple_cond_set_true_label (p, t_label);
480 gimple_cond_set_false_label (p, f_label);
481 return p;
482 }
483
484
485 /* Extract operands for a GIMPLE_COND statement out of COND_EXPR tree COND. */
486
487 void
488 gimple_cond_get_ops_from_tree (tree cond, enum tree_code *code_p,
489 tree *lhs_p, tree *rhs_p)
490 {
491 location_t loc = EXPR_LOCATION (cond);
492 gcc_assert (TREE_CODE_CLASS (TREE_CODE (cond)) == tcc_comparison
493 || TREE_CODE (cond) == TRUTH_NOT_EXPR
494 || is_gimple_min_invariant (cond)
495 || SSA_VAR_P (cond));
496
497 extract_ops_from_tree (cond, code_p, lhs_p, rhs_p);
498
499 /* Canonicalize conditionals of the form 'if (!VAL)'. */
500 if (*code_p == TRUTH_NOT_EXPR)
501 {
502 *code_p = EQ_EXPR;
503 gcc_assert (*lhs_p && *rhs_p == NULL_TREE);
504 *rhs_p = fold_convert_loc (loc, TREE_TYPE (*lhs_p), integer_zero_node);
505 }
506 /* Canonicalize conditionals of the form 'if (VAL)' */
507 else if (TREE_CODE_CLASS (*code_p) != tcc_comparison)
508 {
509 *code_p = NE_EXPR;
510 gcc_assert (*lhs_p && *rhs_p == NULL_TREE);
511 *rhs_p = fold_convert_loc (loc, TREE_TYPE (*lhs_p), integer_zero_node);
512 }
513 }
514
515
516 /* Build a GIMPLE_COND statement from the conditional expression tree
517 COND. T_LABEL and F_LABEL are as in gimple_build_cond. */
518
519 gimple
520 gimple_build_cond_from_tree (tree cond, tree t_label, tree f_label)
521 {
522 enum tree_code code;
523 tree lhs, rhs;
524
525 gimple_cond_get_ops_from_tree (cond, &code, &lhs, &rhs);
526 return gimple_build_cond (code, lhs, rhs, t_label, f_label);
527 }
528
529 /* Set code, lhs, and rhs of a GIMPLE_COND from a suitable
530 boolean expression tree COND. */
531
532 void
533 gimple_cond_set_condition_from_tree (gimple stmt, tree cond)
534 {
535 enum tree_code code;
536 tree lhs, rhs;
537
538 gimple_cond_get_ops_from_tree (cond, &code, &lhs, &rhs);
539 gimple_cond_set_condition (stmt, code, lhs, rhs);
540 }
541
542 /* Build a GIMPLE_LABEL statement for LABEL. */
543
544 gimple
545 gimple_build_label (tree label)
546 {
547 gimple p = gimple_build_with_ops (GIMPLE_LABEL, ERROR_MARK, 1);
548 gimple_label_set_label (p, label);
549 return p;
550 }
551
552 /* Build a GIMPLE_GOTO statement to label DEST. */
553
554 gimple
555 gimple_build_goto (tree dest)
556 {
557 gimple p = gimple_build_with_ops (GIMPLE_GOTO, ERROR_MARK, 1);
558 gimple_goto_set_dest (p, dest);
559 return p;
560 }
561
562
563 /* Build a GIMPLE_NOP statement. */
564
565 gimple
566 gimple_build_nop (void)
567 {
568 return gimple_alloc (GIMPLE_NOP, 0);
569 }
570
571
572 /* Build a GIMPLE_BIND statement.
573 VARS are the variables in BODY.
574 BLOCK is the containing block. */
575
576 gimple
577 gimple_build_bind (tree vars, gimple_seq body, tree block)
578 {
579 gimple p = gimple_alloc (GIMPLE_BIND, 0);
580 gimple_bind_set_vars (p, vars);
581 if (body)
582 gimple_bind_set_body (p, body);
583 if (block)
584 gimple_bind_set_block (p, block);
585 return p;
586 }
587
588 /* Helper function to set the simple fields of a asm stmt.
589
590 STRING is a pointer to a string that is the asm blocks assembly code.
591 NINPUT is the number of register inputs.
592 NOUTPUT is the number of register outputs.
593 NCLOBBERS is the number of clobbered registers.
594 */
595
596 static inline gimple
597 gimple_build_asm_1 (const char *string, unsigned ninputs, unsigned noutputs,
598 unsigned nclobbers)
599 {
600 gimple p;
601 int size = strlen (string);
602
603 p = gimple_build_with_ops (GIMPLE_ASM, ERROR_MARK,
604 ninputs + noutputs + nclobbers);
605
606 p->gimple_asm.ni = ninputs;
607 p->gimple_asm.no = noutputs;
608 p->gimple_asm.nc = nclobbers;
609 p->gimple_asm.string = ggc_alloc_string (string, size);
610
611 #ifdef GATHER_STATISTICS
612 gimple_alloc_sizes[(int) gimple_alloc_kind (GIMPLE_ASM)] += size;
613 #endif
614
615 return p;
616 }
617
618 /* Build a GIMPLE_ASM statement.
619
620 STRING is the assembly code.
621 NINPUT is the number of register inputs.
622 NOUTPUT is the number of register outputs.
623 NCLOBBERS is the number of clobbered registers.
624 INPUTS is a vector of the input register parameters.
625 OUTPUTS is a vector of the output register parameters.
626 CLOBBERS is a vector of the clobbered register parameters. */
627
628 gimple
629 gimple_build_asm_vec (const char *string, VEC(tree,gc)* inputs,
630 VEC(tree,gc)* outputs, VEC(tree,gc)* clobbers)
631 {
632 gimple p;
633 unsigned i;
634
635 p = gimple_build_asm_1 (string,
636 VEC_length (tree, inputs),
637 VEC_length (tree, outputs),
638 VEC_length (tree, clobbers));
639
640 for (i = 0; i < VEC_length (tree, inputs); i++)
641 gimple_asm_set_input_op (p, i, VEC_index (tree, inputs, i));
642
643 for (i = 0; i < VEC_length (tree, outputs); i++)
644 gimple_asm_set_output_op (p, i, VEC_index (tree, outputs, i));
645
646 for (i = 0; i < VEC_length (tree, clobbers); i++)
647 gimple_asm_set_clobber_op (p, i, VEC_index (tree, clobbers, i));
648
649 return p;
650 }
651
652 /* Build a GIMPLE_ASM statement.
653
654 STRING is the assembly code.
655 NINPUT is the number of register inputs.
656 NOUTPUT is the number of register outputs.
657 NCLOBBERS is the number of clobbered registers.
658 ... are trees for each input, output and clobbered register. */
659
660 gimple
661 gimple_build_asm (const char *string, unsigned ninputs, unsigned noutputs,
662 unsigned nclobbers, ...)
663 {
664 gimple p;
665 unsigned i;
666 va_list ap;
667
668 p = gimple_build_asm_1 (string, ninputs, noutputs, nclobbers);
669
670 va_start (ap, nclobbers);
671
672 for (i = 0; i < ninputs; i++)
673 gimple_asm_set_input_op (p, i, va_arg (ap, tree));
674
675 for (i = 0; i < noutputs; i++)
676 gimple_asm_set_output_op (p, i, va_arg (ap, tree));
677
678 for (i = 0; i < nclobbers; i++)
679 gimple_asm_set_clobber_op (p, i, va_arg (ap, tree));
680
681 va_end (ap);
682
683 return p;
684 }
685
686 /* Build a GIMPLE_CATCH statement.
687
688 TYPES are the catch types.
689 HANDLER is the exception handler. */
690
691 gimple
692 gimple_build_catch (tree types, gimple_seq handler)
693 {
694 gimple p = gimple_alloc (GIMPLE_CATCH, 0);
695 gimple_catch_set_types (p, types);
696 if (handler)
697 gimple_catch_set_handler (p, handler);
698
699 return p;
700 }
701
702 /* Build a GIMPLE_EH_FILTER statement.
703
704 TYPES are the filter's types.
705 FAILURE is the filter's failure action. */
706
707 gimple
708 gimple_build_eh_filter (tree types, gimple_seq failure)
709 {
710 gimple p = gimple_alloc (GIMPLE_EH_FILTER, 0);
711 gimple_eh_filter_set_types (p, types);
712 if (failure)
713 gimple_eh_filter_set_failure (p, failure);
714
715 return p;
716 }
717
718 /* Build a GIMPLE_TRY statement.
719
720 EVAL is the expression to evaluate.
721 CLEANUP is the cleanup expression.
722 KIND is either GIMPLE_TRY_CATCH or GIMPLE_TRY_FINALLY depending on
723 whether this is a try/catch or a try/finally respectively. */
724
725 gimple
726 gimple_build_try (gimple_seq eval, gimple_seq cleanup,
727 enum gimple_try_flags kind)
728 {
729 gimple p;
730
731 gcc_assert (kind == GIMPLE_TRY_CATCH || kind == GIMPLE_TRY_FINALLY);
732 p = gimple_alloc (GIMPLE_TRY, 0);
733 gimple_set_subcode (p, kind);
734 if (eval)
735 gimple_try_set_eval (p, eval);
736 if (cleanup)
737 gimple_try_set_cleanup (p, cleanup);
738
739 return p;
740 }
741
742 /* Construct a GIMPLE_WITH_CLEANUP_EXPR statement.
743
744 CLEANUP is the cleanup expression. */
745
746 gimple
747 gimple_build_wce (gimple_seq cleanup)
748 {
749 gimple p = gimple_alloc (GIMPLE_WITH_CLEANUP_EXPR, 0);
750 if (cleanup)
751 gimple_wce_set_cleanup (p, cleanup);
752
753 return p;
754 }
755
756
757 /* Build a GIMPLE_RESX statement.
758
759 REGION is the region number from which this resx causes control flow to
760 leave. */
761
762 gimple
763 gimple_build_resx (int region)
764 {
765 gimple p = gimple_alloc (GIMPLE_RESX, 0);
766 gimple_resx_set_region (p, region);
767 return p;
768 }
769
770
771 /* The helper for constructing a gimple switch statement.
772 INDEX is the switch's index.
773 NLABELS is the number of labels in the switch excluding the default.
774 DEFAULT_LABEL is the default label for the switch statement. */
775
776 static inline gimple
777 gimple_build_switch_1 (unsigned nlabels, tree index, tree default_label)
778 {
779 /* nlabels + 1 default label + 1 index. */
780 gimple p = gimple_build_with_ops (GIMPLE_SWITCH, ERROR_MARK,
781 nlabels + 1 + 1);
782 gimple_switch_set_index (p, index);
783 gimple_switch_set_default_label (p, default_label);
784 return p;
785 }
786
787
788 /* Build a GIMPLE_SWITCH statement.
789
790 INDEX is the switch's index.
791 NLABELS is the number of labels in the switch excluding the DEFAULT_LABEL.
792 ... are the labels excluding the default. */
793
794 gimple
795 gimple_build_switch (unsigned nlabels, tree index, tree default_label, ...)
796 {
797 va_list al;
798 unsigned i;
799 gimple p;
800
801 p = gimple_build_switch_1 (nlabels, index, default_label);
802
803 /* Store the rest of the labels. */
804 va_start (al, default_label);
805 for (i = 1; i <= nlabels; i++)
806 gimple_switch_set_label (p, i, va_arg (al, tree));
807 va_end (al);
808
809 return p;
810 }
811
812
813 /* Build a GIMPLE_SWITCH statement.
814
815 INDEX is the switch's index.
816 DEFAULT_LABEL is the default label
817 ARGS is a vector of labels excluding the default. */
818
819 gimple
820 gimple_build_switch_vec (tree index, tree default_label, VEC(tree, heap) *args)
821 {
822 unsigned i;
823 unsigned nlabels = VEC_length (tree, args);
824 gimple p = gimple_build_switch_1 (nlabels, index, default_label);
825
826 /* Put labels in labels[1 - (nlabels + 1)].
827 Default label is in labels[0]. */
828 for (i = 1; i <= nlabels; i++)
829 gimple_switch_set_label (p, i, VEC_index (tree, args, i - 1));
830
831 return p;
832 }
833
834
835 /* Build a new GIMPLE_DEBUG_BIND statement.
836
837 VAR is bound to VALUE; block and location are taken from STMT. */
838
839 gimple
840 gimple_build_debug_bind_stat (tree var, tree value, gimple stmt MEM_STAT_DECL)
841 {
842 gimple p = gimple_build_with_ops_stat (GIMPLE_DEBUG,
843 (unsigned)GIMPLE_DEBUG_BIND, 2
844 PASS_MEM_STAT);
845
846 gimple_debug_bind_set_var (p, var);
847 gimple_debug_bind_set_value (p, value);
848 if (stmt)
849 {
850 gimple_set_block (p, gimple_block (stmt));
851 gimple_set_location (p, gimple_location (stmt));
852 }
853
854 return p;
855 }
856
857
858 /* Build a GIMPLE_OMP_CRITICAL statement.
859
860 BODY is the sequence of statements for which only one thread can execute.
861 NAME is optional identifier for this critical block. */
862
863 gimple
864 gimple_build_omp_critical (gimple_seq body, tree name)
865 {
866 gimple p = gimple_alloc (GIMPLE_OMP_CRITICAL, 0);
867 gimple_omp_critical_set_name (p, name);
868 if (body)
869 gimple_omp_set_body (p, body);
870
871 return p;
872 }
873
874 /* Build a GIMPLE_OMP_FOR statement.
875
876 BODY is sequence of statements inside the for loop.
877 CLAUSES, are any of the OMP loop construct's clauses: private, firstprivate,
878 lastprivate, reductions, ordered, schedule, and nowait.
879 COLLAPSE is the collapse count.
880 PRE_BODY is the sequence of statements that are loop invariant. */
881
882 gimple
883 gimple_build_omp_for (gimple_seq body, tree clauses, size_t collapse,
884 gimple_seq pre_body)
885 {
886 gimple p = gimple_alloc (GIMPLE_OMP_FOR, 0);
887 if (body)
888 gimple_omp_set_body (p, body);
889 gimple_omp_for_set_clauses (p, clauses);
890 p->gimple_omp_for.collapse = collapse;
891 p->gimple_omp_for.iter = GGC_CNEWVEC (struct gimple_omp_for_iter, collapse);
892 if (pre_body)
893 gimple_omp_for_set_pre_body (p, pre_body);
894
895 return p;
896 }
897
898
899 /* Build a GIMPLE_OMP_PARALLEL statement.
900
901 BODY is sequence of statements which are executed in parallel.
902 CLAUSES, are the OMP parallel construct's clauses.
903 CHILD_FN is the function created for the parallel threads to execute.
904 DATA_ARG are the shared data argument(s). */
905
906 gimple
907 gimple_build_omp_parallel (gimple_seq body, tree clauses, tree child_fn,
908 tree data_arg)
909 {
910 gimple p = gimple_alloc (GIMPLE_OMP_PARALLEL, 0);
911 if (body)
912 gimple_omp_set_body (p, body);
913 gimple_omp_parallel_set_clauses (p, clauses);
914 gimple_omp_parallel_set_child_fn (p, child_fn);
915 gimple_omp_parallel_set_data_arg (p, data_arg);
916
917 return p;
918 }
919
920
921 /* Build a GIMPLE_OMP_TASK statement.
922
923 BODY is sequence of statements which are executed by the explicit task.
924 CLAUSES, are the OMP parallel construct's clauses.
925 CHILD_FN is the function created for the parallel threads to execute.
926 DATA_ARG are the shared data argument(s).
927 COPY_FN is the optional function for firstprivate initialization.
928 ARG_SIZE and ARG_ALIGN are size and alignment of the data block. */
929
930 gimple
931 gimple_build_omp_task (gimple_seq body, tree clauses, tree child_fn,
932 tree data_arg, tree copy_fn, tree arg_size,
933 tree arg_align)
934 {
935 gimple p = gimple_alloc (GIMPLE_OMP_TASK, 0);
936 if (body)
937 gimple_omp_set_body (p, body);
938 gimple_omp_task_set_clauses (p, clauses);
939 gimple_omp_task_set_child_fn (p, child_fn);
940 gimple_omp_task_set_data_arg (p, data_arg);
941 gimple_omp_task_set_copy_fn (p, copy_fn);
942 gimple_omp_task_set_arg_size (p, arg_size);
943 gimple_omp_task_set_arg_align (p, arg_align);
944
945 return p;
946 }
947
948
949 /* Build a GIMPLE_OMP_SECTION statement for a sections statement.
950
951 BODY is the sequence of statements in the section. */
952
953 gimple
954 gimple_build_omp_section (gimple_seq body)
955 {
956 gimple p = gimple_alloc (GIMPLE_OMP_SECTION, 0);
957 if (body)
958 gimple_omp_set_body (p, body);
959
960 return p;
961 }
962
963
964 /* Build a GIMPLE_OMP_MASTER statement.
965
966 BODY is the sequence of statements to be executed by just the master. */
967
968 gimple
969 gimple_build_omp_master (gimple_seq body)
970 {
971 gimple p = gimple_alloc (GIMPLE_OMP_MASTER, 0);
972 if (body)
973 gimple_omp_set_body (p, body);
974
975 return p;
976 }
977
978
979 /* Build a GIMPLE_OMP_CONTINUE statement.
980
981 CONTROL_DEF is the definition of the control variable.
982 CONTROL_USE is the use of the control variable. */
983
984 gimple
985 gimple_build_omp_continue (tree control_def, tree control_use)
986 {
987 gimple p = gimple_alloc (GIMPLE_OMP_CONTINUE, 0);
988 gimple_omp_continue_set_control_def (p, control_def);
989 gimple_omp_continue_set_control_use (p, control_use);
990 return p;
991 }
992
993 /* Build a GIMPLE_OMP_ORDERED statement.
994
995 BODY is the sequence of statements inside a loop that will executed in
996 sequence. */
997
998 gimple
999 gimple_build_omp_ordered (gimple_seq body)
1000 {
1001 gimple p = gimple_alloc (GIMPLE_OMP_ORDERED, 0);
1002 if (body)
1003 gimple_omp_set_body (p, body);
1004
1005 return p;
1006 }
1007
1008
1009 /* Build a GIMPLE_OMP_RETURN statement.
1010 WAIT_P is true if this is a non-waiting return. */
1011
1012 gimple
1013 gimple_build_omp_return (bool wait_p)
1014 {
1015 gimple p = gimple_alloc (GIMPLE_OMP_RETURN, 0);
1016 if (wait_p)
1017 gimple_omp_return_set_nowait (p);
1018
1019 return p;
1020 }
1021
1022
1023 /* Build a GIMPLE_OMP_SECTIONS statement.
1024
1025 BODY is a sequence of section statements.
1026 CLAUSES are any of the OMP sections contsruct's clauses: private,
1027 firstprivate, lastprivate, reduction, and nowait. */
1028
1029 gimple
1030 gimple_build_omp_sections (gimple_seq body, tree clauses)
1031 {
1032 gimple p = gimple_alloc (GIMPLE_OMP_SECTIONS, 0);
1033 if (body)
1034 gimple_omp_set_body (p, body);
1035 gimple_omp_sections_set_clauses (p, clauses);
1036
1037 return p;
1038 }
1039
1040
1041 /* Build a GIMPLE_OMP_SECTIONS_SWITCH. */
1042
1043 gimple
1044 gimple_build_omp_sections_switch (void)
1045 {
1046 return gimple_alloc (GIMPLE_OMP_SECTIONS_SWITCH, 0);
1047 }
1048
1049
1050 /* Build a GIMPLE_OMP_SINGLE statement.
1051
1052 BODY is the sequence of statements that will be executed once.
1053 CLAUSES are any of the OMP single construct's clauses: private, firstprivate,
1054 copyprivate, nowait. */
1055
1056 gimple
1057 gimple_build_omp_single (gimple_seq body, tree clauses)
1058 {
1059 gimple p = gimple_alloc (GIMPLE_OMP_SINGLE, 0);
1060 if (body)
1061 gimple_omp_set_body (p, body);
1062 gimple_omp_single_set_clauses (p, clauses);
1063
1064 return p;
1065 }
1066
1067
1068 /* Build a GIMPLE_OMP_ATOMIC_LOAD statement. */
1069
1070 gimple
1071 gimple_build_omp_atomic_load (tree lhs, tree rhs)
1072 {
1073 gimple p = gimple_alloc (GIMPLE_OMP_ATOMIC_LOAD, 0);
1074 gimple_omp_atomic_load_set_lhs (p, lhs);
1075 gimple_omp_atomic_load_set_rhs (p, rhs);
1076 return p;
1077 }
1078
1079 /* Build a GIMPLE_OMP_ATOMIC_STORE statement.
1080
1081 VAL is the value we are storing. */
1082
1083 gimple
1084 gimple_build_omp_atomic_store (tree val)
1085 {
1086 gimple p = gimple_alloc (GIMPLE_OMP_ATOMIC_STORE, 0);
1087 gimple_omp_atomic_store_set_val (p, val);
1088 return p;
1089 }
1090
1091 /* Build a GIMPLE_PREDICT statement. PREDICT is one of the predictors from
1092 predict.def, OUTCOME is NOT_TAKEN or TAKEN. */
1093
1094 gimple
1095 gimple_build_predict (enum br_predictor predictor, enum prediction outcome)
1096 {
1097 gimple p = gimple_alloc (GIMPLE_PREDICT, 0);
1098 /* Ensure all the predictors fit into the lower bits of the subcode. */
1099 gcc_assert ((int) END_PREDICTORS <= GF_PREDICT_TAKEN);
1100 gimple_predict_set_predictor (p, predictor);
1101 gimple_predict_set_outcome (p, outcome);
1102 return p;
1103 }
1104
1105 /* Return which gimple structure is used by T. The enums here are defined
1106 in gsstruct.def. */
1107
1108 enum gimple_statement_structure_enum
1109 gimple_statement_structure (gimple gs)
1110 {
1111 return gss_for_code (gimple_code (gs));
1112 }
1113
1114 #if defined ENABLE_GIMPLE_CHECKING
1115 /* Complain of a gimple type mismatch and die. */
1116
1117 void
1118 gimple_check_failed (const_gimple gs, const char *file, int line,
1119 const char *function, enum gimple_code code,
1120 enum tree_code subcode)
1121 {
1122 internal_error ("gimple check: expected %s(%s), have %s(%s) in %s, at %s:%d",
1123 gimple_code_name[code],
1124 tree_code_name[subcode],
1125 gimple_code_name[gimple_code (gs)],
1126 gs->gsbase.subcode > 0
1127 ? tree_code_name[gs->gsbase.subcode]
1128 : "",
1129 function, trim_filename (file), line);
1130 }
1131 #endif /* ENABLE_GIMPLE_CHECKING */
1132
1133
1134 /* Allocate a new GIMPLE sequence in GC memory and return it. If
1135 there are free sequences in GIMPLE_SEQ_CACHE return one of those
1136 instead. */
1137
1138 gimple_seq
1139 gimple_seq_alloc (void)
1140 {
1141 gimple_seq seq = gimple_seq_cache;
1142 if (seq)
1143 {
1144 gimple_seq_cache = gimple_seq_cache->next_free;
1145 gcc_assert (gimple_seq_cache != seq);
1146 memset (seq, 0, sizeof (*seq));
1147 }
1148 else
1149 {
1150 seq = (gimple_seq) ggc_alloc_cleared (sizeof (*seq));
1151 #ifdef GATHER_STATISTICS
1152 gimple_alloc_counts[(int) gimple_alloc_kind_seq]++;
1153 gimple_alloc_sizes[(int) gimple_alloc_kind_seq] += sizeof (*seq);
1154 #endif
1155 }
1156
1157 return seq;
1158 }
1159
1160 /* Return SEQ to the free pool of GIMPLE sequences. */
1161
1162 void
1163 gimple_seq_free (gimple_seq seq)
1164 {
1165 if (seq == NULL)
1166 return;
1167
1168 gcc_assert (gimple_seq_first (seq) == NULL);
1169 gcc_assert (gimple_seq_last (seq) == NULL);
1170
1171 /* If this triggers, it's a sign that the same list is being freed
1172 twice. */
1173 gcc_assert (seq != gimple_seq_cache || gimple_seq_cache == NULL);
1174
1175 /* Add SEQ to the pool of free sequences. */
1176 seq->next_free = gimple_seq_cache;
1177 gimple_seq_cache = seq;
1178 }
1179
1180
1181 /* Link gimple statement GS to the end of the sequence *SEQ_P. If
1182 *SEQ_P is NULL, a new sequence is allocated. */
1183
1184 void
1185 gimple_seq_add_stmt (gimple_seq *seq_p, gimple gs)
1186 {
1187 gimple_stmt_iterator si;
1188
1189 if (gs == NULL)
1190 return;
1191
1192 if (*seq_p == NULL)
1193 *seq_p = gimple_seq_alloc ();
1194
1195 si = gsi_last (*seq_p);
1196 gsi_insert_after (&si, gs, GSI_NEW_STMT);
1197 }
1198
1199
1200 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
1201 NULL, a new sequence is allocated. */
1202
1203 void
1204 gimple_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
1205 {
1206 gimple_stmt_iterator si;
1207
1208 if (src == NULL)
1209 return;
1210
1211 if (*dst_p == NULL)
1212 *dst_p = gimple_seq_alloc ();
1213
1214 si = gsi_last (*dst_p);
1215 gsi_insert_seq_after (&si, src, GSI_NEW_STMT);
1216 }
1217
1218
1219 /* Helper function of empty_body_p. Return true if STMT is an empty
1220 statement. */
1221
1222 static bool
1223 empty_stmt_p (gimple stmt)
1224 {
1225 if (gimple_code (stmt) == GIMPLE_NOP)
1226 return true;
1227 if (gimple_code (stmt) == GIMPLE_BIND)
1228 return empty_body_p (gimple_bind_body (stmt));
1229 return false;
1230 }
1231
1232
1233 /* Return true if BODY contains nothing but empty statements. */
1234
1235 bool
1236 empty_body_p (gimple_seq body)
1237 {
1238 gimple_stmt_iterator i;
1239
1240 if (gimple_seq_empty_p (body))
1241 return true;
1242 for (i = gsi_start (body); !gsi_end_p (i); gsi_next (&i))
1243 if (!empty_stmt_p (gsi_stmt (i))
1244 && !is_gimple_debug (gsi_stmt (i)))
1245 return false;
1246
1247 return true;
1248 }
1249
1250
1251 /* Perform a deep copy of sequence SRC and return the result. */
1252
1253 gimple_seq
1254 gimple_seq_copy (gimple_seq src)
1255 {
1256 gimple_stmt_iterator gsi;
1257 gimple_seq new_seq = gimple_seq_alloc ();
1258 gimple stmt;
1259
1260 for (gsi = gsi_start (src); !gsi_end_p (gsi); gsi_next (&gsi))
1261 {
1262 stmt = gimple_copy (gsi_stmt (gsi));
1263 gimple_seq_add_stmt (&new_seq, stmt);
1264 }
1265
1266 return new_seq;
1267 }
1268
1269
1270 /* Walk all the statements in the sequence SEQ calling walk_gimple_stmt
1271 on each one. WI is as in walk_gimple_stmt.
1272
1273 If walk_gimple_stmt returns non-NULL, the walk is stopped, the
1274 value is stored in WI->CALLBACK_RESULT and the statement that
1275 produced the value is returned.
1276
1277 Otherwise, all the statements are walked and NULL returned. */
1278
1279 gimple
1280 walk_gimple_seq (gimple_seq seq, walk_stmt_fn callback_stmt,
1281 walk_tree_fn callback_op, struct walk_stmt_info *wi)
1282 {
1283 gimple_stmt_iterator gsi;
1284
1285 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
1286 {
1287 tree ret = walk_gimple_stmt (&gsi, callback_stmt, callback_op, wi);
1288 if (ret)
1289 {
1290 /* If CALLBACK_STMT or CALLBACK_OP return a value, WI must exist
1291 to hold it. */
1292 gcc_assert (wi);
1293 wi->callback_result = ret;
1294 return gsi_stmt (gsi);
1295 }
1296 }
1297
1298 if (wi)
1299 wi->callback_result = NULL_TREE;
1300
1301 return NULL;
1302 }
1303
1304
1305 /* Helper function for walk_gimple_stmt. Walk operands of a GIMPLE_ASM. */
1306
1307 static tree
1308 walk_gimple_asm (gimple stmt, walk_tree_fn callback_op,
1309 struct walk_stmt_info *wi)
1310 {
1311 tree ret;
1312 unsigned noutputs;
1313 const char **oconstraints;
1314 unsigned i;
1315 const char *constraint;
1316 bool allows_mem, allows_reg, is_inout;
1317
1318 noutputs = gimple_asm_noutputs (stmt);
1319 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
1320
1321 if (wi)
1322 wi->is_lhs = true;
1323
1324 for (i = 0; i < noutputs; i++)
1325 {
1326 tree op = gimple_asm_output_op (stmt, i);
1327 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
1328 oconstraints[i] = constraint;
1329 parse_output_constraint (&constraint, i, 0, 0, &allows_mem, &allows_reg,
1330 &is_inout);
1331 if (wi)
1332 wi->val_only = (allows_reg || !allows_mem);
1333 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
1334 if (ret)
1335 return ret;
1336 }
1337
1338 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
1339 {
1340 tree op = gimple_asm_input_op (stmt, i);
1341 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
1342 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1343 oconstraints, &allows_mem, &allows_reg);
1344 if (wi)
1345 wi->val_only = (allows_reg || !allows_mem);
1346
1347 /* Although input "m" is not really a LHS, we need a lvalue. */
1348 if (wi)
1349 wi->is_lhs = !wi->val_only;
1350 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
1351 if (ret)
1352 return ret;
1353 }
1354
1355 if (wi)
1356 {
1357 wi->is_lhs = false;
1358 wi->val_only = true;
1359 }
1360
1361 return NULL_TREE;
1362 }
1363
1364
1365 /* Helper function of WALK_GIMPLE_STMT. Walk every tree operand in
1366 STMT. CALLBACK_OP and WI are as in WALK_GIMPLE_STMT.
1367
1368 CALLBACK_OP is called on each operand of STMT via walk_tree.
1369 Additional parameters to walk_tree must be stored in WI. For each operand
1370 OP, walk_tree is called as:
1371
1372 walk_tree (&OP, CALLBACK_OP, WI, WI->PSET)
1373
1374 If CALLBACK_OP returns non-NULL for an operand, the remaining
1375 operands are not scanned.
1376
1377 The return value is that returned by the last call to walk_tree, or
1378 NULL_TREE if no CALLBACK_OP is specified. */
1379
1380 inline tree
1381 walk_gimple_op (gimple stmt, walk_tree_fn callback_op,
1382 struct walk_stmt_info *wi)
1383 {
1384 struct pointer_set_t *pset = (wi) ? wi->pset : NULL;
1385 unsigned i;
1386 tree ret = NULL_TREE;
1387
1388 switch (gimple_code (stmt))
1389 {
1390 case GIMPLE_ASSIGN:
1391 /* Walk the RHS operands. A formal temporary LHS may use a
1392 COMPONENT_REF RHS. */
1393 if (wi)
1394 wi->val_only = !is_gimple_reg (gimple_assign_lhs (stmt))
1395 || !gimple_assign_single_p (stmt);
1396
1397 for (i = 1; i < gimple_num_ops (stmt); i++)
1398 {
1399 ret = walk_tree (gimple_op_ptr (stmt, i), callback_op, wi,
1400 pset);
1401 if (ret)
1402 return ret;
1403 }
1404
1405 /* Walk the LHS. If the RHS is appropriate for a memory, we
1406 may use a COMPONENT_REF on the LHS. */
1407 if (wi)
1408 {
1409 /* If the RHS has more than 1 operand, it is not appropriate
1410 for the memory. */
1411 wi->val_only = !is_gimple_mem_rhs (gimple_assign_rhs1 (stmt))
1412 || !gimple_assign_single_p (stmt);
1413 wi->is_lhs = true;
1414 }
1415
1416 ret = walk_tree (gimple_op_ptr (stmt, 0), callback_op, wi, pset);
1417 if (ret)
1418 return ret;
1419
1420 if (wi)
1421 {
1422 wi->val_only = true;
1423 wi->is_lhs = false;
1424 }
1425 break;
1426
1427 case GIMPLE_CALL:
1428 if (wi)
1429 wi->is_lhs = false;
1430
1431 ret = walk_tree (gimple_call_chain_ptr (stmt), callback_op, wi, pset);
1432 if (ret)
1433 return ret;
1434
1435 ret = walk_tree (gimple_call_fn_ptr (stmt), callback_op, wi, pset);
1436 if (ret)
1437 return ret;
1438
1439 for (i = 0; i < gimple_call_num_args (stmt); i++)
1440 {
1441 ret = walk_tree (gimple_call_arg_ptr (stmt, i), callback_op, wi,
1442 pset);
1443 if (ret)
1444 return ret;
1445 }
1446
1447 if (wi)
1448 wi->is_lhs = true;
1449
1450 ret = walk_tree (gimple_call_lhs_ptr (stmt), callback_op, wi, pset);
1451 if (ret)
1452 return ret;
1453
1454 if (wi)
1455 wi->is_lhs = false;
1456 break;
1457
1458 case GIMPLE_CATCH:
1459 ret = walk_tree (gimple_catch_types_ptr (stmt), callback_op, wi,
1460 pset);
1461 if (ret)
1462 return ret;
1463 break;
1464
1465 case GIMPLE_EH_FILTER:
1466 ret = walk_tree (gimple_eh_filter_types_ptr (stmt), callback_op, wi,
1467 pset);
1468 if (ret)
1469 return ret;
1470 break;
1471
1472 case GIMPLE_ASM:
1473 ret = walk_gimple_asm (stmt, callback_op, wi);
1474 if (ret)
1475 return ret;
1476 break;
1477
1478 case GIMPLE_OMP_CONTINUE:
1479 ret = walk_tree (gimple_omp_continue_control_def_ptr (stmt),
1480 callback_op, wi, pset);
1481 if (ret)
1482 return ret;
1483
1484 ret = walk_tree (gimple_omp_continue_control_use_ptr (stmt),
1485 callback_op, wi, pset);
1486 if (ret)
1487 return ret;
1488 break;
1489
1490 case GIMPLE_OMP_CRITICAL:
1491 ret = walk_tree (gimple_omp_critical_name_ptr (stmt), callback_op, wi,
1492 pset);
1493 if (ret)
1494 return ret;
1495 break;
1496
1497 case GIMPLE_OMP_FOR:
1498 ret = walk_tree (gimple_omp_for_clauses_ptr (stmt), callback_op, wi,
1499 pset);
1500 if (ret)
1501 return ret;
1502 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1503 {
1504 ret = walk_tree (gimple_omp_for_index_ptr (stmt, i), callback_op,
1505 wi, pset);
1506 if (ret)
1507 return ret;
1508 ret = walk_tree (gimple_omp_for_initial_ptr (stmt, i), callback_op,
1509 wi, pset);
1510 if (ret)
1511 return ret;
1512 ret = walk_tree (gimple_omp_for_final_ptr (stmt, i), callback_op,
1513 wi, pset);
1514 if (ret)
1515 return ret;
1516 ret = walk_tree (gimple_omp_for_incr_ptr (stmt, i), callback_op,
1517 wi, pset);
1518 }
1519 if (ret)
1520 return ret;
1521 break;
1522
1523 case GIMPLE_OMP_PARALLEL:
1524 ret = walk_tree (gimple_omp_parallel_clauses_ptr (stmt), callback_op,
1525 wi, pset);
1526 if (ret)
1527 return ret;
1528 ret = walk_tree (gimple_omp_parallel_child_fn_ptr (stmt), callback_op,
1529 wi, pset);
1530 if (ret)
1531 return ret;
1532 ret = walk_tree (gimple_omp_parallel_data_arg_ptr (stmt), callback_op,
1533 wi, pset);
1534 if (ret)
1535 return ret;
1536 break;
1537
1538 case GIMPLE_OMP_TASK:
1539 ret = walk_tree (gimple_omp_task_clauses_ptr (stmt), callback_op,
1540 wi, pset);
1541 if (ret)
1542 return ret;
1543 ret = walk_tree (gimple_omp_task_child_fn_ptr (stmt), callback_op,
1544 wi, pset);
1545 if (ret)
1546 return ret;
1547 ret = walk_tree (gimple_omp_task_data_arg_ptr (stmt), callback_op,
1548 wi, pset);
1549 if (ret)
1550 return ret;
1551 ret = walk_tree (gimple_omp_task_copy_fn_ptr (stmt), callback_op,
1552 wi, pset);
1553 if (ret)
1554 return ret;
1555 ret = walk_tree (gimple_omp_task_arg_size_ptr (stmt), callback_op,
1556 wi, pset);
1557 if (ret)
1558 return ret;
1559 ret = walk_tree (gimple_omp_task_arg_align_ptr (stmt), callback_op,
1560 wi, pset);
1561 if (ret)
1562 return ret;
1563 break;
1564
1565 case GIMPLE_OMP_SECTIONS:
1566 ret = walk_tree (gimple_omp_sections_clauses_ptr (stmt), callback_op,
1567 wi, pset);
1568 if (ret)
1569 return ret;
1570
1571 ret = walk_tree (gimple_omp_sections_control_ptr (stmt), callback_op,
1572 wi, pset);
1573 if (ret)
1574 return ret;
1575
1576 break;
1577
1578 case GIMPLE_OMP_SINGLE:
1579 ret = walk_tree (gimple_omp_single_clauses_ptr (stmt), callback_op, wi,
1580 pset);
1581 if (ret)
1582 return ret;
1583 break;
1584
1585 case GIMPLE_OMP_ATOMIC_LOAD:
1586 ret = walk_tree (gimple_omp_atomic_load_lhs_ptr (stmt), callback_op, wi,
1587 pset);
1588 if (ret)
1589 return ret;
1590
1591 ret = walk_tree (gimple_omp_atomic_load_rhs_ptr (stmt), callback_op, wi,
1592 pset);
1593 if (ret)
1594 return ret;
1595 break;
1596
1597 case GIMPLE_OMP_ATOMIC_STORE:
1598 ret = walk_tree (gimple_omp_atomic_store_val_ptr (stmt), callback_op,
1599 wi, pset);
1600 if (ret)
1601 return ret;
1602 break;
1603
1604 /* Tuples that do not have operands. */
1605 case GIMPLE_NOP:
1606 case GIMPLE_RESX:
1607 case GIMPLE_OMP_RETURN:
1608 case GIMPLE_PREDICT:
1609 break;
1610
1611 default:
1612 {
1613 enum gimple_statement_structure_enum gss;
1614 gss = gimple_statement_structure (stmt);
1615 if (gss == GSS_WITH_OPS || gss == GSS_WITH_MEM_OPS)
1616 for (i = 0; i < gimple_num_ops (stmt); i++)
1617 {
1618 ret = walk_tree (gimple_op_ptr (stmt, i), callback_op, wi, pset);
1619 if (ret)
1620 return ret;
1621 }
1622 }
1623 break;
1624 }
1625
1626 return NULL_TREE;
1627 }
1628
1629
1630 /* Walk the current statement in GSI (optionally using traversal state
1631 stored in WI). If WI is NULL, no state is kept during traversal.
1632 The callback CALLBACK_STMT is called. If CALLBACK_STMT indicates
1633 that it has handled all the operands of the statement, its return
1634 value is returned. Otherwise, the return value from CALLBACK_STMT
1635 is discarded and its operands are scanned.
1636
1637 If CALLBACK_STMT is NULL or it didn't handle the operands,
1638 CALLBACK_OP is called on each operand of the statement via
1639 walk_gimple_op. If walk_gimple_op returns non-NULL for any
1640 operand, the remaining operands are not scanned. In this case, the
1641 return value from CALLBACK_OP is returned.
1642
1643 In any other case, NULL_TREE is returned. */
1644
1645 tree
1646 walk_gimple_stmt (gimple_stmt_iterator *gsi, walk_stmt_fn callback_stmt,
1647 walk_tree_fn callback_op, struct walk_stmt_info *wi)
1648 {
1649 gimple ret;
1650 tree tree_ret;
1651 gimple stmt = gsi_stmt (*gsi);
1652
1653 if (wi)
1654 wi->gsi = *gsi;
1655
1656 if (wi && wi->want_locations && gimple_has_location (stmt))
1657 input_location = gimple_location (stmt);
1658
1659 ret = NULL;
1660
1661 /* Invoke the statement callback. Return if the callback handled
1662 all of STMT operands by itself. */
1663 if (callback_stmt)
1664 {
1665 bool handled_ops = false;
1666 tree_ret = callback_stmt (gsi, &handled_ops, wi);
1667 if (handled_ops)
1668 return tree_ret;
1669
1670 /* If CALLBACK_STMT did not handle operands, it should not have
1671 a value to return. */
1672 gcc_assert (tree_ret == NULL);
1673
1674 /* Re-read stmt in case the callback changed it. */
1675 stmt = gsi_stmt (*gsi);
1676 }
1677
1678 /* If CALLBACK_OP is defined, invoke it on every operand of STMT. */
1679 if (callback_op)
1680 {
1681 tree_ret = walk_gimple_op (stmt, callback_op, wi);
1682 if (tree_ret)
1683 return tree_ret;
1684 }
1685
1686 /* If STMT can have statements inside (e.g. GIMPLE_BIND), walk them. */
1687 switch (gimple_code (stmt))
1688 {
1689 case GIMPLE_BIND:
1690 ret = walk_gimple_seq (gimple_bind_body (stmt), callback_stmt,
1691 callback_op, wi);
1692 if (ret)
1693 return wi->callback_result;
1694 break;
1695
1696 case GIMPLE_CATCH:
1697 ret = walk_gimple_seq (gimple_catch_handler (stmt), callback_stmt,
1698 callback_op, wi);
1699 if (ret)
1700 return wi->callback_result;
1701 break;
1702
1703 case GIMPLE_EH_FILTER:
1704 ret = walk_gimple_seq (gimple_eh_filter_failure (stmt), callback_stmt,
1705 callback_op, wi);
1706 if (ret)
1707 return wi->callback_result;
1708 break;
1709
1710 case GIMPLE_TRY:
1711 ret = walk_gimple_seq (gimple_try_eval (stmt), callback_stmt, callback_op,
1712 wi);
1713 if (ret)
1714 return wi->callback_result;
1715
1716 ret = walk_gimple_seq (gimple_try_cleanup (stmt), callback_stmt,
1717 callback_op, wi);
1718 if (ret)
1719 return wi->callback_result;
1720 break;
1721
1722 case GIMPLE_OMP_FOR:
1723 ret = walk_gimple_seq (gimple_omp_for_pre_body (stmt), callback_stmt,
1724 callback_op, wi);
1725 if (ret)
1726 return wi->callback_result;
1727
1728 /* FALL THROUGH. */
1729 case GIMPLE_OMP_CRITICAL:
1730 case GIMPLE_OMP_MASTER:
1731 case GIMPLE_OMP_ORDERED:
1732 case GIMPLE_OMP_SECTION:
1733 case GIMPLE_OMP_PARALLEL:
1734 case GIMPLE_OMP_TASK:
1735 case GIMPLE_OMP_SECTIONS:
1736 case GIMPLE_OMP_SINGLE:
1737 ret = walk_gimple_seq (gimple_omp_body (stmt), callback_stmt, callback_op,
1738 wi);
1739 if (ret)
1740 return wi->callback_result;
1741 break;
1742
1743 case GIMPLE_WITH_CLEANUP_EXPR:
1744 ret = walk_gimple_seq (gimple_wce_cleanup (stmt), callback_stmt,
1745 callback_op, wi);
1746 if (ret)
1747 return wi->callback_result;
1748 break;
1749
1750 default:
1751 gcc_assert (!gimple_has_substatements (stmt));
1752 break;
1753 }
1754
1755 return NULL;
1756 }
1757
1758
1759 /* Set sequence SEQ to be the GIMPLE body for function FN. */
1760
1761 void
1762 gimple_set_body (tree fndecl, gimple_seq seq)
1763 {
1764 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
1765 if (fn == NULL)
1766 {
1767 /* If FNDECL still does not have a function structure associated
1768 with it, then it does not make sense for it to receive a
1769 GIMPLE body. */
1770 gcc_assert (seq == NULL);
1771 }
1772 else
1773 fn->gimple_body = seq;
1774 }
1775
1776
1777 /* Return the body of GIMPLE statements for function FN. */
1778
1779 gimple_seq
1780 gimple_body (tree fndecl)
1781 {
1782 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
1783 return fn ? fn->gimple_body : NULL;
1784 }
1785
1786 /* Return true when FNDECL has Gimple body either in unlowered
1787 or CFG form. */
1788 bool
1789 gimple_has_body_p (tree fndecl)
1790 {
1791 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
1792 return (gimple_body (fndecl) || (fn && fn->cfg));
1793 }
1794
1795 /* Detect flags from a GIMPLE_CALL. This is just like
1796 call_expr_flags, but for gimple tuples. */
1797
1798 int
1799 gimple_call_flags (const_gimple stmt)
1800 {
1801 int flags;
1802 tree decl = gimple_call_fndecl (stmt);
1803 tree t;
1804
1805 if (decl)
1806 flags = flags_from_decl_or_type (decl);
1807 else
1808 {
1809 t = TREE_TYPE (gimple_call_fn (stmt));
1810 if (t && TREE_CODE (t) == POINTER_TYPE)
1811 flags = flags_from_decl_or_type (TREE_TYPE (t));
1812 else
1813 flags = 0;
1814 }
1815
1816 return flags;
1817 }
1818
1819
1820 /* Return true if GS is a copy assignment. */
1821
1822 bool
1823 gimple_assign_copy_p (gimple gs)
1824 {
1825 return gimple_code (gs) == GIMPLE_ASSIGN
1826 && get_gimple_rhs_class (gimple_assign_rhs_code (gs))
1827 == GIMPLE_SINGLE_RHS
1828 && is_gimple_val (gimple_op (gs, 1));
1829 }
1830
1831
1832 /* Return true if GS is a SSA_NAME copy assignment. */
1833
1834 bool
1835 gimple_assign_ssa_name_copy_p (gimple gs)
1836 {
1837 return (gimple_code (gs) == GIMPLE_ASSIGN
1838 && (get_gimple_rhs_class (gimple_assign_rhs_code (gs))
1839 == GIMPLE_SINGLE_RHS)
1840 && TREE_CODE (gimple_assign_lhs (gs)) == SSA_NAME
1841 && TREE_CODE (gimple_assign_rhs1 (gs)) == SSA_NAME);
1842 }
1843
1844
1845 /* Return true if GS is an assignment with a singleton RHS, i.e.,
1846 there is no operator associated with the assignment itself.
1847 Unlike gimple_assign_copy_p, this predicate returns true for
1848 any RHS operand, including those that perform an operation
1849 and do not have the semantics of a copy, such as COND_EXPR. */
1850
1851 bool
1852 gimple_assign_single_p (gimple gs)
1853 {
1854 return (gimple_code (gs) == GIMPLE_ASSIGN
1855 && get_gimple_rhs_class (gimple_assign_rhs_code (gs))
1856 == GIMPLE_SINGLE_RHS);
1857 }
1858
1859 /* Return true if GS is an assignment with a unary RHS, but the
1860 operator has no effect on the assigned value. The logic is adapted
1861 from STRIP_NOPS. This predicate is intended to be used in tuplifying
1862 instances in which STRIP_NOPS was previously applied to the RHS of
1863 an assignment.
1864
1865 NOTE: In the use cases that led to the creation of this function
1866 and of gimple_assign_single_p, it is typical to test for either
1867 condition and to proceed in the same manner. In each case, the
1868 assigned value is represented by the single RHS operand of the
1869 assignment. I suspect there may be cases where gimple_assign_copy_p,
1870 gimple_assign_single_p, or equivalent logic is used where a similar
1871 treatment of unary NOPs is appropriate. */
1872
1873 bool
1874 gimple_assign_unary_nop_p (gimple gs)
1875 {
1876 return (gimple_code (gs) == GIMPLE_ASSIGN
1877 && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (gs))
1878 || gimple_assign_rhs_code (gs) == NON_LVALUE_EXPR)
1879 && gimple_assign_rhs1 (gs) != error_mark_node
1880 && (TYPE_MODE (TREE_TYPE (gimple_assign_lhs (gs)))
1881 == TYPE_MODE (TREE_TYPE (gimple_assign_rhs1 (gs)))));
1882 }
1883
1884 /* Set BB to be the basic block holding G. */
1885
1886 void
1887 gimple_set_bb (gimple stmt, basic_block bb)
1888 {
1889 stmt->gsbase.bb = bb;
1890
1891 /* If the statement is a label, add the label to block-to-labels map
1892 so that we can speed up edge creation for GIMPLE_GOTOs. */
1893 if (cfun->cfg && gimple_code (stmt) == GIMPLE_LABEL)
1894 {
1895 tree t;
1896 int uid;
1897
1898 t = gimple_label_label (stmt);
1899 uid = LABEL_DECL_UID (t);
1900 if (uid == -1)
1901 {
1902 unsigned old_len = VEC_length (basic_block, label_to_block_map);
1903 LABEL_DECL_UID (t) = uid = cfun->cfg->last_label_uid++;
1904 if (old_len <= (unsigned) uid)
1905 {
1906 unsigned new_len = 3 * uid / 2 + 1;
1907
1908 VEC_safe_grow_cleared (basic_block, gc, label_to_block_map,
1909 new_len);
1910 }
1911 }
1912
1913 VEC_replace (basic_block, label_to_block_map, uid, bb);
1914 }
1915 }
1916
1917
1918 /* Fold the expression computed by STMT. If the expression can be
1919 folded, return the folded result, otherwise return NULL. STMT is
1920 not modified. */
1921
1922 tree
1923 gimple_fold (const_gimple stmt)
1924 {
1925 location_t loc = gimple_location (stmt);
1926 switch (gimple_code (stmt))
1927 {
1928 case GIMPLE_COND:
1929 return fold_binary_loc (loc, gimple_cond_code (stmt),
1930 boolean_type_node,
1931 gimple_cond_lhs (stmt),
1932 gimple_cond_rhs (stmt));
1933
1934 case GIMPLE_ASSIGN:
1935 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
1936 {
1937 case GIMPLE_UNARY_RHS:
1938 return fold_unary_loc (loc, gimple_assign_rhs_code (stmt),
1939 TREE_TYPE (gimple_assign_lhs (stmt)),
1940 gimple_assign_rhs1 (stmt));
1941 case GIMPLE_BINARY_RHS:
1942 return fold_binary_loc (loc, gimple_assign_rhs_code (stmt),
1943 TREE_TYPE (gimple_assign_lhs (stmt)),
1944 gimple_assign_rhs1 (stmt),
1945 gimple_assign_rhs2 (stmt));
1946 case GIMPLE_SINGLE_RHS:
1947 return fold (gimple_assign_rhs1 (stmt));
1948 default:;
1949 }
1950 break;
1951
1952 case GIMPLE_SWITCH:
1953 return gimple_switch_index (stmt);
1954
1955 case GIMPLE_CALL:
1956 return NULL_TREE;
1957
1958 default:
1959 break;
1960 }
1961
1962 gcc_unreachable ();
1963 }
1964
1965
1966 /* Modify the RHS of the assignment pointed-to by GSI using the
1967 operands in the expression tree EXPR.
1968
1969 NOTE: The statement pointed-to by GSI may be reallocated if it
1970 did not have enough operand slots.
1971
1972 This function is useful to convert an existing tree expression into
1973 the flat representation used for the RHS of a GIMPLE assignment.
1974 It will reallocate memory as needed to expand or shrink the number
1975 of operand slots needed to represent EXPR.
1976
1977 NOTE: If you find yourself building a tree and then calling this
1978 function, you are most certainly doing it the slow way. It is much
1979 better to build a new assignment or to use the function
1980 gimple_assign_set_rhs_with_ops, which does not require an
1981 expression tree to be built. */
1982
1983 void
1984 gimple_assign_set_rhs_from_tree (gimple_stmt_iterator *gsi, tree expr)
1985 {
1986 enum tree_code subcode;
1987 tree op1, op2;
1988
1989 extract_ops_from_tree (expr, &subcode, &op1, &op2);
1990 gimple_assign_set_rhs_with_ops (gsi, subcode, op1, op2);
1991 }
1992
1993
1994 /* Set the RHS of assignment statement pointed-to by GSI to CODE with
1995 operands OP1 and OP2.
1996
1997 NOTE: The statement pointed-to by GSI may be reallocated if it
1998 did not have enough operand slots. */
1999
2000 void
2001 gimple_assign_set_rhs_with_ops (gimple_stmt_iterator *gsi, enum tree_code code,
2002 tree op1, tree op2)
2003 {
2004 unsigned new_rhs_ops = get_gimple_rhs_num_ops (code);
2005 gimple stmt = gsi_stmt (*gsi);
2006
2007 /* If the new CODE needs more operands, allocate a new statement. */
2008 if (gimple_num_ops (stmt) < new_rhs_ops + 1)
2009 {
2010 tree lhs = gimple_assign_lhs (stmt);
2011 gimple new_stmt = gimple_alloc (gimple_code (stmt), new_rhs_ops + 1);
2012 memcpy (new_stmt, stmt, gimple_size (gimple_code (stmt)));
2013 gsi_replace (gsi, new_stmt, true);
2014 stmt = new_stmt;
2015
2016 /* The LHS needs to be reset as this also changes the SSA name
2017 on the LHS. */
2018 gimple_assign_set_lhs (stmt, lhs);
2019 }
2020
2021 gimple_set_num_ops (stmt, new_rhs_ops + 1);
2022 gimple_set_subcode (stmt, code);
2023 gimple_assign_set_rhs1 (stmt, op1);
2024 if (new_rhs_ops > 1)
2025 gimple_assign_set_rhs2 (stmt, op2);
2026 }
2027
2028
2029 /* Return the LHS of a statement that performs an assignment,
2030 either a GIMPLE_ASSIGN or a GIMPLE_CALL. Returns NULL_TREE
2031 for a call to a function that returns no value, or for a
2032 statement other than an assignment or a call. */
2033
2034 tree
2035 gimple_get_lhs (const_gimple stmt)
2036 {
2037 enum gimple_code code = gimple_code (stmt);
2038
2039 if (code == GIMPLE_ASSIGN)
2040 return gimple_assign_lhs (stmt);
2041 else if (code == GIMPLE_CALL)
2042 return gimple_call_lhs (stmt);
2043 else
2044 return NULL_TREE;
2045 }
2046
2047
2048 /* Set the LHS of a statement that performs an assignment,
2049 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
2050
2051 void
2052 gimple_set_lhs (gimple stmt, tree lhs)
2053 {
2054 enum gimple_code code = gimple_code (stmt);
2055
2056 if (code == GIMPLE_ASSIGN)
2057 gimple_assign_set_lhs (stmt, lhs);
2058 else if (code == GIMPLE_CALL)
2059 gimple_call_set_lhs (stmt, lhs);
2060 else
2061 gcc_unreachable();
2062 }
2063
2064
2065 /* Return a deep copy of statement STMT. All the operands from STMT
2066 are reallocated and copied using unshare_expr. The DEF, USE, VDEF
2067 and VUSE operand arrays are set to empty in the new copy. */
2068
2069 gimple
2070 gimple_copy (gimple stmt)
2071 {
2072 enum gimple_code code = gimple_code (stmt);
2073 unsigned num_ops = gimple_num_ops (stmt);
2074 gimple copy = gimple_alloc (code, num_ops);
2075 unsigned i;
2076
2077 /* Shallow copy all the fields from STMT. */
2078 memcpy (copy, stmt, gimple_size (code));
2079
2080 /* If STMT has sub-statements, deep-copy them as well. */
2081 if (gimple_has_substatements (stmt))
2082 {
2083 gimple_seq new_seq;
2084 tree t;
2085
2086 switch (gimple_code (stmt))
2087 {
2088 case GIMPLE_BIND:
2089 new_seq = gimple_seq_copy (gimple_bind_body (stmt));
2090 gimple_bind_set_body (copy, new_seq);
2091 gimple_bind_set_vars (copy, unshare_expr (gimple_bind_vars (stmt)));
2092 gimple_bind_set_block (copy, gimple_bind_block (stmt));
2093 break;
2094
2095 case GIMPLE_CATCH:
2096 new_seq = gimple_seq_copy (gimple_catch_handler (stmt));
2097 gimple_catch_set_handler (copy, new_seq);
2098 t = unshare_expr (gimple_catch_types (stmt));
2099 gimple_catch_set_types (copy, t);
2100 break;
2101
2102 case GIMPLE_EH_FILTER:
2103 new_seq = gimple_seq_copy (gimple_eh_filter_failure (stmt));
2104 gimple_eh_filter_set_failure (copy, new_seq);
2105 t = unshare_expr (gimple_eh_filter_types (stmt));
2106 gimple_eh_filter_set_types (copy, t);
2107 break;
2108
2109 case GIMPLE_TRY:
2110 new_seq = gimple_seq_copy (gimple_try_eval (stmt));
2111 gimple_try_set_eval (copy, new_seq);
2112 new_seq = gimple_seq_copy (gimple_try_cleanup (stmt));
2113 gimple_try_set_cleanup (copy, new_seq);
2114 break;
2115
2116 case GIMPLE_OMP_FOR:
2117 new_seq = gimple_seq_copy (gimple_omp_for_pre_body (stmt));
2118 gimple_omp_for_set_pre_body (copy, new_seq);
2119 t = unshare_expr (gimple_omp_for_clauses (stmt));
2120 gimple_omp_for_set_clauses (copy, t);
2121 copy->gimple_omp_for.iter
2122 = GGC_NEWVEC (struct gimple_omp_for_iter,
2123 gimple_omp_for_collapse (stmt));
2124 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2125 {
2126 gimple_omp_for_set_cond (copy, i,
2127 gimple_omp_for_cond (stmt, i));
2128 gimple_omp_for_set_index (copy, i,
2129 gimple_omp_for_index (stmt, i));
2130 t = unshare_expr (gimple_omp_for_initial (stmt, i));
2131 gimple_omp_for_set_initial (copy, i, t);
2132 t = unshare_expr (gimple_omp_for_final (stmt, i));
2133 gimple_omp_for_set_final (copy, i, t);
2134 t = unshare_expr (gimple_omp_for_incr (stmt, i));
2135 gimple_omp_for_set_incr (copy, i, t);
2136 }
2137 goto copy_omp_body;
2138
2139 case GIMPLE_OMP_PARALLEL:
2140 t = unshare_expr (gimple_omp_parallel_clauses (stmt));
2141 gimple_omp_parallel_set_clauses (copy, t);
2142 t = unshare_expr (gimple_omp_parallel_child_fn (stmt));
2143 gimple_omp_parallel_set_child_fn (copy, t);
2144 t = unshare_expr (gimple_omp_parallel_data_arg (stmt));
2145 gimple_omp_parallel_set_data_arg (copy, t);
2146 goto copy_omp_body;
2147
2148 case GIMPLE_OMP_TASK:
2149 t = unshare_expr (gimple_omp_task_clauses (stmt));
2150 gimple_omp_task_set_clauses (copy, t);
2151 t = unshare_expr (gimple_omp_task_child_fn (stmt));
2152 gimple_omp_task_set_child_fn (copy, t);
2153 t = unshare_expr (gimple_omp_task_data_arg (stmt));
2154 gimple_omp_task_set_data_arg (copy, t);
2155 t = unshare_expr (gimple_omp_task_copy_fn (stmt));
2156 gimple_omp_task_set_copy_fn (copy, t);
2157 t = unshare_expr (gimple_omp_task_arg_size (stmt));
2158 gimple_omp_task_set_arg_size (copy, t);
2159 t = unshare_expr (gimple_omp_task_arg_align (stmt));
2160 gimple_omp_task_set_arg_align (copy, t);
2161 goto copy_omp_body;
2162
2163 case GIMPLE_OMP_CRITICAL:
2164 t = unshare_expr (gimple_omp_critical_name (stmt));
2165 gimple_omp_critical_set_name (copy, t);
2166 goto copy_omp_body;
2167
2168 case GIMPLE_OMP_SECTIONS:
2169 t = unshare_expr (gimple_omp_sections_clauses (stmt));
2170 gimple_omp_sections_set_clauses (copy, t);
2171 t = unshare_expr (gimple_omp_sections_control (stmt));
2172 gimple_omp_sections_set_control (copy, t);
2173 /* FALLTHRU */
2174
2175 case GIMPLE_OMP_SINGLE:
2176 case GIMPLE_OMP_SECTION:
2177 case GIMPLE_OMP_MASTER:
2178 case GIMPLE_OMP_ORDERED:
2179 copy_omp_body:
2180 new_seq = gimple_seq_copy (gimple_omp_body (stmt));
2181 gimple_omp_set_body (copy, new_seq);
2182 break;
2183
2184 case GIMPLE_WITH_CLEANUP_EXPR:
2185 new_seq = gimple_seq_copy (gimple_wce_cleanup (stmt));
2186 gimple_wce_set_cleanup (copy, new_seq);
2187 break;
2188
2189 default:
2190 gcc_unreachable ();
2191 }
2192 }
2193
2194 /* Make copy of operands. */
2195 if (num_ops > 0)
2196 {
2197 for (i = 0; i < num_ops; i++)
2198 gimple_set_op (copy, i, unshare_expr (gimple_op (stmt, i)));
2199
2200 /* Clear out SSA operand vectors on COPY. */
2201 if (gimple_has_ops (stmt))
2202 {
2203 gimple_set_def_ops (copy, NULL);
2204 gimple_set_use_ops (copy, NULL);
2205 }
2206
2207 if (gimple_has_mem_ops (stmt))
2208 {
2209 gimple_set_vdef (copy, gimple_vdef (stmt));
2210 gimple_set_vuse (copy, gimple_vuse (stmt));
2211 }
2212
2213 /* SSA operands need to be updated. */
2214 gimple_set_modified (copy, true);
2215 }
2216
2217 return copy;
2218 }
2219
2220
2221 /* Set the MODIFIED flag to MODIFIEDP, iff the gimple statement G has
2222 a MODIFIED field. */
2223
2224 void
2225 gimple_set_modified (gimple s, bool modifiedp)
2226 {
2227 if (gimple_has_ops (s))
2228 {
2229 s->gsbase.modified = (unsigned) modifiedp;
2230
2231 if (modifiedp
2232 && cfun->gimple_df
2233 && is_gimple_call (s)
2234 && gimple_call_noreturn_p (s))
2235 VEC_safe_push (gimple, gc, MODIFIED_NORETURN_CALLS (cfun), s);
2236 }
2237 }
2238
2239
2240 /* Return true if statement S has side-effects. We consider a
2241 statement to have side effects if:
2242
2243 - It is a GIMPLE_CALL not marked with ECF_PURE or ECF_CONST.
2244 - Any of its operands are marked TREE_THIS_VOLATILE or TREE_SIDE_EFFECTS. */
2245
2246 bool
2247 gimple_has_side_effects (const_gimple s)
2248 {
2249 unsigned i;
2250
2251 if (is_gimple_debug (s))
2252 return false;
2253
2254 /* We don't have to scan the arguments to check for
2255 volatile arguments, though, at present, we still
2256 do a scan to check for TREE_SIDE_EFFECTS. */
2257 if (gimple_has_volatile_ops (s))
2258 return true;
2259
2260 if (is_gimple_call (s))
2261 {
2262 unsigned nargs = gimple_call_num_args (s);
2263
2264 if (!(gimple_call_flags (s) & (ECF_CONST | ECF_PURE)))
2265 return true;
2266 else if (gimple_call_flags (s) & ECF_LOOPING_CONST_OR_PURE)
2267 /* An infinite loop is considered a side effect. */
2268 return true;
2269
2270 if (gimple_call_lhs (s)
2271 && TREE_SIDE_EFFECTS (gimple_call_lhs (s)))
2272 {
2273 gcc_assert (gimple_has_volatile_ops (s));
2274 return true;
2275 }
2276
2277 if (TREE_SIDE_EFFECTS (gimple_call_fn (s)))
2278 return true;
2279
2280 for (i = 0; i < nargs; i++)
2281 if (TREE_SIDE_EFFECTS (gimple_call_arg (s, i)))
2282 {
2283 gcc_assert (gimple_has_volatile_ops (s));
2284 return true;
2285 }
2286
2287 return false;
2288 }
2289 else
2290 {
2291 for (i = 0; i < gimple_num_ops (s); i++)
2292 if (TREE_SIDE_EFFECTS (gimple_op (s, i)))
2293 {
2294 gcc_assert (gimple_has_volatile_ops (s));
2295 return true;
2296 }
2297 }
2298
2299 return false;
2300 }
2301
2302 /* Return true if the RHS of statement S has side effects.
2303 We may use it to determine if it is admissable to replace
2304 an assignment or call with a copy of a previously-computed
2305 value. In such cases, side-effects due the the LHS are
2306 preserved. */
2307
2308 bool
2309 gimple_rhs_has_side_effects (const_gimple s)
2310 {
2311 unsigned i;
2312
2313 if (is_gimple_call (s))
2314 {
2315 unsigned nargs = gimple_call_num_args (s);
2316
2317 if (!(gimple_call_flags (s) & (ECF_CONST | ECF_PURE)))
2318 return true;
2319
2320 /* We cannot use gimple_has_volatile_ops here,
2321 because we must ignore a volatile LHS. */
2322 if (TREE_SIDE_EFFECTS (gimple_call_fn (s))
2323 || TREE_THIS_VOLATILE (gimple_call_fn (s)))
2324 {
2325 gcc_assert (gimple_has_volatile_ops (s));
2326 return true;
2327 }
2328
2329 for (i = 0; i < nargs; i++)
2330 if (TREE_SIDE_EFFECTS (gimple_call_arg (s, i))
2331 || TREE_THIS_VOLATILE (gimple_call_arg (s, i)))
2332 return true;
2333
2334 return false;
2335 }
2336 else if (is_gimple_assign (s))
2337 {
2338 /* Skip the first operand, the LHS. */
2339 for (i = 1; i < gimple_num_ops (s); i++)
2340 if (TREE_SIDE_EFFECTS (gimple_op (s, i))
2341 || TREE_THIS_VOLATILE (gimple_op (s, i)))
2342 {
2343 gcc_assert (gimple_has_volatile_ops (s));
2344 return true;
2345 }
2346 }
2347 else if (is_gimple_debug (s))
2348 return false;
2349 else
2350 {
2351 /* For statements without an LHS, examine all arguments. */
2352 for (i = 0; i < gimple_num_ops (s); i++)
2353 if (TREE_SIDE_EFFECTS (gimple_op (s, i))
2354 || TREE_THIS_VOLATILE (gimple_op (s, i)))
2355 {
2356 gcc_assert (gimple_has_volatile_ops (s));
2357 return true;
2358 }
2359 }
2360
2361 return false;
2362 }
2363
2364
2365 /* Helper for gimple_could_trap_p and gimple_assign_rhs_could_trap_p.
2366 Return true if S can trap. If INCLUDE_LHS is true and S is a
2367 GIMPLE_ASSIGN, the LHS of the assignment is also checked.
2368 Otherwise, only the RHS of the assignment is checked. */
2369
2370 static bool
2371 gimple_could_trap_p_1 (gimple s, bool include_lhs)
2372 {
2373 unsigned i, start;
2374 tree t, div = NULL_TREE;
2375 enum tree_code op;
2376
2377 start = (is_gimple_assign (s) && !include_lhs) ? 1 : 0;
2378
2379 for (i = start; i < gimple_num_ops (s); i++)
2380 if (tree_could_trap_p (gimple_op (s, i)))
2381 return true;
2382
2383 switch (gimple_code (s))
2384 {
2385 case GIMPLE_ASM:
2386 return gimple_asm_volatile_p (s);
2387
2388 case GIMPLE_CALL:
2389 t = gimple_call_fndecl (s);
2390 /* Assume that calls to weak functions may trap. */
2391 if (!t || !DECL_P (t) || DECL_WEAK (t))
2392 return true;
2393 return false;
2394
2395 case GIMPLE_ASSIGN:
2396 t = gimple_expr_type (s);
2397 op = gimple_assign_rhs_code (s);
2398 if (get_gimple_rhs_class (op) == GIMPLE_BINARY_RHS)
2399 div = gimple_assign_rhs2 (s);
2400 return (operation_could_trap_p (op, FLOAT_TYPE_P (t),
2401 (INTEGRAL_TYPE_P (t)
2402 && TYPE_OVERFLOW_TRAPS (t)),
2403 div));
2404
2405 default:
2406 break;
2407 }
2408
2409 return false;
2410
2411 }
2412
2413
2414 /* Return true if statement S can trap. */
2415
2416 bool
2417 gimple_could_trap_p (gimple s)
2418 {
2419 return gimple_could_trap_p_1 (s, true);
2420 }
2421
2422
2423 /* Return true if RHS of a GIMPLE_ASSIGN S can trap. */
2424
2425 bool
2426 gimple_assign_rhs_could_trap_p (gimple s)
2427 {
2428 gcc_assert (is_gimple_assign (s));
2429 return gimple_could_trap_p_1 (s, false);
2430 }
2431
2432
2433 /* Print debugging information for gimple stmts generated. */
2434
2435 void
2436 dump_gimple_statistics (void)
2437 {
2438 #ifdef GATHER_STATISTICS
2439 int i, total_tuples = 0, total_bytes = 0;
2440
2441 fprintf (stderr, "\nGIMPLE statements\n");
2442 fprintf (stderr, "Kind Stmts Bytes\n");
2443 fprintf (stderr, "---------------------------------------\n");
2444 for (i = 0; i < (int) gimple_alloc_kind_all; ++i)
2445 {
2446 fprintf (stderr, "%-20s %7d %10d\n", gimple_alloc_kind_names[i],
2447 gimple_alloc_counts[i], gimple_alloc_sizes[i]);
2448 total_tuples += gimple_alloc_counts[i];
2449 total_bytes += gimple_alloc_sizes[i];
2450 }
2451 fprintf (stderr, "---------------------------------------\n");
2452 fprintf (stderr, "%-20s %7d %10d\n", "Total", total_tuples, total_bytes);
2453 fprintf (stderr, "---------------------------------------\n");
2454 #else
2455 fprintf (stderr, "No gimple statistics\n");
2456 #endif
2457 }
2458
2459
2460 /* Return the number of operands needed on the RHS of a GIMPLE
2461 assignment for an expression with tree code CODE. */
2462
2463 unsigned
2464 get_gimple_rhs_num_ops (enum tree_code code)
2465 {
2466 enum gimple_rhs_class rhs_class = get_gimple_rhs_class (code);
2467
2468 if (rhs_class == GIMPLE_UNARY_RHS || rhs_class == GIMPLE_SINGLE_RHS)
2469 return 1;
2470 else if (rhs_class == GIMPLE_BINARY_RHS)
2471 return 2;
2472 else
2473 gcc_unreachable ();
2474 }
2475
2476 #define DEFTREECODE(SYM, STRING, TYPE, NARGS) \
2477 (unsigned char) \
2478 ((TYPE) == tcc_unary ? GIMPLE_UNARY_RHS \
2479 : ((TYPE) == tcc_binary \
2480 || (TYPE) == tcc_comparison) ? GIMPLE_BINARY_RHS \
2481 : ((TYPE) == tcc_constant \
2482 || (TYPE) == tcc_declaration \
2483 || (TYPE) == tcc_reference) ? GIMPLE_SINGLE_RHS \
2484 : ((SYM) == TRUTH_AND_EXPR \
2485 || (SYM) == TRUTH_OR_EXPR \
2486 || (SYM) == TRUTH_XOR_EXPR) ? GIMPLE_BINARY_RHS \
2487 : (SYM) == TRUTH_NOT_EXPR ? GIMPLE_UNARY_RHS \
2488 : ((SYM) == COND_EXPR \
2489 || (SYM) == CONSTRUCTOR \
2490 || (SYM) == OBJ_TYPE_REF \
2491 || (SYM) == ASSERT_EXPR \
2492 || (SYM) == ADDR_EXPR \
2493 || (SYM) == WITH_SIZE_EXPR \
2494 || (SYM) == EXC_PTR_EXPR \
2495 || (SYM) == SSA_NAME \
2496 || (SYM) == FILTER_EXPR \
2497 || (SYM) == POLYNOMIAL_CHREC \
2498 || (SYM) == DOT_PROD_EXPR \
2499 || (SYM) == VEC_COND_EXPR \
2500 || (SYM) == REALIGN_LOAD_EXPR) ? GIMPLE_SINGLE_RHS \
2501 : GIMPLE_INVALID_RHS),
2502 #define END_OF_BASE_TREE_CODES (unsigned char) GIMPLE_INVALID_RHS,
2503
2504 const unsigned char gimple_rhs_class_table[] = {
2505 #include "all-tree.def"
2506 };
2507
2508 #undef DEFTREECODE
2509 #undef END_OF_BASE_TREE_CODES
2510
2511 /* For the definitive definition of GIMPLE, see doc/tree-ssa.texi. */
2512
2513 /* Validation of GIMPLE expressions. */
2514
2515 /* Return true if OP is an acceptable tree node to be used as a GIMPLE
2516 operand. */
2517
2518 bool
2519 is_gimple_operand (const_tree op)
2520 {
2521 return op && get_gimple_rhs_class (TREE_CODE (op)) == GIMPLE_SINGLE_RHS;
2522 }
2523
2524 /* Returns true iff T is a valid RHS for an assignment to a renamed
2525 user -- or front-end generated artificial -- variable. */
2526
2527 bool
2528 is_gimple_reg_rhs (tree t)
2529 {
2530 return get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS;
2531 }
2532
2533 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
2534 LHS, or for a call argument. */
2535
2536 bool
2537 is_gimple_mem_rhs (tree t)
2538 {
2539 /* If we're dealing with a renamable type, either source or dest must be
2540 a renamed variable. */
2541 if (is_gimple_reg_type (TREE_TYPE (t)))
2542 return is_gimple_val (t);
2543 else
2544 return is_gimple_val (t) || is_gimple_lvalue (t);
2545 }
2546
2547 /* Return true if T is a valid LHS for a GIMPLE assignment expression. */
2548
2549 bool
2550 is_gimple_lvalue (tree t)
2551 {
2552 return (is_gimple_addressable (t)
2553 || TREE_CODE (t) == WITH_SIZE_EXPR
2554 /* These are complex lvalues, but don't have addresses, so they
2555 go here. */
2556 || TREE_CODE (t) == BIT_FIELD_REF);
2557 }
2558
2559 /* Return true if T is a GIMPLE condition. */
2560
2561 bool
2562 is_gimple_condexpr (tree t)
2563 {
2564 return (is_gimple_val (t) || (COMPARISON_CLASS_P (t)
2565 && !tree_could_trap_p (t)
2566 && is_gimple_val (TREE_OPERAND (t, 0))
2567 && is_gimple_val (TREE_OPERAND (t, 1))));
2568 }
2569
2570 /* Return true if T is something whose address can be taken. */
2571
2572 bool
2573 is_gimple_addressable (tree t)
2574 {
2575 return (is_gimple_id (t) || handled_component_p (t) || INDIRECT_REF_P (t));
2576 }
2577
2578 /* Return true if T is a valid gimple constant. */
2579
2580 bool
2581 is_gimple_constant (const_tree t)
2582 {
2583 switch (TREE_CODE (t))
2584 {
2585 case INTEGER_CST:
2586 case REAL_CST:
2587 case FIXED_CST:
2588 case STRING_CST:
2589 case COMPLEX_CST:
2590 case VECTOR_CST:
2591 return true;
2592
2593 /* Vector constant constructors are gimple invariant. */
2594 case CONSTRUCTOR:
2595 if (TREE_TYPE (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
2596 return TREE_CONSTANT (t);
2597 else
2598 return false;
2599
2600 default:
2601 return false;
2602 }
2603 }
2604
2605 /* Return true if T is a gimple address. */
2606
2607 bool
2608 is_gimple_address (const_tree t)
2609 {
2610 tree op;
2611
2612 if (TREE_CODE (t) != ADDR_EXPR)
2613 return false;
2614
2615 op = TREE_OPERAND (t, 0);
2616 while (handled_component_p (op))
2617 {
2618 if ((TREE_CODE (op) == ARRAY_REF
2619 || TREE_CODE (op) == ARRAY_RANGE_REF)
2620 && !is_gimple_val (TREE_OPERAND (op, 1)))
2621 return false;
2622
2623 op = TREE_OPERAND (op, 0);
2624 }
2625
2626 if (CONSTANT_CLASS_P (op) || INDIRECT_REF_P (op))
2627 return true;
2628
2629 switch (TREE_CODE (op))
2630 {
2631 case PARM_DECL:
2632 case RESULT_DECL:
2633 case LABEL_DECL:
2634 case FUNCTION_DECL:
2635 case VAR_DECL:
2636 case CONST_DECL:
2637 return true;
2638
2639 default:
2640 return false;
2641 }
2642 }
2643
2644 /* Strip out all handled components that produce invariant
2645 offsets. */
2646
2647 static const_tree
2648 strip_invariant_refs (const_tree op)
2649 {
2650 while (handled_component_p (op))
2651 {
2652 switch (TREE_CODE (op))
2653 {
2654 case ARRAY_REF:
2655 case ARRAY_RANGE_REF:
2656 if (!is_gimple_constant (TREE_OPERAND (op, 1))
2657 || TREE_OPERAND (op, 2) != NULL_TREE
2658 || TREE_OPERAND (op, 3) != NULL_TREE)
2659 return NULL;
2660 break;
2661
2662 case COMPONENT_REF:
2663 if (TREE_OPERAND (op, 2) != NULL_TREE)
2664 return NULL;
2665 break;
2666
2667 default:;
2668 }
2669 op = TREE_OPERAND (op, 0);
2670 }
2671
2672 return op;
2673 }
2674
2675 /* Return true if T is a gimple invariant address. */
2676
2677 bool
2678 is_gimple_invariant_address (const_tree t)
2679 {
2680 const_tree op;
2681
2682 if (TREE_CODE (t) != ADDR_EXPR)
2683 return false;
2684
2685 op = strip_invariant_refs (TREE_OPERAND (t, 0));
2686
2687 return op && (CONSTANT_CLASS_P (op) || decl_address_invariant_p (op));
2688 }
2689
2690 /* Return true if T is a gimple invariant address at IPA level
2691 (so addresses of variables on stack are not allowed). */
2692
2693 bool
2694 is_gimple_ip_invariant_address (const_tree t)
2695 {
2696 const_tree op;
2697
2698 if (TREE_CODE (t) != ADDR_EXPR)
2699 return false;
2700
2701 op = strip_invariant_refs (TREE_OPERAND (t, 0));
2702
2703 return op && (CONSTANT_CLASS_P (op) || decl_address_ip_invariant_p (op));
2704 }
2705
2706 /* Return true if T is a GIMPLE minimal invariant. It's a restricted
2707 form of function invariant. */
2708
2709 bool
2710 is_gimple_min_invariant (const_tree t)
2711 {
2712 if (TREE_CODE (t) == ADDR_EXPR)
2713 return is_gimple_invariant_address (t);
2714
2715 return is_gimple_constant (t);
2716 }
2717
2718 /* Return true if T is a GIMPLE interprocedural invariant. It's a restricted
2719 form of gimple minimal invariant. */
2720
2721 bool
2722 is_gimple_ip_invariant (const_tree t)
2723 {
2724 if (TREE_CODE (t) == ADDR_EXPR)
2725 return is_gimple_ip_invariant_address (t);
2726
2727 return is_gimple_constant (t);
2728 }
2729
2730 /* Return true if T looks like a valid GIMPLE statement. */
2731
2732 bool
2733 is_gimple_stmt (tree t)
2734 {
2735 const enum tree_code code = TREE_CODE (t);
2736
2737 switch (code)
2738 {
2739 case NOP_EXPR:
2740 /* The only valid NOP_EXPR is the empty statement. */
2741 return IS_EMPTY_STMT (t);
2742
2743 case BIND_EXPR:
2744 case COND_EXPR:
2745 /* These are only valid if they're void. */
2746 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
2747
2748 case SWITCH_EXPR:
2749 case GOTO_EXPR:
2750 case RETURN_EXPR:
2751 case LABEL_EXPR:
2752 case CASE_LABEL_EXPR:
2753 case TRY_CATCH_EXPR:
2754 case TRY_FINALLY_EXPR:
2755 case EH_FILTER_EXPR:
2756 case CATCH_EXPR:
2757 case ASM_EXPR:
2758 case RESX_EXPR:
2759 case STATEMENT_LIST:
2760 case OMP_PARALLEL:
2761 case OMP_FOR:
2762 case OMP_SECTIONS:
2763 case OMP_SECTION:
2764 case OMP_SINGLE:
2765 case OMP_MASTER:
2766 case OMP_ORDERED:
2767 case OMP_CRITICAL:
2768 case OMP_TASK:
2769 /* These are always void. */
2770 return true;
2771
2772 case CALL_EXPR:
2773 case MODIFY_EXPR:
2774 case PREDICT_EXPR:
2775 /* These are valid regardless of their type. */
2776 return true;
2777
2778 default:
2779 return false;
2780 }
2781 }
2782
2783 /* Return true if T is a variable. */
2784
2785 bool
2786 is_gimple_variable (tree t)
2787 {
2788 return (TREE_CODE (t) == VAR_DECL
2789 || TREE_CODE (t) == PARM_DECL
2790 || TREE_CODE (t) == RESULT_DECL
2791 || TREE_CODE (t) == SSA_NAME);
2792 }
2793
2794 /* Return true if T is a GIMPLE identifier (something with an address). */
2795
2796 bool
2797 is_gimple_id (tree t)
2798 {
2799 return (is_gimple_variable (t)
2800 || TREE_CODE (t) == FUNCTION_DECL
2801 || TREE_CODE (t) == LABEL_DECL
2802 || TREE_CODE (t) == CONST_DECL
2803 /* Allow string constants, since they are addressable. */
2804 || TREE_CODE (t) == STRING_CST);
2805 }
2806
2807 /* Return true if TYPE is a suitable type for a scalar register variable. */
2808
2809 bool
2810 is_gimple_reg_type (tree type)
2811 {
2812 return !AGGREGATE_TYPE_P (type);
2813 }
2814
2815 /* Return true if T is a non-aggregate register variable. */
2816
2817 bool
2818 is_gimple_reg (tree t)
2819 {
2820 if (TREE_CODE (t) == SSA_NAME)
2821 t = SSA_NAME_VAR (t);
2822
2823 if (!is_gimple_variable (t))
2824 return false;
2825
2826 if (!is_gimple_reg_type (TREE_TYPE (t)))
2827 return false;
2828
2829 /* A volatile decl is not acceptable because we can't reuse it as
2830 needed. We need to copy it into a temp first. */
2831 if (TREE_THIS_VOLATILE (t))
2832 return false;
2833
2834 /* We define "registers" as things that can be renamed as needed,
2835 which with our infrastructure does not apply to memory. */
2836 if (needs_to_live_in_memory (t))
2837 return false;
2838
2839 /* Hard register variables are an interesting case. For those that
2840 are call-clobbered, we don't know where all the calls are, since
2841 we don't (want to) take into account which operations will turn
2842 into libcalls at the rtl level. For those that are call-saved,
2843 we don't currently model the fact that calls may in fact change
2844 global hard registers, nor do we examine ASM_CLOBBERS at the tree
2845 level, and so miss variable changes that might imply. All around,
2846 it seems safest to not do too much optimization with these at the
2847 tree level at all. We'll have to rely on the rtl optimizers to
2848 clean this up, as there we've got all the appropriate bits exposed. */
2849 if (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t))
2850 return false;
2851
2852 /* Complex and vector values must have been put into SSA-like form.
2853 That is, no assignments to the individual components. */
2854 if (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
2855 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
2856 return DECL_GIMPLE_REG_P (t);
2857
2858 return true;
2859 }
2860
2861
2862 /* Return true if T is a GIMPLE variable whose address is not needed. */
2863
2864 bool
2865 is_gimple_non_addressable (tree t)
2866 {
2867 if (TREE_CODE (t) == SSA_NAME)
2868 t = SSA_NAME_VAR (t);
2869
2870 return (is_gimple_variable (t) && ! needs_to_live_in_memory (t));
2871 }
2872
2873 /* Return true if T is a GIMPLE rvalue, i.e. an identifier or a constant. */
2874
2875 bool
2876 is_gimple_val (tree t)
2877 {
2878 /* Make loads from volatiles and memory vars explicit. */
2879 if (is_gimple_variable (t)
2880 && is_gimple_reg_type (TREE_TYPE (t))
2881 && !is_gimple_reg (t))
2882 return false;
2883
2884 /* FIXME make these decls. That can happen only when we expose the
2885 entire landing-pad construct at the tree level. */
2886 if (TREE_CODE (t) == EXC_PTR_EXPR || TREE_CODE (t) == FILTER_EXPR)
2887 return true;
2888
2889 return (is_gimple_variable (t) || is_gimple_min_invariant (t));
2890 }
2891
2892 /* Similarly, but accept hard registers as inputs to asm statements. */
2893
2894 bool
2895 is_gimple_asm_val (tree t)
2896 {
2897 if (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t))
2898 return true;
2899
2900 return is_gimple_val (t);
2901 }
2902
2903 /* Return true if T is a GIMPLE minimal lvalue. */
2904
2905 bool
2906 is_gimple_min_lval (tree t)
2907 {
2908 if (!(t = CONST_CAST_TREE (strip_invariant_refs (t))))
2909 return false;
2910 return (is_gimple_id (t) || TREE_CODE (t) == INDIRECT_REF);
2911 }
2912
2913 /* Return true if T is a typecast operation. */
2914
2915 bool
2916 is_gimple_cast (tree t)
2917 {
2918 return (CONVERT_EXPR_P (t)
2919 || TREE_CODE (t) == FIX_TRUNC_EXPR);
2920 }
2921
2922 /* Return true if T is a valid function operand of a CALL_EXPR. */
2923
2924 bool
2925 is_gimple_call_addr (tree t)
2926 {
2927 return (TREE_CODE (t) == OBJ_TYPE_REF || is_gimple_val (t));
2928 }
2929
2930 /* If T makes a function call, return the corresponding CALL_EXPR operand.
2931 Otherwise, return NULL_TREE. */
2932
2933 tree
2934 get_call_expr_in (tree t)
2935 {
2936 if (TREE_CODE (t) == MODIFY_EXPR)
2937 t = TREE_OPERAND (t, 1);
2938 if (TREE_CODE (t) == WITH_SIZE_EXPR)
2939 t = TREE_OPERAND (t, 0);
2940 if (TREE_CODE (t) == CALL_EXPR)
2941 return t;
2942 return NULL_TREE;
2943 }
2944
2945
2946 /* Given a memory reference expression T, return its base address.
2947 The base address of a memory reference expression is the main
2948 object being referenced. For instance, the base address for
2949 'array[i].fld[j]' is 'array'. You can think of this as stripping
2950 away the offset part from a memory address.
2951
2952 This function calls handled_component_p to strip away all the inner
2953 parts of the memory reference until it reaches the base object. */
2954
2955 tree
2956 get_base_address (tree t)
2957 {
2958 while (handled_component_p (t))
2959 t = TREE_OPERAND (t, 0);
2960
2961 if (SSA_VAR_P (t)
2962 || TREE_CODE (t) == STRING_CST
2963 || TREE_CODE (t) == CONSTRUCTOR
2964 || INDIRECT_REF_P (t))
2965 return t;
2966 else
2967 return NULL_TREE;
2968 }
2969
2970 void
2971 recalculate_side_effects (tree t)
2972 {
2973 enum tree_code code = TREE_CODE (t);
2974 int len = TREE_OPERAND_LENGTH (t);
2975 int i;
2976
2977 switch (TREE_CODE_CLASS (code))
2978 {
2979 case tcc_expression:
2980 switch (code)
2981 {
2982 case INIT_EXPR:
2983 case MODIFY_EXPR:
2984 case VA_ARG_EXPR:
2985 case PREDECREMENT_EXPR:
2986 case PREINCREMENT_EXPR:
2987 case POSTDECREMENT_EXPR:
2988 case POSTINCREMENT_EXPR:
2989 /* All of these have side-effects, no matter what their
2990 operands are. */
2991 return;
2992
2993 default:
2994 break;
2995 }
2996 /* Fall through. */
2997
2998 case tcc_comparison: /* a comparison expression */
2999 case tcc_unary: /* a unary arithmetic expression */
3000 case tcc_binary: /* a binary arithmetic expression */
3001 case tcc_reference: /* a reference */
3002 case tcc_vl_exp: /* a function call */
3003 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
3004 for (i = 0; i < len; ++i)
3005 {
3006 tree op = TREE_OPERAND (t, i);
3007 if (op && TREE_SIDE_EFFECTS (op))
3008 TREE_SIDE_EFFECTS (t) = 1;
3009 }
3010 break;
3011
3012 case tcc_constant:
3013 /* No side-effects. */
3014 return;
3015
3016 default:
3017 gcc_unreachable ();
3018 }
3019 }
3020
3021 /* Canonicalize a tree T for use in a COND_EXPR as conditional. Returns
3022 a canonicalized tree that is valid for a COND_EXPR or NULL_TREE, if
3023 we failed to create one. */
3024
3025 tree
3026 canonicalize_cond_expr_cond (tree t)
3027 {
3028 /* For (bool)x use x != 0. */
3029 if (TREE_CODE (t) == NOP_EXPR
3030 && TREE_TYPE (t) == boolean_type_node)
3031 {
3032 tree top0 = TREE_OPERAND (t, 0);
3033 t = build2 (NE_EXPR, TREE_TYPE (t),
3034 top0, build_int_cst (TREE_TYPE (top0), 0));
3035 }
3036 /* For !x use x == 0. */
3037 else if (TREE_CODE (t) == TRUTH_NOT_EXPR)
3038 {
3039 tree top0 = TREE_OPERAND (t, 0);
3040 t = build2 (EQ_EXPR, TREE_TYPE (t),
3041 top0, build_int_cst (TREE_TYPE (top0), 0));
3042 }
3043 /* For cmp ? 1 : 0 use cmp. */
3044 else if (TREE_CODE (t) == COND_EXPR
3045 && COMPARISON_CLASS_P (TREE_OPERAND (t, 0))
3046 && integer_onep (TREE_OPERAND (t, 1))
3047 && integer_zerop (TREE_OPERAND (t, 2)))
3048 {
3049 tree top0 = TREE_OPERAND (t, 0);
3050 t = build2 (TREE_CODE (top0), TREE_TYPE (t),
3051 TREE_OPERAND (top0, 0), TREE_OPERAND (top0, 1));
3052 }
3053
3054 if (is_gimple_condexpr (t))
3055 return t;
3056
3057 return NULL_TREE;
3058 }
3059
3060 /* Build a GIMPLE_CALL identical to STMT but skipping the arguments in
3061 the positions marked by the set ARGS_TO_SKIP. */
3062
3063 gimple
3064 gimple_call_copy_skip_args (gimple stmt, bitmap args_to_skip)
3065 {
3066 int i;
3067 tree fn = gimple_call_fn (stmt);
3068 int nargs = gimple_call_num_args (stmt);
3069 VEC(tree, heap) *vargs = VEC_alloc (tree, heap, nargs);
3070 gimple new_stmt;
3071
3072 for (i = 0; i < nargs; i++)
3073 if (!bitmap_bit_p (args_to_skip, i))
3074 VEC_quick_push (tree, vargs, gimple_call_arg (stmt, i));
3075
3076 new_stmt = gimple_build_call_vec (fn, vargs);
3077 VEC_free (tree, heap, vargs);
3078 if (gimple_call_lhs (stmt))
3079 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
3080
3081 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
3082 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
3083
3084 gimple_set_block (new_stmt, gimple_block (stmt));
3085 if (gimple_has_location (stmt))
3086 gimple_set_location (new_stmt, gimple_location (stmt));
3087
3088 /* Carry all the flags to the new GIMPLE_CALL. */
3089 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
3090 gimple_call_set_tail (new_stmt, gimple_call_tail_p (stmt));
3091 gimple_call_set_cannot_inline (new_stmt, gimple_call_cannot_inline_p (stmt));
3092 gimple_call_set_return_slot_opt (new_stmt, gimple_call_return_slot_opt_p (stmt));
3093 gimple_call_set_from_thunk (new_stmt, gimple_call_from_thunk_p (stmt));
3094 gimple_call_set_va_arg_pack (new_stmt, gimple_call_va_arg_pack_p (stmt));
3095
3096 gimple_set_modified (new_stmt, true);
3097
3098 return new_stmt;
3099 }
3100
3101
3102 /* Data structure used to count the number of dereferences to PTR
3103 inside an expression. */
3104 struct count_ptr_d
3105 {
3106 tree ptr;
3107 unsigned num_stores;
3108 unsigned num_loads;
3109 };
3110
3111 /* Helper for count_uses_and_derefs. Called by walk_tree to look for
3112 (ALIGN/MISALIGNED_)INDIRECT_REF nodes for the pointer passed in DATA. */
3113
3114 static tree
3115 count_ptr_derefs (tree *tp, int *walk_subtrees, void *data)
3116 {
3117 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
3118 struct count_ptr_d *count_p = (struct count_ptr_d *) wi_p->info;
3119
3120 /* Do not walk inside ADDR_EXPR nodes. In the expression &ptr->fld,
3121 pointer 'ptr' is *not* dereferenced, it is simply used to compute
3122 the address of 'fld' as 'ptr + offsetof(fld)'. */
3123 if (TREE_CODE (*tp) == ADDR_EXPR)
3124 {
3125 *walk_subtrees = 0;
3126 return NULL_TREE;
3127 }
3128
3129 if (INDIRECT_REF_P (*tp) && TREE_OPERAND (*tp, 0) == count_p->ptr)
3130 {
3131 if (wi_p->is_lhs)
3132 count_p->num_stores++;
3133 else
3134 count_p->num_loads++;
3135 }
3136
3137 return NULL_TREE;
3138 }
3139
3140 /* Count the number of direct and indirect uses for pointer PTR in
3141 statement STMT. The number of direct uses is stored in
3142 *NUM_USES_P. Indirect references are counted separately depending
3143 on whether they are store or load operations. The counts are
3144 stored in *NUM_STORES_P and *NUM_LOADS_P. */
3145
3146 void
3147 count_uses_and_derefs (tree ptr, gimple stmt, unsigned *num_uses_p,
3148 unsigned *num_loads_p, unsigned *num_stores_p)
3149 {
3150 ssa_op_iter i;
3151 tree use;
3152
3153 *num_uses_p = 0;
3154 *num_loads_p = 0;
3155 *num_stores_p = 0;
3156
3157 /* Find out the total number of uses of PTR in STMT. */
3158 FOR_EACH_SSA_TREE_OPERAND (use, stmt, i, SSA_OP_USE)
3159 if (use == ptr)
3160 (*num_uses_p)++;
3161
3162 /* Now count the number of indirect references to PTR. This is
3163 truly awful, but we don't have much choice. There are no parent
3164 pointers inside INDIRECT_REFs, so an expression like
3165 '*x_1 = foo (x_1, *x_1)' needs to be traversed piece by piece to
3166 find all the indirect and direct uses of x_1 inside. The only
3167 shortcut we can take is the fact that GIMPLE only allows
3168 INDIRECT_REFs inside the expressions below. */
3169 if (is_gimple_assign (stmt)
3170 || gimple_code (stmt) == GIMPLE_RETURN
3171 || gimple_code (stmt) == GIMPLE_ASM
3172 || is_gimple_call (stmt))
3173 {
3174 struct walk_stmt_info wi;
3175 struct count_ptr_d count;
3176
3177 count.ptr = ptr;
3178 count.num_stores = 0;
3179 count.num_loads = 0;
3180
3181 memset (&wi, 0, sizeof (wi));
3182 wi.info = &count;
3183 walk_gimple_op (stmt, count_ptr_derefs, &wi);
3184
3185 *num_stores_p = count.num_stores;
3186 *num_loads_p = count.num_loads;
3187 }
3188
3189 gcc_assert (*num_uses_p >= *num_loads_p + *num_stores_p);
3190 }
3191
3192 /* From a tree operand OP return the base of a load or store operation
3193 or NULL_TREE if OP is not a load or a store. */
3194
3195 static tree
3196 get_base_loadstore (tree op)
3197 {
3198 while (handled_component_p (op))
3199 op = TREE_OPERAND (op, 0);
3200 if (DECL_P (op)
3201 || INDIRECT_REF_P (op)
3202 || TREE_CODE (op) == TARGET_MEM_REF)
3203 return op;
3204 return NULL_TREE;
3205 }
3206
3207 /* For the statement STMT call the callbacks VISIT_LOAD, VISIT_STORE and
3208 VISIT_ADDR if non-NULL on loads, store and address-taken operands
3209 passing the STMT, the base of the operand and DATA to it. The base
3210 will be either a decl, an indirect reference (including TARGET_MEM_REF)
3211 or the argument of an address expression.
3212 Returns the results of these callbacks or'ed. */
3213
3214 bool
3215 walk_stmt_load_store_addr_ops (gimple stmt, void *data,
3216 bool (*visit_load)(gimple, tree, void *),
3217 bool (*visit_store)(gimple, tree, void *),
3218 bool (*visit_addr)(gimple, tree, void *))
3219 {
3220 bool ret = false;
3221 unsigned i;
3222 if (gimple_assign_single_p (stmt))
3223 {
3224 tree lhs, rhs;
3225 if (visit_store)
3226 {
3227 lhs = get_base_loadstore (gimple_assign_lhs (stmt));
3228 if (lhs)
3229 ret |= visit_store (stmt, lhs, data);
3230 }
3231 rhs = gimple_assign_rhs1 (stmt);
3232 while (handled_component_p (rhs))
3233 rhs = TREE_OPERAND (rhs, 0);
3234 if (visit_addr)
3235 {
3236 if (TREE_CODE (rhs) == ADDR_EXPR)
3237 ret |= visit_addr (stmt, TREE_OPERAND (rhs, 0), data);
3238 else if (TREE_CODE (rhs) == TARGET_MEM_REF
3239 && TMR_BASE (rhs) != NULL_TREE
3240 && TREE_CODE (TMR_BASE (rhs)) == ADDR_EXPR)
3241 ret |= visit_addr (stmt, TREE_OPERAND (TMR_BASE (rhs), 0), data);
3242 else if (TREE_CODE (rhs) == OBJ_TYPE_REF
3243 && TREE_CODE (OBJ_TYPE_REF_OBJECT (rhs)) == ADDR_EXPR)
3244 ret |= visit_addr (stmt, TREE_OPERAND (OBJ_TYPE_REF_OBJECT (rhs),
3245 0), data);
3246 lhs = gimple_assign_lhs (stmt);
3247 if (TREE_CODE (lhs) == TARGET_MEM_REF
3248 && TMR_BASE (lhs) != NULL_TREE
3249 && TREE_CODE (TMR_BASE (lhs)) == ADDR_EXPR)
3250 ret |= visit_addr (stmt, TREE_OPERAND (TMR_BASE (lhs), 0), data);
3251 }
3252 if (visit_load)
3253 {
3254 rhs = get_base_loadstore (rhs);
3255 if (rhs)
3256 ret |= visit_load (stmt, rhs, data);
3257 }
3258 }
3259 else if (visit_addr
3260 && (is_gimple_assign (stmt)
3261 || gimple_code (stmt) == GIMPLE_COND))
3262 {
3263 for (i = 0; i < gimple_num_ops (stmt); ++i)
3264 if (gimple_op (stmt, i)
3265 && TREE_CODE (gimple_op (stmt, i)) == ADDR_EXPR)
3266 ret |= visit_addr (stmt, TREE_OPERAND (gimple_op (stmt, i), 0), data);
3267 }
3268 else if (is_gimple_call (stmt))
3269 {
3270 if (visit_store)
3271 {
3272 tree lhs = gimple_call_lhs (stmt);
3273 if (lhs)
3274 {
3275 lhs = get_base_loadstore (lhs);
3276 if (lhs)
3277 ret |= visit_store (stmt, lhs, data);
3278 }
3279 }
3280 if (visit_load || visit_addr)
3281 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3282 {
3283 tree rhs = gimple_call_arg (stmt, i);
3284 if (visit_addr
3285 && TREE_CODE (rhs) == ADDR_EXPR)
3286 ret |= visit_addr (stmt, TREE_OPERAND (rhs, 0), data);
3287 else if (visit_load)
3288 {
3289 rhs = get_base_loadstore (rhs);
3290 if (rhs)
3291 ret |= visit_load (stmt, rhs, data);
3292 }
3293 }
3294 if (visit_addr
3295 && gimple_call_chain (stmt)
3296 && TREE_CODE (gimple_call_chain (stmt)) == ADDR_EXPR)
3297 ret |= visit_addr (stmt, TREE_OPERAND (gimple_call_chain (stmt), 0),
3298 data);
3299 if (visit_addr
3300 && gimple_call_return_slot_opt_p (stmt)
3301 && gimple_call_lhs (stmt) != NULL_TREE
3302 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
3303 ret |= visit_addr (stmt, gimple_call_lhs (stmt), data);
3304 }
3305 else if (gimple_code (stmt) == GIMPLE_ASM)
3306 {
3307 unsigned noutputs;
3308 const char *constraint;
3309 const char **oconstraints;
3310 bool allows_mem, allows_reg, is_inout;
3311 noutputs = gimple_asm_noutputs (stmt);
3312 oconstraints = XALLOCAVEC (const char *, noutputs);
3313 if (visit_store || visit_addr)
3314 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
3315 {
3316 tree link = gimple_asm_output_op (stmt, i);
3317 tree op = get_base_loadstore (TREE_VALUE (link));
3318 if (op && visit_store)
3319 ret |= visit_store (stmt, op, data);
3320 if (visit_addr)
3321 {
3322 constraint = TREE_STRING_POINTER
3323 (TREE_VALUE (TREE_PURPOSE (link)));
3324 oconstraints[i] = constraint;
3325 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
3326 &allows_reg, &is_inout);
3327 if (op && !allows_reg && allows_mem)
3328 ret |= visit_addr (stmt, op, data);
3329 }
3330 }
3331 if (visit_load || visit_addr)
3332 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
3333 {
3334 tree link = gimple_asm_input_op (stmt, i);
3335 tree op = TREE_VALUE (link);
3336 if (visit_addr
3337 && TREE_CODE (op) == ADDR_EXPR)
3338 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
3339 else if (visit_load || visit_addr)
3340 {
3341 op = get_base_loadstore (op);
3342 if (op)
3343 {
3344 if (visit_load)
3345 ret |= visit_load (stmt, op, data);
3346 if (visit_addr)
3347 {
3348 constraint = TREE_STRING_POINTER
3349 (TREE_VALUE (TREE_PURPOSE (link)));
3350 parse_input_constraint (&constraint, 0, 0, noutputs,
3351 0, oconstraints,
3352 &allows_mem, &allows_reg);
3353 if (!allows_reg && allows_mem)
3354 ret |= visit_addr (stmt, op, data);
3355 }
3356 }
3357 }
3358 }
3359 }
3360 else if (gimple_code (stmt) == GIMPLE_RETURN)
3361 {
3362 tree op = gimple_return_retval (stmt);
3363 if (op)
3364 {
3365 if (visit_addr
3366 && TREE_CODE (op) == ADDR_EXPR)
3367 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
3368 else if (visit_load)
3369 {
3370 op = get_base_loadstore (op);
3371 if (op)
3372 ret |= visit_load (stmt, op, data);
3373 }
3374 }
3375 }
3376 else if (visit_addr
3377 && gimple_code (stmt) == GIMPLE_PHI)
3378 {
3379 for (i = 0; i < gimple_phi_num_args (stmt); ++i)
3380 {
3381 tree op = PHI_ARG_DEF (stmt, i);
3382 if (TREE_CODE (op) == ADDR_EXPR)
3383 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
3384 }
3385 }
3386
3387 return ret;
3388 }
3389
3390 /* Like walk_stmt_load_store_addr_ops but with NULL visit_addr. IPA-CP
3391 should make a faster clone for this case. */
3392
3393 bool
3394 walk_stmt_load_store_ops (gimple stmt, void *data,
3395 bool (*visit_load)(gimple, tree, void *),
3396 bool (*visit_store)(gimple, tree, void *))
3397 {
3398 return walk_stmt_load_store_addr_ops (stmt, data,
3399 visit_load, visit_store, NULL);
3400 }
3401
3402 /* Helper for gimple_ior_addresses_taken_1. */
3403
3404 static bool
3405 gimple_ior_addresses_taken_1 (gimple stmt ATTRIBUTE_UNUSED,
3406 tree addr, void *data)
3407 {
3408 bitmap addresses_taken = (bitmap)data;
3409 while (handled_component_p (addr))
3410 addr = TREE_OPERAND (addr, 0);
3411 if (DECL_P (addr))
3412 {
3413 bitmap_set_bit (addresses_taken, DECL_UID (addr));
3414 return true;
3415 }
3416 return false;
3417 }
3418
3419 /* Set the bit for the uid of all decls that have their address taken
3420 in STMT in the ADDRESSES_TAKEN bitmap. Returns true if there
3421 were any in this stmt. */
3422
3423 bool
3424 gimple_ior_addresses_taken (bitmap addresses_taken, gimple stmt)
3425 {
3426 return walk_stmt_load_store_addr_ops (stmt, addresses_taken, NULL, NULL,
3427 gimple_ior_addresses_taken_1);
3428 }
3429
3430 #include "gt-gimple.h"