]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/gimple.c
exp_dbug.ads (Packed Array Encoding): Document the new encoding for the unconstrained...
[thirdparty/gcc.git] / gcc / gimple.c
CommitLineData
726a989a
RB
1/* Gimple IR support functions.
2
bbbbb16a 3 Copyright 2007, 2008, 2009 Free Software Foundation, Inc.
726a989a
RB
4 Contributed by Aldy Hernandez <aldyh@redhat.com>
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 3, or (at your option) any later
11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
21
22#include "config.h"
23#include "system.h"
24#include "coretypes.h"
25#include "tm.h"
26#include "tree.h"
27#include "ggc.h"
726a989a
RB
28#include "hard-reg-set.h"
29#include "basic-block.h"
30#include "gimple.h"
38d2336a 31#include "toplev.h"
726a989a
RB
32#include "diagnostic.h"
33#include "tree-flow.h"
34#include "value-prof.h"
35#include "flags.h"
4537ec0c 36#include "demangle.h"
726a989a 37
726a989a 38
f2c4a81c 39/* All the tuples have their operand vector (if present) at the very bottom
726a989a
RB
40 of the structure. Therefore, the offset required to find the
41 operands vector the size of the structure minus the size of the 1
42 element tree array at the end (see gimple_ops). */
f2c4a81c
RH
43#define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) \
44 (HAS_TREE_OP ? sizeof (struct STRUCT) - sizeof (tree) : 0),
6bc7bc14 45EXPORTED_CONST size_t gimple_ops_offset_[] = {
f2c4a81c
RH
46#include "gsstruct.def"
47};
48#undef DEFGSSTRUCT
49
50#define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) sizeof(struct STRUCT),
51static const size_t gsstruct_code_size[] = {
52#include "gsstruct.def"
53};
54#undef DEFGSSTRUCT
55
56#define DEFGSCODE(SYM, NAME, GSSCODE) NAME,
57const char *const gimple_code_name[] = {
58#include "gimple.def"
59};
60#undef DEFGSCODE
61
62#define DEFGSCODE(SYM, NAME, GSSCODE) GSSCODE,
63EXPORTED_CONST enum gimple_statement_structure_enum gss_for_code_[] = {
726a989a
RB
64#include "gimple.def"
65};
66#undef DEFGSCODE
67
68#ifdef GATHER_STATISTICS
69/* Gimple stats. */
70
71int gimple_alloc_counts[(int) gimple_alloc_kind_all];
72int gimple_alloc_sizes[(int) gimple_alloc_kind_all];
73
74/* Keep in sync with gimple.h:enum gimple_alloc_kind. */
75static const char * const gimple_alloc_kind_names[] = {
76 "assignments",
77 "phi nodes",
78 "conditionals",
79 "sequences",
80 "everything else"
81};
82
83#endif /* GATHER_STATISTICS */
84
85/* A cache of gimple_seq objects. Sequences are created and destroyed
86 fairly often during gimplification. */
87static GTY ((deletable)) struct gimple_seq_d *gimple_seq_cache;
88
89/* Private API manipulation functions shared only with some
90 other files. */
91extern void gimple_set_stored_syms (gimple, bitmap, bitmap_obstack *);
92extern void gimple_set_loaded_syms (gimple, bitmap, bitmap_obstack *);
93
94/* Gimple tuple constructors.
95 Note: Any constructor taking a ``gimple_seq'' as a parameter, can
96 be passed a NULL to start with an empty sequence. */
97
98/* Set the code for statement G to CODE. */
99
100static inline void
101gimple_set_code (gimple g, enum gimple_code code)
102{
103 g->gsbase.code = code;
104}
105
726a989a
RB
106/* Return the number of bytes needed to hold a GIMPLE statement with
107 code CODE. */
108
f2c4a81c 109static inline size_t
726a989a
RB
110gimple_size (enum gimple_code code)
111{
f2c4a81c 112 return gsstruct_code_size[gss_for_code (code)];
726a989a
RB
113}
114
726a989a
RB
115/* Allocate memory for a GIMPLE statement with code CODE and NUM_OPS
116 operands. */
117
118#define gimple_alloc(c, n) gimple_alloc_stat (c, n MEM_STAT_INFO)
119static gimple
120gimple_alloc_stat (enum gimple_code code, unsigned num_ops MEM_STAT_DECL)
121{
122 size_t size;
123 gimple stmt;
124
125 size = gimple_size (code);
126 if (num_ops > 0)
127 size += sizeof (tree) * (num_ops - 1);
128
129#ifdef GATHER_STATISTICS
130 {
131 enum gimple_alloc_kind kind = gimple_alloc_kind (code);
132 gimple_alloc_counts[(int) kind]++;
133 gimple_alloc_sizes[(int) kind] += size;
134 }
135#endif
136
137 stmt = (gimple) ggc_alloc_cleared_stat (size PASS_MEM_STAT);
138 gimple_set_code (stmt, code);
139 gimple_set_num_ops (stmt, num_ops);
140
141 /* Do not call gimple_set_modified here as it has other side
142 effects and this tuple is still not completely built. */
143 stmt->gsbase.modified = 1;
144
145 return stmt;
146}
147
148/* Set SUBCODE to be the code of the expression computed by statement G. */
149
150static inline void
151gimple_set_subcode (gimple g, unsigned subcode)
152{
153 /* We only have 16 bits for the RHS code. Assert that we are not
154 overflowing it. */
155 gcc_assert (subcode < (1 << 16));
156 g->gsbase.subcode = subcode;
157}
158
159
160
161/* Build a tuple with operands. CODE is the statement to build (which
162 must be one of the GIMPLE_WITH_OPS tuples). SUBCODE is the sub-code
163 for the new tuple. NUM_OPS is the number of operands to allocate. */
164
165#define gimple_build_with_ops(c, s, n) \
166 gimple_build_with_ops_stat (c, s, n MEM_STAT_INFO)
167
168static gimple
b5b8b0ac 169gimple_build_with_ops_stat (enum gimple_code code, unsigned subcode,
726a989a
RB
170 unsigned num_ops MEM_STAT_DECL)
171{
172 gimple s = gimple_alloc_stat (code, num_ops PASS_MEM_STAT);
173 gimple_set_subcode (s, subcode);
174
175 return s;
176}
177
178
179/* Build a GIMPLE_RETURN statement returning RETVAL. */
180
181gimple
182gimple_build_return (tree retval)
183{
bbbbb16a 184 gimple s = gimple_build_with_ops (GIMPLE_RETURN, ERROR_MARK, 1);
726a989a
RB
185 if (retval)
186 gimple_return_set_retval (s, retval);
187 return s;
188}
189
190/* Helper for gimple_build_call, gimple_build_call_vec and
191 gimple_build_call_from_tree. Build the basic components of a
192 GIMPLE_CALL statement to function FN with NARGS arguments. */
193
194static inline gimple
195gimple_build_call_1 (tree fn, unsigned nargs)
196{
bbbbb16a 197 gimple s = gimple_build_with_ops (GIMPLE_CALL, ERROR_MARK, nargs + 3);
7c9577be
RG
198 if (TREE_CODE (fn) == FUNCTION_DECL)
199 fn = build_fold_addr_expr (fn);
726a989a
RB
200 gimple_set_op (s, 1, fn);
201 return s;
202}
203
204
205/* Build a GIMPLE_CALL statement to function FN with the arguments
206 specified in vector ARGS. */
207
208gimple
209gimple_build_call_vec (tree fn, VEC(tree, heap) *args)
210{
211 unsigned i;
212 unsigned nargs = VEC_length (tree, args);
213 gimple call = gimple_build_call_1 (fn, nargs);
214
215 for (i = 0; i < nargs; i++)
216 gimple_call_set_arg (call, i, VEC_index (tree, args, i));
217
218 return call;
219}
220
221
222/* Build a GIMPLE_CALL statement to function FN. NARGS is the number of
223 arguments. The ... are the arguments. */
224
225gimple
226gimple_build_call (tree fn, unsigned nargs, ...)
227{
228 va_list ap;
229 gimple call;
230 unsigned i;
231
232 gcc_assert (TREE_CODE (fn) == FUNCTION_DECL || is_gimple_call_addr (fn));
233
234 call = gimple_build_call_1 (fn, nargs);
235
236 va_start (ap, nargs);
237 for (i = 0; i < nargs; i++)
238 gimple_call_set_arg (call, i, va_arg (ap, tree));
239 va_end (ap);
240
241 return call;
242}
243
244
245/* Build a GIMPLE_CALL statement from CALL_EXPR T. Note that T is
246 assumed to be in GIMPLE form already. Minimal checking is done of
247 this fact. */
248
249gimple
250gimple_build_call_from_tree (tree t)
251{
252 unsigned i, nargs;
253 gimple call;
254 tree fndecl = get_callee_fndecl (t);
255
256 gcc_assert (TREE_CODE (t) == CALL_EXPR);
257
258 nargs = call_expr_nargs (t);
259 call = gimple_build_call_1 (fndecl ? fndecl : CALL_EXPR_FN (t), nargs);
260
261 for (i = 0; i < nargs; i++)
262 gimple_call_set_arg (call, i, CALL_EXPR_ARG (t, i));
263
264 gimple_set_block (call, TREE_BLOCK (t));
265
266 /* Carry all the CALL_EXPR flags to the new GIMPLE_CALL. */
267 gimple_call_set_chain (call, CALL_EXPR_STATIC_CHAIN (t));
268 gimple_call_set_tail (call, CALL_EXPR_TAILCALL (t));
269 gimple_call_set_cannot_inline (call, CALL_CANNOT_INLINE_P (t));
270 gimple_call_set_return_slot_opt (call, CALL_EXPR_RETURN_SLOT_OPT (t));
271 gimple_call_set_from_thunk (call, CALL_FROM_THUNK_P (t));
272 gimple_call_set_va_arg_pack (call, CALL_EXPR_VA_ARG_PACK (t));
d665b6e5 273 gimple_set_no_warning (call, TREE_NO_WARNING (t));
726a989a
RB
274
275 return call;
276}
277
278
279/* Extract the operands and code for expression EXPR into *SUBCODE_P,
280 *OP1_P and *OP2_P respectively. */
281
282void
283extract_ops_from_tree (tree expr, enum tree_code *subcode_p, tree *op1_p,
284 tree *op2_p)
285{
82d6e6fc 286 enum gimple_rhs_class grhs_class;
726a989a
RB
287
288 *subcode_p = TREE_CODE (expr);
82d6e6fc 289 grhs_class = get_gimple_rhs_class (*subcode_p);
726a989a 290
82d6e6fc 291 if (grhs_class == GIMPLE_BINARY_RHS)
726a989a
RB
292 {
293 *op1_p = TREE_OPERAND (expr, 0);
294 *op2_p = TREE_OPERAND (expr, 1);
295 }
82d6e6fc 296 else if (grhs_class == GIMPLE_UNARY_RHS)
726a989a
RB
297 {
298 *op1_p = TREE_OPERAND (expr, 0);
299 *op2_p = NULL_TREE;
300 }
82d6e6fc 301 else if (grhs_class == GIMPLE_SINGLE_RHS)
726a989a
RB
302 {
303 *op1_p = expr;
304 *op2_p = NULL_TREE;
305 }
306 else
307 gcc_unreachable ();
308}
309
310
311/* Build a GIMPLE_ASSIGN statement.
312
313 LHS of the assignment.
314 RHS of the assignment which can be unary or binary. */
315
316gimple
317gimple_build_assign_stat (tree lhs, tree rhs MEM_STAT_DECL)
318{
319 enum tree_code subcode;
320 tree op1, op2;
321
322 extract_ops_from_tree (rhs, &subcode, &op1, &op2);
323 return gimple_build_assign_with_ops_stat (subcode, lhs, op1, op2
324 PASS_MEM_STAT);
325}
326
327
328/* Build a GIMPLE_ASSIGN statement with sub-code SUBCODE and operands
329 OP1 and OP2. If OP2 is NULL then SUBCODE must be of class
330 GIMPLE_UNARY_RHS or GIMPLE_SINGLE_RHS. */
331
332gimple
333gimple_build_assign_with_ops_stat (enum tree_code subcode, tree lhs, tree op1,
334 tree op2 MEM_STAT_DECL)
335{
336 unsigned num_ops;
337 gimple p;
338
339 /* Need 1 operand for LHS and 1 or 2 for the RHS (depending on the
340 code). */
341 num_ops = get_gimple_rhs_num_ops (subcode) + 1;
342
b5b8b0ac 343 p = gimple_build_with_ops_stat (GIMPLE_ASSIGN, (unsigned)subcode, num_ops
726a989a
RB
344 PASS_MEM_STAT);
345 gimple_assign_set_lhs (p, lhs);
346 gimple_assign_set_rhs1 (p, op1);
347 if (op2)
348 {
349 gcc_assert (num_ops > 2);
350 gimple_assign_set_rhs2 (p, op2);
351 }
352
353 return p;
354}
355
356
357/* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
358
359 DST/SRC are the destination and source respectively. You can pass
360 ungimplified trees in DST or SRC, in which case they will be
361 converted to a gimple operand if necessary.
362
363 This function returns the newly created GIMPLE_ASSIGN tuple. */
364
5fd8300b 365gimple
726a989a
RB
366gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
367{
368 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
369 gimplify_and_add (t, seq_p);
370 ggc_free (t);
371 return gimple_seq_last_stmt (*seq_p);
372}
373
374
375/* Build a GIMPLE_COND statement.
376
377 PRED is the condition used to compare LHS and the RHS.
378 T_LABEL is the label to jump to if the condition is true.
379 F_LABEL is the label to jump to otherwise. */
380
381gimple
382gimple_build_cond (enum tree_code pred_code, tree lhs, tree rhs,
383 tree t_label, tree f_label)
384{
385 gimple p;
386
387 gcc_assert (TREE_CODE_CLASS (pred_code) == tcc_comparison);
388 p = gimple_build_with_ops (GIMPLE_COND, pred_code, 4);
389 gimple_cond_set_lhs (p, lhs);
390 gimple_cond_set_rhs (p, rhs);
391 gimple_cond_set_true_label (p, t_label);
392 gimple_cond_set_false_label (p, f_label);
393 return p;
394}
395
396
397/* Extract operands for a GIMPLE_COND statement out of COND_EXPR tree COND. */
398
399void
400gimple_cond_get_ops_from_tree (tree cond, enum tree_code *code_p,
401 tree *lhs_p, tree *rhs_p)
402{
db3927fb 403 location_t loc = EXPR_LOCATION (cond);
726a989a
RB
404 gcc_assert (TREE_CODE_CLASS (TREE_CODE (cond)) == tcc_comparison
405 || TREE_CODE (cond) == TRUTH_NOT_EXPR
406 || is_gimple_min_invariant (cond)
407 || SSA_VAR_P (cond));
408
409 extract_ops_from_tree (cond, code_p, lhs_p, rhs_p);
410
411 /* Canonicalize conditionals of the form 'if (!VAL)'. */
412 if (*code_p == TRUTH_NOT_EXPR)
413 {
414 *code_p = EQ_EXPR;
415 gcc_assert (*lhs_p && *rhs_p == NULL_TREE);
db3927fb 416 *rhs_p = fold_convert_loc (loc, TREE_TYPE (*lhs_p), integer_zero_node);
726a989a
RB
417 }
418 /* Canonicalize conditionals of the form 'if (VAL)' */
419 else if (TREE_CODE_CLASS (*code_p) != tcc_comparison)
420 {
421 *code_p = NE_EXPR;
422 gcc_assert (*lhs_p && *rhs_p == NULL_TREE);
db3927fb 423 *rhs_p = fold_convert_loc (loc, TREE_TYPE (*lhs_p), integer_zero_node);
726a989a
RB
424 }
425}
426
427
428/* Build a GIMPLE_COND statement from the conditional expression tree
429 COND. T_LABEL and F_LABEL are as in gimple_build_cond. */
430
431gimple
432gimple_build_cond_from_tree (tree cond, tree t_label, tree f_label)
433{
434 enum tree_code code;
435 tree lhs, rhs;
436
437 gimple_cond_get_ops_from_tree (cond, &code, &lhs, &rhs);
438 return gimple_build_cond (code, lhs, rhs, t_label, f_label);
439}
440
441/* Set code, lhs, and rhs of a GIMPLE_COND from a suitable
442 boolean expression tree COND. */
443
444void
445gimple_cond_set_condition_from_tree (gimple stmt, tree cond)
446{
447 enum tree_code code;
448 tree lhs, rhs;
449
450 gimple_cond_get_ops_from_tree (cond, &code, &lhs, &rhs);
451 gimple_cond_set_condition (stmt, code, lhs, rhs);
452}
453
454/* Build a GIMPLE_LABEL statement for LABEL. */
455
456gimple
457gimple_build_label (tree label)
458{
bbbbb16a 459 gimple p = gimple_build_with_ops (GIMPLE_LABEL, ERROR_MARK, 1);
726a989a
RB
460 gimple_label_set_label (p, label);
461 return p;
462}
463
464/* Build a GIMPLE_GOTO statement to label DEST. */
465
466gimple
467gimple_build_goto (tree dest)
468{
bbbbb16a 469 gimple p = gimple_build_with_ops (GIMPLE_GOTO, ERROR_MARK, 1);
726a989a
RB
470 gimple_goto_set_dest (p, dest);
471 return p;
472}
473
474
475/* Build a GIMPLE_NOP statement. */
476
477gimple
478gimple_build_nop (void)
479{
480 return gimple_alloc (GIMPLE_NOP, 0);
481}
482
483
484/* Build a GIMPLE_BIND statement.
485 VARS are the variables in BODY.
486 BLOCK is the containing block. */
487
488gimple
489gimple_build_bind (tree vars, gimple_seq body, tree block)
490{
491 gimple p = gimple_alloc (GIMPLE_BIND, 0);
492 gimple_bind_set_vars (p, vars);
493 if (body)
494 gimple_bind_set_body (p, body);
495 if (block)
496 gimple_bind_set_block (p, block);
497 return p;
498}
499
500/* Helper function to set the simple fields of a asm stmt.
501
502 STRING is a pointer to a string that is the asm blocks assembly code.
503 NINPUT is the number of register inputs.
504 NOUTPUT is the number of register outputs.
505 NCLOBBERS is the number of clobbered registers.
506 */
507
508static inline gimple
509gimple_build_asm_1 (const char *string, unsigned ninputs, unsigned noutputs,
510 unsigned nclobbers)
511{
512 gimple p;
513 int size = strlen (string);
514
bbbbb16a
ILT
515 p = gimple_build_with_ops (GIMPLE_ASM, ERROR_MARK,
516 ninputs + noutputs + nclobbers);
726a989a
RB
517
518 p->gimple_asm.ni = ninputs;
519 p->gimple_asm.no = noutputs;
520 p->gimple_asm.nc = nclobbers;
521 p->gimple_asm.string = ggc_alloc_string (string, size);
522
523#ifdef GATHER_STATISTICS
524 gimple_alloc_sizes[(int) gimple_alloc_kind (GIMPLE_ASM)] += size;
525#endif
526
527 return p;
528}
529
530/* Build a GIMPLE_ASM statement.
531
532 STRING is the assembly code.
533 NINPUT is the number of register inputs.
534 NOUTPUT is the number of register outputs.
535 NCLOBBERS is the number of clobbered registers.
536 INPUTS is a vector of the input register parameters.
537 OUTPUTS is a vector of the output register parameters.
538 CLOBBERS is a vector of the clobbered register parameters. */
539
540gimple
541gimple_build_asm_vec (const char *string, VEC(tree,gc)* inputs,
542 VEC(tree,gc)* outputs, VEC(tree,gc)* clobbers)
543{
544 gimple p;
545 unsigned i;
546
547 p = gimple_build_asm_1 (string,
548 VEC_length (tree, inputs),
549 VEC_length (tree, outputs),
550 VEC_length (tree, clobbers));
551
552 for (i = 0; i < VEC_length (tree, inputs); i++)
553 gimple_asm_set_input_op (p, i, VEC_index (tree, inputs, i));
554
555 for (i = 0; i < VEC_length (tree, outputs); i++)
556 gimple_asm_set_output_op (p, i, VEC_index (tree, outputs, i));
557
558 for (i = 0; i < VEC_length (tree, clobbers); i++)
559 gimple_asm_set_clobber_op (p, i, VEC_index (tree, clobbers, i));
560
561 return p;
562}
563
564/* Build a GIMPLE_ASM statement.
565
566 STRING is the assembly code.
567 NINPUT is the number of register inputs.
568 NOUTPUT is the number of register outputs.
569 NCLOBBERS is the number of clobbered registers.
570 ... are trees for each input, output and clobbered register. */
571
572gimple
573gimple_build_asm (const char *string, unsigned ninputs, unsigned noutputs,
574 unsigned nclobbers, ...)
575{
576 gimple p;
577 unsigned i;
578 va_list ap;
579
580 p = gimple_build_asm_1 (string, ninputs, noutputs, nclobbers);
581
582 va_start (ap, nclobbers);
583
584 for (i = 0; i < ninputs; i++)
585 gimple_asm_set_input_op (p, i, va_arg (ap, tree));
586
587 for (i = 0; i < noutputs; i++)
588 gimple_asm_set_output_op (p, i, va_arg (ap, tree));
589
590 for (i = 0; i < nclobbers; i++)
591 gimple_asm_set_clobber_op (p, i, va_arg (ap, tree));
592
593 va_end (ap);
594
595 return p;
596}
597
598/* Build a GIMPLE_CATCH statement.
599
600 TYPES are the catch types.
601 HANDLER is the exception handler. */
602
603gimple
604gimple_build_catch (tree types, gimple_seq handler)
605{
606 gimple p = gimple_alloc (GIMPLE_CATCH, 0);
607 gimple_catch_set_types (p, types);
608 if (handler)
609 gimple_catch_set_handler (p, handler);
610
611 return p;
612}
613
614/* Build a GIMPLE_EH_FILTER statement.
615
616 TYPES are the filter's types.
617 FAILURE is the filter's failure action. */
618
619gimple
620gimple_build_eh_filter (tree types, gimple_seq failure)
621{
622 gimple p = gimple_alloc (GIMPLE_EH_FILTER, 0);
623 gimple_eh_filter_set_types (p, types);
624 if (failure)
625 gimple_eh_filter_set_failure (p, failure);
626
627 return p;
628}
629
1d65f45c
RH
630/* Build a GIMPLE_EH_MUST_NOT_THROW statement. */
631
632gimple
633gimple_build_eh_must_not_throw (tree decl)
634{
635 gimple p = gimple_alloc (GIMPLE_EH_MUST_NOT_THROW, 1);
636
637 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
638 gcc_assert (flags_from_decl_or_type (decl) & ECF_NORETURN);
639 p->gimple_eh_mnt.fndecl = decl;
640
641 return p;
642}
643
726a989a
RB
644/* Build a GIMPLE_TRY statement.
645
646 EVAL is the expression to evaluate.
647 CLEANUP is the cleanup expression.
648 KIND is either GIMPLE_TRY_CATCH or GIMPLE_TRY_FINALLY depending on
649 whether this is a try/catch or a try/finally respectively. */
650
651gimple
652gimple_build_try (gimple_seq eval, gimple_seq cleanup,
653 enum gimple_try_flags kind)
654{
655 gimple p;
656
657 gcc_assert (kind == GIMPLE_TRY_CATCH || kind == GIMPLE_TRY_FINALLY);
658 p = gimple_alloc (GIMPLE_TRY, 0);
659 gimple_set_subcode (p, kind);
660 if (eval)
661 gimple_try_set_eval (p, eval);
662 if (cleanup)
663 gimple_try_set_cleanup (p, cleanup);
664
665 return p;
666}
667
668/* Construct a GIMPLE_WITH_CLEANUP_EXPR statement.
669
670 CLEANUP is the cleanup expression. */
671
672gimple
673gimple_build_wce (gimple_seq cleanup)
674{
675 gimple p = gimple_alloc (GIMPLE_WITH_CLEANUP_EXPR, 0);
676 if (cleanup)
677 gimple_wce_set_cleanup (p, cleanup);
678
679 return p;
680}
681
682
1d65f45c 683/* Build a GIMPLE_RESX statement. */
726a989a
RB
684
685gimple
686gimple_build_resx (int region)
687{
1d65f45c
RH
688 gimple p = gimple_build_with_ops (GIMPLE_RESX, ERROR_MARK, 0);
689 p->gimple_eh_ctrl.region = region;
726a989a
RB
690 return p;
691}
692
693
694/* The helper for constructing a gimple switch statement.
695 INDEX is the switch's index.
696 NLABELS is the number of labels in the switch excluding the default.
697 DEFAULT_LABEL is the default label for the switch statement. */
698
1d65f45c
RH
699gimple
700gimple_build_switch_nlabels (unsigned nlabels, tree index, tree default_label)
726a989a
RB
701{
702 /* nlabels + 1 default label + 1 index. */
bbbbb16a 703 gimple p = gimple_build_with_ops (GIMPLE_SWITCH, ERROR_MARK,
1d65f45c 704 1 + (default_label != NULL) + nlabels);
726a989a 705 gimple_switch_set_index (p, index);
1d65f45c
RH
706 if (default_label)
707 gimple_switch_set_default_label (p, default_label);
726a989a
RB
708 return p;
709}
710
711
712/* Build a GIMPLE_SWITCH statement.
713
714 INDEX is the switch's index.
715 NLABELS is the number of labels in the switch excluding the DEFAULT_LABEL.
716 ... are the labels excluding the default. */
717
718gimple
719gimple_build_switch (unsigned nlabels, tree index, tree default_label, ...)
720{
721 va_list al;
1d65f45c
RH
722 unsigned i, offset;
723 gimple p = gimple_build_switch_nlabels (nlabels, index, default_label);
726a989a
RB
724
725 /* Store the rest of the labels. */
726 va_start (al, default_label);
1d65f45c
RH
727 offset = (default_label != NULL);
728 for (i = 0; i < nlabels; i++)
729 gimple_switch_set_label (p, i + offset, va_arg (al, tree));
726a989a
RB
730 va_end (al);
731
732 return p;
733}
734
735
736/* Build a GIMPLE_SWITCH statement.
737
738 INDEX is the switch's index.
739 DEFAULT_LABEL is the default label
740 ARGS is a vector of labels excluding the default. */
741
742gimple
743gimple_build_switch_vec (tree index, tree default_label, VEC(tree, heap) *args)
744{
1d65f45c
RH
745 unsigned i, offset, nlabels = VEC_length (tree, args);
746 gimple p = gimple_build_switch_nlabels (nlabels, index, default_label);
726a989a 747
1d65f45c
RH
748 /* Copy the labels from the vector to the switch statement. */
749 offset = (default_label != NULL);
750 for (i = 0; i < nlabels; i++)
751 gimple_switch_set_label (p, i + offset, VEC_index (tree, args, i));
726a989a
RB
752
753 return p;
754}
755
1d65f45c
RH
756/* Build a GIMPLE_EH_DISPATCH statement. */
757
758gimple
759gimple_build_eh_dispatch (int region)
760{
761 gimple p = gimple_build_with_ops (GIMPLE_EH_DISPATCH, ERROR_MARK, 0);
762 p->gimple_eh_ctrl.region = region;
763 return p;
764}
726a989a 765
b5b8b0ac
AO
766/* Build a new GIMPLE_DEBUG_BIND statement.
767
768 VAR is bound to VALUE; block and location are taken from STMT. */
769
770gimple
771gimple_build_debug_bind_stat (tree var, tree value, gimple stmt MEM_STAT_DECL)
772{
773 gimple p = gimple_build_with_ops_stat (GIMPLE_DEBUG,
774 (unsigned)GIMPLE_DEBUG_BIND, 2
775 PASS_MEM_STAT);
776
777 gimple_debug_bind_set_var (p, var);
778 gimple_debug_bind_set_value (p, value);
779 if (stmt)
780 {
781 gimple_set_block (p, gimple_block (stmt));
782 gimple_set_location (p, gimple_location (stmt));
783 }
784
785 return p;
786}
787
788
726a989a
RB
789/* Build a GIMPLE_OMP_CRITICAL statement.
790
791 BODY is the sequence of statements for which only one thread can execute.
792 NAME is optional identifier for this critical block. */
793
794gimple
795gimple_build_omp_critical (gimple_seq body, tree name)
796{
797 gimple p = gimple_alloc (GIMPLE_OMP_CRITICAL, 0);
798 gimple_omp_critical_set_name (p, name);
799 if (body)
800 gimple_omp_set_body (p, body);
801
802 return p;
803}
804
805/* Build a GIMPLE_OMP_FOR statement.
806
807 BODY is sequence of statements inside the for loop.
808 CLAUSES, are any of the OMP loop construct's clauses: private, firstprivate,
809 lastprivate, reductions, ordered, schedule, and nowait.
810 COLLAPSE is the collapse count.
811 PRE_BODY is the sequence of statements that are loop invariant. */
812
813gimple
814gimple_build_omp_for (gimple_seq body, tree clauses, size_t collapse,
815 gimple_seq pre_body)
816{
817 gimple p = gimple_alloc (GIMPLE_OMP_FOR, 0);
818 if (body)
819 gimple_omp_set_body (p, body);
820 gimple_omp_for_set_clauses (p, clauses);
821 p->gimple_omp_for.collapse = collapse;
822 p->gimple_omp_for.iter = GGC_CNEWVEC (struct gimple_omp_for_iter, collapse);
823 if (pre_body)
824 gimple_omp_for_set_pre_body (p, pre_body);
825
826 return p;
827}
828
829
830/* Build a GIMPLE_OMP_PARALLEL statement.
831
832 BODY is sequence of statements which are executed in parallel.
833 CLAUSES, are the OMP parallel construct's clauses.
834 CHILD_FN is the function created for the parallel threads to execute.
835 DATA_ARG are the shared data argument(s). */
836
837gimple
838gimple_build_omp_parallel (gimple_seq body, tree clauses, tree child_fn,
839 tree data_arg)
840{
841 gimple p = gimple_alloc (GIMPLE_OMP_PARALLEL, 0);
842 if (body)
843 gimple_omp_set_body (p, body);
844 gimple_omp_parallel_set_clauses (p, clauses);
845 gimple_omp_parallel_set_child_fn (p, child_fn);
846 gimple_omp_parallel_set_data_arg (p, data_arg);
847
848 return p;
849}
850
851
852/* Build a GIMPLE_OMP_TASK statement.
853
854 BODY is sequence of statements which are executed by the explicit task.
855 CLAUSES, are the OMP parallel construct's clauses.
856 CHILD_FN is the function created for the parallel threads to execute.
857 DATA_ARG are the shared data argument(s).
858 COPY_FN is the optional function for firstprivate initialization.
859 ARG_SIZE and ARG_ALIGN are size and alignment of the data block. */
860
861gimple
862gimple_build_omp_task (gimple_seq body, tree clauses, tree child_fn,
863 tree data_arg, tree copy_fn, tree arg_size,
864 tree arg_align)
865{
866 gimple p = gimple_alloc (GIMPLE_OMP_TASK, 0);
867 if (body)
868 gimple_omp_set_body (p, body);
869 gimple_omp_task_set_clauses (p, clauses);
870 gimple_omp_task_set_child_fn (p, child_fn);
871 gimple_omp_task_set_data_arg (p, data_arg);
872 gimple_omp_task_set_copy_fn (p, copy_fn);
873 gimple_omp_task_set_arg_size (p, arg_size);
874 gimple_omp_task_set_arg_align (p, arg_align);
875
876 return p;
877}
878
879
880/* Build a GIMPLE_OMP_SECTION statement for a sections statement.
881
882 BODY is the sequence of statements in the section. */
883
884gimple
885gimple_build_omp_section (gimple_seq body)
886{
887 gimple p = gimple_alloc (GIMPLE_OMP_SECTION, 0);
888 if (body)
889 gimple_omp_set_body (p, body);
890
891 return p;
892}
893
894
895/* Build a GIMPLE_OMP_MASTER statement.
896
897 BODY is the sequence of statements to be executed by just the master. */
898
899gimple
900gimple_build_omp_master (gimple_seq body)
901{
902 gimple p = gimple_alloc (GIMPLE_OMP_MASTER, 0);
903 if (body)
904 gimple_omp_set_body (p, body);
905
906 return p;
907}
908
909
910/* Build a GIMPLE_OMP_CONTINUE statement.
911
912 CONTROL_DEF is the definition of the control variable.
913 CONTROL_USE is the use of the control variable. */
914
915gimple
916gimple_build_omp_continue (tree control_def, tree control_use)
917{
918 gimple p = gimple_alloc (GIMPLE_OMP_CONTINUE, 0);
919 gimple_omp_continue_set_control_def (p, control_def);
920 gimple_omp_continue_set_control_use (p, control_use);
921 return p;
922}
923
924/* Build a GIMPLE_OMP_ORDERED statement.
925
926 BODY is the sequence of statements inside a loop that will executed in
927 sequence. */
928
929gimple
930gimple_build_omp_ordered (gimple_seq body)
931{
932 gimple p = gimple_alloc (GIMPLE_OMP_ORDERED, 0);
933 if (body)
934 gimple_omp_set_body (p, body);
935
936 return p;
937}
938
939
940/* Build a GIMPLE_OMP_RETURN statement.
941 WAIT_P is true if this is a non-waiting return. */
942
943gimple
944gimple_build_omp_return (bool wait_p)
945{
946 gimple p = gimple_alloc (GIMPLE_OMP_RETURN, 0);
947 if (wait_p)
948 gimple_omp_return_set_nowait (p);
949
950 return p;
951}
952
953
954/* Build a GIMPLE_OMP_SECTIONS statement.
955
956 BODY is a sequence of section statements.
957 CLAUSES are any of the OMP sections contsruct's clauses: private,
958 firstprivate, lastprivate, reduction, and nowait. */
959
960gimple
961gimple_build_omp_sections (gimple_seq body, tree clauses)
962{
963 gimple p = gimple_alloc (GIMPLE_OMP_SECTIONS, 0);
964 if (body)
965 gimple_omp_set_body (p, body);
966 gimple_omp_sections_set_clauses (p, clauses);
967
968 return p;
969}
970
971
972/* Build a GIMPLE_OMP_SECTIONS_SWITCH. */
973
974gimple
975gimple_build_omp_sections_switch (void)
976{
977 return gimple_alloc (GIMPLE_OMP_SECTIONS_SWITCH, 0);
978}
979
980
981/* Build a GIMPLE_OMP_SINGLE statement.
982
983 BODY is the sequence of statements that will be executed once.
984 CLAUSES are any of the OMP single construct's clauses: private, firstprivate,
985 copyprivate, nowait. */
986
987gimple
988gimple_build_omp_single (gimple_seq body, tree clauses)
989{
990 gimple p = gimple_alloc (GIMPLE_OMP_SINGLE, 0);
991 if (body)
992 gimple_omp_set_body (p, body);
993 gimple_omp_single_set_clauses (p, clauses);
994
995 return p;
996}
997
998
726a989a
RB
999/* Build a GIMPLE_OMP_ATOMIC_LOAD statement. */
1000
1001gimple
1002gimple_build_omp_atomic_load (tree lhs, tree rhs)
1003{
1004 gimple p = gimple_alloc (GIMPLE_OMP_ATOMIC_LOAD, 0);
1005 gimple_omp_atomic_load_set_lhs (p, lhs);
1006 gimple_omp_atomic_load_set_rhs (p, rhs);
1007 return p;
1008}
1009
1010/* Build a GIMPLE_OMP_ATOMIC_STORE statement.
1011
1012 VAL is the value we are storing. */
1013
1014gimple
1015gimple_build_omp_atomic_store (tree val)
1016{
1017 gimple p = gimple_alloc (GIMPLE_OMP_ATOMIC_STORE, 0);
1018 gimple_omp_atomic_store_set_val (p, val);
1019 return p;
1020}
1021
1022/* Build a GIMPLE_PREDICT statement. PREDICT is one of the predictors from
1023 predict.def, OUTCOME is NOT_TAKEN or TAKEN. */
1024
1025gimple
1026gimple_build_predict (enum br_predictor predictor, enum prediction outcome)
1027{
1028 gimple p = gimple_alloc (GIMPLE_PREDICT, 0);
1029 /* Ensure all the predictors fit into the lower bits of the subcode. */
e0c68ce9 1030 gcc_assert ((int) END_PREDICTORS <= GF_PREDICT_TAKEN);
726a989a
RB
1031 gimple_predict_set_predictor (p, predictor);
1032 gimple_predict_set_outcome (p, outcome);
1033 return p;
1034}
1035
cea094ed 1036#if defined ENABLE_GIMPLE_CHECKING
726a989a
RB
1037/* Complain of a gimple type mismatch and die. */
1038
1039void
1040gimple_check_failed (const_gimple gs, const char *file, int line,
1041 const char *function, enum gimple_code code,
1042 enum tree_code subcode)
1043{
1044 internal_error ("gimple check: expected %s(%s), have %s(%s) in %s, at %s:%d",
1045 gimple_code_name[code],
1046 tree_code_name[subcode],
1047 gimple_code_name[gimple_code (gs)],
1048 gs->gsbase.subcode > 0
1049 ? tree_code_name[gs->gsbase.subcode]
1050 : "",
1051 function, trim_filename (file), line);
1052}
726a989a
RB
1053#endif /* ENABLE_GIMPLE_CHECKING */
1054
1055
1056/* Allocate a new GIMPLE sequence in GC memory and return it. If
1057 there are free sequences in GIMPLE_SEQ_CACHE return one of those
1058 instead. */
1059
1060gimple_seq
1061gimple_seq_alloc (void)
1062{
1063 gimple_seq seq = gimple_seq_cache;
1064 if (seq)
1065 {
1066 gimple_seq_cache = gimple_seq_cache->next_free;
1067 gcc_assert (gimple_seq_cache != seq);
1068 memset (seq, 0, sizeof (*seq));
1069 }
1070 else
1071 {
1072 seq = (gimple_seq) ggc_alloc_cleared (sizeof (*seq));
1073#ifdef GATHER_STATISTICS
1074 gimple_alloc_counts[(int) gimple_alloc_kind_seq]++;
1075 gimple_alloc_sizes[(int) gimple_alloc_kind_seq] += sizeof (*seq);
1076#endif
1077 }
1078
1079 return seq;
1080}
1081
1082/* Return SEQ to the free pool of GIMPLE sequences. */
1083
1084void
1085gimple_seq_free (gimple_seq seq)
1086{
1087 if (seq == NULL)
1088 return;
1089
1090 gcc_assert (gimple_seq_first (seq) == NULL);
1091 gcc_assert (gimple_seq_last (seq) == NULL);
1092
1093 /* If this triggers, it's a sign that the same list is being freed
1094 twice. */
1095 gcc_assert (seq != gimple_seq_cache || gimple_seq_cache == NULL);
1096
1097 /* Add SEQ to the pool of free sequences. */
1098 seq->next_free = gimple_seq_cache;
1099 gimple_seq_cache = seq;
1100}
1101
1102
1103/* Link gimple statement GS to the end of the sequence *SEQ_P. If
1104 *SEQ_P is NULL, a new sequence is allocated. */
1105
1106void
1107gimple_seq_add_stmt (gimple_seq *seq_p, gimple gs)
1108{
1109 gimple_stmt_iterator si;
1110
1111 if (gs == NULL)
1112 return;
1113
1114 if (*seq_p == NULL)
1115 *seq_p = gimple_seq_alloc ();
1116
1117 si = gsi_last (*seq_p);
1118 gsi_insert_after (&si, gs, GSI_NEW_STMT);
1119}
1120
1121
1122/* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
1123 NULL, a new sequence is allocated. */
1124
1125void
1126gimple_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
1127{
1128 gimple_stmt_iterator si;
1129
1130 if (src == NULL)
1131 return;
1132
1133 if (*dst_p == NULL)
1134 *dst_p = gimple_seq_alloc ();
1135
1136 si = gsi_last (*dst_p);
1137 gsi_insert_seq_after (&si, src, GSI_NEW_STMT);
1138}
1139
1140
1141/* Helper function of empty_body_p. Return true if STMT is an empty
1142 statement. */
1143
1144static bool
1145empty_stmt_p (gimple stmt)
1146{
1147 if (gimple_code (stmt) == GIMPLE_NOP)
1148 return true;
1149 if (gimple_code (stmt) == GIMPLE_BIND)
1150 return empty_body_p (gimple_bind_body (stmt));
1151 return false;
1152}
1153
1154
1155/* Return true if BODY contains nothing but empty statements. */
1156
1157bool
1158empty_body_p (gimple_seq body)
1159{
1160 gimple_stmt_iterator i;
1161
726a989a
RB
1162 if (gimple_seq_empty_p (body))
1163 return true;
1164 for (i = gsi_start (body); !gsi_end_p (i); gsi_next (&i))
b5b8b0ac
AO
1165 if (!empty_stmt_p (gsi_stmt (i))
1166 && !is_gimple_debug (gsi_stmt (i)))
726a989a
RB
1167 return false;
1168
1169 return true;
1170}
1171
1172
1173/* Perform a deep copy of sequence SRC and return the result. */
1174
1175gimple_seq
1176gimple_seq_copy (gimple_seq src)
1177{
1178 gimple_stmt_iterator gsi;
82d6e6fc 1179 gimple_seq new_seq = gimple_seq_alloc ();
726a989a
RB
1180 gimple stmt;
1181
1182 for (gsi = gsi_start (src); !gsi_end_p (gsi); gsi_next (&gsi))
1183 {
1184 stmt = gimple_copy (gsi_stmt (gsi));
82d6e6fc 1185 gimple_seq_add_stmt (&new_seq, stmt);
726a989a
RB
1186 }
1187
82d6e6fc 1188 return new_seq;
726a989a
RB
1189}
1190
1191
1192/* Walk all the statements in the sequence SEQ calling walk_gimple_stmt
1193 on each one. WI is as in walk_gimple_stmt.
1194
1195 If walk_gimple_stmt returns non-NULL, the walk is stopped, the
1196 value is stored in WI->CALLBACK_RESULT and the statement that
1197 produced the value is returned.
1198
1199 Otherwise, all the statements are walked and NULL returned. */
1200
1201gimple
1202walk_gimple_seq (gimple_seq seq, walk_stmt_fn callback_stmt,
1203 walk_tree_fn callback_op, struct walk_stmt_info *wi)
1204{
1205 gimple_stmt_iterator gsi;
1206
1207 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
1208 {
1209 tree ret = walk_gimple_stmt (&gsi, callback_stmt, callback_op, wi);
1210 if (ret)
1211 {
1212 /* If CALLBACK_STMT or CALLBACK_OP return a value, WI must exist
1213 to hold it. */
1214 gcc_assert (wi);
1215 wi->callback_result = ret;
1216 return gsi_stmt (gsi);
1217 }
1218 }
1219
1220 if (wi)
1221 wi->callback_result = NULL_TREE;
1222
1223 return NULL;
1224}
1225
1226
1227/* Helper function for walk_gimple_stmt. Walk operands of a GIMPLE_ASM. */
1228
1229static tree
1230walk_gimple_asm (gimple stmt, walk_tree_fn callback_op,
1231 struct walk_stmt_info *wi)
1232{
1233 tree ret;
1234 unsigned noutputs;
1235 const char **oconstraints;
1236 unsigned i;
1237 const char *constraint;
1238 bool allows_mem, allows_reg, is_inout;
1239
1240 noutputs = gimple_asm_noutputs (stmt);
1241 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
1242
1243 if (wi)
1244 wi->is_lhs = true;
1245
1246 for (i = 0; i < noutputs; i++)
1247 {
1248 tree op = gimple_asm_output_op (stmt, i);
1249 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
1250 oconstraints[i] = constraint;
1251 parse_output_constraint (&constraint, i, 0, 0, &allows_mem, &allows_reg,
1252 &is_inout);
1253 if (wi)
1254 wi->val_only = (allows_reg || !allows_mem);
1255 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
1256 if (ret)
1257 return ret;
1258 }
1259
1260 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
1261 {
1262 tree op = gimple_asm_input_op (stmt, i);
1263 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
1264 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1265 oconstraints, &allows_mem, &allows_reg);
1266 if (wi)
1267 wi->val_only = (allows_reg || !allows_mem);
1268
1269 /* Although input "m" is not really a LHS, we need a lvalue. */
1270 if (wi)
1271 wi->is_lhs = !wi->val_only;
1272 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
1273 if (ret)
1274 return ret;
1275 }
1276
1277 if (wi)
1278 {
1279 wi->is_lhs = false;
1280 wi->val_only = true;
1281 }
1282
1283 return NULL_TREE;
1284}
1285
1286
1287/* Helper function of WALK_GIMPLE_STMT. Walk every tree operand in
1288 STMT. CALLBACK_OP and WI are as in WALK_GIMPLE_STMT.
1289
1290 CALLBACK_OP is called on each operand of STMT via walk_tree.
1291 Additional parameters to walk_tree must be stored in WI. For each operand
1292 OP, walk_tree is called as:
1293
1294 walk_tree (&OP, CALLBACK_OP, WI, WI->PSET)
1295
1296 If CALLBACK_OP returns non-NULL for an operand, the remaining
1297 operands are not scanned.
1298
1299 The return value is that returned by the last call to walk_tree, or
1300 NULL_TREE if no CALLBACK_OP is specified. */
1301
1302inline tree
1303walk_gimple_op (gimple stmt, walk_tree_fn callback_op,
1304 struct walk_stmt_info *wi)
1305{
1306 struct pointer_set_t *pset = (wi) ? wi->pset : NULL;
1307 unsigned i;
1308 tree ret = NULL_TREE;
1309
1310 switch (gimple_code (stmt))
1311 {
1312 case GIMPLE_ASSIGN:
1313 /* Walk the RHS operands. A formal temporary LHS may use a
1314 COMPONENT_REF RHS. */
1315 if (wi)
ba4d8f9d
RG
1316 wi->val_only = !is_gimple_reg (gimple_assign_lhs (stmt))
1317 || !gimple_assign_single_p (stmt);
726a989a
RB
1318
1319 for (i = 1; i < gimple_num_ops (stmt); i++)
1320 {
1321 ret = walk_tree (gimple_op_ptr (stmt, i), callback_op, wi,
1322 pset);
1323 if (ret)
1324 return ret;
1325 }
1326
1327 /* Walk the LHS. If the RHS is appropriate for a memory, we
1328 may use a COMPONENT_REF on the LHS. */
1329 if (wi)
1330 {
1331 /* If the RHS has more than 1 operand, it is not appropriate
1332 for the memory. */
1333 wi->val_only = !is_gimple_mem_rhs (gimple_assign_rhs1 (stmt))
1334 || !gimple_assign_single_p (stmt);
1335 wi->is_lhs = true;
1336 }
1337
1338 ret = walk_tree (gimple_op_ptr (stmt, 0), callback_op, wi, pset);
1339 if (ret)
1340 return ret;
1341
1342 if (wi)
1343 {
1344 wi->val_only = true;
1345 wi->is_lhs = false;
1346 }
1347 break;
1348
1349 case GIMPLE_CALL:
1350 if (wi)
1351 wi->is_lhs = false;
1352
1353 ret = walk_tree (gimple_call_chain_ptr (stmt), callback_op, wi, pset);
1354 if (ret)
1355 return ret;
1356
1357 ret = walk_tree (gimple_call_fn_ptr (stmt), callback_op, wi, pset);
1358 if (ret)
1359 return ret;
1360
1361 for (i = 0; i < gimple_call_num_args (stmt); i++)
1362 {
1363 ret = walk_tree (gimple_call_arg_ptr (stmt, i), callback_op, wi,
1364 pset);
1365 if (ret)
1366 return ret;
1367 }
1368
1369 if (wi)
1370 wi->is_lhs = true;
1371
1372 ret = walk_tree (gimple_call_lhs_ptr (stmt), callback_op, wi, pset);
1373 if (ret)
1374 return ret;
1375
1376 if (wi)
1377 wi->is_lhs = false;
1378 break;
1379
1380 case GIMPLE_CATCH:
1381 ret = walk_tree (gimple_catch_types_ptr (stmt), callback_op, wi,
1382 pset);
1383 if (ret)
1384 return ret;
1385 break;
1386
1387 case GIMPLE_EH_FILTER:
1388 ret = walk_tree (gimple_eh_filter_types_ptr (stmt), callback_op, wi,
1389 pset);
1390 if (ret)
1391 return ret;
1392 break;
1393
726a989a
RB
1394 case GIMPLE_ASM:
1395 ret = walk_gimple_asm (stmt, callback_op, wi);
1396 if (ret)
1397 return ret;
1398 break;
1399
1400 case GIMPLE_OMP_CONTINUE:
1401 ret = walk_tree (gimple_omp_continue_control_def_ptr (stmt),
1402 callback_op, wi, pset);
1403 if (ret)
1404 return ret;
1405
1406 ret = walk_tree (gimple_omp_continue_control_use_ptr (stmt),
1407 callback_op, wi, pset);
1408 if (ret)
1409 return ret;
1410 break;
1411
1412 case GIMPLE_OMP_CRITICAL:
1413 ret = walk_tree (gimple_omp_critical_name_ptr (stmt), callback_op, wi,
1414 pset);
1415 if (ret)
1416 return ret;
1417 break;
1418
1419 case GIMPLE_OMP_FOR:
1420 ret = walk_tree (gimple_omp_for_clauses_ptr (stmt), callback_op, wi,
1421 pset);
1422 if (ret)
1423 return ret;
1424 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1425 {
1426 ret = walk_tree (gimple_omp_for_index_ptr (stmt, i), callback_op,
1427 wi, pset);
1428 if (ret)
1429 return ret;
1430 ret = walk_tree (gimple_omp_for_initial_ptr (stmt, i), callback_op,
1431 wi, pset);
1432 if (ret)
1433 return ret;
1434 ret = walk_tree (gimple_omp_for_final_ptr (stmt, i), callback_op,
1435 wi, pset);
1436 if (ret)
1437 return ret;
1438 ret = walk_tree (gimple_omp_for_incr_ptr (stmt, i), callback_op,
1439 wi, pset);
1440 }
1441 if (ret)
1442 return ret;
1443 break;
1444
1445 case GIMPLE_OMP_PARALLEL:
1446 ret = walk_tree (gimple_omp_parallel_clauses_ptr (stmt), callback_op,
1447 wi, pset);
1448 if (ret)
1449 return ret;
1450 ret = walk_tree (gimple_omp_parallel_child_fn_ptr (stmt), callback_op,
1451 wi, pset);
1452 if (ret)
1453 return ret;
1454 ret = walk_tree (gimple_omp_parallel_data_arg_ptr (stmt), callback_op,
1455 wi, pset);
1456 if (ret)
1457 return ret;
1458 break;
1459
1460 case GIMPLE_OMP_TASK:
1461 ret = walk_tree (gimple_omp_task_clauses_ptr (stmt), callback_op,
1462 wi, pset);
1463 if (ret)
1464 return ret;
1465 ret = walk_tree (gimple_omp_task_child_fn_ptr (stmt), callback_op,
1466 wi, pset);
1467 if (ret)
1468 return ret;
1469 ret = walk_tree (gimple_omp_task_data_arg_ptr (stmt), callback_op,
1470 wi, pset);
1471 if (ret)
1472 return ret;
1473 ret = walk_tree (gimple_omp_task_copy_fn_ptr (stmt), callback_op,
1474 wi, pset);
1475 if (ret)
1476 return ret;
1477 ret = walk_tree (gimple_omp_task_arg_size_ptr (stmt), callback_op,
1478 wi, pset);
1479 if (ret)
1480 return ret;
1481 ret = walk_tree (gimple_omp_task_arg_align_ptr (stmt), callback_op,
1482 wi, pset);
1483 if (ret)
1484 return ret;
1485 break;
1486
1487 case GIMPLE_OMP_SECTIONS:
1488 ret = walk_tree (gimple_omp_sections_clauses_ptr (stmt), callback_op,
1489 wi, pset);
1490 if (ret)
1491 return ret;
1492
1493 ret = walk_tree (gimple_omp_sections_control_ptr (stmt), callback_op,
1494 wi, pset);
1495 if (ret)
1496 return ret;
1497
1498 break;
1499
1500 case GIMPLE_OMP_SINGLE:
1501 ret = walk_tree (gimple_omp_single_clauses_ptr (stmt), callback_op, wi,
1502 pset);
1503 if (ret)
1504 return ret;
1505 break;
1506
1507 case GIMPLE_OMP_ATOMIC_LOAD:
1508 ret = walk_tree (gimple_omp_atomic_load_lhs_ptr (stmt), callback_op, wi,
1509 pset);
1510 if (ret)
1511 return ret;
1512
1513 ret = walk_tree (gimple_omp_atomic_load_rhs_ptr (stmt), callback_op, wi,
1514 pset);
1515 if (ret)
1516 return ret;
1517 break;
1518
1519 case GIMPLE_OMP_ATOMIC_STORE:
1520 ret = walk_tree (gimple_omp_atomic_store_val_ptr (stmt), callback_op,
1521 wi, pset);
1522 if (ret)
1523 return ret;
1524 break;
1525
1526 /* Tuples that do not have operands. */
1527 case GIMPLE_NOP:
1528 case GIMPLE_RESX:
1529 case GIMPLE_OMP_RETURN:
1530 case GIMPLE_PREDICT:
1531 break;
1532
1533 default:
1534 {
1535 enum gimple_statement_structure_enum gss;
1536 gss = gimple_statement_structure (stmt);
1537 if (gss == GSS_WITH_OPS || gss == GSS_WITH_MEM_OPS)
1538 for (i = 0; i < gimple_num_ops (stmt); i++)
1539 {
1540 ret = walk_tree (gimple_op_ptr (stmt, i), callback_op, wi, pset);
1541 if (ret)
1542 return ret;
1543 }
1544 }
1545 break;
1546 }
1547
1548 return NULL_TREE;
1549}
1550
1551
1552/* Walk the current statement in GSI (optionally using traversal state
1553 stored in WI). If WI is NULL, no state is kept during traversal.
1554 The callback CALLBACK_STMT is called. If CALLBACK_STMT indicates
1555 that it has handled all the operands of the statement, its return
1556 value is returned. Otherwise, the return value from CALLBACK_STMT
1557 is discarded and its operands are scanned.
1558
1559 If CALLBACK_STMT is NULL or it didn't handle the operands,
1560 CALLBACK_OP is called on each operand of the statement via
1561 walk_gimple_op. If walk_gimple_op returns non-NULL for any
1562 operand, the remaining operands are not scanned. In this case, the
1563 return value from CALLBACK_OP is returned.
1564
1565 In any other case, NULL_TREE is returned. */
1566
1567tree
1568walk_gimple_stmt (gimple_stmt_iterator *gsi, walk_stmt_fn callback_stmt,
1569 walk_tree_fn callback_op, struct walk_stmt_info *wi)
1570{
1571 gimple ret;
1572 tree tree_ret;
1573 gimple stmt = gsi_stmt (*gsi);
1574
1575 if (wi)
1576 wi->gsi = *gsi;
1577
1578 if (wi && wi->want_locations && gimple_has_location (stmt))
1579 input_location = gimple_location (stmt);
1580
1581 ret = NULL;
1582
1583 /* Invoke the statement callback. Return if the callback handled
1584 all of STMT operands by itself. */
1585 if (callback_stmt)
1586 {
1587 bool handled_ops = false;
1588 tree_ret = callback_stmt (gsi, &handled_ops, wi);
1589 if (handled_ops)
1590 return tree_ret;
1591
1592 /* If CALLBACK_STMT did not handle operands, it should not have
1593 a value to return. */
1594 gcc_assert (tree_ret == NULL);
1595
1596 /* Re-read stmt in case the callback changed it. */
1597 stmt = gsi_stmt (*gsi);
1598 }
1599
1600 /* If CALLBACK_OP is defined, invoke it on every operand of STMT. */
1601 if (callback_op)
1602 {
1603 tree_ret = walk_gimple_op (stmt, callback_op, wi);
1604 if (tree_ret)
1605 return tree_ret;
1606 }
1607
1608 /* If STMT can have statements inside (e.g. GIMPLE_BIND), walk them. */
1609 switch (gimple_code (stmt))
1610 {
1611 case GIMPLE_BIND:
1612 ret = walk_gimple_seq (gimple_bind_body (stmt), callback_stmt,
1613 callback_op, wi);
1614 if (ret)
1615 return wi->callback_result;
1616 break;
1617
1618 case GIMPLE_CATCH:
1619 ret = walk_gimple_seq (gimple_catch_handler (stmt), callback_stmt,
1620 callback_op, wi);
1621 if (ret)
1622 return wi->callback_result;
1623 break;
1624
1625 case GIMPLE_EH_FILTER:
1626 ret = walk_gimple_seq (gimple_eh_filter_failure (stmt), callback_stmt,
1627 callback_op, wi);
1628 if (ret)
1629 return wi->callback_result;
1630 break;
1631
1632 case GIMPLE_TRY:
1633 ret = walk_gimple_seq (gimple_try_eval (stmt), callback_stmt, callback_op,
1634 wi);
1635 if (ret)
1636 return wi->callback_result;
1637
1638 ret = walk_gimple_seq (gimple_try_cleanup (stmt), callback_stmt,
1639 callback_op, wi);
1640 if (ret)
1641 return wi->callback_result;
1642 break;
1643
1644 case GIMPLE_OMP_FOR:
1645 ret = walk_gimple_seq (gimple_omp_for_pre_body (stmt), callback_stmt,
1646 callback_op, wi);
1647 if (ret)
1648 return wi->callback_result;
1649
1650 /* FALL THROUGH. */
1651 case GIMPLE_OMP_CRITICAL:
1652 case GIMPLE_OMP_MASTER:
1653 case GIMPLE_OMP_ORDERED:
1654 case GIMPLE_OMP_SECTION:
1655 case GIMPLE_OMP_PARALLEL:
1656 case GIMPLE_OMP_TASK:
1657 case GIMPLE_OMP_SECTIONS:
1658 case GIMPLE_OMP_SINGLE:
1659 ret = walk_gimple_seq (gimple_omp_body (stmt), callback_stmt, callback_op,
1660 wi);
1661 if (ret)
1662 return wi->callback_result;
1663 break;
1664
1665 case GIMPLE_WITH_CLEANUP_EXPR:
1666 ret = walk_gimple_seq (gimple_wce_cleanup (stmt), callback_stmt,
1667 callback_op, wi);
1668 if (ret)
1669 return wi->callback_result;
1670 break;
1671
1672 default:
1673 gcc_assert (!gimple_has_substatements (stmt));
1674 break;
1675 }
1676
1677 return NULL;
1678}
1679
1680
1681/* Set sequence SEQ to be the GIMPLE body for function FN. */
1682
1683void
1684gimple_set_body (tree fndecl, gimple_seq seq)
1685{
1686 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
1687 if (fn == NULL)
1688 {
1689 /* If FNDECL still does not have a function structure associated
1690 with it, then it does not make sense for it to receive a
1691 GIMPLE body. */
1692 gcc_assert (seq == NULL);
1693 }
1694 else
1695 fn->gimple_body = seq;
1696}
1697
1698
1699/* Return the body of GIMPLE statements for function FN. */
1700
1701gimple_seq
1702gimple_body (tree fndecl)
1703{
1704 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
1705 return fn ? fn->gimple_body : NULL;
1706}
1707
39ecc018
JH
1708/* Return true when FNDECL has Gimple body either in unlowered
1709 or CFG form. */
1710bool
1711gimple_has_body_p (tree fndecl)
1712{
1713 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
1714 return (gimple_body (fndecl) || (fn && fn->cfg));
1715}
726a989a
RB
1716
1717/* Detect flags from a GIMPLE_CALL. This is just like
1718 call_expr_flags, but for gimple tuples. */
1719
1720int
1721gimple_call_flags (const_gimple stmt)
1722{
1723 int flags;
1724 tree decl = gimple_call_fndecl (stmt);
1725 tree t;
1726
1727 if (decl)
1728 flags = flags_from_decl_or_type (decl);
1729 else
1730 {
1731 t = TREE_TYPE (gimple_call_fn (stmt));
1732 if (t && TREE_CODE (t) == POINTER_TYPE)
1733 flags = flags_from_decl_or_type (TREE_TYPE (t));
1734 else
1735 flags = 0;
1736 }
1737
1738 return flags;
1739}
1740
1741
1742/* Return true if GS is a copy assignment. */
1743
1744bool
1745gimple_assign_copy_p (gimple gs)
1746{
1747 return gimple_code (gs) == GIMPLE_ASSIGN
1748 && get_gimple_rhs_class (gimple_assign_rhs_code (gs))
1749 == GIMPLE_SINGLE_RHS
1750 && is_gimple_val (gimple_op (gs, 1));
1751}
1752
1753
1754/* Return true if GS is a SSA_NAME copy assignment. */
1755
1756bool
1757gimple_assign_ssa_name_copy_p (gimple gs)
1758{
1759 return (gimple_code (gs) == GIMPLE_ASSIGN
1760 && (get_gimple_rhs_class (gimple_assign_rhs_code (gs))
1761 == GIMPLE_SINGLE_RHS)
1762 && TREE_CODE (gimple_assign_lhs (gs)) == SSA_NAME
1763 && TREE_CODE (gimple_assign_rhs1 (gs)) == SSA_NAME);
1764}
1765
1766
1767/* Return true if GS is an assignment with a singleton RHS, i.e.,
1768 there is no operator associated with the assignment itself.
1769 Unlike gimple_assign_copy_p, this predicate returns true for
1770 any RHS operand, including those that perform an operation
1771 and do not have the semantics of a copy, such as COND_EXPR. */
1772
1773bool
1774gimple_assign_single_p (gimple gs)
1775{
1776 return (gimple_code (gs) == GIMPLE_ASSIGN
1777 && get_gimple_rhs_class (gimple_assign_rhs_code (gs))
1778 == GIMPLE_SINGLE_RHS);
1779}
1780
1781/* Return true if GS is an assignment with a unary RHS, but the
1782 operator has no effect on the assigned value. The logic is adapted
1783 from STRIP_NOPS. This predicate is intended to be used in tuplifying
1784 instances in which STRIP_NOPS was previously applied to the RHS of
1785 an assignment.
1786
1787 NOTE: In the use cases that led to the creation of this function
1788 and of gimple_assign_single_p, it is typical to test for either
1789 condition and to proceed in the same manner. In each case, the
1790 assigned value is represented by the single RHS operand of the
1791 assignment. I suspect there may be cases where gimple_assign_copy_p,
1792 gimple_assign_single_p, or equivalent logic is used where a similar
1793 treatment of unary NOPs is appropriate. */
1794
1795bool
1796gimple_assign_unary_nop_p (gimple gs)
1797{
1798 return (gimple_code (gs) == GIMPLE_ASSIGN
1a87cf0c 1799 && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (gs))
726a989a
RB
1800 || gimple_assign_rhs_code (gs) == NON_LVALUE_EXPR)
1801 && gimple_assign_rhs1 (gs) != error_mark_node
1802 && (TYPE_MODE (TREE_TYPE (gimple_assign_lhs (gs)))
1803 == TYPE_MODE (TREE_TYPE (gimple_assign_rhs1 (gs)))));
1804}
1805
1806/* Set BB to be the basic block holding G. */
1807
1808void
1809gimple_set_bb (gimple stmt, basic_block bb)
1810{
1811 stmt->gsbase.bb = bb;
1812
1813 /* If the statement is a label, add the label to block-to-labels map
1814 so that we can speed up edge creation for GIMPLE_GOTOs. */
1815 if (cfun->cfg && gimple_code (stmt) == GIMPLE_LABEL)
1816 {
1817 tree t;
1818 int uid;
1819
1820 t = gimple_label_label (stmt);
1821 uid = LABEL_DECL_UID (t);
1822 if (uid == -1)
1823 {
1824 unsigned old_len = VEC_length (basic_block, label_to_block_map);
1825 LABEL_DECL_UID (t) = uid = cfun->cfg->last_label_uid++;
1826 if (old_len <= (unsigned) uid)
1827 {
5006671f 1828 unsigned new_len = 3 * uid / 2 + 1;
726a989a
RB
1829
1830 VEC_safe_grow_cleared (basic_block, gc, label_to_block_map,
1831 new_len);
1832 }
1833 }
1834
1835 VEC_replace (basic_block, label_to_block_map, uid, bb);
1836 }
1837}
1838
1839
1840/* Fold the expression computed by STMT. If the expression can be
1841 folded, return the folded result, otherwise return NULL. STMT is
1842 not modified. */
1843
1844tree
1845gimple_fold (const_gimple stmt)
1846{
db3927fb 1847 location_t loc = gimple_location (stmt);
726a989a
RB
1848 switch (gimple_code (stmt))
1849 {
1850 case GIMPLE_COND:
db3927fb 1851 return fold_binary_loc (loc, gimple_cond_code (stmt),
726a989a
RB
1852 boolean_type_node,
1853 gimple_cond_lhs (stmt),
1854 gimple_cond_rhs (stmt));
1855
1856 case GIMPLE_ASSIGN:
1857 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
1858 {
1859 case GIMPLE_UNARY_RHS:
db3927fb 1860 return fold_unary_loc (loc, gimple_assign_rhs_code (stmt),
726a989a
RB
1861 TREE_TYPE (gimple_assign_lhs (stmt)),
1862 gimple_assign_rhs1 (stmt));
1863 case GIMPLE_BINARY_RHS:
db3927fb 1864 return fold_binary_loc (loc, gimple_assign_rhs_code (stmt),
726a989a
RB
1865 TREE_TYPE (gimple_assign_lhs (stmt)),
1866 gimple_assign_rhs1 (stmt),
1867 gimple_assign_rhs2 (stmt));
1868 case GIMPLE_SINGLE_RHS:
1869 return fold (gimple_assign_rhs1 (stmt));
1870 default:;
1871 }
1872 break;
1873
1874 case GIMPLE_SWITCH:
1875 return gimple_switch_index (stmt);
1876
1877 case GIMPLE_CALL:
1878 return NULL_TREE;
1879
1880 default:
1881 break;
1882 }
1883
1884 gcc_unreachable ();
1885}
1886
1887
1888/* Modify the RHS of the assignment pointed-to by GSI using the
1889 operands in the expression tree EXPR.
1890
1891 NOTE: The statement pointed-to by GSI may be reallocated if it
1892 did not have enough operand slots.
1893
1894 This function is useful to convert an existing tree expression into
1895 the flat representation used for the RHS of a GIMPLE assignment.
1896 It will reallocate memory as needed to expand or shrink the number
1897 of operand slots needed to represent EXPR.
1898
1899 NOTE: If you find yourself building a tree and then calling this
1900 function, you are most certainly doing it the slow way. It is much
1901 better to build a new assignment or to use the function
1902 gimple_assign_set_rhs_with_ops, which does not require an
1903 expression tree to be built. */
1904
1905void
1906gimple_assign_set_rhs_from_tree (gimple_stmt_iterator *gsi, tree expr)
1907{
1908 enum tree_code subcode;
1909 tree op1, op2;
1910
1911 extract_ops_from_tree (expr, &subcode, &op1, &op2);
1912 gimple_assign_set_rhs_with_ops (gsi, subcode, op1, op2);
1913}
1914
1915
1916/* Set the RHS of assignment statement pointed-to by GSI to CODE with
1917 operands OP1 and OP2.
1918
1919 NOTE: The statement pointed-to by GSI may be reallocated if it
1920 did not have enough operand slots. */
1921
1922void
1923gimple_assign_set_rhs_with_ops (gimple_stmt_iterator *gsi, enum tree_code code,
1924 tree op1, tree op2)
1925{
1926 unsigned new_rhs_ops = get_gimple_rhs_num_ops (code);
1927 gimple stmt = gsi_stmt (*gsi);
1928
1929 /* If the new CODE needs more operands, allocate a new statement. */
1930 if (gimple_num_ops (stmt) < new_rhs_ops + 1)
1931 {
1932 tree lhs = gimple_assign_lhs (stmt);
1933 gimple new_stmt = gimple_alloc (gimple_code (stmt), new_rhs_ops + 1);
1934 memcpy (new_stmt, stmt, gimple_size (gimple_code (stmt)));
1935 gsi_replace (gsi, new_stmt, true);
1936 stmt = new_stmt;
1937
1938 /* The LHS needs to be reset as this also changes the SSA name
1939 on the LHS. */
1940 gimple_assign_set_lhs (stmt, lhs);
1941 }
1942
1943 gimple_set_num_ops (stmt, new_rhs_ops + 1);
1944 gimple_set_subcode (stmt, code);
1945 gimple_assign_set_rhs1 (stmt, op1);
1946 if (new_rhs_ops > 1)
1947 gimple_assign_set_rhs2 (stmt, op2);
1948}
1949
1950
1951/* Return the LHS of a statement that performs an assignment,
1952 either a GIMPLE_ASSIGN or a GIMPLE_CALL. Returns NULL_TREE
1953 for a call to a function that returns no value, or for a
1954 statement other than an assignment or a call. */
1955
1956tree
1957gimple_get_lhs (const_gimple stmt)
1958{
e0c68ce9 1959 enum gimple_code code = gimple_code (stmt);
726a989a
RB
1960
1961 if (code == GIMPLE_ASSIGN)
1962 return gimple_assign_lhs (stmt);
1963 else if (code == GIMPLE_CALL)
1964 return gimple_call_lhs (stmt);
1965 else
1966 return NULL_TREE;
1967}
1968
1969
1970/* Set the LHS of a statement that performs an assignment,
1971 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
1972
1973void
1974gimple_set_lhs (gimple stmt, tree lhs)
1975{
e0c68ce9 1976 enum gimple_code code = gimple_code (stmt);
726a989a
RB
1977
1978 if (code == GIMPLE_ASSIGN)
1979 gimple_assign_set_lhs (stmt, lhs);
1980 else if (code == GIMPLE_CALL)
1981 gimple_call_set_lhs (stmt, lhs);
1982 else
1983 gcc_unreachable();
1984}
1985
1986
1987/* Return a deep copy of statement STMT. All the operands from STMT
1988 are reallocated and copied using unshare_expr. The DEF, USE, VDEF
1989 and VUSE operand arrays are set to empty in the new copy. */
1990
1991gimple
1992gimple_copy (gimple stmt)
1993{
1994 enum gimple_code code = gimple_code (stmt);
1995 unsigned num_ops = gimple_num_ops (stmt);
1996 gimple copy = gimple_alloc (code, num_ops);
1997 unsigned i;
1998
1999 /* Shallow copy all the fields from STMT. */
2000 memcpy (copy, stmt, gimple_size (code));
2001
2002 /* If STMT has sub-statements, deep-copy them as well. */
2003 if (gimple_has_substatements (stmt))
2004 {
2005 gimple_seq new_seq;
2006 tree t;
2007
2008 switch (gimple_code (stmt))
2009 {
2010 case GIMPLE_BIND:
2011 new_seq = gimple_seq_copy (gimple_bind_body (stmt));
2012 gimple_bind_set_body (copy, new_seq);
2013 gimple_bind_set_vars (copy, unshare_expr (gimple_bind_vars (stmt)));
2014 gimple_bind_set_block (copy, gimple_bind_block (stmt));
2015 break;
2016
2017 case GIMPLE_CATCH:
2018 new_seq = gimple_seq_copy (gimple_catch_handler (stmt));
2019 gimple_catch_set_handler (copy, new_seq);
2020 t = unshare_expr (gimple_catch_types (stmt));
2021 gimple_catch_set_types (copy, t);
2022 break;
2023
2024 case GIMPLE_EH_FILTER:
2025 new_seq = gimple_seq_copy (gimple_eh_filter_failure (stmt));
2026 gimple_eh_filter_set_failure (copy, new_seq);
2027 t = unshare_expr (gimple_eh_filter_types (stmt));
2028 gimple_eh_filter_set_types (copy, t);
2029 break;
2030
2031 case GIMPLE_TRY:
2032 new_seq = gimple_seq_copy (gimple_try_eval (stmt));
2033 gimple_try_set_eval (copy, new_seq);
2034 new_seq = gimple_seq_copy (gimple_try_cleanup (stmt));
2035 gimple_try_set_cleanup (copy, new_seq);
2036 break;
2037
2038 case GIMPLE_OMP_FOR:
2039 new_seq = gimple_seq_copy (gimple_omp_for_pre_body (stmt));
2040 gimple_omp_for_set_pre_body (copy, new_seq);
2041 t = unshare_expr (gimple_omp_for_clauses (stmt));
2042 gimple_omp_for_set_clauses (copy, t);
2043 copy->gimple_omp_for.iter
2044 = GGC_NEWVEC (struct gimple_omp_for_iter,
2045 gimple_omp_for_collapse (stmt));
2046 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2047 {
2048 gimple_omp_for_set_cond (copy, i,
2049 gimple_omp_for_cond (stmt, i));
2050 gimple_omp_for_set_index (copy, i,
2051 gimple_omp_for_index (stmt, i));
2052 t = unshare_expr (gimple_omp_for_initial (stmt, i));
2053 gimple_omp_for_set_initial (copy, i, t);
2054 t = unshare_expr (gimple_omp_for_final (stmt, i));
2055 gimple_omp_for_set_final (copy, i, t);
2056 t = unshare_expr (gimple_omp_for_incr (stmt, i));
2057 gimple_omp_for_set_incr (copy, i, t);
2058 }
2059 goto copy_omp_body;
2060
2061 case GIMPLE_OMP_PARALLEL:
2062 t = unshare_expr (gimple_omp_parallel_clauses (stmt));
2063 gimple_omp_parallel_set_clauses (copy, t);
2064 t = unshare_expr (gimple_omp_parallel_child_fn (stmt));
2065 gimple_omp_parallel_set_child_fn (copy, t);
2066 t = unshare_expr (gimple_omp_parallel_data_arg (stmt));
2067 gimple_omp_parallel_set_data_arg (copy, t);
2068 goto copy_omp_body;
2069
2070 case GIMPLE_OMP_TASK:
2071 t = unshare_expr (gimple_omp_task_clauses (stmt));
2072 gimple_omp_task_set_clauses (copy, t);
2073 t = unshare_expr (gimple_omp_task_child_fn (stmt));
2074 gimple_omp_task_set_child_fn (copy, t);
2075 t = unshare_expr (gimple_omp_task_data_arg (stmt));
2076 gimple_omp_task_set_data_arg (copy, t);
2077 t = unshare_expr (gimple_omp_task_copy_fn (stmt));
2078 gimple_omp_task_set_copy_fn (copy, t);
2079 t = unshare_expr (gimple_omp_task_arg_size (stmt));
2080 gimple_omp_task_set_arg_size (copy, t);
2081 t = unshare_expr (gimple_omp_task_arg_align (stmt));
2082 gimple_omp_task_set_arg_align (copy, t);
2083 goto copy_omp_body;
2084
2085 case GIMPLE_OMP_CRITICAL:
2086 t = unshare_expr (gimple_omp_critical_name (stmt));
2087 gimple_omp_critical_set_name (copy, t);
2088 goto copy_omp_body;
2089
2090 case GIMPLE_OMP_SECTIONS:
2091 t = unshare_expr (gimple_omp_sections_clauses (stmt));
2092 gimple_omp_sections_set_clauses (copy, t);
2093 t = unshare_expr (gimple_omp_sections_control (stmt));
2094 gimple_omp_sections_set_control (copy, t);
2095 /* FALLTHRU */
2096
2097 case GIMPLE_OMP_SINGLE:
2098 case GIMPLE_OMP_SECTION:
2099 case GIMPLE_OMP_MASTER:
2100 case GIMPLE_OMP_ORDERED:
2101 copy_omp_body:
2102 new_seq = gimple_seq_copy (gimple_omp_body (stmt));
2103 gimple_omp_set_body (copy, new_seq);
2104 break;
2105
2106 case GIMPLE_WITH_CLEANUP_EXPR:
2107 new_seq = gimple_seq_copy (gimple_wce_cleanup (stmt));
2108 gimple_wce_set_cleanup (copy, new_seq);
2109 break;
2110
2111 default:
2112 gcc_unreachable ();
2113 }
2114 }
2115
2116 /* Make copy of operands. */
2117 if (num_ops > 0)
2118 {
2119 for (i = 0; i < num_ops; i++)
2120 gimple_set_op (copy, i, unshare_expr (gimple_op (stmt, i)));
2121
ccacdf06 2122 /* Clear out SSA operand vectors on COPY. */
726a989a
RB
2123 if (gimple_has_ops (stmt))
2124 {
2125 gimple_set_def_ops (copy, NULL);
2126 gimple_set_use_ops (copy, NULL);
726a989a
RB
2127 }
2128
2129 if (gimple_has_mem_ops (stmt))
2130 {
5006671f
RG
2131 gimple_set_vdef (copy, gimple_vdef (stmt));
2132 gimple_set_vuse (copy, gimple_vuse (stmt));
726a989a
RB
2133 }
2134
5006671f
RG
2135 /* SSA operands need to be updated. */
2136 gimple_set_modified (copy, true);
726a989a
RB
2137 }
2138
2139 return copy;
2140}
2141
2142
2143/* Set the MODIFIED flag to MODIFIEDP, iff the gimple statement G has
2144 a MODIFIED field. */
2145
2146void
2147gimple_set_modified (gimple s, bool modifiedp)
2148{
2149 if (gimple_has_ops (s))
2150 {
2151 s->gsbase.modified = (unsigned) modifiedp;
2152
2153 if (modifiedp
2154 && cfun->gimple_df
2155 && is_gimple_call (s)
2156 && gimple_call_noreturn_p (s))
2157 VEC_safe_push (gimple, gc, MODIFIED_NORETURN_CALLS (cfun), s);
2158 }
2159}
2160
2161
2162/* Return true if statement S has side-effects. We consider a
2163 statement to have side effects if:
2164
2165 - It is a GIMPLE_CALL not marked with ECF_PURE or ECF_CONST.
2166 - Any of its operands are marked TREE_THIS_VOLATILE or TREE_SIDE_EFFECTS. */
2167
2168bool
2169gimple_has_side_effects (const_gimple s)
2170{
2171 unsigned i;
2172
b5b8b0ac
AO
2173 if (is_gimple_debug (s))
2174 return false;
2175
726a989a
RB
2176 /* We don't have to scan the arguments to check for
2177 volatile arguments, though, at present, we still
2178 do a scan to check for TREE_SIDE_EFFECTS. */
2179 if (gimple_has_volatile_ops (s))
2180 return true;
2181
2182 if (is_gimple_call (s))
2183 {
2184 unsigned nargs = gimple_call_num_args (s);
2185
2186 if (!(gimple_call_flags (s) & (ECF_CONST | ECF_PURE)))
2187 return true;
2188 else if (gimple_call_flags (s) & ECF_LOOPING_CONST_OR_PURE)
2189 /* An infinite loop is considered a side effect. */
2190 return true;
2191
2192 if (gimple_call_lhs (s)
2193 && TREE_SIDE_EFFECTS (gimple_call_lhs (s)))
2194 {
2195 gcc_assert (gimple_has_volatile_ops (s));
2196 return true;
2197 }
2198
2199 if (TREE_SIDE_EFFECTS (gimple_call_fn (s)))
2200 return true;
2201
2202 for (i = 0; i < nargs; i++)
2203 if (TREE_SIDE_EFFECTS (gimple_call_arg (s, i)))
2204 {
2205 gcc_assert (gimple_has_volatile_ops (s));
2206 return true;
2207 }
2208
2209 return false;
2210 }
2211 else
2212 {
2213 for (i = 0; i < gimple_num_ops (s); i++)
2214 if (TREE_SIDE_EFFECTS (gimple_op (s, i)))
2215 {
2216 gcc_assert (gimple_has_volatile_ops (s));
2217 return true;
2218 }
2219 }
2220
2221 return false;
2222}
2223
2224/* Return true if the RHS of statement S has side effects.
2225 We may use it to determine if it is admissable to replace
2226 an assignment or call with a copy of a previously-computed
2227 value. In such cases, side-effects due the the LHS are
2228 preserved. */
2229
2230bool
2231gimple_rhs_has_side_effects (const_gimple s)
2232{
2233 unsigned i;
2234
2235 if (is_gimple_call (s))
2236 {
2237 unsigned nargs = gimple_call_num_args (s);
2238
2239 if (!(gimple_call_flags (s) & (ECF_CONST | ECF_PURE)))
2240 return true;
2241
2242 /* We cannot use gimple_has_volatile_ops here,
2243 because we must ignore a volatile LHS. */
2244 if (TREE_SIDE_EFFECTS (gimple_call_fn (s))
2245 || TREE_THIS_VOLATILE (gimple_call_fn (s)))
2246 {
2247 gcc_assert (gimple_has_volatile_ops (s));
2248 return true;
2249 }
2250
2251 for (i = 0; i < nargs; i++)
2252 if (TREE_SIDE_EFFECTS (gimple_call_arg (s, i))
2253 || TREE_THIS_VOLATILE (gimple_call_arg (s, i)))
2254 return true;
2255
2256 return false;
2257 }
2258 else if (is_gimple_assign (s))
2259 {
2260 /* Skip the first operand, the LHS. */
2261 for (i = 1; i < gimple_num_ops (s); i++)
2262 if (TREE_SIDE_EFFECTS (gimple_op (s, i))
2263 || TREE_THIS_VOLATILE (gimple_op (s, i)))
2264 {
2265 gcc_assert (gimple_has_volatile_ops (s));
2266 return true;
2267 }
2268 }
b5b8b0ac
AO
2269 else if (is_gimple_debug (s))
2270 return false;
726a989a
RB
2271 else
2272 {
2273 /* For statements without an LHS, examine all arguments. */
2274 for (i = 0; i < gimple_num_ops (s); i++)
2275 if (TREE_SIDE_EFFECTS (gimple_op (s, i))
2276 || TREE_THIS_VOLATILE (gimple_op (s, i)))
2277 {
2278 gcc_assert (gimple_has_volatile_ops (s));
2279 return true;
2280 }
2281 }
2282
2283 return false;
2284}
2285
2286
2287/* Helper for gimple_could_trap_p and gimple_assign_rhs_could_trap_p.
2288 Return true if S can trap. If INCLUDE_LHS is true and S is a
2289 GIMPLE_ASSIGN, the LHS of the assignment is also checked.
2290 Otherwise, only the RHS of the assignment is checked. */
2291
2292static bool
2293gimple_could_trap_p_1 (gimple s, bool include_lhs)
2294{
2295 unsigned i, start;
2296 tree t, div = NULL_TREE;
2297 enum tree_code op;
2298
2299 start = (is_gimple_assign (s) && !include_lhs) ? 1 : 0;
2300
2301 for (i = start; i < gimple_num_ops (s); i++)
2302 if (tree_could_trap_p (gimple_op (s, i)))
2303 return true;
2304
2305 switch (gimple_code (s))
2306 {
2307 case GIMPLE_ASM:
2308 return gimple_asm_volatile_p (s);
2309
2310 case GIMPLE_CALL:
2311 t = gimple_call_fndecl (s);
2312 /* Assume that calls to weak functions may trap. */
2313 if (!t || !DECL_P (t) || DECL_WEAK (t))
2314 return true;
2315 return false;
2316
2317 case GIMPLE_ASSIGN:
2318 t = gimple_expr_type (s);
2319 op = gimple_assign_rhs_code (s);
2320 if (get_gimple_rhs_class (op) == GIMPLE_BINARY_RHS)
2321 div = gimple_assign_rhs2 (s);
2322 return (operation_could_trap_p (op, FLOAT_TYPE_P (t),
2323 (INTEGRAL_TYPE_P (t)
2324 && TYPE_OVERFLOW_TRAPS (t)),
2325 div));
2326
2327 default:
2328 break;
2329 }
2330
2331 return false;
2332
2333}
2334
2335
2336/* Return true if statement S can trap. */
2337
2338bool
2339gimple_could_trap_p (gimple s)
2340{
2341 return gimple_could_trap_p_1 (s, true);
2342}
2343
2344
2345/* Return true if RHS of a GIMPLE_ASSIGN S can trap. */
2346
2347bool
2348gimple_assign_rhs_could_trap_p (gimple s)
2349{
2350 gcc_assert (is_gimple_assign (s));
2351 return gimple_could_trap_p_1 (s, false);
2352}
2353
2354
2355/* Print debugging information for gimple stmts generated. */
2356
2357void
2358dump_gimple_statistics (void)
2359{
2360#ifdef GATHER_STATISTICS
2361 int i, total_tuples = 0, total_bytes = 0;
2362
2363 fprintf (stderr, "\nGIMPLE statements\n");
2364 fprintf (stderr, "Kind Stmts Bytes\n");
2365 fprintf (stderr, "---------------------------------------\n");
2366 for (i = 0; i < (int) gimple_alloc_kind_all; ++i)
2367 {
2368 fprintf (stderr, "%-20s %7d %10d\n", gimple_alloc_kind_names[i],
2369 gimple_alloc_counts[i], gimple_alloc_sizes[i]);
2370 total_tuples += gimple_alloc_counts[i];
2371 total_bytes += gimple_alloc_sizes[i];
2372 }
2373 fprintf (stderr, "---------------------------------------\n");
2374 fprintf (stderr, "%-20s %7d %10d\n", "Total", total_tuples, total_bytes);
2375 fprintf (stderr, "---------------------------------------\n");
2376#else
2377 fprintf (stderr, "No gimple statistics\n");
2378#endif
2379}
2380
2381
726a989a
RB
2382/* Return the number of operands needed on the RHS of a GIMPLE
2383 assignment for an expression with tree code CODE. */
2384
2385unsigned
2386get_gimple_rhs_num_ops (enum tree_code code)
2387{
2388 enum gimple_rhs_class rhs_class = get_gimple_rhs_class (code);
2389
2390 if (rhs_class == GIMPLE_UNARY_RHS || rhs_class == GIMPLE_SINGLE_RHS)
2391 return 1;
2392 else if (rhs_class == GIMPLE_BINARY_RHS)
2393 return 2;
2394 else
2395 gcc_unreachable ();
2396}
2397
2398#define DEFTREECODE(SYM, STRING, TYPE, NARGS) \
2399 (unsigned char) \
2400 ((TYPE) == tcc_unary ? GIMPLE_UNARY_RHS \
2401 : ((TYPE) == tcc_binary \
2402 || (TYPE) == tcc_comparison) ? GIMPLE_BINARY_RHS \
2403 : ((TYPE) == tcc_constant \
2404 || (TYPE) == tcc_declaration \
2405 || (TYPE) == tcc_reference) ? GIMPLE_SINGLE_RHS \
2406 : ((SYM) == TRUTH_AND_EXPR \
2407 || (SYM) == TRUTH_OR_EXPR \
2408 || (SYM) == TRUTH_XOR_EXPR) ? GIMPLE_BINARY_RHS \
2409 : (SYM) == TRUTH_NOT_EXPR ? GIMPLE_UNARY_RHS \
2410 : ((SYM) == COND_EXPR \
2411 || (SYM) == CONSTRUCTOR \
2412 || (SYM) == OBJ_TYPE_REF \
2413 || (SYM) == ASSERT_EXPR \
2414 || (SYM) == ADDR_EXPR \
2415 || (SYM) == WITH_SIZE_EXPR \
726a989a 2416 || (SYM) == SSA_NAME \
726a989a
RB
2417 || (SYM) == POLYNOMIAL_CHREC \
2418 || (SYM) == DOT_PROD_EXPR \
2419 || (SYM) == VEC_COND_EXPR \
2420 || (SYM) == REALIGN_LOAD_EXPR) ? GIMPLE_SINGLE_RHS \
2421 : GIMPLE_INVALID_RHS),
2422#define END_OF_BASE_TREE_CODES (unsigned char) GIMPLE_INVALID_RHS,
2423
2424const unsigned char gimple_rhs_class_table[] = {
2425#include "all-tree.def"
2426};
2427
2428#undef DEFTREECODE
2429#undef END_OF_BASE_TREE_CODES
2430
2431/* For the definitive definition of GIMPLE, see doc/tree-ssa.texi. */
2432
2433/* Validation of GIMPLE expressions. */
2434
2435/* Return true if OP is an acceptable tree node to be used as a GIMPLE
2436 operand. */
2437
2438bool
2439is_gimple_operand (const_tree op)
2440{
2441 return op && get_gimple_rhs_class (TREE_CODE (op)) == GIMPLE_SINGLE_RHS;
2442}
2443
726a989a
RB
2444/* Returns true iff T is a valid RHS for an assignment to a renamed
2445 user -- or front-end generated artificial -- variable. */
2446
2447bool
2448is_gimple_reg_rhs (tree t)
2449{
ba4d8f9d 2450 return get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS;
726a989a
RB
2451}
2452
2453/* Returns true iff T is a valid RHS for an assignment to an un-renamed
2454 LHS, or for a call argument. */
2455
2456bool
2457is_gimple_mem_rhs (tree t)
2458{
2459 /* If we're dealing with a renamable type, either source or dest must be
2460 a renamed variable. */
2461 if (is_gimple_reg_type (TREE_TYPE (t)))
2462 return is_gimple_val (t);
2463 else
ba4d8f9d 2464 return is_gimple_val (t) || is_gimple_lvalue (t);
726a989a
RB
2465}
2466
2467/* Return true if T is a valid LHS for a GIMPLE assignment expression. */
2468
2469bool
2470is_gimple_lvalue (tree t)
2471{
2472 return (is_gimple_addressable (t)
2473 || TREE_CODE (t) == WITH_SIZE_EXPR
2474 /* These are complex lvalues, but don't have addresses, so they
2475 go here. */
2476 || TREE_CODE (t) == BIT_FIELD_REF);
2477}
2478
2479/* Return true if T is a GIMPLE condition. */
2480
2481bool
2482is_gimple_condexpr (tree t)
2483{
2484 return (is_gimple_val (t) || (COMPARISON_CLASS_P (t)
2485 && !tree_could_trap_p (t)
2486 && is_gimple_val (TREE_OPERAND (t, 0))
2487 && is_gimple_val (TREE_OPERAND (t, 1))));
2488}
2489
2490/* Return true if T is something whose address can be taken. */
2491
2492bool
2493is_gimple_addressable (tree t)
2494{
2495 return (is_gimple_id (t) || handled_component_p (t) || INDIRECT_REF_P (t));
2496}
2497
2498/* Return true if T is a valid gimple constant. */
2499
2500bool
2501is_gimple_constant (const_tree t)
2502{
2503 switch (TREE_CODE (t))
2504 {
2505 case INTEGER_CST:
2506 case REAL_CST:
2507 case FIXED_CST:
2508 case STRING_CST:
2509 case COMPLEX_CST:
2510 case VECTOR_CST:
2511 return true;
2512
2513 /* Vector constant constructors are gimple invariant. */
2514 case CONSTRUCTOR:
2515 if (TREE_TYPE (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
2516 return TREE_CONSTANT (t);
2517 else
2518 return false;
2519
2520 default:
2521 return false;
2522 }
2523}
2524
2525/* Return true if T is a gimple address. */
2526
2527bool
2528is_gimple_address (const_tree t)
2529{
2530 tree op;
2531
2532 if (TREE_CODE (t) != ADDR_EXPR)
2533 return false;
2534
2535 op = TREE_OPERAND (t, 0);
2536 while (handled_component_p (op))
2537 {
2538 if ((TREE_CODE (op) == ARRAY_REF
2539 || TREE_CODE (op) == ARRAY_RANGE_REF)
2540 && !is_gimple_val (TREE_OPERAND (op, 1)))
2541 return false;
2542
2543 op = TREE_OPERAND (op, 0);
2544 }
2545
2546 if (CONSTANT_CLASS_P (op) || INDIRECT_REF_P (op))
2547 return true;
2548
2549 switch (TREE_CODE (op))
2550 {
2551 case PARM_DECL:
2552 case RESULT_DECL:
2553 case LABEL_DECL:
2554 case FUNCTION_DECL:
2555 case VAR_DECL:
2556 case CONST_DECL:
2557 return true;
2558
2559 default:
2560 return false;
2561 }
2562}
2563
00fc2333
JH
2564/* Strip out all handled components that produce invariant
2565 offsets. */
726a989a 2566
00fc2333
JH
2567static const_tree
2568strip_invariant_refs (const_tree op)
726a989a 2569{
726a989a
RB
2570 while (handled_component_p (op))
2571 {
2572 switch (TREE_CODE (op))
2573 {
2574 case ARRAY_REF:
2575 case ARRAY_RANGE_REF:
2576 if (!is_gimple_constant (TREE_OPERAND (op, 1))
2577 || TREE_OPERAND (op, 2) != NULL_TREE
2578 || TREE_OPERAND (op, 3) != NULL_TREE)
00fc2333 2579 return NULL;
726a989a
RB
2580 break;
2581
2582 case COMPONENT_REF:
2583 if (TREE_OPERAND (op, 2) != NULL_TREE)
00fc2333 2584 return NULL;
726a989a
RB
2585 break;
2586
2587 default:;
2588 }
2589 op = TREE_OPERAND (op, 0);
2590 }
2591
00fc2333
JH
2592 return op;
2593}
2594
2595/* Return true if T is a gimple invariant address. */
2596
2597bool
2598is_gimple_invariant_address (const_tree t)
2599{
2600 const_tree op;
2601
2602 if (TREE_CODE (t) != ADDR_EXPR)
2603 return false;
2604
2605 op = strip_invariant_refs (TREE_OPERAND (t, 0));
2606
2607 return op && (CONSTANT_CLASS_P (op) || decl_address_invariant_p (op));
2608}
2609
2610/* Return true if T is a gimple invariant address at IPA level
2611 (so addresses of variables on stack are not allowed). */
2612
2613bool
2614is_gimple_ip_invariant_address (const_tree t)
2615{
2616 const_tree op;
2617
2618 if (TREE_CODE (t) != ADDR_EXPR)
2619 return false;
2620
2621 op = strip_invariant_refs (TREE_OPERAND (t, 0));
2622
2623 return op && (CONSTANT_CLASS_P (op) || decl_address_ip_invariant_p (op));
726a989a
RB
2624}
2625
2626/* Return true if T is a GIMPLE minimal invariant. It's a restricted
2627 form of function invariant. */
2628
2629bool
2630is_gimple_min_invariant (const_tree t)
2631{
2632 if (TREE_CODE (t) == ADDR_EXPR)
2633 return is_gimple_invariant_address (t);
2634
2635 return is_gimple_constant (t);
2636}
2637
00fc2333
JH
2638/* Return true if T is a GIMPLE interprocedural invariant. It's a restricted
2639 form of gimple minimal invariant. */
2640
2641bool
2642is_gimple_ip_invariant (const_tree t)
2643{
2644 if (TREE_CODE (t) == ADDR_EXPR)
2645 return is_gimple_ip_invariant_address (t);
2646
2647 return is_gimple_constant (t);
2648}
2649
726a989a
RB
2650/* Return true if T looks like a valid GIMPLE statement. */
2651
2652bool
2653is_gimple_stmt (tree t)
2654{
2655 const enum tree_code code = TREE_CODE (t);
2656
2657 switch (code)
2658 {
2659 case NOP_EXPR:
2660 /* The only valid NOP_EXPR is the empty statement. */
2661 return IS_EMPTY_STMT (t);
2662
2663 case BIND_EXPR:
2664 case COND_EXPR:
2665 /* These are only valid if they're void. */
2666 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
2667
2668 case SWITCH_EXPR:
2669 case GOTO_EXPR:
2670 case RETURN_EXPR:
2671 case LABEL_EXPR:
2672 case CASE_LABEL_EXPR:
2673 case TRY_CATCH_EXPR:
2674 case TRY_FINALLY_EXPR:
2675 case EH_FILTER_EXPR:
2676 case CATCH_EXPR:
726a989a 2677 case ASM_EXPR:
726a989a
RB
2678 case STATEMENT_LIST:
2679 case OMP_PARALLEL:
2680 case OMP_FOR:
2681 case OMP_SECTIONS:
2682 case OMP_SECTION:
2683 case OMP_SINGLE:
2684 case OMP_MASTER:
2685 case OMP_ORDERED:
2686 case OMP_CRITICAL:
2687 case OMP_TASK:
2688 /* These are always void. */
2689 return true;
2690
2691 case CALL_EXPR:
2692 case MODIFY_EXPR:
2693 case PREDICT_EXPR:
2694 /* These are valid regardless of their type. */
2695 return true;
2696
2697 default:
2698 return false;
2699 }
2700}
2701
2702/* Return true if T is a variable. */
2703
2704bool
2705is_gimple_variable (tree t)
2706{
2707 return (TREE_CODE (t) == VAR_DECL
2708 || TREE_CODE (t) == PARM_DECL
2709 || TREE_CODE (t) == RESULT_DECL
2710 || TREE_CODE (t) == SSA_NAME);
2711}
2712
2713/* Return true if T is a GIMPLE identifier (something with an address). */
2714
2715bool
2716is_gimple_id (tree t)
2717{
2718 return (is_gimple_variable (t)
2719 || TREE_CODE (t) == FUNCTION_DECL
2720 || TREE_CODE (t) == LABEL_DECL
2721 || TREE_CODE (t) == CONST_DECL
2722 /* Allow string constants, since they are addressable. */
2723 || TREE_CODE (t) == STRING_CST);
2724}
2725
2726/* Return true if TYPE is a suitable type for a scalar register variable. */
2727
2728bool
2729is_gimple_reg_type (tree type)
2730{
4636b850 2731 return !AGGREGATE_TYPE_P (type);
726a989a
RB
2732}
2733
2734/* Return true if T is a non-aggregate register variable. */
2735
2736bool
2737is_gimple_reg (tree t)
2738{
2739 if (TREE_CODE (t) == SSA_NAME)
2740 t = SSA_NAME_VAR (t);
2741
726a989a
RB
2742 if (!is_gimple_variable (t))
2743 return false;
2744
2745 if (!is_gimple_reg_type (TREE_TYPE (t)))
2746 return false;
2747
2748 /* A volatile decl is not acceptable because we can't reuse it as
2749 needed. We need to copy it into a temp first. */
2750 if (TREE_THIS_VOLATILE (t))
2751 return false;
2752
2753 /* We define "registers" as things that can be renamed as needed,
2754 which with our infrastructure does not apply to memory. */
2755 if (needs_to_live_in_memory (t))
2756 return false;
2757
2758 /* Hard register variables are an interesting case. For those that
2759 are call-clobbered, we don't know where all the calls are, since
2760 we don't (want to) take into account which operations will turn
2761 into libcalls at the rtl level. For those that are call-saved,
2762 we don't currently model the fact that calls may in fact change
2763 global hard registers, nor do we examine ASM_CLOBBERS at the tree
2764 level, and so miss variable changes that might imply. All around,
2765 it seems safest to not do too much optimization with these at the
2766 tree level at all. We'll have to rely on the rtl optimizers to
2767 clean this up, as there we've got all the appropriate bits exposed. */
2768 if (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t))
2769 return false;
2770
4636b850
RG
2771 /* Complex and vector values must have been put into SSA-like form.
2772 That is, no assignments to the individual components. */
2773 if (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
2774 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
2775 return DECL_GIMPLE_REG_P (t);
2776
726a989a
RB
2777 return true;
2778}
2779
2780
726a989a
RB
2781/* Return true if T is a GIMPLE variable whose address is not needed. */
2782
2783bool
2784is_gimple_non_addressable (tree t)
2785{
2786 if (TREE_CODE (t) == SSA_NAME)
2787 t = SSA_NAME_VAR (t);
2788
2789 return (is_gimple_variable (t) && ! needs_to_live_in_memory (t));
2790}
2791
2792/* Return true if T is a GIMPLE rvalue, i.e. an identifier or a constant. */
2793
2794bool
2795is_gimple_val (tree t)
2796{
2797 /* Make loads from volatiles and memory vars explicit. */
2798 if (is_gimple_variable (t)
2799 && is_gimple_reg_type (TREE_TYPE (t))
2800 && !is_gimple_reg (t))
2801 return false;
2802
726a989a
RB
2803 return (is_gimple_variable (t) || is_gimple_min_invariant (t));
2804}
2805
2806/* Similarly, but accept hard registers as inputs to asm statements. */
2807
2808bool
2809is_gimple_asm_val (tree t)
2810{
2811 if (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t))
2812 return true;
2813
2814 return is_gimple_val (t);
2815}
2816
2817/* Return true if T is a GIMPLE minimal lvalue. */
2818
2819bool
2820is_gimple_min_lval (tree t)
2821{
ba4d8f9d
RG
2822 if (!(t = CONST_CAST_TREE (strip_invariant_refs (t))))
2823 return false;
726a989a
RB
2824 return (is_gimple_id (t) || TREE_CODE (t) == INDIRECT_REF);
2825}
2826
2827/* Return true if T is a typecast operation. */
2828
2829bool
2830is_gimple_cast (tree t)
2831{
2832 return (CONVERT_EXPR_P (t)
2833 || TREE_CODE (t) == FIX_TRUNC_EXPR);
2834}
2835
2836/* Return true if T is a valid function operand of a CALL_EXPR. */
2837
2838bool
2839is_gimple_call_addr (tree t)
2840{
2841 return (TREE_CODE (t) == OBJ_TYPE_REF || is_gimple_val (t));
2842}
2843
2844/* If T makes a function call, return the corresponding CALL_EXPR operand.
2845 Otherwise, return NULL_TREE. */
2846
2847tree
2848get_call_expr_in (tree t)
2849{
2850 if (TREE_CODE (t) == MODIFY_EXPR)
2851 t = TREE_OPERAND (t, 1);
2852 if (TREE_CODE (t) == WITH_SIZE_EXPR)
2853 t = TREE_OPERAND (t, 0);
2854 if (TREE_CODE (t) == CALL_EXPR)
2855 return t;
2856 return NULL_TREE;
2857}
2858
2859
2860/* Given a memory reference expression T, return its base address.
2861 The base address of a memory reference expression is the main
2862 object being referenced. For instance, the base address for
2863 'array[i].fld[j]' is 'array'. You can think of this as stripping
2864 away the offset part from a memory address.
2865
2866 This function calls handled_component_p to strip away all the inner
2867 parts of the memory reference until it reaches the base object. */
2868
2869tree
2870get_base_address (tree t)
2871{
2872 while (handled_component_p (t))
2873 t = TREE_OPERAND (t, 0);
2874
2875 if (SSA_VAR_P (t)
2876 || TREE_CODE (t) == STRING_CST
2877 || TREE_CODE (t) == CONSTRUCTOR
2878 || INDIRECT_REF_P (t))
2879 return t;
2880 else
2881 return NULL_TREE;
2882}
2883
2884void
2885recalculate_side_effects (tree t)
2886{
2887 enum tree_code code = TREE_CODE (t);
2888 int len = TREE_OPERAND_LENGTH (t);
2889 int i;
2890
2891 switch (TREE_CODE_CLASS (code))
2892 {
2893 case tcc_expression:
2894 switch (code)
2895 {
2896 case INIT_EXPR:
2897 case MODIFY_EXPR:
2898 case VA_ARG_EXPR:
2899 case PREDECREMENT_EXPR:
2900 case PREINCREMENT_EXPR:
2901 case POSTDECREMENT_EXPR:
2902 case POSTINCREMENT_EXPR:
2903 /* All of these have side-effects, no matter what their
2904 operands are. */
2905 return;
2906
2907 default:
2908 break;
2909 }
2910 /* Fall through. */
2911
2912 case tcc_comparison: /* a comparison expression */
2913 case tcc_unary: /* a unary arithmetic expression */
2914 case tcc_binary: /* a binary arithmetic expression */
2915 case tcc_reference: /* a reference */
2916 case tcc_vl_exp: /* a function call */
2917 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
2918 for (i = 0; i < len; ++i)
2919 {
2920 tree op = TREE_OPERAND (t, i);
2921 if (op && TREE_SIDE_EFFECTS (op))
2922 TREE_SIDE_EFFECTS (t) = 1;
2923 }
2924 break;
2925
13f95bdb
EB
2926 case tcc_constant:
2927 /* No side-effects. */
2928 return;
2929
726a989a 2930 default:
726a989a
RB
2931 gcc_unreachable ();
2932 }
2933}
2934
2935/* Canonicalize a tree T for use in a COND_EXPR as conditional. Returns
2936 a canonicalized tree that is valid for a COND_EXPR or NULL_TREE, if
2937 we failed to create one. */
2938
2939tree
2940canonicalize_cond_expr_cond (tree t)
2941{
2942 /* For (bool)x use x != 0. */
2943 if (TREE_CODE (t) == NOP_EXPR
2944 && TREE_TYPE (t) == boolean_type_node)
2945 {
2946 tree top0 = TREE_OPERAND (t, 0);
2947 t = build2 (NE_EXPR, TREE_TYPE (t),
2948 top0, build_int_cst (TREE_TYPE (top0), 0));
2949 }
2950 /* For !x use x == 0. */
2951 else if (TREE_CODE (t) == TRUTH_NOT_EXPR)
2952 {
2953 tree top0 = TREE_OPERAND (t, 0);
2954 t = build2 (EQ_EXPR, TREE_TYPE (t),
2955 top0, build_int_cst (TREE_TYPE (top0), 0));
2956 }
2957 /* For cmp ? 1 : 0 use cmp. */
2958 else if (TREE_CODE (t) == COND_EXPR
2959 && COMPARISON_CLASS_P (TREE_OPERAND (t, 0))
2960 && integer_onep (TREE_OPERAND (t, 1))
2961 && integer_zerop (TREE_OPERAND (t, 2)))
2962 {
2963 tree top0 = TREE_OPERAND (t, 0);
2964 t = build2 (TREE_CODE (top0), TREE_TYPE (t),
2965 TREE_OPERAND (top0, 0), TREE_OPERAND (top0, 1));
2966 }
2967
2968 if (is_gimple_condexpr (t))
2969 return t;
2970
2971 return NULL_TREE;
2972}
2973
e6c99067
DN
2974/* Build a GIMPLE_CALL identical to STMT but skipping the arguments in
2975 the positions marked by the set ARGS_TO_SKIP. */
2976
c6f7cfc1 2977gimple
5c0466b5 2978gimple_call_copy_skip_args (gimple stmt, bitmap args_to_skip)
c6f7cfc1
JH
2979{
2980 int i;
2981 tree fn = gimple_call_fn (stmt);
2982 int nargs = gimple_call_num_args (stmt);
2983 VEC(tree, heap) *vargs = VEC_alloc (tree, heap, nargs);
2984 gimple new_stmt;
2985
2986 for (i = 0; i < nargs; i++)
2987 if (!bitmap_bit_p (args_to_skip, i))
2988 VEC_quick_push (tree, vargs, gimple_call_arg (stmt, i));
2989
2990 new_stmt = gimple_build_call_vec (fn, vargs);
2991 VEC_free (tree, heap, vargs);
2992 if (gimple_call_lhs (stmt))
2993 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
2994
5006671f
RG
2995 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
2996 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
2997
c6f7cfc1
JH
2998 gimple_set_block (new_stmt, gimple_block (stmt));
2999 if (gimple_has_location (stmt))
3000 gimple_set_location (new_stmt, gimple_location (stmt));
3001
3002 /* Carry all the flags to the new GIMPLE_CALL. */
3003 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
3004 gimple_call_set_tail (new_stmt, gimple_call_tail_p (stmt));
3005 gimple_call_set_cannot_inline (new_stmt, gimple_call_cannot_inline_p (stmt));
3006 gimple_call_set_return_slot_opt (new_stmt, gimple_call_return_slot_opt_p (stmt));
3007 gimple_call_set_from_thunk (new_stmt, gimple_call_from_thunk_p (stmt));
3008 gimple_call_set_va_arg_pack (new_stmt, gimple_call_va_arg_pack_p (stmt));
5006671f
RG
3009
3010 gimple_set_modified (new_stmt, true);
3011
c6f7cfc1
JH
3012 return new_stmt;
3013}
3014
5006671f
RG
3015
3016/* Data structure used to count the number of dereferences to PTR
3017 inside an expression. */
3018struct count_ptr_d
3019{
3020 tree ptr;
3021 unsigned num_stores;
3022 unsigned num_loads;
3023};
3024
3025/* Helper for count_uses_and_derefs. Called by walk_tree to look for
3026 (ALIGN/MISALIGNED_)INDIRECT_REF nodes for the pointer passed in DATA. */
3027
3028static tree
3029count_ptr_derefs (tree *tp, int *walk_subtrees, void *data)
3030{
3031 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
3032 struct count_ptr_d *count_p = (struct count_ptr_d *) wi_p->info;
3033
3034 /* Do not walk inside ADDR_EXPR nodes. In the expression &ptr->fld,
3035 pointer 'ptr' is *not* dereferenced, it is simply used to compute
3036 the address of 'fld' as 'ptr + offsetof(fld)'. */
3037 if (TREE_CODE (*tp) == ADDR_EXPR)
3038 {
3039 *walk_subtrees = 0;
3040 return NULL_TREE;
3041 }
3042
3043 if (INDIRECT_REF_P (*tp) && TREE_OPERAND (*tp, 0) == count_p->ptr)
3044 {
3045 if (wi_p->is_lhs)
3046 count_p->num_stores++;
3047 else
3048 count_p->num_loads++;
3049 }
3050
3051 return NULL_TREE;
3052}
3053
3054/* Count the number of direct and indirect uses for pointer PTR in
3055 statement STMT. The number of direct uses is stored in
3056 *NUM_USES_P. Indirect references are counted separately depending
3057 on whether they are store or load operations. The counts are
3058 stored in *NUM_STORES_P and *NUM_LOADS_P. */
3059
3060void
3061count_uses_and_derefs (tree ptr, gimple stmt, unsigned *num_uses_p,
3062 unsigned *num_loads_p, unsigned *num_stores_p)
3063{
3064 ssa_op_iter i;
3065 tree use;
3066
3067 *num_uses_p = 0;
3068 *num_loads_p = 0;
3069 *num_stores_p = 0;
3070
3071 /* Find out the total number of uses of PTR in STMT. */
3072 FOR_EACH_SSA_TREE_OPERAND (use, stmt, i, SSA_OP_USE)
3073 if (use == ptr)
3074 (*num_uses_p)++;
3075
3076 /* Now count the number of indirect references to PTR. This is
3077 truly awful, but we don't have much choice. There are no parent
3078 pointers inside INDIRECT_REFs, so an expression like
3079 '*x_1 = foo (x_1, *x_1)' needs to be traversed piece by piece to
3080 find all the indirect and direct uses of x_1 inside. The only
3081 shortcut we can take is the fact that GIMPLE only allows
3082 INDIRECT_REFs inside the expressions below. */
3083 if (is_gimple_assign (stmt)
3084 || gimple_code (stmt) == GIMPLE_RETURN
3085 || gimple_code (stmt) == GIMPLE_ASM
3086 || is_gimple_call (stmt))
3087 {
3088 struct walk_stmt_info wi;
3089 struct count_ptr_d count;
3090
3091 count.ptr = ptr;
3092 count.num_stores = 0;
3093 count.num_loads = 0;
3094
3095 memset (&wi, 0, sizeof (wi));
3096 wi.info = &count;
3097 walk_gimple_op (stmt, count_ptr_derefs, &wi);
3098
3099 *num_stores_p = count.num_stores;
3100 *num_loads_p = count.num_loads;
3101 }
3102
3103 gcc_assert (*num_uses_p >= *num_loads_p + *num_stores_p);
3104}
3105
346ef3fa
RG
3106/* From a tree operand OP return the base of a load or store operation
3107 or NULL_TREE if OP is not a load or a store. */
3108
3109static tree
3110get_base_loadstore (tree op)
3111{
3112 while (handled_component_p (op))
3113 op = TREE_OPERAND (op, 0);
3114 if (DECL_P (op)
3115 || INDIRECT_REF_P (op)
3116 || TREE_CODE (op) == TARGET_MEM_REF)
3117 return op;
3118 return NULL_TREE;
3119}
3120
3121/* For the statement STMT call the callbacks VISIT_LOAD, VISIT_STORE and
3122 VISIT_ADDR if non-NULL on loads, store and address-taken operands
3123 passing the STMT, the base of the operand and DATA to it. The base
3124 will be either a decl, an indirect reference (including TARGET_MEM_REF)
3125 or the argument of an address expression.
3126 Returns the results of these callbacks or'ed. */
3127
3128bool
3129walk_stmt_load_store_addr_ops (gimple stmt, void *data,
3130 bool (*visit_load)(gimple, tree, void *),
3131 bool (*visit_store)(gimple, tree, void *),
3132 bool (*visit_addr)(gimple, tree, void *))
3133{
3134 bool ret = false;
3135 unsigned i;
3136 if (gimple_assign_single_p (stmt))
3137 {
3138 tree lhs, rhs;
3139 if (visit_store)
3140 {
3141 lhs = get_base_loadstore (gimple_assign_lhs (stmt));
3142 if (lhs)
3143 ret |= visit_store (stmt, lhs, data);
3144 }
3145 rhs = gimple_assign_rhs1 (stmt);
ad8a1ac0
RG
3146 while (handled_component_p (rhs))
3147 rhs = TREE_OPERAND (rhs, 0);
346ef3fa
RG
3148 if (visit_addr)
3149 {
3150 if (TREE_CODE (rhs) == ADDR_EXPR)
3151 ret |= visit_addr (stmt, TREE_OPERAND (rhs, 0), data);
3152 else if (TREE_CODE (rhs) == TARGET_MEM_REF
fff1894c 3153 && TMR_BASE (rhs) != NULL_TREE
346ef3fa
RG
3154 && TREE_CODE (TMR_BASE (rhs)) == ADDR_EXPR)
3155 ret |= visit_addr (stmt, TREE_OPERAND (TMR_BASE (rhs), 0), data);
3156 else if (TREE_CODE (rhs) == OBJ_TYPE_REF
3157 && TREE_CODE (OBJ_TYPE_REF_OBJECT (rhs)) == ADDR_EXPR)
3158 ret |= visit_addr (stmt, TREE_OPERAND (OBJ_TYPE_REF_OBJECT (rhs),
3159 0), data);
fff1894c
AB
3160 lhs = gimple_assign_lhs (stmt);
3161 if (TREE_CODE (lhs) == TARGET_MEM_REF
3162 && TMR_BASE (lhs) != NULL_TREE
3163 && TREE_CODE (TMR_BASE (lhs)) == ADDR_EXPR)
3164 ret |= visit_addr (stmt, TREE_OPERAND (TMR_BASE (lhs), 0), data);
346ef3fa
RG
3165 }
3166 if (visit_load)
3167 {
3168 rhs = get_base_loadstore (rhs);
3169 if (rhs)
3170 ret |= visit_load (stmt, rhs, data);
3171 }
3172 }
3173 else if (visit_addr
3174 && (is_gimple_assign (stmt)
4d7a65ea 3175 || gimple_code (stmt) == GIMPLE_COND))
346ef3fa
RG
3176 {
3177 for (i = 0; i < gimple_num_ops (stmt); ++i)
3178 if (gimple_op (stmt, i)
3179 && TREE_CODE (gimple_op (stmt, i)) == ADDR_EXPR)
3180 ret |= visit_addr (stmt, TREE_OPERAND (gimple_op (stmt, i), 0), data);
3181 }
3182 else if (is_gimple_call (stmt))
3183 {
3184 if (visit_store)
3185 {
3186 tree lhs = gimple_call_lhs (stmt);
3187 if (lhs)
3188 {
3189 lhs = get_base_loadstore (lhs);
3190 if (lhs)
3191 ret |= visit_store (stmt, lhs, data);
3192 }
3193 }
3194 if (visit_load || visit_addr)
3195 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3196 {
3197 tree rhs = gimple_call_arg (stmt, i);
3198 if (visit_addr
3199 && TREE_CODE (rhs) == ADDR_EXPR)
3200 ret |= visit_addr (stmt, TREE_OPERAND (rhs, 0), data);
3201 else if (visit_load)
3202 {
3203 rhs = get_base_loadstore (rhs);
3204 if (rhs)
3205 ret |= visit_load (stmt, rhs, data);
3206 }
3207 }
3208 if (visit_addr
3209 && gimple_call_chain (stmt)
3210 && TREE_CODE (gimple_call_chain (stmt)) == ADDR_EXPR)
3211 ret |= visit_addr (stmt, TREE_OPERAND (gimple_call_chain (stmt), 0),
3212 data);
1d24fdd9
RG
3213 if (visit_addr
3214 && gimple_call_return_slot_opt_p (stmt)
3215 && gimple_call_lhs (stmt) != NULL_TREE
4d61856d 3216 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
1d24fdd9 3217 ret |= visit_addr (stmt, gimple_call_lhs (stmt), data);
346ef3fa
RG
3218 }
3219 else if (gimple_code (stmt) == GIMPLE_ASM)
3220 {
3221 unsigned noutputs;
3222 const char *constraint;
3223 const char **oconstraints;
3224 bool allows_mem, allows_reg, is_inout;
3225 noutputs = gimple_asm_noutputs (stmt);
3226 oconstraints = XALLOCAVEC (const char *, noutputs);
3227 if (visit_store || visit_addr)
3228 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
3229 {
3230 tree link = gimple_asm_output_op (stmt, i);
3231 tree op = get_base_loadstore (TREE_VALUE (link));
3232 if (op && visit_store)
3233 ret |= visit_store (stmt, op, data);
3234 if (visit_addr)
3235 {
3236 constraint = TREE_STRING_POINTER
3237 (TREE_VALUE (TREE_PURPOSE (link)));
3238 oconstraints[i] = constraint;
3239 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
3240 &allows_reg, &is_inout);
3241 if (op && !allows_reg && allows_mem)
3242 ret |= visit_addr (stmt, op, data);
3243 }
3244 }
3245 if (visit_load || visit_addr)
3246 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
3247 {
3248 tree link = gimple_asm_input_op (stmt, i);
3249 tree op = TREE_VALUE (link);
3250 if (visit_addr
3251 && TREE_CODE (op) == ADDR_EXPR)
3252 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
3253 else if (visit_load || visit_addr)
3254 {
3255 op = get_base_loadstore (op);
3256 if (op)
3257 {
3258 if (visit_load)
3259 ret |= visit_load (stmt, op, data);
3260 if (visit_addr)
3261 {
3262 constraint = TREE_STRING_POINTER
3263 (TREE_VALUE (TREE_PURPOSE (link)));
3264 parse_input_constraint (&constraint, 0, 0, noutputs,
3265 0, oconstraints,
3266 &allows_mem, &allows_reg);
3267 if (!allows_reg && allows_mem)
3268 ret |= visit_addr (stmt, op, data);
3269 }
3270 }
3271 }
3272 }
3273 }
3274 else if (gimple_code (stmt) == GIMPLE_RETURN)
3275 {
3276 tree op = gimple_return_retval (stmt);
3277 if (op)
3278 {
3279 if (visit_addr
3280 && TREE_CODE (op) == ADDR_EXPR)
3281 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
3282 else if (visit_load)
3283 {
3284 op = get_base_loadstore (op);
3285 if (op)
3286 ret |= visit_load (stmt, op, data);
3287 }
3288 }
3289 }
3290 else if (visit_addr
3291 && gimple_code (stmt) == GIMPLE_PHI)
3292 {
3293 for (i = 0; i < gimple_phi_num_args (stmt); ++i)
3294 {
3295 tree op = PHI_ARG_DEF (stmt, i);
3296 if (TREE_CODE (op) == ADDR_EXPR)
3297 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
3298 }
3299 }
3300
3301 return ret;
3302}
3303
3304/* Like walk_stmt_load_store_addr_ops but with NULL visit_addr. IPA-CP
3305 should make a faster clone for this case. */
3306
3307bool
3308walk_stmt_load_store_ops (gimple stmt, void *data,
3309 bool (*visit_load)(gimple, tree, void *),
3310 bool (*visit_store)(gimple, tree, void *))
3311{
3312 return walk_stmt_load_store_addr_ops (stmt, data,
3313 visit_load, visit_store, NULL);
3314}
3315
ccacdf06
RG
3316/* Helper for gimple_ior_addresses_taken_1. */
3317
3318static bool
3319gimple_ior_addresses_taken_1 (gimple stmt ATTRIBUTE_UNUSED,
3320 tree addr, void *data)
3321{
3322 bitmap addresses_taken = (bitmap)data;
3323 while (handled_component_p (addr))
3324 addr = TREE_OPERAND (addr, 0);
3325 if (DECL_P (addr))
3326 {
3327 bitmap_set_bit (addresses_taken, DECL_UID (addr));
3328 return true;
3329 }
3330 return false;
3331}
3332
3333/* Set the bit for the uid of all decls that have their address taken
3334 in STMT in the ADDRESSES_TAKEN bitmap. Returns true if there
3335 were any in this stmt. */
3336
3337bool
3338gimple_ior_addresses_taken (bitmap addresses_taken, gimple stmt)
3339{
3340 return walk_stmt_load_store_addr_ops (stmt, addresses_taken, NULL, NULL,
3341 gimple_ior_addresses_taken_1);
3342}
3343
4537ec0c
DN
3344
3345/* Return a printable name for symbol DECL. */
3346
3347const char *
3348gimple_decl_printable_name (tree decl, int verbosity)
3349{
3350 gcc_assert (decl && DECL_NAME (decl));
3351
3352 if (DECL_ASSEMBLER_NAME_SET_P (decl))
3353 {
3354 const char *str, *mangled_str;
3355 int dmgl_opts = DMGL_NO_OPTS;
3356
3357 if (verbosity >= 2)
3358 {
3359 dmgl_opts = DMGL_VERBOSE
3360 | DMGL_TYPES
3361 | DMGL_ANSI
3362 | DMGL_GNU_V3
3363 | DMGL_RET_POSTFIX;
3364 if (TREE_CODE (decl) == FUNCTION_DECL)
3365 dmgl_opts |= DMGL_PARAMS;
3366 }
3367
3368 mangled_str = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
3369 str = cplus_demangle_v3 (mangled_str, dmgl_opts);
3370 return (str) ? str : mangled_str;
3371 }
3372
3373 return IDENTIFIER_POINTER (DECL_NAME (decl));
3374}
3375
3376
3377/* Fold a OBJ_TYPE_REF expression to the address of a function.
3378 KNOWN_TYPE carries the true type of OBJ_TYPE_REF_OBJECT(REF). Adapted
3379 from cp_fold_obj_type_ref, but it tolerates types with no binfo
3380 data. */
3381
3382tree
3383gimple_fold_obj_type_ref (tree ref, tree known_type)
3384{
3385 HOST_WIDE_INT index;
3386 HOST_WIDE_INT i;
3387 tree v;
3388 tree fndecl;
3389
3390 if (TYPE_BINFO (known_type) == NULL_TREE)
3391 return NULL_TREE;
3392
3393 v = BINFO_VIRTUALS (TYPE_BINFO (known_type));
3394 index = tree_low_cst (OBJ_TYPE_REF_TOKEN (ref), 1);
3395 i = 0;
3396 while (i != index)
3397 {
3398 i += (TARGET_VTABLE_USES_DESCRIPTORS
3399 ? TARGET_VTABLE_USES_DESCRIPTORS : 1);
3400 v = TREE_CHAIN (v);
3401 }
3402
3403 fndecl = TREE_VALUE (v);
3404
3405#ifdef ENABLE_CHECKING
3406 gcc_assert (tree_int_cst_equal (OBJ_TYPE_REF_TOKEN (ref),
3407 DECL_VINDEX (fndecl)));
3408#endif
3409
3410 cgraph_node (fndecl)->local.vtable_method = true;
3411
3412 return build_fold_addr_expr (fndecl);
3413}
3414
726a989a 3415#include "gt-gimple.h"