]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/gimple.c
Daily bump.
[thirdparty/gcc.git] / gcc / gimple.c
CommitLineData
726a989a
RB
1/* Gimple IR support functions.
2
bbbbb16a 3 Copyright 2007, 2008, 2009 Free Software Foundation, Inc.
726a989a
RB
4 Contributed by Aldy Hernandez <aldyh@redhat.com>
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 3, or (at your option) any later
11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
21
22#include "config.h"
23#include "system.h"
24#include "coretypes.h"
25#include "tm.h"
26#include "tree.h"
27#include "ggc.h"
726a989a
RB
28#include "hard-reg-set.h"
29#include "basic-block.h"
30#include "gimple.h"
38d2336a 31#include "toplev.h"
726a989a
RB
32#include "diagnostic.h"
33#include "tree-flow.h"
34#include "value-prof.h"
35#include "flags.h"
4537ec0c 36#include "demangle.h"
726a989a
RB
37
38#define DEFGSCODE(SYM, NAME, STRUCT) NAME,
39const char *const gimple_code_name[] = {
40#include "gimple.def"
41};
42#undef DEFGSCODE
43
44/* All the tuples have their operand vector at the very bottom
45 of the structure. Therefore, the offset required to find the
46 operands vector the size of the structure minus the size of the 1
47 element tree array at the end (see gimple_ops). */
48#define DEFGSCODE(SYM, NAME, STRUCT) (sizeof (STRUCT) - sizeof (tree)),
6bc7bc14 49EXPORTED_CONST size_t gimple_ops_offset_[] = {
726a989a
RB
50#include "gimple.def"
51};
52#undef DEFGSCODE
53
54#ifdef GATHER_STATISTICS
55/* Gimple stats. */
56
57int gimple_alloc_counts[(int) gimple_alloc_kind_all];
58int gimple_alloc_sizes[(int) gimple_alloc_kind_all];
59
60/* Keep in sync with gimple.h:enum gimple_alloc_kind. */
61static const char * const gimple_alloc_kind_names[] = {
62 "assignments",
63 "phi nodes",
64 "conditionals",
65 "sequences",
66 "everything else"
67};
68
69#endif /* GATHER_STATISTICS */
70
71/* A cache of gimple_seq objects. Sequences are created and destroyed
72 fairly often during gimplification. */
73static GTY ((deletable)) struct gimple_seq_d *gimple_seq_cache;
74
75/* Private API manipulation functions shared only with some
76 other files. */
77extern void gimple_set_stored_syms (gimple, bitmap, bitmap_obstack *);
78extern void gimple_set_loaded_syms (gimple, bitmap, bitmap_obstack *);
79
80/* Gimple tuple constructors.
81 Note: Any constructor taking a ``gimple_seq'' as a parameter, can
82 be passed a NULL to start with an empty sequence. */
83
84/* Set the code for statement G to CODE. */
85
86static inline void
87gimple_set_code (gimple g, enum gimple_code code)
88{
89 g->gsbase.code = code;
90}
91
92
93/* Return the GSS_* identifier for the given GIMPLE statement CODE. */
94
95static enum gimple_statement_structure_enum
96gss_for_code (enum gimple_code code)
97{
98 switch (code)
99 {
100 case GIMPLE_ASSIGN:
101 case GIMPLE_CALL:
102 case GIMPLE_RETURN: return GSS_WITH_MEM_OPS;
103 case GIMPLE_COND:
104 case GIMPLE_GOTO:
105 case GIMPLE_LABEL:
b5b8b0ac 106 case GIMPLE_DEBUG:
726a989a
RB
107 case GIMPLE_SWITCH: return GSS_WITH_OPS;
108 case GIMPLE_ASM: return GSS_ASM;
109 case GIMPLE_BIND: return GSS_BIND;
110 case GIMPLE_CATCH: return GSS_CATCH;
111 case GIMPLE_EH_FILTER: return GSS_EH_FILTER;
112 case GIMPLE_NOP: return GSS_BASE;
113 case GIMPLE_PHI: return GSS_PHI;
114 case GIMPLE_RESX: return GSS_RESX;
115 case GIMPLE_TRY: return GSS_TRY;
116 case GIMPLE_WITH_CLEANUP_EXPR: return GSS_WCE;
117 case GIMPLE_OMP_CRITICAL: return GSS_OMP_CRITICAL;
118 case GIMPLE_OMP_FOR: return GSS_OMP_FOR;
119 case GIMPLE_OMP_MASTER:
120 case GIMPLE_OMP_ORDERED:
121 case GIMPLE_OMP_SECTION: return GSS_OMP;
122 case GIMPLE_OMP_RETURN:
123 case GIMPLE_OMP_SECTIONS_SWITCH: return GSS_BASE;
124 case GIMPLE_OMP_CONTINUE: return GSS_OMP_CONTINUE;
125 case GIMPLE_OMP_PARALLEL: return GSS_OMP_PARALLEL;
126 case GIMPLE_OMP_TASK: return GSS_OMP_TASK;
127 case GIMPLE_OMP_SECTIONS: return GSS_OMP_SECTIONS;
128 case GIMPLE_OMP_SINGLE: return GSS_OMP_SINGLE;
129 case GIMPLE_OMP_ATOMIC_LOAD: return GSS_OMP_ATOMIC_LOAD;
130 case GIMPLE_OMP_ATOMIC_STORE: return GSS_OMP_ATOMIC_STORE;
131 case GIMPLE_PREDICT: return GSS_BASE;
132 default: gcc_unreachable ();
133 }
134}
135
136
137/* Return the number of bytes needed to hold a GIMPLE statement with
138 code CODE. */
139
140static size_t
141gimple_size (enum gimple_code code)
142{
143 enum gimple_statement_structure_enum gss = gss_for_code (code);
144
145 if (gss == GSS_WITH_OPS)
146 return sizeof (struct gimple_statement_with_ops);
147 else if (gss == GSS_WITH_MEM_OPS)
148 return sizeof (struct gimple_statement_with_memory_ops);
149
150 switch (code)
151 {
152 case GIMPLE_ASM:
153 return sizeof (struct gimple_statement_asm);
154 case GIMPLE_NOP:
155 return sizeof (struct gimple_statement_base);
156 case GIMPLE_BIND:
157 return sizeof (struct gimple_statement_bind);
158 case GIMPLE_CATCH:
159 return sizeof (struct gimple_statement_catch);
160 case GIMPLE_EH_FILTER:
161 return sizeof (struct gimple_statement_eh_filter);
162 case GIMPLE_TRY:
163 return sizeof (struct gimple_statement_try);
164 case GIMPLE_RESX:
165 return sizeof (struct gimple_statement_resx);
166 case GIMPLE_OMP_CRITICAL:
167 return sizeof (struct gimple_statement_omp_critical);
168 case GIMPLE_OMP_FOR:
169 return sizeof (struct gimple_statement_omp_for);
170 case GIMPLE_OMP_PARALLEL:
171 return sizeof (struct gimple_statement_omp_parallel);
172 case GIMPLE_OMP_TASK:
173 return sizeof (struct gimple_statement_omp_task);
174 case GIMPLE_OMP_SECTION:
175 case GIMPLE_OMP_MASTER:
176 case GIMPLE_OMP_ORDERED:
177 return sizeof (struct gimple_statement_omp);
178 case GIMPLE_OMP_RETURN:
179 return sizeof (struct gimple_statement_base);
180 case GIMPLE_OMP_CONTINUE:
181 return sizeof (struct gimple_statement_omp_continue);
182 case GIMPLE_OMP_SECTIONS:
183 return sizeof (struct gimple_statement_omp_sections);
184 case GIMPLE_OMP_SECTIONS_SWITCH:
185 return sizeof (struct gimple_statement_base);
186 case GIMPLE_OMP_SINGLE:
187 return sizeof (struct gimple_statement_omp_single);
188 case GIMPLE_OMP_ATOMIC_LOAD:
189 return sizeof (struct gimple_statement_omp_atomic_load);
190 case GIMPLE_OMP_ATOMIC_STORE:
191 return sizeof (struct gimple_statement_omp_atomic_store);
192 case GIMPLE_WITH_CLEANUP_EXPR:
193 return sizeof (struct gimple_statement_wce);
726a989a
RB
194 case GIMPLE_PREDICT:
195 return sizeof (struct gimple_statement_base);
196 default:
197 break;
198 }
199
200 gcc_unreachable ();
201}
202
203
204/* Allocate memory for a GIMPLE statement with code CODE and NUM_OPS
205 operands. */
206
207#define gimple_alloc(c, n) gimple_alloc_stat (c, n MEM_STAT_INFO)
208static gimple
209gimple_alloc_stat (enum gimple_code code, unsigned num_ops MEM_STAT_DECL)
210{
211 size_t size;
212 gimple stmt;
213
214 size = gimple_size (code);
215 if (num_ops > 0)
216 size += sizeof (tree) * (num_ops - 1);
217
218#ifdef GATHER_STATISTICS
219 {
220 enum gimple_alloc_kind kind = gimple_alloc_kind (code);
221 gimple_alloc_counts[(int) kind]++;
222 gimple_alloc_sizes[(int) kind] += size;
223 }
224#endif
225
226 stmt = (gimple) ggc_alloc_cleared_stat (size PASS_MEM_STAT);
227 gimple_set_code (stmt, code);
228 gimple_set_num_ops (stmt, num_ops);
229
230 /* Do not call gimple_set_modified here as it has other side
231 effects and this tuple is still not completely built. */
232 stmt->gsbase.modified = 1;
233
234 return stmt;
235}
236
237/* Set SUBCODE to be the code of the expression computed by statement G. */
238
239static inline void
240gimple_set_subcode (gimple g, unsigned subcode)
241{
242 /* We only have 16 bits for the RHS code. Assert that we are not
243 overflowing it. */
244 gcc_assert (subcode < (1 << 16));
245 g->gsbase.subcode = subcode;
246}
247
248
249
250/* Build a tuple with operands. CODE is the statement to build (which
251 must be one of the GIMPLE_WITH_OPS tuples). SUBCODE is the sub-code
252 for the new tuple. NUM_OPS is the number of operands to allocate. */
253
254#define gimple_build_with_ops(c, s, n) \
255 gimple_build_with_ops_stat (c, s, n MEM_STAT_INFO)
256
257static gimple
b5b8b0ac 258gimple_build_with_ops_stat (enum gimple_code code, unsigned subcode,
726a989a
RB
259 unsigned num_ops MEM_STAT_DECL)
260{
261 gimple s = gimple_alloc_stat (code, num_ops PASS_MEM_STAT);
262 gimple_set_subcode (s, subcode);
263
264 return s;
265}
266
267
268/* Build a GIMPLE_RETURN statement returning RETVAL. */
269
270gimple
271gimple_build_return (tree retval)
272{
bbbbb16a 273 gimple s = gimple_build_with_ops (GIMPLE_RETURN, ERROR_MARK, 1);
726a989a
RB
274 if (retval)
275 gimple_return_set_retval (s, retval);
276 return s;
277}
278
279/* Helper for gimple_build_call, gimple_build_call_vec and
280 gimple_build_call_from_tree. Build the basic components of a
281 GIMPLE_CALL statement to function FN with NARGS arguments. */
282
283static inline gimple
284gimple_build_call_1 (tree fn, unsigned nargs)
285{
bbbbb16a 286 gimple s = gimple_build_with_ops (GIMPLE_CALL, ERROR_MARK, nargs + 3);
7c9577be
RG
287 if (TREE_CODE (fn) == FUNCTION_DECL)
288 fn = build_fold_addr_expr (fn);
726a989a
RB
289 gimple_set_op (s, 1, fn);
290 return s;
291}
292
293
294/* Build a GIMPLE_CALL statement to function FN with the arguments
295 specified in vector ARGS. */
296
297gimple
298gimple_build_call_vec (tree fn, VEC(tree, heap) *args)
299{
300 unsigned i;
301 unsigned nargs = VEC_length (tree, args);
302 gimple call = gimple_build_call_1 (fn, nargs);
303
304 for (i = 0; i < nargs; i++)
305 gimple_call_set_arg (call, i, VEC_index (tree, args, i));
306
307 return call;
308}
309
310
311/* Build a GIMPLE_CALL statement to function FN. NARGS is the number of
312 arguments. The ... are the arguments. */
313
314gimple
315gimple_build_call (tree fn, unsigned nargs, ...)
316{
317 va_list ap;
318 gimple call;
319 unsigned i;
320
321 gcc_assert (TREE_CODE (fn) == FUNCTION_DECL || is_gimple_call_addr (fn));
322
323 call = gimple_build_call_1 (fn, nargs);
324
325 va_start (ap, nargs);
326 for (i = 0; i < nargs; i++)
327 gimple_call_set_arg (call, i, va_arg (ap, tree));
328 va_end (ap);
329
330 return call;
331}
332
333
334/* Build a GIMPLE_CALL statement from CALL_EXPR T. Note that T is
335 assumed to be in GIMPLE form already. Minimal checking is done of
336 this fact. */
337
338gimple
339gimple_build_call_from_tree (tree t)
340{
341 unsigned i, nargs;
342 gimple call;
343 tree fndecl = get_callee_fndecl (t);
344
345 gcc_assert (TREE_CODE (t) == CALL_EXPR);
346
347 nargs = call_expr_nargs (t);
348 call = gimple_build_call_1 (fndecl ? fndecl : CALL_EXPR_FN (t), nargs);
349
350 for (i = 0; i < nargs; i++)
351 gimple_call_set_arg (call, i, CALL_EXPR_ARG (t, i));
352
353 gimple_set_block (call, TREE_BLOCK (t));
354
355 /* Carry all the CALL_EXPR flags to the new GIMPLE_CALL. */
356 gimple_call_set_chain (call, CALL_EXPR_STATIC_CHAIN (t));
357 gimple_call_set_tail (call, CALL_EXPR_TAILCALL (t));
358 gimple_call_set_cannot_inline (call, CALL_CANNOT_INLINE_P (t));
359 gimple_call_set_return_slot_opt (call, CALL_EXPR_RETURN_SLOT_OPT (t));
360 gimple_call_set_from_thunk (call, CALL_FROM_THUNK_P (t));
361 gimple_call_set_va_arg_pack (call, CALL_EXPR_VA_ARG_PACK (t));
d665b6e5 362 gimple_set_no_warning (call, TREE_NO_WARNING (t));
726a989a
RB
363
364 return call;
365}
366
367
368/* Extract the operands and code for expression EXPR into *SUBCODE_P,
369 *OP1_P and *OP2_P respectively. */
370
371void
372extract_ops_from_tree (tree expr, enum tree_code *subcode_p, tree *op1_p,
373 tree *op2_p)
374{
82d6e6fc 375 enum gimple_rhs_class grhs_class;
726a989a
RB
376
377 *subcode_p = TREE_CODE (expr);
82d6e6fc 378 grhs_class = get_gimple_rhs_class (*subcode_p);
726a989a 379
82d6e6fc 380 if (grhs_class == GIMPLE_BINARY_RHS)
726a989a
RB
381 {
382 *op1_p = TREE_OPERAND (expr, 0);
383 *op2_p = TREE_OPERAND (expr, 1);
384 }
82d6e6fc 385 else if (grhs_class == GIMPLE_UNARY_RHS)
726a989a
RB
386 {
387 *op1_p = TREE_OPERAND (expr, 0);
388 *op2_p = NULL_TREE;
389 }
82d6e6fc 390 else if (grhs_class == GIMPLE_SINGLE_RHS)
726a989a
RB
391 {
392 *op1_p = expr;
393 *op2_p = NULL_TREE;
394 }
395 else
396 gcc_unreachable ();
397}
398
399
400/* Build a GIMPLE_ASSIGN statement.
401
402 LHS of the assignment.
403 RHS of the assignment which can be unary or binary. */
404
405gimple
406gimple_build_assign_stat (tree lhs, tree rhs MEM_STAT_DECL)
407{
408 enum tree_code subcode;
409 tree op1, op2;
410
411 extract_ops_from_tree (rhs, &subcode, &op1, &op2);
412 return gimple_build_assign_with_ops_stat (subcode, lhs, op1, op2
413 PASS_MEM_STAT);
414}
415
416
417/* Build a GIMPLE_ASSIGN statement with sub-code SUBCODE and operands
418 OP1 and OP2. If OP2 is NULL then SUBCODE must be of class
419 GIMPLE_UNARY_RHS or GIMPLE_SINGLE_RHS. */
420
421gimple
422gimple_build_assign_with_ops_stat (enum tree_code subcode, tree lhs, tree op1,
423 tree op2 MEM_STAT_DECL)
424{
425 unsigned num_ops;
426 gimple p;
427
428 /* Need 1 operand for LHS and 1 or 2 for the RHS (depending on the
429 code). */
430 num_ops = get_gimple_rhs_num_ops (subcode) + 1;
431
b5b8b0ac 432 p = gimple_build_with_ops_stat (GIMPLE_ASSIGN, (unsigned)subcode, num_ops
726a989a
RB
433 PASS_MEM_STAT);
434 gimple_assign_set_lhs (p, lhs);
435 gimple_assign_set_rhs1 (p, op1);
436 if (op2)
437 {
438 gcc_assert (num_ops > 2);
439 gimple_assign_set_rhs2 (p, op2);
440 }
441
442 return p;
443}
444
445
446/* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
447
448 DST/SRC are the destination and source respectively. You can pass
449 ungimplified trees in DST or SRC, in which case they will be
450 converted to a gimple operand if necessary.
451
452 This function returns the newly created GIMPLE_ASSIGN tuple. */
453
5fd8300b 454gimple
726a989a
RB
455gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
456{
457 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
458 gimplify_and_add (t, seq_p);
459 ggc_free (t);
460 return gimple_seq_last_stmt (*seq_p);
461}
462
463
464/* Build a GIMPLE_COND statement.
465
466 PRED is the condition used to compare LHS and the RHS.
467 T_LABEL is the label to jump to if the condition is true.
468 F_LABEL is the label to jump to otherwise. */
469
470gimple
471gimple_build_cond (enum tree_code pred_code, tree lhs, tree rhs,
472 tree t_label, tree f_label)
473{
474 gimple p;
475
476 gcc_assert (TREE_CODE_CLASS (pred_code) == tcc_comparison);
477 p = gimple_build_with_ops (GIMPLE_COND, pred_code, 4);
478 gimple_cond_set_lhs (p, lhs);
479 gimple_cond_set_rhs (p, rhs);
480 gimple_cond_set_true_label (p, t_label);
481 gimple_cond_set_false_label (p, f_label);
482 return p;
483}
484
485
486/* Extract operands for a GIMPLE_COND statement out of COND_EXPR tree COND. */
487
488void
489gimple_cond_get_ops_from_tree (tree cond, enum tree_code *code_p,
490 tree *lhs_p, tree *rhs_p)
491{
db3927fb 492 location_t loc = EXPR_LOCATION (cond);
726a989a
RB
493 gcc_assert (TREE_CODE_CLASS (TREE_CODE (cond)) == tcc_comparison
494 || TREE_CODE (cond) == TRUTH_NOT_EXPR
495 || is_gimple_min_invariant (cond)
496 || SSA_VAR_P (cond));
497
498 extract_ops_from_tree (cond, code_p, lhs_p, rhs_p);
499
500 /* Canonicalize conditionals of the form 'if (!VAL)'. */
501 if (*code_p == TRUTH_NOT_EXPR)
502 {
503 *code_p = EQ_EXPR;
504 gcc_assert (*lhs_p && *rhs_p == NULL_TREE);
db3927fb 505 *rhs_p = fold_convert_loc (loc, TREE_TYPE (*lhs_p), integer_zero_node);
726a989a
RB
506 }
507 /* Canonicalize conditionals of the form 'if (VAL)' */
508 else if (TREE_CODE_CLASS (*code_p) != tcc_comparison)
509 {
510 *code_p = NE_EXPR;
511 gcc_assert (*lhs_p && *rhs_p == NULL_TREE);
db3927fb 512 *rhs_p = fold_convert_loc (loc, TREE_TYPE (*lhs_p), integer_zero_node);
726a989a
RB
513 }
514}
515
516
517/* Build a GIMPLE_COND statement from the conditional expression tree
518 COND. T_LABEL and F_LABEL are as in gimple_build_cond. */
519
520gimple
521gimple_build_cond_from_tree (tree cond, tree t_label, tree f_label)
522{
523 enum tree_code code;
524 tree lhs, rhs;
525
526 gimple_cond_get_ops_from_tree (cond, &code, &lhs, &rhs);
527 return gimple_build_cond (code, lhs, rhs, t_label, f_label);
528}
529
530/* Set code, lhs, and rhs of a GIMPLE_COND from a suitable
531 boolean expression tree COND. */
532
533void
534gimple_cond_set_condition_from_tree (gimple stmt, tree cond)
535{
536 enum tree_code code;
537 tree lhs, rhs;
538
539 gimple_cond_get_ops_from_tree (cond, &code, &lhs, &rhs);
540 gimple_cond_set_condition (stmt, code, lhs, rhs);
541}
542
543/* Build a GIMPLE_LABEL statement for LABEL. */
544
545gimple
546gimple_build_label (tree label)
547{
bbbbb16a 548 gimple p = gimple_build_with_ops (GIMPLE_LABEL, ERROR_MARK, 1);
726a989a
RB
549 gimple_label_set_label (p, label);
550 return p;
551}
552
553/* Build a GIMPLE_GOTO statement to label DEST. */
554
555gimple
556gimple_build_goto (tree dest)
557{
bbbbb16a 558 gimple p = gimple_build_with_ops (GIMPLE_GOTO, ERROR_MARK, 1);
726a989a
RB
559 gimple_goto_set_dest (p, dest);
560 return p;
561}
562
563
564/* Build a GIMPLE_NOP statement. */
565
566gimple
567gimple_build_nop (void)
568{
569 return gimple_alloc (GIMPLE_NOP, 0);
570}
571
572
573/* Build a GIMPLE_BIND statement.
574 VARS are the variables in BODY.
575 BLOCK is the containing block. */
576
577gimple
578gimple_build_bind (tree vars, gimple_seq body, tree block)
579{
580 gimple p = gimple_alloc (GIMPLE_BIND, 0);
581 gimple_bind_set_vars (p, vars);
582 if (body)
583 gimple_bind_set_body (p, body);
584 if (block)
585 gimple_bind_set_block (p, block);
586 return p;
587}
588
589/* Helper function to set the simple fields of a asm stmt.
590
591 STRING is a pointer to a string that is the asm blocks assembly code.
592 NINPUT is the number of register inputs.
593 NOUTPUT is the number of register outputs.
594 NCLOBBERS is the number of clobbered registers.
595 */
596
597static inline gimple
598gimple_build_asm_1 (const char *string, unsigned ninputs, unsigned noutputs,
599 unsigned nclobbers)
600{
601 gimple p;
602 int size = strlen (string);
603
bbbbb16a
ILT
604 p = gimple_build_with_ops (GIMPLE_ASM, ERROR_MARK,
605 ninputs + noutputs + nclobbers);
726a989a
RB
606
607 p->gimple_asm.ni = ninputs;
608 p->gimple_asm.no = noutputs;
609 p->gimple_asm.nc = nclobbers;
610 p->gimple_asm.string = ggc_alloc_string (string, size);
611
612#ifdef GATHER_STATISTICS
613 gimple_alloc_sizes[(int) gimple_alloc_kind (GIMPLE_ASM)] += size;
614#endif
615
616 return p;
617}
618
619/* Build a GIMPLE_ASM statement.
620
621 STRING is the assembly code.
622 NINPUT is the number of register inputs.
623 NOUTPUT is the number of register outputs.
624 NCLOBBERS is the number of clobbered registers.
625 INPUTS is a vector of the input register parameters.
626 OUTPUTS is a vector of the output register parameters.
627 CLOBBERS is a vector of the clobbered register parameters. */
628
629gimple
630gimple_build_asm_vec (const char *string, VEC(tree,gc)* inputs,
631 VEC(tree,gc)* outputs, VEC(tree,gc)* clobbers)
632{
633 gimple p;
634 unsigned i;
635
636 p = gimple_build_asm_1 (string,
637 VEC_length (tree, inputs),
638 VEC_length (tree, outputs),
639 VEC_length (tree, clobbers));
640
641 for (i = 0; i < VEC_length (tree, inputs); i++)
642 gimple_asm_set_input_op (p, i, VEC_index (tree, inputs, i));
643
644 for (i = 0; i < VEC_length (tree, outputs); i++)
645 gimple_asm_set_output_op (p, i, VEC_index (tree, outputs, i));
646
647 for (i = 0; i < VEC_length (tree, clobbers); i++)
648 gimple_asm_set_clobber_op (p, i, VEC_index (tree, clobbers, i));
649
650 return p;
651}
652
653/* Build a GIMPLE_ASM statement.
654
655 STRING is the assembly code.
656 NINPUT is the number of register inputs.
657 NOUTPUT is the number of register outputs.
658 NCLOBBERS is the number of clobbered registers.
659 ... are trees for each input, output and clobbered register. */
660
661gimple
662gimple_build_asm (const char *string, unsigned ninputs, unsigned noutputs,
663 unsigned nclobbers, ...)
664{
665 gimple p;
666 unsigned i;
667 va_list ap;
668
669 p = gimple_build_asm_1 (string, ninputs, noutputs, nclobbers);
670
671 va_start (ap, nclobbers);
672
673 for (i = 0; i < ninputs; i++)
674 gimple_asm_set_input_op (p, i, va_arg (ap, tree));
675
676 for (i = 0; i < noutputs; i++)
677 gimple_asm_set_output_op (p, i, va_arg (ap, tree));
678
679 for (i = 0; i < nclobbers; i++)
680 gimple_asm_set_clobber_op (p, i, va_arg (ap, tree));
681
682 va_end (ap);
683
684 return p;
685}
686
687/* Build a GIMPLE_CATCH statement.
688
689 TYPES are the catch types.
690 HANDLER is the exception handler. */
691
692gimple
693gimple_build_catch (tree types, gimple_seq handler)
694{
695 gimple p = gimple_alloc (GIMPLE_CATCH, 0);
696 gimple_catch_set_types (p, types);
697 if (handler)
698 gimple_catch_set_handler (p, handler);
699
700 return p;
701}
702
703/* Build a GIMPLE_EH_FILTER statement.
704
705 TYPES are the filter's types.
706 FAILURE is the filter's failure action. */
707
708gimple
709gimple_build_eh_filter (tree types, gimple_seq failure)
710{
711 gimple p = gimple_alloc (GIMPLE_EH_FILTER, 0);
712 gimple_eh_filter_set_types (p, types);
713 if (failure)
714 gimple_eh_filter_set_failure (p, failure);
715
716 return p;
717}
718
719/* Build a GIMPLE_TRY statement.
720
721 EVAL is the expression to evaluate.
722 CLEANUP is the cleanup expression.
723 KIND is either GIMPLE_TRY_CATCH or GIMPLE_TRY_FINALLY depending on
724 whether this is a try/catch or a try/finally respectively. */
725
726gimple
727gimple_build_try (gimple_seq eval, gimple_seq cleanup,
728 enum gimple_try_flags kind)
729{
730 gimple p;
731
732 gcc_assert (kind == GIMPLE_TRY_CATCH || kind == GIMPLE_TRY_FINALLY);
733 p = gimple_alloc (GIMPLE_TRY, 0);
734 gimple_set_subcode (p, kind);
735 if (eval)
736 gimple_try_set_eval (p, eval);
737 if (cleanup)
738 gimple_try_set_cleanup (p, cleanup);
739
740 return p;
741}
742
743/* Construct a GIMPLE_WITH_CLEANUP_EXPR statement.
744
745 CLEANUP is the cleanup expression. */
746
747gimple
748gimple_build_wce (gimple_seq cleanup)
749{
750 gimple p = gimple_alloc (GIMPLE_WITH_CLEANUP_EXPR, 0);
751 if (cleanup)
752 gimple_wce_set_cleanup (p, cleanup);
753
754 return p;
755}
756
757
758/* Build a GIMPLE_RESX statement.
759
760 REGION is the region number from which this resx causes control flow to
761 leave. */
762
763gimple
764gimple_build_resx (int region)
765{
766 gimple p = gimple_alloc (GIMPLE_RESX, 0);
767 gimple_resx_set_region (p, region);
768 return p;
769}
770
771
772/* The helper for constructing a gimple switch statement.
773 INDEX is the switch's index.
774 NLABELS is the number of labels in the switch excluding the default.
775 DEFAULT_LABEL is the default label for the switch statement. */
776
777static inline gimple
778gimple_build_switch_1 (unsigned nlabels, tree index, tree default_label)
779{
780 /* nlabels + 1 default label + 1 index. */
bbbbb16a
ILT
781 gimple p = gimple_build_with_ops (GIMPLE_SWITCH, ERROR_MARK,
782 nlabels + 1 + 1);
726a989a
RB
783 gimple_switch_set_index (p, index);
784 gimple_switch_set_default_label (p, default_label);
785 return p;
786}
787
788
789/* Build a GIMPLE_SWITCH statement.
790
791 INDEX is the switch's index.
792 NLABELS is the number of labels in the switch excluding the DEFAULT_LABEL.
793 ... are the labels excluding the default. */
794
795gimple
796gimple_build_switch (unsigned nlabels, tree index, tree default_label, ...)
797{
798 va_list al;
799 unsigned i;
800 gimple p;
801
802 p = gimple_build_switch_1 (nlabels, index, default_label);
803
804 /* Store the rest of the labels. */
805 va_start (al, default_label);
806 for (i = 1; i <= nlabels; i++)
807 gimple_switch_set_label (p, i, va_arg (al, tree));
808 va_end (al);
809
810 return p;
811}
812
813
814/* Build a GIMPLE_SWITCH statement.
815
816 INDEX is the switch's index.
817 DEFAULT_LABEL is the default label
818 ARGS is a vector of labels excluding the default. */
819
820gimple
821gimple_build_switch_vec (tree index, tree default_label, VEC(tree, heap) *args)
822{
823 unsigned i;
824 unsigned nlabels = VEC_length (tree, args);
825 gimple p = gimple_build_switch_1 (nlabels, index, default_label);
826
827 /* Put labels in labels[1 - (nlabels + 1)].
828 Default label is in labels[0]. */
829 for (i = 1; i <= nlabels; i++)
830 gimple_switch_set_label (p, i, VEC_index (tree, args, i - 1));
831
832 return p;
833}
834
835
b5b8b0ac
AO
836/* Build a new GIMPLE_DEBUG_BIND statement.
837
838 VAR is bound to VALUE; block and location are taken from STMT. */
839
840gimple
841gimple_build_debug_bind_stat (tree var, tree value, gimple stmt MEM_STAT_DECL)
842{
843 gimple p = gimple_build_with_ops_stat (GIMPLE_DEBUG,
844 (unsigned)GIMPLE_DEBUG_BIND, 2
845 PASS_MEM_STAT);
846
847 gimple_debug_bind_set_var (p, var);
848 gimple_debug_bind_set_value (p, value);
849 if (stmt)
850 {
851 gimple_set_block (p, gimple_block (stmt));
852 gimple_set_location (p, gimple_location (stmt));
853 }
854
855 return p;
856}
857
858
726a989a
RB
859/* Build a GIMPLE_OMP_CRITICAL statement.
860
861 BODY is the sequence of statements for which only one thread can execute.
862 NAME is optional identifier for this critical block. */
863
864gimple
865gimple_build_omp_critical (gimple_seq body, tree name)
866{
867 gimple p = gimple_alloc (GIMPLE_OMP_CRITICAL, 0);
868 gimple_omp_critical_set_name (p, name);
869 if (body)
870 gimple_omp_set_body (p, body);
871
872 return p;
873}
874
875/* Build a GIMPLE_OMP_FOR statement.
876
877 BODY is sequence of statements inside the for loop.
878 CLAUSES, are any of the OMP loop construct's clauses: private, firstprivate,
879 lastprivate, reductions, ordered, schedule, and nowait.
880 COLLAPSE is the collapse count.
881 PRE_BODY is the sequence of statements that are loop invariant. */
882
883gimple
884gimple_build_omp_for (gimple_seq body, tree clauses, size_t collapse,
885 gimple_seq pre_body)
886{
887 gimple p = gimple_alloc (GIMPLE_OMP_FOR, 0);
888 if (body)
889 gimple_omp_set_body (p, body);
890 gimple_omp_for_set_clauses (p, clauses);
891 p->gimple_omp_for.collapse = collapse;
892 p->gimple_omp_for.iter = GGC_CNEWVEC (struct gimple_omp_for_iter, collapse);
893 if (pre_body)
894 gimple_omp_for_set_pre_body (p, pre_body);
895
896 return p;
897}
898
899
900/* Build a GIMPLE_OMP_PARALLEL statement.
901
902 BODY is sequence of statements which are executed in parallel.
903 CLAUSES, are the OMP parallel construct's clauses.
904 CHILD_FN is the function created for the parallel threads to execute.
905 DATA_ARG are the shared data argument(s). */
906
907gimple
908gimple_build_omp_parallel (gimple_seq body, tree clauses, tree child_fn,
909 tree data_arg)
910{
911 gimple p = gimple_alloc (GIMPLE_OMP_PARALLEL, 0);
912 if (body)
913 gimple_omp_set_body (p, body);
914 gimple_omp_parallel_set_clauses (p, clauses);
915 gimple_omp_parallel_set_child_fn (p, child_fn);
916 gimple_omp_parallel_set_data_arg (p, data_arg);
917
918 return p;
919}
920
921
922/* Build a GIMPLE_OMP_TASK statement.
923
924 BODY is sequence of statements which are executed by the explicit task.
925 CLAUSES, are the OMP parallel construct's clauses.
926 CHILD_FN is the function created for the parallel threads to execute.
927 DATA_ARG are the shared data argument(s).
928 COPY_FN is the optional function for firstprivate initialization.
929 ARG_SIZE and ARG_ALIGN are size and alignment of the data block. */
930
931gimple
932gimple_build_omp_task (gimple_seq body, tree clauses, tree child_fn,
933 tree data_arg, tree copy_fn, tree arg_size,
934 tree arg_align)
935{
936 gimple p = gimple_alloc (GIMPLE_OMP_TASK, 0);
937 if (body)
938 gimple_omp_set_body (p, body);
939 gimple_omp_task_set_clauses (p, clauses);
940 gimple_omp_task_set_child_fn (p, child_fn);
941 gimple_omp_task_set_data_arg (p, data_arg);
942 gimple_omp_task_set_copy_fn (p, copy_fn);
943 gimple_omp_task_set_arg_size (p, arg_size);
944 gimple_omp_task_set_arg_align (p, arg_align);
945
946 return p;
947}
948
949
950/* Build a GIMPLE_OMP_SECTION statement for a sections statement.
951
952 BODY is the sequence of statements in the section. */
953
954gimple
955gimple_build_omp_section (gimple_seq body)
956{
957 gimple p = gimple_alloc (GIMPLE_OMP_SECTION, 0);
958 if (body)
959 gimple_omp_set_body (p, body);
960
961 return p;
962}
963
964
965/* Build a GIMPLE_OMP_MASTER statement.
966
967 BODY is the sequence of statements to be executed by just the master. */
968
969gimple
970gimple_build_omp_master (gimple_seq body)
971{
972 gimple p = gimple_alloc (GIMPLE_OMP_MASTER, 0);
973 if (body)
974 gimple_omp_set_body (p, body);
975
976 return p;
977}
978
979
980/* Build a GIMPLE_OMP_CONTINUE statement.
981
982 CONTROL_DEF is the definition of the control variable.
983 CONTROL_USE is the use of the control variable. */
984
985gimple
986gimple_build_omp_continue (tree control_def, tree control_use)
987{
988 gimple p = gimple_alloc (GIMPLE_OMP_CONTINUE, 0);
989 gimple_omp_continue_set_control_def (p, control_def);
990 gimple_omp_continue_set_control_use (p, control_use);
991 return p;
992}
993
994/* Build a GIMPLE_OMP_ORDERED statement.
995
996 BODY is the sequence of statements inside a loop that will executed in
997 sequence. */
998
999gimple
1000gimple_build_omp_ordered (gimple_seq body)
1001{
1002 gimple p = gimple_alloc (GIMPLE_OMP_ORDERED, 0);
1003 if (body)
1004 gimple_omp_set_body (p, body);
1005
1006 return p;
1007}
1008
1009
1010/* Build a GIMPLE_OMP_RETURN statement.
1011 WAIT_P is true if this is a non-waiting return. */
1012
1013gimple
1014gimple_build_omp_return (bool wait_p)
1015{
1016 gimple p = gimple_alloc (GIMPLE_OMP_RETURN, 0);
1017 if (wait_p)
1018 gimple_omp_return_set_nowait (p);
1019
1020 return p;
1021}
1022
1023
1024/* Build a GIMPLE_OMP_SECTIONS statement.
1025
1026 BODY is a sequence of section statements.
1027 CLAUSES are any of the OMP sections contsruct's clauses: private,
1028 firstprivate, lastprivate, reduction, and nowait. */
1029
1030gimple
1031gimple_build_omp_sections (gimple_seq body, tree clauses)
1032{
1033 gimple p = gimple_alloc (GIMPLE_OMP_SECTIONS, 0);
1034 if (body)
1035 gimple_omp_set_body (p, body);
1036 gimple_omp_sections_set_clauses (p, clauses);
1037
1038 return p;
1039}
1040
1041
1042/* Build a GIMPLE_OMP_SECTIONS_SWITCH. */
1043
1044gimple
1045gimple_build_omp_sections_switch (void)
1046{
1047 return gimple_alloc (GIMPLE_OMP_SECTIONS_SWITCH, 0);
1048}
1049
1050
1051/* Build a GIMPLE_OMP_SINGLE statement.
1052
1053 BODY is the sequence of statements that will be executed once.
1054 CLAUSES are any of the OMP single construct's clauses: private, firstprivate,
1055 copyprivate, nowait. */
1056
1057gimple
1058gimple_build_omp_single (gimple_seq body, tree clauses)
1059{
1060 gimple p = gimple_alloc (GIMPLE_OMP_SINGLE, 0);
1061 if (body)
1062 gimple_omp_set_body (p, body);
1063 gimple_omp_single_set_clauses (p, clauses);
1064
1065 return p;
1066}
1067
1068
726a989a
RB
1069/* Build a GIMPLE_OMP_ATOMIC_LOAD statement. */
1070
1071gimple
1072gimple_build_omp_atomic_load (tree lhs, tree rhs)
1073{
1074 gimple p = gimple_alloc (GIMPLE_OMP_ATOMIC_LOAD, 0);
1075 gimple_omp_atomic_load_set_lhs (p, lhs);
1076 gimple_omp_atomic_load_set_rhs (p, rhs);
1077 return p;
1078}
1079
1080/* Build a GIMPLE_OMP_ATOMIC_STORE statement.
1081
1082 VAL is the value we are storing. */
1083
1084gimple
1085gimple_build_omp_atomic_store (tree val)
1086{
1087 gimple p = gimple_alloc (GIMPLE_OMP_ATOMIC_STORE, 0);
1088 gimple_omp_atomic_store_set_val (p, val);
1089 return p;
1090}
1091
1092/* Build a GIMPLE_PREDICT statement. PREDICT is one of the predictors from
1093 predict.def, OUTCOME is NOT_TAKEN or TAKEN. */
1094
1095gimple
1096gimple_build_predict (enum br_predictor predictor, enum prediction outcome)
1097{
1098 gimple p = gimple_alloc (GIMPLE_PREDICT, 0);
1099 /* Ensure all the predictors fit into the lower bits of the subcode. */
e0c68ce9 1100 gcc_assert ((int) END_PREDICTORS <= GF_PREDICT_TAKEN);
726a989a
RB
1101 gimple_predict_set_predictor (p, predictor);
1102 gimple_predict_set_outcome (p, outcome);
1103 return p;
1104}
1105
1106/* Return which gimple structure is used by T. The enums here are defined
1107 in gsstruct.def. */
1108
1109enum gimple_statement_structure_enum
1110gimple_statement_structure (gimple gs)
1111{
1112 return gss_for_code (gimple_code (gs));
1113}
1114
cea094ed 1115#if defined ENABLE_GIMPLE_CHECKING
726a989a
RB
1116/* Complain of a gimple type mismatch and die. */
1117
1118void
1119gimple_check_failed (const_gimple gs, const char *file, int line,
1120 const char *function, enum gimple_code code,
1121 enum tree_code subcode)
1122{
1123 internal_error ("gimple check: expected %s(%s), have %s(%s) in %s, at %s:%d",
1124 gimple_code_name[code],
1125 tree_code_name[subcode],
1126 gimple_code_name[gimple_code (gs)],
1127 gs->gsbase.subcode > 0
1128 ? tree_code_name[gs->gsbase.subcode]
1129 : "",
1130 function, trim_filename (file), line);
1131}
726a989a
RB
1132#endif /* ENABLE_GIMPLE_CHECKING */
1133
1134
1135/* Allocate a new GIMPLE sequence in GC memory and return it. If
1136 there are free sequences in GIMPLE_SEQ_CACHE return one of those
1137 instead. */
1138
1139gimple_seq
1140gimple_seq_alloc (void)
1141{
1142 gimple_seq seq = gimple_seq_cache;
1143 if (seq)
1144 {
1145 gimple_seq_cache = gimple_seq_cache->next_free;
1146 gcc_assert (gimple_seq_cache != seq);
1147 memset (seq, 0, sizeof (*seq));
1148 }
1149 else
1150 {
1151 seq = (gimple_seq) ggc_alloc_cleared (sizeof (*seq));
1152#ifdef GATHER_STATISTICS
1153 gimple_alloc_counts[(int) gimple_alloc_kind_seq]++;
1154 gimple_alloc_sizes[(int) gimple_alloc_kind_seq] += sizeof (*seq);
1155#endif
1156 }
1157
1158 return seq;
1159}
1160
1161/* Return SEQ to the free pool of GIMPLE sequences. */
1162
1163void
1164gimple_seq_free (gimple_seq seq)
1165{
1166 if (seq == NULL)
1167 return;
1168
1169 gcc_assert (gimple_seq_first (seq) == NULL);
1170 gcc_assert (gimple_seq_last (seq) == NULL);
1171
1172 /* If this triggers, it's a sign that the same list is being freed
1173 twice. */
1174 gcc_assert (seq != gimple_seq_cache || gimple_seq_cache == NULL);
1175
1176 /* Add SEQ to the pool of free sequences. */
1177 seq->next_free = gimple_seq_cache;
1178 gimple_seq_cache = seq;
1179}
1180
1181
1182/* Link gimple statement GS to the end of the sequence *SEQ_P. If
1183 *SEQ_P is NULL, a new sequence is allocated. */
1184
1185void
1186gimple_seq_add_stmt (gimple_seq *seq_p, gimple gs)
1187{
1188 gimple_stmt_iterator si;
1189
1190 if (gs == NULL)
1191 return;
1192
1193 if (*seq_p == NULL)
1194 *seq_p = gimple_seq_alloc ();
1195
1196 si = gsi_last (*seq_p);
1197 gsi_insert_after (&si, gs, GSI_NEW_STMT);
1198}
1199
1200
1201/* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
1202 NULL, a new sequence is allocated. */
1203
1204void
1205gimple_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
1206{
1207 gimple_stmt_iterator si;
1208
1209 if (src == NULL)
1210 return;
1211
1212 if (*dst_p == NULL)
1213 *dst_p = gimple_seq_alloc ();
1214
1215 si = gsi_last (*dst_p);
1216 gsi_insert_seq_after (&si, src, GSI_NEW_STMT);
1217}
1218
1219
1220/* Helper function of empty_body_p. Return true if STMT is an empty
1221 statement. */
1222
1223static bool
1224empty_stmt_p (gimple stmt)
1225{
1226 if (gimple_code (stmt) == GIMPLE_NOP)
1227 return true;
1228 if (gimple_code (stmt) == GIMPLE_BIND)
1229 return empty_body_p (gimple_bind_body (stmt));
1230 return false;
1231}
1232
1233
1234/* Return true if BODY contains nothing but empty statements. */
1235
1236bool
1237empty_body_p (gimple_seq body)
1238{
1239 gimple_stmt_iterator i;
1240
726a989a
RB
1241 if (gimple_seq_empty_p (body))
1242 return true;
1243 for (i = gsi_start (body); !gsi_end_p (i); gsi_next (&i))
b5b8b0ac
AO
1244 if (!empty_stmt_p (gsi_stmt (i))
1245 && !is_gimple_debug (gsi_stmt (i)))
726a989a
RB
1246 return false;
1247
1248 return true;
1249}
1250
1251
1252/* Perform a deep copy of sequence SRC and return the result. */
1253
1254gimple_seq
1255gimple_seq_copy (gimple_seq src)
1256{
1257 gimple_stmt_iterator gsi;
82d6e6fc 1258 gimple_seq new_seq = gimple_seq_alloc ();
726a989a
RB
1259 gimple stmt;
1260
1261 for (gsi = gsi_start (src); !gsi_end_p (gsi); gsi_next (&gsi))
1262 {
1263 stmt = gimple_copy (gsi_stmt (gsi));
82d6e6fc 1264 gimple_seq_add_stmt (&new_seq, stmt);
726a989a
RB
1265 }
1266
82d6e6fc 1267 return new_seq;
726a989a
RB
1268}
1269
1270
1271/* Walk all the statements in the sequence SEQ calling walk_gimple_stmt
1272 on each one. WI is as in walk_gimple_stmt.
1273
1274 If walk_gimple_stmt returns non-NULL, the walk is stopped, the
1275 value is stored in WI->CALLBACK_RESULT and the statement that
1276 produced the value is returned.
1277
1278 Otherwise, all the statements are walked and NULL returned. */
1279
1280gimple
1281walk_gimple_seq (gimple_seq seq, walk_stmt_fn callback_stmt,
1282 walk_tree_fn callback_op, struct walk_stmt_info *wi)
1283{
1284 gimple_stmt_iterator gsi;
1285
1286 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
1287 {
1288 tree ret = walk_gimple_stmt (&gsi, callback_stmt, callback_op, wi);
1289 if (ret)
1290 {
1291 /* If CALLBACK_STMT or CALLBACK_OP return a value, WI must exist
1292 to hold it. */
1293 gcc_assert (wi);
1294 wi->callback_result = ret;
1295 return gsi_stmt (gsi);
1296 }
1297 }
1298
1299 if (wi)
1300 wi->callback_result = NULL_TREE;
1301
1302 return NULL;
1303}
1304
1305
1306/* Helper function for walk_gimple_stmt. Walk operands of a GIMPLE_ASM. */
1307
1308static tree
1309walk_gimple_asm (gimple stmt, walk_tree_fn callback_op,
1310 struct walk_stmt_info *wi)
1311{
1312 tree ret;
1313 unsigned noutputs;
1314 const char **oconstraints;
1315 unsigned i;
1316 const char *constraint;
1317 bool allows_mem, allows_reg, is_inout;
1318
1319 noutputs = gimple_asm_noutputs (stmt);
1320 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
1321
1322 if (wi)
1323 wi->is_lhs = true;
1324
1325 for (i = 0; i < noutputs; i++)
1326 {
1327 tree op = gimple_asm_output_op (stmt, i);
1328 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
1329 oconstraints[i] = constraint;
1330 parse_output_constraint (&constraint, i, 0, 0, &allows_mem, &allows_reg,
1331 &is_inout);
1332 if (wi)
1333 wi->val_only = (allows_reg || !allows_mem);
1334 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
1335 if (ret)
1336 return ret;
1337 }
1338
1339 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
1340 {
1341 tree op = gimple_asm_input_op (stmt, i);
1342 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
1343 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1344 oconstraints, &allows_mem, &allows_reg);
1345 if (wi)
1346 wi->val_only = (allows_reg || !allows_mem);
1347
1348 /* Although input "m" is not really a LHS, we need a lvalue. */
1349 if (wi)
1350 wi->is_lhs = !wi->val_only;
1351 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
1352 if (ret)
1353 return ret;
1354 }
1355
1356 if (wi)
1357 {
1358 wi->is_lhs = false;
1359 wi->val_only = true;
1360 }
1361
1362 return NULL_TREE;
1363}
1364
1365
1366/* Helper function of WALK_GIMPLE_STMT. Walk every tree operand in
1367 STMT. CALLBACK_OP and WI are as in WALK_GIMPLE_STMT.
1368
1369 CALLBACK_OP is called on each operand of STMT via walk_tree.
1370 Additional parameters to walk_tree must be stored in WI. For each operand
1371 OP, walk_tree is called as:
1372
1373 walk_tree (&OP, CALLBACK_OP, WI, WI->PSET)
1374
1375 If CALLBACK_OP returns non-NULL for an operand, the remaining
1376 operands are not scanned.
1377
1378 The return value is that returned by the last call to walk_tree, or
1379 NULL_TREE if no CALLBACK_OP is specified. */
1380
1381inline tree
1382walk_gimple_op (gimple stmt, walk_tree_fn callback_op,
1383 struct walk_stmt_info *wi)
1384{
1385 struct pointer_set_t *pset = (wi) ? wi->pset : NULL;
1386 unsigned i;
1387 tree ret = NULL_TREE;
1388
1389 switch (gimple_code (stmt))
1390 {
1391 case GIMPLE_ASSIGN:
1392 /* Walk the RHS operands. A formal temporary LHS may use a
1393 COMPONENT_REF RHS. */
1394 if (wi)
ba4d8f9d
RG
1395 wi->val_only = !is_gimple_reg (gimple_assign_lhs (stmt))
1396 || !gimple_assign_single_p (stmt);
726a989a
RB
1397
1398 for (i = 1; i < gimple_num_ops (stmt); i++)
1399 {
1400 ret = walk_tree (gimple_op_ptr (stmt, i), callback_op, wi,
1401 pset);
1402 if (ret)
1403 return ret;
1404 }
1405
1406 /* Walk the LHS. If the RHS is appropriate for a memory, we
1407 may use a COMPONENT_REF on the LHS. */
1408 if (wi)
1409 {
1410 /* If the RHS has more than 1 operand, it is not appropriate
1411 for the memory. */
1412 wi->val_only = !is_gimple_mem_rhs (gimple_assign_rhs1 (stmt))
1413 || !gimple_assign_single_p (stmt);
1414 wi->is_lhs = true;
1415 }
1416
1417 ret = walk_tree (gimple_op_ptr (stmt, 0), callback_op, wi, pset);
1418 if (ret)
1419 return ret;
1420
1421 if (wi)
1422 {
1423 wi->val_only = true;
1424 wi->is_lhs = false;
1425 }
1426 break;
1427
1428 case GIMPLE_CALL:
1429 if (wi)
1430 wi->is_lhs = false;
1431
1432 ret = walk_tree (gimple_call_chain_ptr (stmt), callback_op, wi, pset);
1433 if (ret)
1434 return ret;
1435
1436 ret = walk_tree (gimple_call_fn_ptr (stmt), callback_op, wi, pset);
1437 if (ret)
1438 return ret;
1439
1440 for (i = 0; i < gimple_call_num_args (stmt); i++)
1441 {
1442 ret = walk_tree (gimple_call_arg_ptr (stmt, i), callback_op, wi,
1443 pset);
1444 if (ret)
1445 return ret;
1446 }
1447
1448 if (wi)
1449 wi->is_lhs = true;
1450
1451 ret = walk_tree (gimple_call_lhs_ptr (stmt), callback_op, wi, pset);
1452 if (ret)
1453 return ret;
1454
1455 if (wi)
1456 wi->is_lhs = false;
1457 break;
1458
1459 case GIMPLE_CATCH:
1460 ret = walk_tree (gimple_catch_types_ptr (stmt), callback_op, wi,
1461 pset);
1462 if (ret)
1463 return ret;
1464 break;
1465
1466 case GIMPLE_EH_FILTER:
1467 ret = walk_tree (gimple_eh_filter_types_ptr (stmt), callback_op, wi,
1468 pset);
1469 if (ret)
1470 return ret;
1471 break;
1472
726a989a
RB
1473 case GIMPLE_ASM:
1474 ret = walk_gimple_asm (stmt, callback_op, wi);
1475 if (ret)
1476 return ret;
1477 break;
1478
1479 case GIMPLE_OMP_CONTINUE:
1480 ret = walk_tree (gimple_omp_continue_control_def_ptr (stmt),
1481 callback_op, wi, pset);
1482 if (ret)
1483 return ret;
1484
1485 ret = walk_tree (gimple_omp_continue_control_use_ptr (stmt),
1486 callback_op, wi, pset);
1487 if (ret)
1488 return ret;
1489 break;
1490
1491 case GIMPLE_OMP_CRITICAL:
1492 ret = walk_tree (gimple_omp_critical_name_ptr (stmt), callback_op, wi,
1493 pset);
1494 if (ret)
1495 return ret;
1496 break;
1497
1498 case GIMPLE_OMP_FOR:
1499 ret = walk_tree (gimple_omp_for_clauses_ptr (stmt), callback_op, wi,
1500 pset);
1501 if (ret)
1502 return ret;
1503 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1504 {
1505 ret = walk_tree (gimple_omp_for_index_ptr (stmt, i), callback_op,
1506 wi, pset);
1507 if (ret)
1508 return ret;
1509 ret = walk_tree (gimple_omp_for_initial_ptr (stmt, i), callback_op,
1510 wi, pset);
1511 if (ret)
1512 return ret;
1513 ret = walk_tree (gimple_omp_for_final_ptr (stmt, i), callback_op,
1514 wi, pset);
1515 if (ret)
1516 return ret;
1517 ret = walk_tree (gimple_omp_for_incr_ptr (stmt, i), callback_op,
1518 wi, pset);
1519 }
1520 if (ret)
1521 return ret;
1522 break;
1523
1524 case GIMPLE_OMP_PARALLEL:
1525 ret = walk_tree (gimple_omp_parallel_clauses_ptr (stmt), callback_op,
1526 wi, pset);
1527 if (ret)
1528 return ret;
1529 ret = walk_tree (gimple_omp_parallel_child_fn_ptr (stmt), callback_op,
1530 wi, pset);
1531 if (ret)
1532 return ret;
1533 ret = walk_tree (gimple_omp_parallel_data_arg_ptr (stmt), callback_op,
1534 wi, pset);
1535 if (ret)
1536 return ret;
1537 break;
1538
1539 case GIMPLE_OMP_TASK:
1540 ret = walk_tree (gimple_omp_task_clauses_ptr (stmt), callback_op,
1541 wi, pset);
1542 if (ret)
1543 return ret;
1544 ret = walk_tree (gimple_omp_task_child_fn_ptr (stmt), callback_op,
1545 wi, pset);
1546 if (ret)
1547 return ret;
1548 ret = walk_tree (gimple_omp_task_data_arg_ptr (stmt), callback_op,
1549 wi, pset);
1550 if (ret)
1551 return ret;
1552 ret = walk_tree (gimple_omp_task_copy_fn_ptr (stmt), callback_op,
1553 wi, pset);
1554 if (ret)
1555 return ret;
1556 ret = walk_tree (gimple_omp_task_arg_size_ptr (stmt), callback_op,
1557 wi, pset);
1558 if (ret)
1559 return ret;
1560 ret = walk_tree (gimple_omp_task_arg_align_ptr (stmt), callback_op,
1561 wi, pset);
1562 if (ret)
1563 return ret;
1564 break;
1565
1566 case GIMPLE_OMP_SECTIONS:
1567 ret = walk_tree (gimple_omp_sections_clauses_ptr (stmt), callback_op,
1568 wi, pset);
1569 if (ret)
1570 return ret;
1571
1572 ret = walk_tree (gimple_omp_sections_control_ptr (stmt), callback_op,
1573 wi, pset);
1574 if (ret)
1575 return ret;
1576
1577 break;
1578
1579 case GIMPLE_OMP_SINGLE:
1580 ret = walk_tree (gimple_omp_single_clauses_ptr (stmt), callback_op, wi,
1581 pset);
1582 if (ret)
1583 return ret;
1584 break;
1585
1586 case GIMPLE_OMP_ATOMIC_LOAD:
1587 ret = walk_tree (gimple_omp_atomic_load_lhs_ptr (stmt), callback_op, wi,
1588 pset);
1589 if (ret)
1590 return ret;
1591
1592 ret = walk_tree (gimple_omp_atomic_load_rhs_ptr (stmt), callback_op, wi,
1593 pset);
1594 if (ret)
1595 return ret;
1596 break;
1597
1598 case GIMPLE_OMP_ATOMIC_STORE:
1599 ret = walk_tree (gimple_omp_atomic_store_val_ptr (stmt), callback_op,
1600 wi, pset);
1601 if (ret)
1602 return ret;
1603 break;
1604
1605 /* Tuples that do not have operands. */
1606 case GIMPLE_NOP:
1607 case GIMPLE_RESX:
1608 case GIMPLE_OMP_RETURN:
1609 case GIMPLE_PREDICT:
1610 break;
1611
1612 default:
1613 {
1614 enum gimple_statement_structure_enum gss;
1615 gss = gimple_statement_structure (stmt);
1616 if (gss == GSS_WITH_OPS || gss == GSS_WITH_MEM_OPS)
1617 for (i = 0; i < gimple_num_ops (stmt); i++)
1618 {
1619 ret = walk_tree (gimple_op_ptr (stmt, i), callback_op, wi, pset);
1620 if (ret)
1621 return ret;
1622 }
1623 }
1624 break;
1625 }
1626
1627 return NULL_TREE;
1628}
1629
1630
1631/* Walk the current statement in GSI (optionally using traversal state
1632 stored in WI). If WI is NULL, no state is kept during traversal.
1633 The callback CALLBACK_STMT is called. If CALLBACK_STMT indicates
1634 that it has handled all the operands of the statement, its return
1635 value is returned. Otherwise, the return value from CALLBACK_STMT
1636 is discarded and its operands are scanned.
1637
1638 If CALLBACK_STMT is NULL or it didn't handle the operands,
1639 CALLBACK_OP is called on each operand of the statement via
1640 walk_gimple_op. If walk_gimple_op returns non-NULL for any
1641 operand, the remaining operands are not scanned. In this case, the
1642 return value from CALLBACK_OP is returned.
1643
1644 In any other case, NULL_TREE is returned. */
1645
1646tree
1647walk_gimple_stmt (gimple_stmt_iterator *gsi, walk_stmt_fn callback_stmt,
1648 walk_tree_fn callback_op, struct walk_stmt_info *wi)
1649{
1650 gimple ret;
1651 tree tree_ret;
1652 gimple stmt = gsi_stmt (*gsi);
1653
1654 if (wi)
1655 wi->gsi = *gsi;
1656
1657 if (wi && wi->want_locations && gimple_has_location (stmt))
1658 input_location = gimple_location (stmt);
1659
1660 ret = NULL;
1661
1662 /* Invoke the statement callback. Return if the callback handled
1663 all of STMT operands by itself. */
1664 if (callback_stmt)
1665 {
1666 bool handled_ops = false;
1667 tree_ret = callback_stmt (gsi, &handled_ops, wi);
1668 if (handled_ops)
1669 return tree_ret;
1670
1671 /* If CALLBACK_STMT did not handle operands, it should not have
1672 a value to return. */
1673 gcc_assert (tree_ret == NULL);
1674
1675 /* Re-read stmt in case the callback changed it. */
1676 stmt = gsi_stmt (*gsi);
1677 }
1678
1679 /* If CALLBACK_OP is defined, invoke it on every operand of STMT. */
1680 if (callback_op)
1681 {
1682 tree_ret = walk_gimple_op (stmt, callback_op, wi);
1683 if (tree_ret)
1684 return tree_ret;
1685 }
1686
1687 /* If STMT can have statements inside (e.g. GIMPLE_BIND), walk them. */
1688 switch (gimple_code (stmt))
1689 {
1690 case GIMPLE_BIND:
1691 ret = walk_gimple_seq (gimple_bind_body (stmt), callback_stmt,
1692 callback_op, wi);
1693 if (ret)
1694 return wi->callback_result;
1695 break;
1696
1697 case GIMPLE_CATCH:
1698 ret = walk_gimple_seq (gimple_catch_handler (stmt), callback_stmt,
1699 callback_op, wi);
1700 if (ret)
1701 return wi->callback_result;
1702 break;
1703
1704 case GIMPLE_EH_FILTER:
1705 ret = walk_gimple_seq (gimple_eh_filter_failure (stmt), callback_stmt,
1706 callback_op, wi);
1707 if (ret)
1708 return wi->callback_result;
1709 break;
1710
1711 case GIMPLE_TRY:
1712 ret = walk_gimple_seq (gimple_try_eval (stmt), callback_stmt, callback_op,
1713 wi);
1714 if (ret)
1715 return wi->callback_result;
1716
1717 ret = walk_gimple_seq (gimple_try_cleanup (stmt), callback_stmt,
1718 callback_op, wi);
1719 if (ret)
1720 return wi->callback_result;
1721 break;
1722
1723 case GIMPLE_OMP_FOR:
1724 ret = walk_gimple_seq (gimple_omp_for_pre_body (stmt), callback_stmt,
1725 callback_op, wi);
1726 if (ret)
1727 return wi->callback_result;
1728
1729 /* FALL THROUGH. */
1730 case GIMPLE_OMP_CRITICAL:
1731 case GIMPLE_OMP_MASTER:
1732 case GIMPLE_OMP_ORDERED:
1733 case GIMPLE_OMP_SECTION:
1734 case GIMPLE_OMP_PARALLEL:
1735 case GIMPLE_OMP_TASK:
1736 case GIMPLE_OMP_SECTIONS:
1737 case GIMPLE_OMP_SINGLE:
1738 ret = walk_gimple_seq (gimple_omp_body (stmt), callback_stmt, callback_op,
1739 wi);
1740 if (ret)
1741 return wi->callback_result;
1742 break;
1743
1744 case GIMPLE_WITH_CLEANUP_EXPR:
1745 ret = walk_gimple_seq (gimple_wce_cleanup (stmt), callback_stmt,
1746 callback_op, wi);
1747 if (ret)
1748 return wi->callback_result;
1749 break;
1750
1751 default:
1752 gcc_assert (!gimple_has_substatements (stmt));
1753 break;
1754 }
1755
1756 return NULL;
1757}
1758
1759
1760/* Set sequence SEQ to be the GIMPLE body for function FN. */
1761
1762void
1763gimple_set_body (tree fndecl, gimple_seq seq)
1764{
1765 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
1766 if (fn == NULL)
1767 {
1768 /* If FNDECL still does not have a function structure associated
1769 with it, then it does not make sense for it to receive a
1770 GIMPLE body. */
1771 gcc_assert (seq == NULL);
1772 }
1773 else
1774 fn->gimple_body = seq;
1775}
1776
1777
1778/* Return the body of GIMPLE statements for function FN. */
1779
1780gimple_seq
1781gimple_body (tree fndecl)
1782{
1783 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
1784 return fn ? fn->gimple_body : NULL;
1785}
1786
39ecc018
JH
1787/* Return true when FNDECL has Gimple body either in unlowered
1788 or CFG form. */
1789bool
1790gimple_has_body_p (tree fndecl)
1791{
1792 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
1793 return (gimple_body (fndecl) || (fn && fn->cfg));
1794}
726a989a
RB
1795
1796/* Detect flags from a GIMPLE_CALL. This is just like
1797 call_expr_flags, but for gimple tuples. */
1798
1799int
1800gimple_call_flags (const_gimple stmt)
1801{
1802 int flags;
1803 tree decl = gimple_call_fndecl (stmt);
1804 tree t;
1805
1806 if (decl)
1807 flags = flags_from_decl_or_type (decl);
1808 else
1809 {
1810 t = TREE_TYPE (gimple_call_fn (stmt));
1811 if (t && TREE_CODE (t) == POINTER_TYPE)
1812 flags = flags_from_decl_or_type (TREE_TYPE (t));
1813 else
1814 flags = 0;
1815 }
1816
1817 return flags;
1818}
1819
1820
1821/* Return true if GS is a copy assignment. */
1822
1823bool
1824gimple_assign_copy_p (gimple gs)
1825{
1826 return gimple_code (gs) == GIMPLE_ASSIGN
1827 && get_gimple_rhs_class (gimple_assign_rhs_code (gs))
1828 == GIMPLE_SINGLE_RHS
1829 && is_gimple_val (gimple_op (gs, 1));
1830}
1831
1832
1833/* Return true if GS is a SSA_NAME copy assignment. */
1834
1835bool
1836gimple_assign_ssa_name_copy_p (gimple gs)
1837{
1838 return (gimple_code (gs) == GIMPLE_ASSIGN
1839 && (get_gimple_rhs_class (gimple_assign_rhs_code (gs))
1840 == GIMPLE_SINGLE_RHS)
1841 && TREE_CODE (gimple_assign_lhs (gs)) == SSA_NAME
1842 && TREE_CODE (gimple_assign_rhs1 (gs)) == SSA_NAME);
1843}
1844
1845
1846/* Return true if GS is an assignment with a singleton RHS, i.e.,
1847 there is no operator associated with the assignment itself.
1848 Unlike gimple_assign_copy_p, this predicate returns true for
1849 any RHS operand, including those that perform an operation
1850 and do not have the semantics of a copy, such as COND_EXPR. */
1851
1852bool
1853gimple_assign_single_p (gimple gs)
1854{
1855 return (gimple_code (gs) == GIMPLE_ASSIGN
1856 && get_gimple_rhs_class (gimple_assign_rhs_code (gs))
1857 == GIMPLE_SINGLE_RHS);
1858}
1859
1860/* Return true if GS is an assignment with a unary RHS, but the
1861 operator has no effect on the assigned value. The logic is adapted
1862 from STRIP_NOPS. This predicate is intended to be used in tuplifying
1863 instances in which STRIP_NOPS was previously applied to the RHS of
1864 an assignment.
1865
1866 NOTE: In the use cases that led to the creation of this function
1867 and of gimple_assign_single_p, it is typical to test for either
1868 condition and to proceed in the same manner. In each case, the
1869 assigned value is represented by the single RHS operand of the
1870 assignment. I suspect there may be cases where gimple_assign_copy_p,
1871 gimple_assign_single_p, or equivalent logic is used where a similar
1872 treatment of unary NOPs is appropriate. */
1873
1874bool
1875gimple_assign_unary_nop_p (gimple gs)
1876{
1877 return (gimple_code (gs) == GIMPLE_ASSIGN
1a87cf0c 1878 && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (gs))
726a989a
RB
1879 || gimple_assign_rhs_code (gs) == NON_LVALUE_EXPR)
1880 && gimple_assign_rhs1 (gs) != error_mark_node
1881 && (TYPE_MODE (TREE_TYPE (gimple_assign_lhs (gs)))
1882 == TYPE_MODE (TREE_TYPE (gimple_assign_rhs1 (gs)))));
1883}
1884
1885/* Set BB to be the basic block holding G. */
1886
1887void
1888gimple_set_bb (gimple stmt, basic_block bb)
1889{
1890 stmt->gsbase.bb = bb;
1891
1892 /* If the statement is a label, add the label to block-to-labels map
1893 so that we can speed up edge creation for GIMPLE_GOTOs. */
1894 if (cfun->cfg && gimple_code (stmt) == GIMPLE_LABEL)
1895 {
1896 tree t;
1897 int uid;
1898
1899 t = gimple_label_label (stmt);
1900 uid = LABEL_DECL_UID (t);
1901 if (uid == -1)
1902 {
1903 unsigned old_len = VEC_length (basic_block, label_to_block_map);
1904 LABEL_DECL_UID (t) = uid = cfun->cfg->last_label_uid++;
1905 if (old_len <= (unsigned) uid)
1906 {
5006671f 1907 unsigned new_len = 3 * uid / 2 + 1;
726a989a
RB
1908
1909 VEC_safe_grow_cleared (basic_block, gc, label_to_block_map,
1910 new_len);
1911 }
1912 }
1913
1914 VEC_replace (basic_block, label_to_block_map, uid, bb);
1915 }
1916}
1917
1918
1919/* Fold the expression computed by STMT. If the expression can be
1920 folded, return the folded result, otherwise return NULL. STMT is
1921 not modified. */
1922
1923tree
1924gimple_fold (const_gimple stmt)
1925{
db3927fb 1926 location_t loc = gimple_location (stmt);
726a989a
RB
1927 switch (gimple_code (stmt))
1928 {
1929 case GIMPLE_COND:
db3927fb 1930 return fold_binary_loc (loc, gimple_cond_code (stmt),
726a989a
RB
1931 boolean_type_node,
1932 gimple_cond_lhs (stmt),
1933 gimple_cond_rhs (stmt));
1934
1935 case GIMPLE_ASSIGN:
1936 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
1937 {
1938 case GIMPLE_UNARY_RHS:
db3927fb 1939 return fold_unary_loc (loc, gimple_assign_rhs_code (stmt),
726a989a
RB
1940 TREE_TYPE (gimple_assign_lhs (stmt)),
1941 gimple_assign_rhs1 (stmt));
1942 case GIMPLE_BINARY_RHS:
db3927fb 1943 return fold_binary_loc (loc, gimple_assign_rhs_code (stmt),
726a989a
RB
1944 TREE_TYPE (gimple_assign_lhs (stmt)),
1945 gimple_assign_rhs1 (stmt),
1946 gimple_assign_rhs2 (stmt));
1947 case GIMPLE_SINGLE_RHS:
1948 return fold (gimple_assign_rhs1 (stmt));
1949 default:;
1950 }
1951 break;
1952
1953 case GIMPLE_SWITCH:
1954 return gimple_switch_index (stmt);
1955
1956 case GIMPLE_CALL:
1957 return NULL_TREE;
1958
1959 default:
1960 break;
1961 }
1962
1963 gcc_unreachable ();
1964}
1965
1966
1967/* Modify the RHS of the assignment pointed-to by GSI using the
1968 operands in the expression tree EXPR.
1969
1970 NOTE: The statement pointed-to by GSI may be reallocated if it
1971 did not have enough operand slots.
1972
1973 This function is useful to convert an existing tree expression into
1974 the flat representation used for the RHS of a GIMPLE assignment.
1975 It will reallocate memory as needed to expand or shrink the number
1976 of operand slots needed to represent EXPR.
1977
1978 NOTE: If you find yourself building a tree and then calling this
1979 function, you are most certainly doing it the slow way. It is much
1980 better to build a new assignment or to use the function
1981 gimple_assign_set_rhs_with_ops, which does not require an
1982 expression tree to be built. */
1983
1984void
1985gimple_assign_set_rhs_from_tree (gimple_stmt_iterator *gsi, tree expr)
1986{
1987 enum tree_code subcode;
1988 tree op1, op2;
1989
1990 extract_ops_from_tree (expr, &subcode, &op1, &op2);
1991 gimple_assign_set_rhs_with_ops (gsi, subcode, op1, op2);
1992}
1993
1994
1995/* Set the RHS of assignment statement pointed-to by GSI to CODE with
1996 operands OP1 and OP2.
1997
1998 NOTE: The statement pointed-to by GSI may be reallocated if it
1999 did not have enough operand slots. */
2000
2001void
2002gimple_assign_set_rhs_with_ops (gimple_stmt_iterator *gsi, enum tree_code code,
2003 tree op1, tree op2)
2004{
2005 unsigned new_rhs_ops = get_gimple_rhs_num_ops (code);
2006 gimple stmt = gsi_stmt (*gsi);
2007
2008 /* If the new CODE needs more operands, allocate a new statement. */
2009 if (gimple_num_ops (stmt) < new_rhs_ops + 1)
2010 {
2011 tree lhs = gimple_assign_lhs (stmt);
2012 gimple new_stmt = gimple_alloc (gimple_code (stmt), new_rhs_ops + 1);
2013 memcpy (new_stmt, stmt, gimple_size (gimple_code (stmt)));
2014 gsi_replace (gsi, new_stmt, true);
2015 stmt = new_stmt;
2016
2017 /* The LHS needs to be reset as this also changes the SSA name
2018 on the LHS. */
2019 gimple_assign_set_lhs (stmt, lhs);
2020 }
2021
2022 gimple_set_num_ops (stmt, new_rhs_ops + 1);
2023 gimple_set_subcode (stmt, code);
2024 gimple_assign_set_rhs1 (stmt, op1);
2025 if (new_rhs_ops > 1)
2026 gimple_assign_set_rhs2 (stmt, op2);
2027}
2028
2029
2030/* Return the LHS of a statement that performs an assignment,
2031 either a GIMPLE_ASSIGN or a GIMPLE_CALL. Returns NULL_TREE
2032 for a call to a function that returns no value, or for a
2033 statement other than an assignment or a call. */
2034
2035tree
2036gimple_get_lhs (const_gimple stmt)
2037{
e0c68ce9 2038 enum gimple_code code = gimple_code (stmt);
726a989a
RB
2039
2040 if (code == GIMPLE_ASSIGN)
2041 return gimple_assign_lhs (stmt);
2042 else if (code == GIMPLE_CALL)
2043 return gimple_call_lhs (stmt);
2044 else
2045 return NULL_TREE;
2046}
2047
2048
2049/* Set the LHS of a statement that performs an assignment,
2050 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
2051
2052void
2053gimple_set_lhs (gimple stmt, tree lhs)
2054{
e0c68ce9 2055 enum gimple_code code = gimple_code (stmt);
726a989a
RB
2056
2057 if (code == GIMPLE_ASSIGN)
2058 gimple_assign_set_lhs (stmt, lhs);
2059 else if (code == GIMPLE_CALL)
2060 gimple_call_set_lhs (stmt, lhs);
2061 else
2062 gcc_unreachable();
2063}
2064
2065
2066/* Return a deep copy of statement STMT. All the operands from STMT
2067 are reallocated and copied using unshare_expr. The DEF, USE, VDEF
2068 and VUSE operand arrays are set to empty in the new copy. */
2069
2070gimple
2071gimple_copy (gimple stmt)
2072{
2073 enum gimple_code code = gimple_code (stmt);
2074 unsigned num_ops = gimple_num_ops (stmt);
2075 gimple copy = gimple_alloc (code, num_ops);
2076 unsigned i;
2077
2078 /* Shallow copy all the fields from STMT. */
2079 memcpy (copy, stmt, gimple_size (code));
2080
2081 /* If STMT has sub-statements, deep-copy them as well. */
2082 if (gimple_has_substatements (stmt))
2083 {
2084 gimple_seq new_seq;
2085 tree t;
2086
2087 switch (gimple_code (stmt))
2088 {
2089 case GIMPLE_BIND:
2090 new_seq = gimple_seq_copy (gimple_bind_body (stmt));
2091 gimple_bind_set_body (copy, new_seq);
2092 gimple_bind_set_vars (copy, unshare_expr (gimple_bind_vars (stmt)));
2093 gimple_bind_set_block (copy, gimple_bind_block (stmt));
2094 break;
2095
2096 case GIMPLE_CATCH:
2097 new_seq = gimple_seq_copy (gimple_catch_handler (stmt));
2098 gimple_catch_set_handler (copy, new_seq);
2099 t = unshare_expr (gimple_catch_types (stmt));
2100 gimple_catch_set_types (copy, t);
2101 break;
2102
2103 case GIMPLE_EH_FILTER:
2104 new_seq = gimple_seq_copy (gimple_eh_filter_failure (stmt));
2105 gimple_eh_filter_set_failure (copy, new_seq);
2106 t = unshare_expr (gimple_eh_filter_types (stmt));
2107 gimple_eh_filter_set_types (copy, t);
2108 break;
2109
2110 case GIMPLE_TRY:
2111 new_seq = gimple_seq_copy (gimple_try_eval (stmt));
2112 gimple_try_set_eval (copy, new_seq);
2113 new_seq = gimple_seq_copy (gimple_try_cleanup (stmt));
2114 gimple_try_set_cleanup (copy, new_seq);
2115 break;
2116
2117 case GIMPLE_OMP_FOR:
2118 new_seq = gimple_seq_copy (gimple_omp_for_pre_body (stmt));
2119 gimple_omp_for_set_pre_body (copy, new_seq);
2120 t = unshare_expr (gimple_omp_for_clauses (stmt));
2121 gimple_omp_for_set_clauses (copy, t);
2122 copy->gimple_omp_for.iter
2123 = GGC_NEWVEC (struct gimple_omp_for_iter,
2124 gimple_omp_for_collapse (stmt));
2125 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2126 {
2127 gimple_omp_for_set_cond (copy, i,
2128 gimple_omp_for_cond (stmt, i));
2129 gimple_omp_for_set_index (copy, i,
2130 gimple_omp_for_index (stmt, i));
2131 t = unshare_expr (gimple_omp_for_initial (stmt, i));
2132 gimple_omp_for_set_initial (copy, i, t);
2133 t = unshare_expr (gimple_omp_for_final (stmt, i));
2134 gimple_omp_for_set_final (copy, i, t);
2135 t = unshare_expr (gimple_omp_for_incr (stmt, i));
2136 gimple_omp_for_set_incr (copy, i, t);
2137 }
2138 goto copy_omp_body;
2139
2140 case GIMPLE_OMP_PARALLEL:
2141 t = unshare_expr (gimple_omp_parallel_clauses (stmt));
2142 gimple_omp_parallel_set_clauses (copy, t);
2143 t = unshare_expr (gimple_omp_parallel_child_fn (stmt));
2144 gimple_omp_parallel_set_child_fn (copy, t);
2145 t = unshare_expr (gimple_omp_parallel_data_arg (stmt));
2146 gimple_omp_parallel_set_data_arg (copy, t);
2147 goto copy_omp_body;
2148
2149 case GIMPLE_OMP_TASK:
2150 t = unshare_expr (gimple_omp_task_clauses (stmt));
2151 gimple_omp_task_set_clauses (copy, t);
2152 t = unshare_expr (gimple_omp_task_child_fn (stmt));
2153 gimple_omp_task_set_child_fn (copy, t);
2154 t = unshare_expr (gimple_omp_task_data_arg (stmt));
2155 gimple_omp_task_set_data_arg (copy, t);
2156 t = unshare_expr (gimple_omp_task_copy_fn (stmt));
2157 gimple_omp_task_set_copy_fn (copy, t);
2158 t = unshare_expr (gimple_omp_task_arg_size (stmt));
2159 gimple_omp_task_set_arg_size (copy, t);
2160 t = unshare_expr (gimple_omp_task_arg_align (stmt));
2161 gimple_omp_task_set_arg_align (copy, t);
2162 goto copy_omp_body;
2163
2164 case GIMPLE_OMP_CRITICAL:
2165 t = unshare_expr (gimple_omp_critical_name (stmt));
2166 gimple_omp_critical_set_name (copy, t);
2167 goto copy_omp_body;
2168
2169 case GIMPLE_OMP_SECTIONS:
2170 t = unshare_expr (gimple_omp_sections_clauses (stmt));
2171 gimple_omp_sections_set_clauses (copy, t);
2172 t = unshare_expr (gimple_omp_sections_control (stmt));
2173 gimple_omp_sections_set_control (copy, t);
2174 /* FALLTHRU */
2175
2176 case GIMPLE_OMP_SINGLE:
2177 case GIMPLE_OMP_SECTION:
2178 case GIMPLE_OMP_MASTER:
2179 case GIMPLE_OMP_ORDERED:
2180 copy_omp_body:
2181 new_seq = gimple_seq_copy (gimple_omp_body (stmt));
2182 gimple_omp_set_body (copy, new_seq);
2183 break;
2184
2185 case GIMPLE_WITH_CLEANUP_EXPR:
2186 new_seq = gimple_seq_copy (gimple_wce_cleanup (stmt));
2187 gimple_wce_set_cleanup (copy, new_seq);
2188 break;
2189
2190 default:
2191 gcc_unreachable ();
2192 }
2193 }
2194
2195 /* Make copy of operands. */
2196 if (num_ops > 0)
2197 {
2198 for (i = 0; i < num_ops; i++)
2199 gimple_set_op (copy, i, unshare_expr (gimple_op (stmt, i)));
2200
ccacdf06 2201 /* Clear out SSA operand vectors on COPY. */
726a989a
RB
2202 if (gimple_has_ops (stmt))
2203 {
2204 gimple_set_def_ops (copy, NULL);
2205 gimple_set_use_ops (copy, NULL);
726a989a
RB
2206 }
2207
2208 if (gimple_has_mem_ops (stmt))
2209 {
5006671f
RG
2210 gimple_set_vdef (copy, gimple_vdef (stmt));
2211 gimple_set_vuse (copy, gimple_vuse (stmt));
726a989a
RB
2212 }
2213
5006671f
RG
2214 /* SSA operands need to be updated. */
2215 gimple_set_modified (copy, true);
726a989a
RB
2216 }
2217
2218 return copy;
2219}
2220
2221
2222/* Set the MODIFIED flag to MODIFIEDP, iff the gimple statement G has
2223 a MODIFIED field. */
2224
2225void
2226gimple_set_modified (gimple s, bool modifiedp)
2227{
2228 if (gimple_has_ops (s))
2229 {
2230 s->gsbase.modified = (unsigned) modifiedp;
2231
2232 if (modifiedp
2233 && cfun->gimple_df
2234 && is_gimple_call (s)
2235 && gimple_call_noreturn_p (s))
2236 VEC_safe_push (gimple, gc, MODIFIED_NORETURN_CALLS (cfun), s);
2237 }
2238}
2239
2240
2241/* Return true if statement S has side-effects. We consider a
2242 statement to have side effects if:
2243
2244 - It is a GIMPLE_CALL not marked with ECF_PURE or ECF_CONST.
2245 - Any of its operands are marked TREE_THIS_VOLATILE or TREE_SIDE_EFFECTS. */
2246
2247bool
2248gimple_has_side_effects (const_gimple s)
2249{
2250 unsigned i;
2251
b5b8b0ac
AO
2252 if (is_gimple_debug (s))
2253 return false;
2254
726a989a
RB
2255 /* We don't have to scan the arguments to check for
2256 volatile arguments, though, at present, we still
2257 do a scan to check for TREE_SIDE_EFFECTS. */
2258 if (gimple_has_volatile_ops (s))
2259 return true;
2260
2261 if (is_gimple_call (s))
2262 {
2263 unsigned nargs = gimple_call_num_args (s);
2264
2265 if (!(gimple_call_flags (s) & (ECF_CONST | ECF_PURE)))
2266 return true;
2267 else if (gimple_call_flags (s) & ECF_LOOPING_CONST_OR_PURE)
2268 /* An infinite loop is considered a side effect. */
2269 return true;
2270
2271 if (gimple_call_lhs (s)
2272 && TREE_SIDE_EFFECTS (gimple_call_lhs (s)))
2273 {
2274 gcc_assert (gimple_has_volatile_ops (s));
2275 return true;
2276 }
2277
2278 if (TREE_SIDE_EFFECTS (gimple_call_fn (s)))
2279 return true;
2280
2281 for (i = 0; i < nargs; i++)
2282 if (TREE_SIDE_EFFECTS (gimple_call_arg (s, i)))
2283 {
2284 gcc_assert (gimple_has_volatile_ops (s));
2285 return true;
2286 }
2287
2288 return false;
2289 }
2290 else
2291 {
2292 for (i = 0; i < gimple_num_ops (s); i++)
2293 if (TREE_SIDE_EFFECTS (gimple_op (s, i)))
2294 {
2295 gcc_assert (gimple_has_volatile_ops (s));
2296 return true;
2297 }
2298 }
2299
2300 return false;
2301}
2302
2303/* Return true if the RHS of statement S has side effects.
2304 We may use it to determine if it is admissable to replace
2305 an assignment or call with a copy of a previously-computed
2306 value. In such cases, side-effects due the the LHS are
2307 preserved. */
2308
2309bool
2310gimple_rhs_has_side_effects (const_gimple s)
2311{
2312 unsigned i;
2313
2314 if (is_gimple_call (s))
2315 {
2316 unsigned nargs = gimple_call_num_args (s);
2317
2318 if (!(gimple_call_flags (s) & (ECF_CONST | ECF_PURE)))
2319 return true;
2320
2321 /* We cannot use gimple_has_volatile_ops here,
2322 because we must ignore a volatile LHS. */
2323 if (TREE_SIDE_EFFECTS (gimple_call_fn (s))
2324 || TREE_THIS_VOLATILE (gimple_call_fn (s)))
2325 {
2326 gcc_assert (gimple_has_volatile_ops (s));
2327 return true;
2328 }
2329
2330 for (i = 0; i < nargs; i++)
2331 if (TREE_SIDE_EFFECTS (gimple_call_arg (s, i))
2332 || TREE_THIS_VOLATILE (gimple_call_arg (s, i)))
2333 return true;
2334
2335 return false;
2336 }
2337 else if (is_gimple_assign (s))
2338 {
2339 /* Skip the first operand, the LHS. */
2340 for (i = 1; i < gimple_num_ops (s); i++)
2341 if (TREE_SIDE_EFFECTS (gimple_op (s, i))
2342 || TREE_THIS_VOLATILE (gimple_op (s, i)))
2343 {
2344 gcc_assert (gimple_has_volatile_ops (s));
2345 return true;
2346 }
2347 }
b5b8b0ac
AO
2348 else if (is_gimple_debug (s))
2349 return false;
726a989a
RB
2350 else
2351 {
2352 /* For statements without an LHS, examine all arguments. */
2353 for (i = 0; i < gimple_num_ops (s); i++)
2354 if (TREE_SIDE_EFFECTS (gimple_op (s, i))
2355 || TREE_THIS_VOLATILE (gimple_op (s, i)))
2356 {
2357 gcc_assert (gimple_has_volatile_ops (s));
2358 return true;
2359 }
2360 }
2361
2362 return false;
2363}
2364
2365
2366/* Helper for gimple_could_trap_p and gimple_assign_rhs_could_trap_p.
2367 Return true if S can trap. If INCLUDE_LHS is true and S is a
2368 GIMPLE_ASSIGN, the LHS of the assignment is also checked.
2369 Otherwise, only the RHS of the assignment is checked. */
2370
2371static bool
2372gimple_could_trap_p_1 (gimple s, bool include_lhs)
2373{
2374 unsigned i, start;
2375 tree t, div = NULL_TREE;
2376 enum tree_code op;
2377
2378 start = (is_gimple_assign (s) && !include_lhs) ? 1 : 0;
2379
2380 for (i = start; i < gimple_num_ops (s); i++)
2381 if (tree_could_trap_p (gimple_op (s, i)))
2382 return true;
2383
2384 switch (gimple_code (s))
2385 {
2386 case GIMPLE_ASM:
2387 return gimple_asm_volatile_p (s);
2388
2389 case GIMPLE_CALL:
2390 t = gimple_call_fndecl (s);
2391 /* Assume that calls to weak functions may trap. */
2392 if (!t || !DECL_P (t) || DECL_WEAK (t))
2393 return true;
2394 return false;
2395
2396 case GIMPLE_ASSIGN:
2397 t = gimple_expr_type (s);
2398 op = gimple_assign_rhs_code (s);
2399 if (get_gimple_rhs_class (op) == GIMPLE_BINARY_RHS)
2400 div = gimple_assign_rhs2 (s);
2401 return (operation_could_trap_p (op, FLOAT_TYPE_P (t),
2402 (INTEGRAL_TYPE_P (t)
2403 && TYPE_OVERFLOW_TRAPS (t)),
2404 div));
2405
2406 default:
2407 break;
2408 }
2409
2410 return false;
2411
2412}
2413
2414
2415/* Return true if statement S can trap. */
2416
2417bool
2418gimple_could_trap_p (gimple s)
2419{
2420 return gimple_could_trap_p_1 (s, true);
2421}
2422
2423
2424/* Return true if RHS of a GIMPLE_ASSIGN S can trap. */
2425
2426bool
2427gimple_assign_rhs_could_trap_p (gimple s)
2428{
2429 gcc_assert (is_gimple_assign (s));
2430 return gimple_could_trap_p_1 (s, false);
2431}
2432
2433
2434/* Print debugging information for gimple stmts generated. */
2435
2436void
2437dump_gimple_statistics (void)
2438{
2439#ifdef GATHER_STATISTICS
2440 int i, total_tuples = 0, total_bytes = 0;
2441
2442 fprintf (stderr, "\nGIMPLE statements\n");
2443 fprintf (stderr, "Kind Stmts Bytes\n");
2444 fprintf (stderr, "---------------------------------------\n");
2445 for (i = 0; i < (int) gimple_alloc_kind_all; ++i)
2446 {
2447 fprintf (stderr, "%-20s %7d %10d\n", gimple_alloc_kind_names[i],
2448 gimple_alloc_counts[i], gimple_alloc_sizes[i]);
2449 total_tuples += gimple_alloc_counts[i];
2450 total_bytes += gimple_alloc_sizes[i];
2451 }
2452 fprintf (stderr, "---------------------------------------\n");
2453 fprintf (stderr, "%-20s %7d %10d\n", "Total", total_tuples, total_bytes);
2454 fprintf (stderr, "---------------------------------------\n");
2455#else
2456 fprintf (stderr, "No gimple statistics\n");
2457#endif
2458}
2459
2460
726a989a
RB
2461/* Return the number of operands needed on the RHS of a GIMPLE
2462 assignment for an expression with tree code CODE. */
2463
2464unsigned
2465get_gimple_rhs_num_ops (enum tree_code code)
2466{
2467 enum gimple_rhs_class rhs_class = get_gimple_rhs_class (code);
2468
2469 if (rhs_class == GIMPLE_UNARY_RHS || rhs_class == GIMPLE_SINGLE_RHS)
2470 return 1;
2471 else if (rhs_class == GIMPLE_BINARY_RHS)
2472 return 2;
2473 else
2474 gcc_unreachable ();
2475}
2476
2477#define DEFTREECODE(SYM, STRING, TYPE, NARGS) \
2478 (unsigned char) \
2479 ((TYPE) == tcc_unary ? GIMPLE_UNARY_RHS \
2480 : ((TYPE) == tcc_binary \
2481 || (TYPE) == tcc_comparison) ? GIMPLE_BINARY_RHS \
2482 : ((TYPE) == tcc_constant \
2483 || (TYPE) == tcc_declaration \
2484 || (TYPE) == tcc_reference) ? GIMPLE_SINGLE_RHS \
2485 : ((SYM) == TRUTH_AND_EXPR \
2486 || (SYM) == TRUTH_OR_EXPR \
2487 || (SYM) == TRUTH_XOR_EXPR) ? GIMPLE_BINARY_RHS \
2488 : (SYM) == TRUTH_NOT_EXPR ? GIMPLE_UNARY_RHS \
2489 : ((SYM) == COND_EXPR \
2490 || (SYM) == CONSTRUCTOR \
2491 || (SYM) == OBJ_TYPE_REF \
2492 || (SYM) == ASSERT_EXPR \
2493 || (SYM) == ADDR_EXPR \
2494 || (SYM) == WITH_SIZE_EXPR \
2495 || (SYM) == EXC_PTR_EXPR \
2496 || (SYM) == SSA_NAME \
2497 || (SYM) == FILTER_EXPR \
2498 || (SYM) == POLYNOMIAL_CHREC \
2499 || (SYM) == DOT_PROD_EXPR \
2500 || (SYM) == VEC_COND_EXPR \
2501 || (SYM) == REALIGN_LOAD_EXPR) ? GIMPLE_SINGLE_RHS \
2502 : GIMPLE_INVALID_RHS),
2503#define END_OF_BASE_TREE_CODES (unsigned char) GIMPLE_INVALID_RHS,
2504
2505const unsigned char gimple_rhs_class_table[] = {
2506#include "all-tree.def"
2507};
2508
2509#undef DEFTREECODE
2510#undef END_OF_BASE_TREE_CODES
2511
2512/* For the definitive definition of GIMPLE, see doc/tree-ssa.texi. */
2513
2514/* Validation of GIMPLE expressions. */
2515
2516/* Return true if OP is an acceptable tree node to be used as a GIMPLE
2517 operand. */
2518
2519bool
2520is_gimple_operand (const_tree op)
2521{
2522 return op && get_gimple_rhs_class (TREE_CODE (op)) == GIMPLE_SINGLE_RHS;
2523}
2524
726a989a
RB
2525/* Returns true iff T is a valid RHS for an assignment to a renamed
2526 user -- or front-end generated artificial -- variable. */
2527
2528bool
2529is_gimple_reg_rhs (tree t)
2530{
ba4d8f9d 2531 return get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS;
726a989a
RB
2532}
2533
2534/* Returns true iff T is a valid RHS for an assignment to an un-renamed
2535 LHS, or for a call argument. */
2536
2537bool
2538is_gimple_mem_rhs (tree t)
2539{
2540 /* If we're dealing with a renamable type, either source or dest must be
2541 a renamed variable. */
2542 if (is_gimple_reg_type (TREE_TYPE (t)))
2543 return is_gimple_val (t);
2544 else
ba4d8f9d 2545 return is_gimple_val (t) || is_gimple_lvalue (t);
726a989a
RB
2546}
2547
2548/* Return true if T is a valid LHS for a GIMPLE assignment expression. */
2549
2550bool
2551is_gimple_lvalue (tree t)
2552{
2553 return (is_gimple_addressable (t)
2554 || TREE_CODE (t) == WITH_SIZE_EXPR
2555 /* These are complex lvalues, but don't have addresses, so they
2556 go here. */
2557 || TREE_CODE (t) == BIT_FIELD_REF);
2558}
2559
2560/* Return true if T is a GIMPLE condition. */
2561
2562bool
2563is_gimple_condexpr (tree t)
2564{
2565 return (is_gimple_val (t) || (COMPARISON_CLASS_P (t)
2566 && !tree_could_trap_p (t)
2567 && is_gimple_val (TREE_OPERAND (t, 0))
2568 && is_gimple_val (TREE_OPERAND (t, 1))));
2569}
2570
2571/* Return true if T is something whose address can be taken. */
2572
2573bool
2574is_gimple_addressable (tree t)
2575{
2576 return (is_gimple_id (t) || handled_component_p (t) || INDIRECT_REF_P (t));
2577}
2578
2579/* Return true if T is a valid gimple constant. */
2580
2581bool
2582is_gimple_constant (const_tree t)
2583{
2584 switch (TREE_CODE (t))
2585 {
2586 case INTEGER_CST:
2587 case REAL_CST:
2588 case FIXED_CST:
2589 case STRING_CST:
2590 case COMPLEX_CST:
2591 case VECTOR_CST:
2592 return true;
2593
2594 /* Vector constant constructors are gimple invariant. */
2595 case CONSTRUCTOR:
2596 if (TREE_TYPE (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
2597 return TREE_CONSTANT (t);
2598 else
2599 return false;
2600
2601 default:
2602 return false;
2603 }
2604}
2605
2606/* Return true if T is a gimple address. */
2607
2608bool
2609is_gimple_address (const_tree t)
2610{
2611 tree op;
2612
2613 if (TREE_CODE (t) != ADDR_EXPR)
2614 return false;
2615
2616 op = TREE_OPERAND (t, 0);
2617 while (handled_component_p (op))
2618 {
2619 if ((TREE_CODE (op) == ARRAY_REF
2620 || TREE_CODE (op) == ARRAY_RANGE_REF)
2621 && !is_gimple_val (TREE_OPERAND (op, 1)))
2622 return false;
2623
2624 op = TREE_OPERAND (op, 0);
2625 }
2626
2627 if (CONSTANT_CLASS_P (op) || INDIRECT_REF_P (op))
2628 return true;
2629
2630 switch (TREE_CODE (op))
2631 {
2632 case PARM_DECL:
2633 case RESULT_DECL:
2634 case LABEL_DECL:
2635 case FUNCTION_DECL:
2636 case VAR_DECL:
2637 case CONST_DECL:
2638 return true;
2639
2640 default:
2641 return false;
2642 }
2643}
2644
00fc2333
JH
2645/* Strip out all handled components that produce invariant
2646 offsets. */
726a989a 2647
00fc2333
JH
2648static const_tree
2649strip_invariant_refs (const_tree op)
726a989a 2650{
726a989a
RB
2651 while (handled_component_p (op))
2652 {
2653 switch (TREE_CODE (op))
2654 {
2655 case ARRAY_REF:
2656 case ARRAY_RANGE_REF:
2657 if (!is_gimple_constant (TREE_OPERAND (op, 1))
2658 || TREE_OPERAND (op, 2) != NULL_TREE
2659 || TREE_OPERAND (op, 3) != NULL_TREE)
00fc2333 2660 return NULL;
726a989a
RB
2661 break;
2662
2663 case COMPONENT_REF:
2664 if (TREE_OPERAND (op, 2) != NULL_TREE)
00fc2333 2665 return NULL;
726a989a
RB
2666 break;
2667
2668 default:;
2669 }
2670 op = TREE_OPERAND (op, 0);
2671 }
2672
00fc2333
JH
2673 return op;
2674}
2675
2676/* Return true if T is a gimple invariant address. */
2677
2678bool
2679is_gimple_invariant_address (const_tree t)
2680{
2681 const_tree op;
2682
2683 if (TREE_CODE (t) != ADDR_EXPR)
2684 return false;
2685
2686 op = strip_invariant_refs (TREE_OPERAND (t, 0));
2687
2688 return op && (CONSTANT_CLASS_P (op) || decl_address_invariant_p (op));
2689}
2690
2691/* Return true if T is a gimple invariant address at IPA level
2692 (so addresses of variables on stack are not allowed). */
2693
2694bool
2695is_gimple_ip_invariant_address (const_tree t)
2696{
2697 const_tree op;
2698
2699 if (TREE_CODE (t) != ADDR_EXPR)
2700 return false;
2701
2702 op = strip_invariant_refs (TREE_OPERAND (t, 0));
2703
2704 return op && (CONSTANT_CLASS_P (op) || decl_address_ip_invariant_p (op));
726a989a
RB
2705}
2706
2707/* Return true if T is a GIMPLE minimal invariant. It's a restricted
2708 form of function invariant. */
2709
2710bool
2711is_gimple_min_invariant (const_tree t)
2712{
2713 if (TREE_CODE (t) == ADDR_EXPR)
2714 return is_gimple_invariant_address (t);
2715
2716 return is_gimple_constant (t);
2717}
2718
00fc2333
JH
2719/* Return true if T is a GIMPLE interprocedural invariant. It's a restricted
2720 form of gimple minimal invariant. */
2721
2722bool
2723is_gimple_ip_invariant (const_tree t)
2724{
2725 if (TREE_CODE (t) == ADDR_EXPR)
2726 return is_gimple_ip_invariant_address (t);
2727
2728 return is_gimple_constant (t);
2729}
2730
726a989a
RB
2731/* Return true if T looks like a valid GIMPLE statement. */
2732
2733bool
2734is_gimple_stmt (tree t)
2735{
2736 const enum tree_code code = TREE_CODE (t);
2737
2738 switch (code)
2739 {
2740 case NOP_EXPR:
2741 /* The only valid NOP_EXPR is the empty statement. */
2742 return IS_EMPTY_STMT (t);
2743
2744 case BIND_EXPR:
2745 case COND_EXPR:
2746 /* These are only valid if they're void. */
2747 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
2748
2749 case SWITCH_EXPR:
2750 case GOTO_EXPR:
2751 case RETURN_EXPR:
2752 case LABEL_EXPR:
2753 case CASE_LABEL_EXPR:
2754 case TRY_CATCH_EXPR:
2755 case TRY_FINALLY_EXPR:
2756 case EH_FILTER_EXPR:
2757 case CATCH_EXPR:
726a989a
RB
2758 case ASM_EXPR:
2759 case RESX_EXPR:
2760 case STATEMENT_LIST:
2761 case OMP_PARALLEL:
2762 case OMP_FOR:
2763 case OMP_SECTIONS:
2764 case OMP_SECTION:
2765 case OMP_SINGLE:
2766 case OMP_MASTER:
2767 case OMP_ORDERED:
2768 case OMP_CRITICAL:
2769 case OMP_TASK:
2770 /* These are always void. */
2771 return true;
2772
2773 case CALL_EXPR:
2774 case MODIFY_EXPR:
2775 case PREDICT_EXPR:
2776 /* These are valid regardless of their type. */
2777 return true;
2778
2779 default:
2780 return false;
2781 }
2782}
2783
2784/* Return true if T is a variable. */
2785
2786bool
2787is_gimple_variable (tree t)
2788{
2789 return (TREE_CODE (t) == VAR_DECL
2790 || TREE_CODE (t) == PARM_DECL
2791 || TREE_CODE (t) == RESULT_DECL
2792 || TREE_CODE (t) == SSA_NAME);
2793}
2794
2795/* Return true if T is a GIMPLE identifier (something with an address). */
2796
2797bool
2798is_gimple_id (tree t)
2799{
2800 return (is_gimple_variable (t)
2801 || TREE_CODE (t) == FUNCTION_DECL
2802 || TREE_CODE (t) == LABEL_DECL
2803 || TREE_CODE (t) == CONST_DECL
2804 /* Allow string constants, since they are addressable. */
2805 || TREE_CODE (t) == STRING_CST);
2806}
2807
2808/* Return true if TYPE is a suitable type for a scalar register variable. */
2809
2810bool
2811is_gimple_reg_type (tree type)
2812{
4636b850 2813 return !AGGREGATE_TYPE_P (type);
726a989a
RB
2814}
2815
2816/* Return true if T is a non-aggregate register variable. */
2817
2818bool
2819is_gimple_reg (tree t)
2820{
2821 if (TREE_CODE (t) == SSA_NAME)
2822 t = SSA_NAME_VAR (t);
2823
726a989a
RB
2824 if (!is_gimple_variable (t))
2825 return false;
2826
2827 if (!is_gimple_reg_type (TREE_TYPE (t)))
2828 return false;
2829
2830 /* A volatile decl is not acceptable because we can't reuse it as
2831 needed. We need to copy it into a temp first. */
2832 if (TREE_THIS_VOLATILE (t))
2833 return false;
2834
2835 /* We define "registers" as things that can be renamed as needed,
2836 which with our infrastructure does not apply to memory. */
2837 if (needs_to_live_in_memory (t))
2838 return false;
2839
2840 /* Hard register variables are an interesting case. For those that
2841 are call-clobbered, we don't know where all the calls are, since
2842 we don't (want to) take into account which operations will turn
2843 into libcalls at the rtl level. For those that are call-saved,
2844 we don't currently model the fact that calls may in fact change
2845 global hard registers, nor do we examine ASM_CLOBBERS at the tree
2846 level, and so miss variable changes that might imply. All around,
2847 it seems safest to not do too much optimization with these at the
2848 tree level at all. We'll have to rely on the rtl optimizers to
2849 clean this up, as there we've got all the appropriate bits exposed. */
2850 if (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t))
2851 return false;
2852
4636b850
RG
2853 /* Complex and vector values must have been put into SSA-like form.
2854 That is, no assignments to the individual components. */
2855 if (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
2856 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
2857 return DECL_GIMPLE_REG_P (t);
2858
726a989a
RB
2859 return true;
2860}
2861
2862
726a989a
RB
2863/* Return true if T is a GIMPLE variable whose address is not needed. */
2864
2865bool
2866is_gimple_non_addressable (tree t)
2867{
2868 if (TREE_CODE (t) == SSA_NAME)
2869 t = SSA_NAME_VAR (t);
2870
2871 return (is_gimple_variable (t) && ! needs_to_live_in_memory (t));
2872}
2873
2874/* Return true if T is a GIMPLE rvalue, i.e. an identifier or a constant. */
2875
2876bool
2877is_gimple_val (tree t)
2878{
2879 /* Make loads from volatiles and memory vars explicit. */
2880 if (is_gimple_variable (t)
2881 && is_gimple_reg_type (TREE_TYPE (t))
2882 && !is_gimple_reg (t))
2883 return false;
2884
2885 /* FIXME make these decls. That can happen only when we expose the
2886 entire landing-pad construct at the tree level. */
2887 if (TREE_CODE (t) == EXC_PTR_EXPR || TREE_CODE (t) == FILTER_EXPR)
2888 return true;
2889
2890 return (is_gimple_variable (t) || is_gimple_min_invariant (t));
2891}
2892
2893/* Similarly, but accept hard registers as inputs to asm statements. */
2894
2895bool
2896is_gimple_asm_val (tree t)
2897{
2898 if (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t))
2899 return true;
2900
2901 return is_gimple_val (t);
2902}
2903
2904/* Return true if T is a GIMPLE minimal lvalue. */
2905
2906bool
2907is_gimple_min_lval (tree t)
2908{
ba4d8f9d
RG
2909 if (!(t = CONST_CAST_TREE (strip_invariant_refs (t))))
2910 return false;
726a989a
RB
2911 return (is_gimple_id (t) || TREE_CODE (t) == INDIRECT_REF);
2912}
2913
2914/* Return true if T is a typecast operation. */
2915
2916bool
2917is_gimple_cast (tree t)
2918{
2919 return (CONVERT_EXPR_P (t)
2920 || TREE_CODE (t) == FIX_TRUNC_EXPR);
2921}
2922
2923/* Return true if T is a valid function operand of a CALL_EXPR. */
2924
2925bool
2926is_gimple_call_addr (tree t)
2927{
2928 return (TREE_CODE (t) == OBJ_TYPE_REF || is_gimple_val (t));
2929}
2930
2931/* If T makes a function call, return the corresponding CALL_EXPR operand.
2932 Otherwise, return NULL_TREE. */
2933
2934tree
2935get_call_expr_in (tree t)
2936{
2937 if (TREE_CODE (t) == MODIFY_EXPR)
2938 t = TREE_OPERAND (t, 1);
2939 if (TREE_CODE (t) == WITH_SIZE_EXPR)
2940 t = TREE_OPERAND (t, 0);
2941 if (TREE_CODE (t) == CALL_EXPR)
2942 return t;
2943 return NULL_TREE;
2944}
2945
2946
2947/* Given a memory reference expression T, return its base address.
2948 The base address of a memory reference expression is the main
2949 object being referenced. For instance, the base address for
2950 'array[i].fld[j]' is 'array'. You can think of this as stripping
2951 away the offset part from a memory address.
2952
2953 This function calls handled_component_p to strip away all the inner
2954 parts of the memory reference until it reaches the base object. */
2955
2956tree
2957get_base_address (tree t)
2958{
2959 while (handled_component_p (t))
2960 t = TREE_OPERAND (t, 0);
2961
2962 if (SSA_VAR_P (t)
2963 || TREE_CODE (t) == STRING_CST
2964 || TREE_CODE (t) == CONSTRUCTOR
2965 || INDIRECT_REF_P (t))
2966 return t;
2967 else
2968 return NULL_TREE;
2969}
2970
2971void
2972recalculate_side_effects (tree t)
2973{
2974 enum tree_code code = TREE_CODE (t);
2975 int len = TREE_OPERAND_LENGTH (t);
2976 int i;
2977
2978 switch (TREE_CODE_CLASS (code))
2979 {
2980 case tcc_expression:
2981 switch (code)
2982 {
2983 case INIT_EXPR:
2984 case MODIFY_EXPR:
2985 case VA_ARG_EXPR:
2986 case PREDECREMENT_EXPR:
2987 case PREINCREMENT_EXPR:
2988 case POSTDECREMENT_EXPR:
2989 case POSTINCREMENT_EXPR:
2990 /* All of these have side-effects, no matter what their
2991 operands are. */
2992 return;
2993
2994 default:
2995 break;
2996 }
2997 /* Fall through. */
2998
2999 case tcc_comparison: /* a comparison expression */
3000 case tcc_unary: /* a unary arithmetic expression */
3001 case tcc_binary: /* a binary arithmetic expression */
3002 case tcc_reference: /* a reference */
3003 case tcc_vl_exp: /* a function call */
3004 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
3005 for (i = 0; i < len; ++i)
3006 {
3007 tree op = TREE_OPERAND (t, i);
3008 if (op && TREE_SIDE_EFFECTS (op))
3009 TREE_SIDE_EFFECTS (t) = 1;
3010 }
3011 break;
3012
13f95bdb
EB
3013 case tcc_constant:
3014 /* No side-effects. */
3015 return;
3016
726a989a 3017 default:
726a989a
RB
3018 gcc_unreachable ();
3019 }
3020}
3021
3022/* Canonicalize a tree T for use in a COND_EXPR as conditional. Returns
3023 a canonicalized tree that is valid for a COND_EXPR or NULL_TREE, if
3024 we failed to create one. */
3025
3026tree
3027canonicalize_cond_expr_cond (tree t)
3028{
3029 /* For (bool)x use x != 0. */
3030 if (TREE_CODE (t) == NOP_EXPR
3031 && TREE_TYPE (t) == boolean_type_node)
3032 {
3033 tree top0 = TREE_OPERAND (t, 0);
3034 t = build2 (NE_EXPR, TREE_TYPE (t),
3035 top0, build_int_cst (TREE_TYPE (top0), 0));
3036 }
3037 /* For !x use x == 0. */
3038 else if (TREE_CODE (t) == TRUTH_NOT_EXPR)
3039 {
3040 tree top0 = TREE_OPERAND (t, 0);
3041 t = build2 (EQ_EXPR, TREE_TYPE (t),
3042 top0, build_int_cst (TREE_TYPE (top0), 0));
3043 }
3044 /* For cmp ? 1 : 0 use cmp. */
3045 else if (TREE_CODE (t) == COND_EXPR
3046 && COMPARISON_CLASS_P (TREE_OPERAND (t, 0))
3047 && integer_onep (TREE_OPERAND (t, 1))
3048 && integer_zerop (TREE_OPERAND (t, 2)))
3049 {
3050 tree top0 = TREE_OPERAND (t, 0);
3051 t = build2 (TREE_CODE (top0), TREE_TYPE (t),
3052 TREE_OPERAND (top0, 0), TREE_OPERAND (top0, 1));
3053 }
3054
3055 if (is_gimple_condexpr (t))
3056 return t;
3057
3058 return NULL_TREE;
3059}
3060
e6c99067
DN
3061/* Build a GIMPLE_CALL identical to STMT but skipping the arguments in
3062 the positions marked by the set ARGS_TO_SKIP. */
3063
c6f7cfc1 3064gimple
5c0466b5 3065gimple_call_copy_skip_args (gimple stmt, bitmap args_to_skip)
c6f7cfc1
JH
3066{
3067 int i;
3068 tree fn = gimple_call_fn (stmt);
3069 int nargs = gimple_call_num_args (stmt);
3070 VEC(tree, heap) *vargs = VEC_alloc (tree, heap, nargs);
3071 gimple new_stmt;
3072
3073 for (i = 0; i < nargs; i++)
3074 if (!bitmap_bit_p (args_to_skip, i))
3075 VEC_quick_push (tree, vargs, gimple_call_arg (stmt, i));
3076
3077 new_stmt = gimple_build_call_vec (fn, vargs);
3078 VEC_free (tree, heap, vargs);
3079 if (gimple_call_lhs (stmt))
3080 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
3081
5006671f
RG
3082 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
3083 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
3084
c6f7cfc1
JH
3085 gimple_set_block (new_stmt, gimple_block (stmt));
3086 if (gimple_has_location (stmt))
3087 gimple_set_location (new_stmt, gimple_location (stmt));
3088
3089 /* Carry all the flags to the new GIMPLE_CALL. */
3090 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
3091 gimple_call_set_tail (new_stmt, gimple_call_tail_p (stmt));
3092 gimple_call_set_cannot_inline (new_stmt, gimple_call_cannot_inline_p (stmt));
3093 gimple_call_set_return_slot_opt (new_stmt, gimple_call_return_slot_opt_p (stmt));
3094 gimple_call_set_from_thunk (new_stmt, gimple_call_from_thunk_p (stmt));
3095 gimple_call_set_va_arg_pack (new_stmt, gimple_call_va_arg_pack_p (stmt));
5006671f
RG
3096
3097 gimple_set_modified (new_stmt, true);
3098
c6f7cfc1
JH
3099 return new_stmt;
3100}
3101
5006671f
RG
3102
3103/* Data structure used to count the number of dereferences to PTR
3104 inside an expression. */
3105struct count_ptr_d
3106{
3107 tree ptr;
3108 unsigned num_stores;
3109 unsigned num_loads;
3110};
3111
3112/* Helper for count_uses_and_derefs. Called by walk_tree to look for
3113 (ALIGN/MISALIGNED_)INDIRECT_REF nodes for the pointer passed in DATA. */
3114
3115static tree
3116count_ptr_derefs (tree *tp, int *walk_subtrees, void *data)
3117{
3118 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
3119 struct count_ptr_d *count_p = (struct count_ptr_d *) wi_p->info;
3120
3121 /* Do not walk inside ADDR_EXPR nodes. In the expression &ptr->fld,
3122 pointer 'ptr' is *not* dereferenced, it is simply used to compute
3123 the address of 'fld' as 'ptr + offsetof(fld)'. */
3124 if (TREE_CODE (*tp) == ADDR_EXPR)
3125 {
3126 *walk_subtrees = 0;
3127 return NULL_TREE;
3128 }
3129
3130 if (INDIRECT_REF_P (*tp) && TREE_OPERAND (*tp, 0) == count_p->ptr)
3131 {
3132 if (wi_p->is_lhs)
3133 count_p->num_stores++;
3134 else
3135 count_p->num_loads++;
3136 }
3137
3138 return NULL_TREE;
3139}
3140
3141/* Count the number of direct and indirect uses for pointer PTR in
3142 statement STMT. The number of direct uses is stored in
3143 *NUM_USES_P. Indirect references are counted separately depending
3144 on whether they are store or load operations. The counts are
3145 stored in *NUM_STORES_P and *NUM_LOADS_P. */
3146
3147void
3148count_uses_and_derefs (tree ptr, gimple stmt, unsigned *num_uses_p,
3149 unsigned *num_loads_p, unsigned *num_stores_p)
3150{
3151 ssa_op_iter i;
3152 tree use;
3153
3154 *num_uses_p = 0;
3155 *num_loads_p = 0;
3156 *num_stores_p = 0;
3157
3158 /* Find out the total number of uses of PTR in STMT. */
3159 FOR_EACH_SSA_TREE_OPERAND (use, stmt, i, SSA_OP_USE)
3160 if (use == ptr)
3161 (*num_uses_p)++;
3162
3163 /* Now count the number of indirect references to PTR. This is
3164 truly awful, but we don't have much choice. There are no parent
3165 pointers inside INDIRECT_REFs, so an expression like
3166 '*x_1 = foo (x_1, *x_1)' needs to be traversed piece by piece to
3167 find all the indirect and direct uses of x_1 inside. The only
3168 shortcut we can take is the fact that GIMPLE only allows
3169 INDIRECT_REFs inside the expressions below. */
3170 if (is_gimple_assign (stmt)
3171 || gimple_code (stmt) == GIMPLE_RETURN
3172 || gimple_code (stmt) == GIMPLE_ASM
3173 || is_gimple_call (stmt))
3174 {
3175 struct walk_stmt_info wi;
3176 struct count_ptr_d count;
3177
3178 count.ptr = ptr;
3179 count.num_stores = 0;
3180 count.num_loads = 0;
3181
3182 memset (&wi, 0, sizeof (wi));
3183 wi.info = &count;
3184 walk_gimple_op (stmt, count_ptr_derefs, &wi);
3185
3186 *num_stores_p = count.num_stores;
3187 *num_loads_p = count.num_loads;
3188 }
3189
3190 gcc_assert (*num_uses_p >= *num_loads_p + *num_stores_p);
3191}
3192
346ef3fa
RG
3193/* From a tree operand OP return the base of a load or store operation
3194 or NULL_TREE if OP is not a load or a store. */
3195
3196static tree
3197get_base_loadstore (tree op)
3198{
3199 while (handled_component_p (op))
3200 op = TREE_OPERAND (op, 0);
3201 if (DECL_P (op)
3202 || INDIRECT_REF_P (op)
3203 || TREE_CODE (op) == TARGET_MEM_REF)
3204 return op;
3205 return NULL_TREE;
3206}
3207
3208/* For the statement STMT call the callbacks VISIT_LOAD, VISIT_STORE and
3209 VISIT_ADDR if non-NULL on loads, store and address-taken operands
3210 passing the STMT, the base of the operand and DATA to it. The base
3211 will be either a decl, an indirect reference (including TARGET_MEM_REF)
3212 or the argument of an address expression.
3213 Returns the results of these callbacks or'ed. */
3214
3215bool
3216walk_stmt_load_store_addr_ops (gimple stmt, void *data,
3217 bool (*visit_load)(gimple, tree, void *),
3218 bool (*visit_store)(gimple, tree, void *),
3219 bool (*visit_addr)(gimple, tree, void *))
3220{
3221 bool ret = false;
3222 unsigned i;
3223 if (gimple_assign_single_p (stmt))
3224 {
3225 tree lhs, rhs;
3226 if (visit_store)
3227 {
3228 lhs = get_base_loadstore (gimple_assign_lhs (stmt));
3229 if (lhs)
3230 ret |= visit_store (stmt, lhs, data);
3231 }
3232 rhs = gimple_assign_rhs1 (stmt);
ad8a1ac0
RG
3233 while (handled_component_p (rhs))
3234 rhs = TREE_OPERAND (rhs, 0);
346ef3fa
RG
3235 if (visit_addr)
3236 {
3237 if (TREE_CODE (rhs) == ADDR_EXPR)
3238 ret |= visit_addr (stmt, TREE_OPERAND (rhs, 0), data);
3239 else if (TREE_CODE (rhs) == TARGET_MEM_REF
fff1894c 3240 && TMR_BASE (rhs) != NULL_TREE
346ef3fa
RG
3241 && TREE_CODE (TMR_BASE (rhs)) == ADDR_EXPR)
3242 ret |= visit_addr (stmt, TREE_OPERAND (TMR_BASE (rhs), 0), data);
3243 else if (TREE_CODE (rhs) == OBJ_TYPE_REF
3244 && TREE_CODE (OBJ_TYPE_REF_OBJECT (rhs)) == ADDR_EXPR)
3245 ret |= visit_addr (stmt, TREE_OPERAND (OBJ_TYPE_REF_OBJECT (rhs),
3246 0), data);
fff1894c
AB
3247 lhs = gimple_assign_lhs (stmt);
3248 if (TREE_CODE (lhs) == TARGET_MEM_REF
3249 && TMR_BASE (lhs) != NULL_TREE
3250 && TREE_CODE (TMR_BASE (lhs)) == ADDR_EXPR)
3251 ret |= visit_addr (stmt, TREE_OPERAND (TMR_BASE (lhs), 0), data);
346ef3fa
RG
3252 }
3253 if (visit_load)
3254 {
3255 rhs = get_base_loadstore (rhs);
3256 if (rhs)
3257 ret |= visit_load (stmt, rhs, data);
3258 }
3259 }
3260 else if (visit_addr
3261 && (is_gimple_assign (stmt)
4d7a65ea 3262 || gimple_code (stmt) == GIMPLE_COND))
346ef3fa
RG
3263 {
3264 for (i = 0; i < gimple_num_ops (stmt); ++i)
3265 if (gimple_op (stmt, i)
3266 && TREE_CODE (gimple_op (stmt, i)) == ADDR_EXPR)
3267 ret |= visit_addr (stmt, TREE_OPERAND (gimple_op (stmt, i), 0), data);
3268 }
3269 else if (is_gimple_call (stmt))
3270 {
3271 if (visit_store)
3272 {
3273 tree lhs = gimple_call_lhs (stmt);
3274 if (lhs)
3275 {
3276 lhs = get_base_loadstore (lhs);
3277 if (lhs)
3278 ret |= visit_store (stmt, lhs, data);
3279 }
3280 }
3281 if (visit_load || visit_addr)
3282 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3283 {
3284 tree rhs = gimple_call_arg (stmt, i);
3285 if (visit_addr
3286 && TREE_CODE (rhs) == ADDR_EXPR)
3287 ret |= visit_addr (stmt, TREE_OPERAND (rhs, 0), data);
3288 else if (visit_load)
3289 {
3290 rhs = get_base_loadstore (rhs);
3291 if (rhs)
3292 ret |= visit_load (stmt, rhs, data);
3293 }
3294 }
3295 if (visit_addr
3296 && gimple_call_chain (stmt)
3297 && TREE_CODE (gimple_call_chain (stmt)) == ADDR_EXPR)
3298 ret |= visit_addr (stmt, TREE_OPERAND (gimple_call_chain (stmt), 0),
3299 data);
1d24fdd9
RG
3300 if (visit_addr
3301 && gimple_call_return_slot_opt_p (stmt)
3302 && gimple_call_lhs (stmt) != NULL_TREE
4d61856d 3303 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
1d24fdd9 3304 ret |= visit_addr (stmt, gimple_call_lhs (stmt), data);
346ef3fa
RG
3305 }
3306 else if (gimple_code (stmt) == GIMPLE_ASM)
3307 {
3308 unsigned noutputs;
3309 const char *constraint;
3310 const char **oconstraints;
3311 bool allows_mem, allows_reg, is_inout;
3312 noutputs = gimple_asm_noutputs (stmt);
3313 oconstraints = XALLOCAVEC (const char *, noutputs);
3314 if (visit_store || visit_addr)
3315 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
3316 {
3317 tree link = gimple_asm_output_op (stmt, i);
3318 tree op = get_base_loadstore (TREE_VALUE (link));
3319 if (op && visit_store)
3320 ret |= visit_store (stmt, op, data);
3321 if (visit_addr)
3322 {
3323 constraint = TREE_STRING_POINTER
3324 (TREE_VALUE (TREE_PURPOSE (link)));
3325 oconstraints[i] = constraint;
3326 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
3327 &allows_reg, &is_inout);
3328 if (op && !allows_reg && allows_mem)
3329 ret |= visit_addr (stmt, op, data);
3330 }
3331 }
3332 if (visit_load || visit_addr)
3333 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
3334 {
3335 tree link = gimple_asm_input_op (stmt, i);
3336 tree op = TREE_VALUE (link);
3337 if (visit_addr
3338 && TREE_CODE (op) == ADDR_EXPR)
3339 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
3340 else if (visit_load || visit_addr)
3341 {
3342 op = get_base_loadstore (op);
3343 if (op)
3344 {
3345 if (visit_load)
3346 ret |= visit_load (stmt, op, data);
3347 if (visit_addr)
3348 {
3349 constraint = TREE_STRING_POINTER
3350 (TREE_VALUE (TREE_PURPOSE (link)));
3351 parse_input_constraint (&constraint, 0, 0, noutputs,
3352 0, oconstraints,
3353 &allows_mem, &allows_reg);
3354 if (!allows_reg && allows_mem)
3355 ret |= visit_addr (stmt, op, data);
3356 }
3357 }
3358 }
3359 }
3360 }
3361 else if (gimple_code (stmt) == GIMPLE_RETURN)
3362 {
3363 tree op = gimple_return_retval (stmt);
3364 if (op)
3365 {
3366 if (visit_addr
3367 && TREE_CODE (op) == ADDR_EXPR)
3368 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
3369 else if (visit_load)
3370 {
3371 op = get_base_loadstore (op);
3372 if (op)
3373 ret |= visit_load (stmt, op, data);
3374 }
3375 }
3376 }
3377 else if (visit_addr
3378 && gimple_code (stmt) == GIMPLE_PHI)
3379 {
3380 for (i = 0; i < gimple_phi_num_args (stmt); ++i)
3381 {
3382 tree op = PHI_ARG_DEF (stmt, i);
3383 if (TREE_CODE (op) == ADDR_EXPR)
3384 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
3385 }
3386 }
3387
3388 return ret;
3389}
3390
3391/* Like walk_stmt_load_store_addr_ops but with NULL visit_addr. IPA-CP
3392 should make a faster clone for this case. */
3393
3394bool
3395walk_stmt_load_store_ops (gimple stmt, void *data,
3396 bool (*visit_load)(gimple, tree, void *),
3397 bool (*visit_store)(gimple, tree, void *))
3398{
3399 return walk_stmt_load_store_addr_ops (stmt, data,
3400 visit_load, visit_store, NULL);
3401}
3402
ccacdf06
RG
3403/* Helper for gimple_ior_addresses_taken_1. */
3404
3405static bool
3406gimple_ior_addresses_taken_1 (gimple stmt ATTRIBUTE_UNUSED,
3407 tree addr, void *data)
3408{
3409 bitmap addresses_taken = (bitmap)data;
3410 while (handled_component_p (addr))
3411 addr = TREE_OPERAND (addr, 0);
3412 if (DECL_P (addr))
3413 {
3414 bitmap_set_bit (addresses_taken, DECL_UID (addr));
3415 return true;
3416 }
3417 return false;
3418}
3419
3420/* Set the bit for the uid of all decls that have their address taken
3421 in STMT in the ADDRESSES_TAKEN bitmap. Returns true if there
3422 were any in this stmt. */
3423
3424bool
3425gimple_ior_addresses_taken (bitmap addresses_taken, gimple stmt)
3426{
3427 return walk_stmt_load_store_addr_ops (stmt, addresses_taken, NULL, NULL,
3428 gimple_ior_addresses_taken_1);
3429}
3430
4537ec0c
DN
3431
3432/* Return a printable name for symbol DECL. */
3433
3434const char *
3435gimple_decl_printable_name (tree decl, int verbosity)
3436{
3437 gcc_assert (decl && DECL_NAME (decl));
3438
3439 if (DECL_ASSEMBLER_NAME_SET_P (decl))
3440 {
3441 const char *str, *mangled_str;
3442 int dmgl_opts = DMGL_NO_OPTS;
3443
3444 if (verbosity >= 2)
3445 {
3446 dmgl_opts = DMGL_VERBOSE
3447 | DMGL_TYPES
3448 | DMGL_ANSI
3449 | DMGL_GNU_V3
3450 | DMGL_RET_POSTFIX;
3451 if (TREE_CODE (decl) == FUNCTION_DECL)
3452 dmgl_opts |= DMGL_PARAMS;
3453 }
3454
3455 mangled_str = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
3456 str = cplus_demangle_v3 (mangled_str, dmgl_opts);
3457 return (str) ? str : mangled_str;
3458 }
3459
3460 return IDENTIFIER_POINTER (DECL_NAME (decl));
3461}
3462
3463
3464/* Fold a OBJ_TYPE_REF expression to the address of a function.
3465 KNOWN_TYPE carries the true type of OBJ_TYPE_REF_OBJECT(REF). Adapted
3466 from cp_fold_obj_type_ref, but it tolerates types with no binfo
3467 data. */
3468
3469tree
3470gimple_fold_obj_type_ref (tree ref, tree known_type)
3471{
3472 HOST_WIDE_INT index;
3473 HOST_WIDE_INT i;
3474 tree v;
3475 tree fndecl;
3476
3477 if (TYPE_BINFO (known_type) == NULL_TREE)
3478 return NULL_TREE;
3479
3480 v = BINFO_VIRTUALS (TYPE_BINFO (known_type));
3481 index = tree_low_cst (OBJ_TYPE_REF_TOKEN (ref), 1);
3482 i = 0;
3483 while (i != index)
3484 {
3485 i += (TARGET_VTABLE_USES_DESCRIPTORS
3486 ? TARGET_VTABLE_USES_DESCRIPTORS : 1);
3487 v = TREE_CHAIN (v);
3488 }
3489
3490 fndecl = TREE_VALUE (v);
3491
3492#ifdef ENABLE_CHECKING
3493 gcc_assert (tree_int_cst_equal (OBJ_TYPE_REF_TOKEN (ref),
3494 DECL_VINDEX (fndecl)));
3495#endif
3496
3497 cgraph_node (fndecl)->local.vtable_method = true;
3498
3499 return build_fold_addr_expr (fndecl);
3500}
3501
726a989a 3502#include "gt-gimple.h"