]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/gimple.c
generic.texi (OpenMP): OMP_CLAUSE_* are subcodes, not sub-codes.
[thirdparty/gcc.git] / gcc / gimple.c
CommitLineData
726a989a
RB
1/* Gimple IR support functions.
2
d1e082c2 3 Copyright (C) 2007-2013 Free Software Foundation, Inc.
726a989a
RB
4 Contributed by Aldy Hernandez <aldyh@redhat.com>
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 3, or (at your option) any later
11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
21
22#include "config.h"
23#include "system.h"
24#include "coretypes.h"
25#include "tm.h"
d7f09764 26#include "target.h"
726a989a
RB
27#include "tree.h"
28#include "ggc.h"
726a989a
RB
29#include "hard-reg-set.h"
30#include "basic-block.h"
31#include "gimple.h"
32#include "diagnostic.h"
ff2a63a7 33#include "tree-flow.h"
726a989a
RB
34#include "value-prof.h"
35#include "flags.h"
d7f09764 36#include "alias.h"
4537ec0c 37#include "demangle.h"
0f443ad0 38#include "langhooks.h"
726a989a 39
b8f4e58f 40/* Global canonical type table. */
4490cae6
RG
41static GTY((if_marked ("ggc_marked_p"), param_is (union tree_node)))
42 htab_t gimple_canonical_types;
a844a60b
RG
43static GTY((if_marked ("tree_int_map_marked_p"), param_is (struct tree_int_map)))
44 htab_t canonical_type_hash_cache;
d7f09764 45
f2c4a81c 46/* All the tuples have their operand vector (if present) at the very bottom
726a989a
RB
47 of the structure. Therefore, the offset required to find the
48 operands vector the size of the structure minus the size of the 1
49 element tree array at the end (see gimple_ops). */
f2c4a81c
RH
50#define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) \
51 (HAS_TREE_OP ? sizeof (struct STRUCT) - sizeof (tree) : 0),
6bc7bc14 52EXPORTED_CONST size_t gimple_ops_offset_[] = {
f2c4a81c
RH
53#include "gsstruct.def"
54};
55#undef DEFGSSTRUCT
56
c3284718 57#define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) sizeof (struct STRUCT),
f2c4a81c
RH
58static const size_t gsstruct_code_size[] = {
59#include "gsstruct.def"
60};
61#undef DEFGSSTRUCT
62
63#define DEFGSCODE(SYM, NAME, GSSCODE) NAME,
64const char *const gimple_code_name[] = {
65#include "gimple.def"
66};
67#undef DEFGSCODE
68
69#define DEFGSCODE(SYM, NAME, GSSCODE) GSSCODE,
70EXPORTED_CONST enum gimple_statement_structure_enum gss_for_code_[] = {
726a989a
RB
71#include "gimple.def"
72};
73#undef DEFGSCODE
74
726a989a
RB
75/* Gimple stats. */
76
77int gimple_alloc_counts[(int) gimple_alloc_kind_all];
78int gimple_alloc_sizes[(int) gimple_alloc_kind_all];
79
80/* Keep in sync with gimple.h:enum gimple_alloc_kind. */
81static const char * const gimple_alloc_kind_names[] = {
82 "assignments",
83 "phi nodes",
84 "conditionals",
726a989a
RB
85 "everything else"
86};
87
726a989a
RB
88/* Private API manipulation functions shared only with some
89 other files. */
90extern void gimple_set_stored_syms (gimple, bitmap, bitmap_obstack *);
91extern void gimple_set_loaded_syms (gimple, bitmap, bitmap_obstack *);
92
93/* Gimple tuple constructors.
94 Note: Any constructor taking a ``gimple_seq'' as a parameter, can
95 be passed a NULL to start with an empty sequence. */
96
97/* Set the code for statement G to CODE. */
98
99static inline void
100gimple_set_code (gimple g, enum gimple_code code)
101{
102 g->gsbase.code = code;
103}
104
726a989a
RB
105/* Return the number of bytes needed to hold a GIMPLE statement with
106 code CODE. */
107
f2c4a81c 108static inline size_t
726a989a
RB
109gimple_size (enum gimple_code code)
110{
f2c4a81c 111 return gsstruct_code_size[gss_for_code (code)];
726a989a
RB
112}
113
726a989a
RB
114/* Allocate memory for a GIMPLE statement with code CODE and NUM_OPS
115 operands. */
116
d7f09764 117gimple
726a989a
RB
118gimple_alloc_stat (enum gimple_code code, unsigned num_ops MEM_STAT_DECL)
119{
120 size_t size;
121 gimple stmt;
122
123 size = gimple_size (code);
124 if (num_ops > 0)
125 size += sizeof (tree) * (num_ops - 1);
126
7aa6d18a
SB
127 if (GATHER_STATISTICS)
128 {
129 enum gimple_alloc_kind kind = gimple_alloc_kind (code);
130 gimple_alloc_counts[(int) kind]++;
131 gimple_alloc_sizes[(int) kind] += size;
132 }
726a989a 133
a9429e29 134 stmt = ggc_alloc_cleared_gimple_statement_d_stat (size PASS_MEM_STAT);
726a989a
RB
135 gimple_set_code (stmt, code);
136 gimple_set_num_ops (stmt, num_ops);
137
138 /* Do not call gimple_set_modified here as it has other side
139 effects and this tuple is still not completely built. */
140 stmt->gsbase.modified = 1;
355a7673 141 gimple_init_singleton (stmt);
726a989a
RB
142
143 return stmt;
144}
145
146/* Set SUBCODE to be the code of the expression computed by statement G. */
147
148static inline void
149gimple_set_subcode (gimple g, unsigned subcode)
150{
151 /* We only have 16 bits for the RHS code. Assert that we are not
152 overflowing it. */
153 gcc_assert (subcode < (1 << 16));
154 g->gsbase.subcode = subcode;
155}
156
157
158
159/* Build a tuple with operands. CODE is the statement to build (which
160 must be one of the GIMPLE_WITH_OPS tuples). SUBCODE is the sub-code
b8698a0f 161 for the new tuple. NUM_OPS is the number of operands to allocate. */
726a989a
RB
162
163#define gimple_build_with_ops(c, s, n) \
164 gimple_build_with_ops_stat (c, s, n MEM_STAT_INFO)
165
166static gimple
b5b8b0ac 167gimple_build_with_ops_stat (enum gimple_code code, unsigned subcode,
726a989a
RB
168 unsigned num_ops MEM_STAT_DECL)
169{
170 gimple s = gimple_alloc_stat (code, num_ops PASS_MEM_STAT);
171 gimple_set_subcode (s, subcode);
172
173 return s;
174}
175
176
177/* Build a GIMPLE_RETURN statement returning RETVAL. */
178
179gimple
180gimple_build_return (tree retval)
181{
bbbbb16a 182 gimple s = gimple_build_with_ops (GIMPLE_RETURN, ERROR_MARK, 1);
726a989a
RB
183 if (retval)
184 gimple_return_set_retval (s, retval);
185 return s;
186}
187
d086d311
RG
188/* Reset alias information on call S. */
189
190void
191gimple_call_reset_alias_info (gimple s)
192{
193 if (gimple_call_flags (s) & ECF_CONST)
194 memset (gimple_call_use_set (s), 0, sizeof (struct pt_solution));
195 else
196 pt_solution_reset (gimple_call_use_set (s));
197 if (gimple_call_flags (s) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
198 memset (gimple_call_clobber_set (s), 0, sizeof (struct pt_solution));
199 else
200 pt_solution_reset (gimple_call_clobber_set (s));
201}
202
21860814
JJ
203/* Helper for gimple_build_call, gimple_build_call_valist,
204 gimple_build_call_vec and gimple_build_call_from_tree. Build the basic
205 components of a GIMPLE_CALL statement to function FN with NARGS
206 arguments. */
726a989a
RB
207
208static inline gimple
209gimple_build_call_1 (tree fn, unsigned nargs)
210{
bbbbb16a 211 gimple s = gimple_build_with_ops (GIMPLE_CALL, ERROR_MARK, nargs + 3);
7c9577be
RG
212 if (TREE_CODE (fn) == FUNCTION_DECL)
213 fn = build_fold_addr_expr (fn);
726a989a 214 gimple_set_op (s, 1, fn);
f20ca725 215 gimple_call_set_fntype (s, TREE_TYPE (TREE_TYPE (fn)));
d086d311 216 gimple_call_reset_alias_info (s);
726a989a
RB
217 return s;
218}
219
220
221/* Build a GIMPLE_CALL statement to function FN with the arguments
222 specified in vector ARGS. */
223
224gimple
9771b263 225gimple_build_call_vec (tree fn, vec<tree> args)
726a989a
RB
226{
227 unsigned i;
9771b263 228 unsigned nargs = args.length ();
726a989a
RB
229 gimple call = gimple_build_call_1 (fn, nargs);
230
231 for (i = 0; i < nargs; i++)
9771b263 232 gimple_call_set_arg (call, i, args[i]);
726a989a
RB
233
234 return call;
235}
236
237
238/* Build a GIMPLE_CALL statement to function FN. NARGS is the number of
239 arguments. The ... are the arguments. */
240
241gimple
242gimple_build_call (tree fn, unsigned nargs, ...)
243{
244 va_list ap;
245 gimple call;
246 unsigned i;
247
248 gcc_assert (TREE_CODE (fn) == FUNCTION_DECL || is_gimple_call_addr (fn));
249
250 call = gimple_build_call_1 (fn, nargs);
251
252 va_start (ap, nargs);
253 for (i = 0; i < nargs; i++)
254 gimple_call_set_arg (call, i, va_arg (ap, tree));
255 va_end (ap);
256
257 return call;
258}
259
260
21860814
JJ
261/* Build a GIMPLE_CALL statement to function FN. NARGS is the number of
262 arguments. AP contains the arguments. */
263
264gimple
265gimple_build_call_valist (tree fn, unsigned nargs, va_list ap)
266{
267 gimple call;
268 unsigned i;
269
270 gcc_assert (TREE_CODE (fn) == FUNCTION_DECL || is_gimple_call_addr (fn));
271
272 call = gimple_build_call_1 (fn, nargs);
273
274 for (i = 0; i < nargs; i++)
275 gimple_call_set_arg (call, i, va_arg (ap, tree));
276
277 return call;
278}
279
280
25583c4f
RS
281/* Helper for gimple_build_call_internal and gimple_build_call_internal_vec.
282 Build the basic components of a GIMPLE_CALL statement to internal
283 function FN with NARGS arguments. */
284
285static inline gimple
286gimple_build_call_internal_1 (enum internal_fn fn, unsigned nargs)
287{
288 gimple s = gimple_build_with_ops (GIMPLE_CALL, ERROR_MARK, nargs + 3);
289 s->gsbase.subcode |= GF_CALL_INTERNAL;
290 gimple_call_set_internal_fn (s, fn);
291 gimple_call_reset_alias_info (s);
292 return s;
293}
294
295
296/* Build a GIMPLE_CALL statement to internal function FN. NARGS is
297 the number of arguments. The ... are the arguments. */
298
299gimple
300gimple_build_call_internal (enum internal_fn fn, unsigned nargs, ...)
301{
302 va_list ap;
303 gimple call;
304 unsigned i;
305
306 call = gimple_build_call_internal_1 (fn, nargs);
307 va_start (ap, nargs);
308 for (i = 0; i < nargs; i++)
309 gimple_call_set_arg (call, i, va_arg (ap, tree));
310 va_end (ap);
311
312 return call;
313}
314
315
316/* Build a GIMPLE_CALL statement to internal function FN with the arguments
317 specified in vector ARGS. */
318
319gimple
9771b263 320gimple_build_call_internal_vec (enum internal_fn fn, vec<tree> args)
25583c4f
RS
321{
322 unsigned i, nargs;
323 gimple call;
324
9771b263 325 nargs = args.length ();
25583c4f
RS
326 call = gimple_build_call_internal_1 (fn, nargs);
327 for (i = 0; i < nargs; i++)
9771b263 328 gimple_call_set_arg (call, i, args[i]);
25583c4f
RS
329
330 return call;
331}
332
333
726a989a
RB
334/* Build a GIMPLE_CALL statement from CALL_EXPR T. Note that T is
335 assumed to be in GIMPLE form already. Minimal checking is done of
336 this fact. */
337
338gimple
339gimple_build_call_from_tree (tree t)
340{
341 unsigned i, nargs;
342 gimple call;
343 tree fndecl = get_callee_fndecl (t);
344
345 gcc_assert (TREE_CODE (t) == CALL_EXPR);
346
347 nargs = call_expr_nargs (t);
348 call = gimple_build_call_1 (fndecl ? fndecl : CALL_EXPR_FN (t), nargs);
349
350 for (i = 0; i < nargs; i++)
351 gimple_call_set_arg (call, i, CALL_EXPR_ARG (t, i));
352
353 gimple_set_block (call, TREE_BLOCK (t));
354
355 /* Carry all the CALL_EXPR flags to the new GIMPLE_CALL. */
356 gimple_call_set_chain (call, CALL_EXPR_STATIC_CHAIN (t));
357 gimple_call_set_tail (call, CALL_EXPR_TAILCALL (t));
726a989a 358 gimple_call_set_return_slot_opt (call, CALL_EXPR_RETURN_SLOT_OPT (t));
63d2a353
MM
359 if (fndecl
360 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
13e49da9
TV
361 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA
362 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA_WITH_ALIGN))
63d2a353
MM
363 gimple_call_set_alloca_for_var (call, CALL_ALLOCA_FOR_VAR_P (t));
364 else
365 gimple_call_set_from_thunk (call, CALL_FROM_THUNK_P (t));
726a989a 366 gimple_call_set_va_arg_pack (call, CALL_EXPR_VA_ARG_PACK (t));
9bb1a81b 367 gimple_call_set_nothrow (call, TREE_NOTHROW (t));
d665b6e5 368 gimple_set_no_warning (call, TREE_NO_WARNING (t));
726a989a
RB
369
370 return call;
371}
372
373
374/* Extract the operands and code for expression EXPR into *SUBCODE_P,
0354c0c7 375 *OP1_P, *OP2_P and *OP3_P respectively. */
726a989a
RB
376
377void
0354c0c7
BS
378extract_ops_from_tree_1 (tree expr, enum tree_code *subcode_p, tree *op1_p,
379 tree *op2_p, tree *op3_p)
726a989a 380{
82d6e6fc 381 enum gimple_rhs_class grhs_class;
726a989a
RB
382
383 *subcode_p = TREE_CODE (expr);
82d6e6fc 384 grhs_class = get_gimple_rhs_class (*subcode_p);
726a989a 385
0354c0c7 386 if (grhs_class == GIMPLE_TERNARY_RHS)
726a989a
RB
387 {
388 *op1_p = TREE_OPERAND (expr, 0);
389 *op2_p = TREE_OPERAND (expr, 1);
0354c0c7
BS
390 *op3_p = TREE_OPERAND (expr, 2);
391 }
392 else if (grhs_class == GIMPLE_BINARY_RHS)
393 {
394 *op1_p = TREE_OPERAND (expr, 0);
395 *op2_p = TREE_OPERAND (expr, 1);
396 *op3_p = NULL_TREE;
726a989a 397 }
82d6e6fc 398 else if (grhs_class == GIMPLE_UNARY_RHS)
726a989a
RB
399 {
400 *op1_p = TREE_OPERAND (expr, 0);
401 *op2_p = NULL_TREE;
0354c0c7 402 *op3_p = NULL_TREE;
726a989a 403 }
82d6e6fc 404 else if (grhs_class == GIMPLE_SINGLE_RHS)
726a989a
RB
405 {
406 *op1_p = expr;
407 *op2_p = NULL_TREE;
0354c0c7 408 *op3_p = NULL_TREE;
726a989a
RB
409 }
410 else
411 gcc_unreachable ();
412}
413
414
415/* Build a GIMPLE_ASSIGN statement.
416
417 LHS of the assignment.
418 RHS of the assignment which can be unary or binary. */
419
420gimple
421gimple_build_assign_stat (tree lhs, tree rhs MEM_STAT_DECL)
422{
423 enum tree_code subcode;
0354c0c7 424 tree op1, op2, op3;
726a989a 425
0354c0c7 426 extract_ops_from_tree_1 (rhs, &subcode, &op1, &op2, &op3);
73804b12
RG
427 return gimple_build_assign_with_ops (subcode, lhs, op1, op2, op3
428 PASS_MEM_STAT);
726a989a
RB
429}
430
431
432/* Build a GIMPLE_ASSIGN statement with sub-code SUBCODE and operands
433 OP1 and OP2. If OP2 is NULL then SUBCODE must be of class
434 GIMPLE_UNARY_RHS or GIMPLE_SINGLE_RHS. */
435
436gimple
73804b12
RG
437gimple_build_assign_with_ops (enum tree_code subcode, tree lhs, tree op1,
438 tree op2, tree op3 MEM_STAT_DECL)
726a989a
RB
439{
440 unsigned num_ops;
441 gimple p;
442
443 /* Need 1 operand for LHS and 1 or 2 for the RHS (depending on the
444 code). */
445 num_ops = get_gimple_rhs_num_ops (subcode) + 1;
b8698a0f 446
b5b8b0ac 447 p = gimple_build_with_ops_stat (GIMPLE_ASSIGN, (unsigned)subcode, num_ops
726a989a
RB
448 PASS_MEM_STAT);
449 gimple_assign_set_lhs (p, lhs);
450 gimple_assign_set_rhs1 (p, op1);
451 if (op2)
452 {
453 gcc_assert (num_ops > 2);
454 gimple_assign_set_rhs2 (p, op2);
455 }
456
0354c0c7
BS
457 if (op3)
458 {
459 gcc_assert (num_ops > 3);
460 gimple_assign_set_rhs3 (p, op3);
461 }
462
726a989a
RB
463 return p;
464}
465
73804b12
RG
466gimple
467gimple_build_assign_with_ops (enum tree_code subcode, tree lhs, tree op1,
468 tree op2 MEM_STAT_DECL)
469{
470 return gimple_build_assign_with_ops (subcode, lhs, op1, op2, NULL_TREE
471 PASS_MEM_STAT);
472}
473
726a989a
RB
474
475/* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
476
477 DST/SRC are the destination and source respectively. You can pass
478 ungimplified trees in DST or SRC, in which case they will be
479 converted to a gimple operand if necessary.
480
481 This function returns the newly created GIMPLE_ASSIGN tuple. */
482
5fd8300b 483gimple
726a989a 484gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
b8698a0f 485{
726a989a
RB
486 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
487 gimplify_and_add (t, seq_p);
488 ggc_free (t);
489 return gimple_seq_last_stmt (*seq_p);
490}
491
492
493/* Build a GIMPLE_COND statement.
494
495 PRED is the condition used to compare LHS and the RHS.
496 T_LABEL is the label to jump to if the condition is true.
497 F_LABEL is the label to jump to otherwise. */
498
499gimple
500gimple_build_cond (enum tree_code pred_code, tree lhs, tree rhs,
501 tree t_label, tree f_label)
502{
503 gimple p;
504
505 gcc_assert (TREE_CODE_CLASS (pred_code) == tcc_comparison);
506 p = gimple_build_with_ops (GIMPLE_COND, pred_code, 4);
507 gimple_cond_set_lhs (p, lhs);
508 gimple_cond_set_rhs (p, rhs);
509 gimple_cond_set_true_label (p, t_label);
510 gimple_cond_set_false_label (p, f_label);
511 return p;
512}
513
514
515/* Extract operands for a GIMPLE_COND statement out of COND_EXPR tree COND. */
516
517void
518gimple_cond_get_ops_from_tree (tree cond, enum tree_code *code_p,
519 tree *lhs_p, tree *rhs_p)
520{
521 gcc_assert (TREE_CODE_CLASS (TREE_CODE (cond)) == tcc_comparison
522 || TREE_CODE (cond) == TRUTH_NOT_EXPR
523 || is_gimple_min_invariant (cond)
524 || SSA_VAR_P (cond));
525
526 extract_ops_from_tree (cond, code_p, lhs_p, rhs_p);
527
528 /* Canonicalize conditionals of the form 'if (!VAL)'. */
529 if (*code_p == TRUTH_NOT_EXPR)
530 {
531 *code_p = EQ_EXPR;
532 gcc_assert (*lhs_p && *rhs_p == NULL_TREE);
e8160c9a 533 *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
726a989a
RB
534 }
535 /* Canonicalize conditionals of the form 'if (VAL)' */
536 else if (TREE_CODE_CLASS (*code_p) != tcc_comparison)
537 {
538 *code_p = NE_EXPR;
539 gcc_assert (*lhs_p && *rhs_p == NULL_TREE);
e8160c9a 540 *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
726a989a
RB
541 }
542}
543
544
545/* Build a GIMPLE_COND statement from the conditional expression tree
546 COND. T_LABEL and F_LABEL are as in gimple_build_cond. */
547
548gimple
549gimple_build_cond_from_tree (tree cond, tree t_label, tree f_label)
550{
551 enum tree_code code;
552 tree lhs, rhs;
553
554 gimple_cond_get_ops_from_tree (cond, &code, &lhs, &rhs);
555 return gimple_build_cond (code, lhs, rhs, t_label, f_label);
556}
557
558/* Set code, lhs, and rhs of a GIMPLE_COND from a suitable
559 boolean expression tree COND. */
560
561void
562gimple_cond_set_condition_from_tree (gimple stmt, tree cond)
563{
564 enum tree_code code;
565 tree lhs, rhs;
566
567 gimple_cond_get_ops_from_tree (cond, &code, &lhs, &rhs);
568 gimple_cond_set_condition (stmt, code, lhs, rhs);
569}
570
571/* Build a GIMPLE_LABEL statement for LABEL. */
572
573gimple
574gimple_build_label (tree label)
575{
bbbbb16a 576 gimple p = gimple_build_with_ops (GIMPLE_LABEL, ERROR_MARK, 1);
726a989a
RB
577 gimple_label_set_label (p, label);
578 return p;
579}
580
581/* Build a GIMPLE_GOTO statement to label DEST. */
582
583gimple
584gimple_build_goto (tree dest)
585{
bbbbb16a 586 gimple p = gimple_build_with_ops (GIMPLE_GOTO, ERROR_MARK, 1);
726a989a
RB
587 gimple_goto_set_dest (p, dest);
588 return p;
589}
590
591
592/* Build a GIMPLE_NOP statement. */
593
b8698a0f 594gimple
726a989a
RB
595gimple_build_nop (void)
596{
597 return gimple_alloc (GIMPLE_NOP, 0);
598}
599
600
601/* Build a GIMPLE_BIND statement.
602 VARS are the variables in BODY.
603 BLOCK is the containing block. */
604
605gimple
606gimple_build_bind (tree vars, gimple_seq body, tree block)
607{
608 gimple p = gimple_alloc (GIMPLE_BIND, 0);
609 gimple_bind_set_vars (p, vars);
610 if (body)
611 gimple_bind_set_body (p, body);
612 if (block)
613 gimple_bind_set_block (p, block);
614 return p;
615}
616
617/* Helper function to set the simple fields of a asm stmt.
618
619 STRING is a pointer to a string that is the asm blocks assembly code.
620 NINPUT is the number of register inputs.
621 NOUTPUT is the number of register outputs.
622 NCLOBBERS is the number of clobbered registers.
623 */
624
625static inline gimple
b8698a0f 626gimple_build_asm_1 (const char *string, unsigned ninputs, unsigned noutputs,
1c384bf1 627 unsigned nclobbers, unsigned nlabels)
726a989a
RB
628{
629 gimple p;
630 int size = strlen (string);
631
1c384bf1
RH
632 /* ASMs with labels cannot have outputs. This should have been
633 enforced by the front end. */
634 gcc_assert (nlabels == 0 || noutputs == 0);
635
bbbbb16a 636 p = gimple_build_with_ops (GIMPLE_ASM, ERROR_MARK,
1c384bf1 637 ninputs + noutputs + nclobbers + nlabels);
726a989a
RB
638
639 p->gimple_asm.ni = ninputs;
640 p->gimple_asm.no = noutputs;
641 p->gimple_asm.nc = nclobbers;
1c384bf1 642 p->gimple_asm.nl = nlabels;
726a989a
RB
643 p->gimple_asm.string = ggc_alloc_string (string, size);
644
7aa6d18a
SB
645 if (GATHER_STATISTICS)
646 gimple_alloc_sizes[(int) gimple_alloc_kind (GIMPLE_ASM)] += size;
b8698a0f 647
726a989a
RB
648 return p;
649}
650
651/* Build a GIMPLE_ASM statement.
652
653 STRING is the assembly code.
654 NINPUT is the number of register inputs.
655 NOUTPUT is the number of register outputs.
656 NCLOBBERS is the number of clobbered registers.
657 INPUTS is a vector of the input register parameters.
658 OUTPUTS is a vector of the output register parameters.
1c384bf1
RH
659 CLOBBERS is a vector of the clobbered register parameters.
660 LABELS is a vector of destination labels. */
726a989a
RB
661
662gimple
9771b263
DN
663gimple_build_asm_vec (const char *string, vec<tree, va_gc> *inputs,
664 vec<tree, va_gc> *outputs, vec<tree, va_gc> *clobbers,
665 vec<tree, va_gc> *labels)
726a989a
RB
666{
667 gimple p;
668 unsigned i;
669
670 p = gimple_build_asm_1 (string,
9771b263
DN
671 vec_safe_length (inputs),
672 vec_safe_length (outputs),
673 vec_safe_length (clobbers),
674 vec_safe_length (labels));
b8698a0f 675
9771b263
DN
676 for (i = 0; i < vec_safe_length (inputs); i++)
677 gimple_asm_set_input_op (p, i, (*inputs)[i]);
726a989a 678
9771b263
DN
679 for (i = 0; i < vec_safe_length (outputs); i++)
680 gimple_asm_set_output_op (p, i, (*outputs)[i]);
726a989a 681
9771b263
DN
682 for (i = 0; i < vec_safe_length (clobbers); i++)
683 gimple_asm_set_clobber_op (p, i, (*clobbers)[i]);
b8698a0f 684
9771b263
DN
685 for (i = 0; i < vec_safe_length (labels); i++)
686 gimple_asm_set_label_op (p, i, (*labels)[i]);
b8698a0f 687
726a989a
RB
688 return p;
689}
690
691/* Build a GIMPLE_CATCH statement.
692
693 TYPES are the catch types.
694 HANDLER is the exception handler. */
695
696gimple
697gimple_build_catch (tree types, gimple_seq handler)
698{
699 gimple p = gimple_alloc (GIMPLE_CATCH, 0);
700 gimple_catch_set_types (p, types);
701 if (handler)
702 gimple_catch_set_handler (p, handler);
703
704 return p;
705}
706
707/* Build a GIMPLE_EH_FILTER statement.
708
709 TYPES are the filter's types.
710 FAILURE is the filter's failure action. */
711
712gimple
713gimple_build_eh_filter (tree types, gimple_seq failure)
714{
715 gimple p = gimple_alloc (GIMPLE_EH_FILTER, 0);
716 gimple_eh_filter_set_types (p, types);
717 if (failure)
718 gimple_eh_filter_set_failure (p, failure);
719
720 return p;
721}
722
1d65f45c
RH
723/* Build a GIMPLE_EH_MUST_NOT_THROW statement. */
724
725gimple
726gimple_build_eh_must_not_throw (tree decl)
727{
786f715d 728 gimple p = gimple_alloc (GIMPLE_EH_MUST_NOT_THROW, 0);
1d65f45c
RH
729
730 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
731 gcc_assert (flags_from_decl_or_type (decl) & ECF_NORETURN);
d7f09764 732 gimple_eh_must_not_throw_set_fndecl (p, decl);
1d65f45c
RH
733
734 return p;
735}
736
0a35513e
AH
737/* Build a GIMPLE_EH_ELSE statement. */
738
739gimple
740gimple_build_eh_else (gimple_seq n_body, gimple_seq e_body)
741{
742 gimple p = gimple_alloc (GIMPLE_EH_ELSE, 0);
743 gimple_eh_else_set_n_body (p, n_body);
744 gimple_eh_else_set_e_body (p, e_body);
745 return p;
746}
747
726a989a
RB
748/* Build a GIMPLE_TRY statement.
749
750 EVAL is the expression to evaluate.
751 CLEANUP is the cleanup expression.
752 KIND is either GIMPLE_TRY_CATCH or GIMPLE_TRY_FINALLY depending on
753 whether this is a try/catch or a try/finally respectively. */
754
755gimple
756gimple_build_try (gimple_seq eval, gimple_seq cleanup,
757 enum gimple_try_flags kind)
758{
759 gimple p;
760
761 gcc_assert (kind == GIMPLE_TRY_CATCH || kind == GIMPLE_TRY_FINALLY);
762 p = gimple_alloc (GIMPLE_TRY, 0);
763 gimple_set_subcode (p, kind);
764 if (eval)
765 gimple_try_set_eval (p, eval);
766 if (cleanup)
767 gimple_try_set_cleanup (p, cleanup);
768
769 return p;
770}
771
772/* Construct a GIMPLE_WITH_CLEANUP_EXPR statement.
773
774 CLEANUP is the cleanup expression. */
775
776gimple
777gimple_build_wce (gimple_seq cleanup)
778{
779 gimple p = gimple_alloc (GIMPLE_WITH_CLEANUP_EXPR, 0);
780 if (cleanup)
781 gimple_wce_set_cleanup (p, cleanup);
782
783 return p;
784}
785
786
1d65f45c 787/* Build a GIMPLE_RESX statement. */
726a989a
RB
788
789gimple
790gimple_build_resx (int region)
791{
1d65f45c
RH
792 gimple p = gimple_build_with_ops (GIMPLE_RESX, ERROR_MARK, 0);
793 p->gimple_eh_ctrl.region = region;
726a989a
RB
794 return p;
795}
796
797
798/* The helper for constructing a gimple switch statement.
799 INDEX is the switch's index.
800 NLABELS is the number of labels in the switch excluding the default.
801 DEFAULT_LABEL is the default label for the switch statement. */
802
b8698a0f 803gimple
1d65f45c 804gimple_build_switch_nlabels (unsigned nlabels, tree index, tree default_label)
726a989a
RB
805{
806 /* nlabels + 1 default label + 1 index. */
fd8d363e 807 gcc_checking_assert (default_label);
bbbbb16a 808 gimple p = gimple_build_with_ops (GIMPLE_SWITCH, ERROR_MARK,
fd8d363e 809 1 + 1 + nlabels);
726a989a 810 gimple_switch_set_index (p, index);
fd8d363e 811 gimple_switch_set_default_label (p, default_label);
726a989a
RB
812 return p;
813}
814
726a989a
RB
815/* Build a GIMPLE_SWITCH statement.
816
817 INDEX is the switch's index.
818 DEFAULT_LABEL is the default label
819 ARGS is a vector of labels excluding the default. */
820
821gimple
9771b263 822gimple_build_switch (tree index, tree default_label, vec<tree> args)
726a989a 823{
9771b263 824 unsigned i, nlabels = args.length ();
fd8d363e 825
1d65f45c 826 gimple p = gimple_build_switch_nlabels (nlabels, index, default_label);
726a989a 827
1d65f45c 828 /* Copy the labels from the vector to the switch statement. */
1d65f45c 829 for (i = 0; i < nlabels; i++)
9771b263 830 gimple_switch_set_label (p, i + 1, args[i]);
726a989a
RB
831
832 return p;
833}
834
1d65f45c
RH
835/* Build a GIMPLE_EH_DISPATCH statement. */
836
837gimple
838gimple_build_eh_dispatch (int region)
839{
840 gimple p = gimple_build_with_ops (GIMPLE_EH_DISPATCH, ERROR_MARK, 0);
841 p->gimple_eh_ctrl.region = region;
842 return p;
843}
726a989a 844
b5b8b0ac
AO
845/* Build a new GIMPLE_DEBUG_BIND statement.
846
847 VAR is bound to VALUE; block and location are taken from STMT. */
848
849gimple
850gimple_build_debug_bind_stat (tree var, tree value, gimple stmt MEM_STAT_DECL)
851{
852 gimple p = gimple_build_with_ops_stat (GIMPLE_DEBUG,
853 (unsigned)GIMPLE_DEBUG_BIND, 2
854 PASS_MEM_STAT);
855
856 gimple_debug_bind_set_var (p, var);
857 gimple_debug_bind_set_value (p, value);
858 if (stmt)
5368224f 859 gimple_set_location (p, gimple_location (stmt));
b5b8b0ac
AO
860
861 return p;
862}
863
864
ddb555ed
JJ
865/* Build a new GIMPLE_DEBUG_SOURCE_BIND statement.
866
867 VAR is bound to VALUE; block and location are taken from STMT. */
868
869gimple
870gimple_build_debug_source_bind_stat (tree var, tree value,
871 gimple stmt MEM_STAT_DECL)
872{
873 gimple p = gimple_build_with_ops_stat (GIMPLE_DEBUG,
874 (unsigned)GIMPLE_DEBUG_SOURCE_BIND, 2
875 PASS_MEM_STAT);
876
877 gimple_debug_source_bind_set_var (p, var);
878 gimple_debug_source_bind_set_value (p, value);
879 if (stmt)
5368224f 880 gimple_set_location (p, gimple_location (stmt));
ddb555ed
JJ
881
882 return p;
883}
884
885
726a989a
RB
886/* Build a GIMPLE_OMP_CRITICAL statement.
887
888 BODY is the sequence of statements for which only one thread can execute.
889 NAME is optional identifier for this critical block. */
890
b8698a0f 891gimple
726a989a
RB
892gimple_build_omp_critical (gimple_seq body, tree name)
893{
894 gimple p = gimple_alloc (GIMPLE_OMP_CRITICAL, 0);
895 gimple_omp_critical_set_name (p, name);
896 if (body)
897 gimple_omp_set_body (p, body);
898
899 return p;
900}
901
902/* Build a GIMPLE_OMP_FOR statement.
903
904 BODY is sequence of statements inside the for loop.
74bf76ed 905 KIND is the `for' variant.
b8698a0f 906 CLAUSES, are any of the OMP loop construct's clauses: private, firstprivate,
726a989a
RB
907 lastprivate, reductions, ordered, schedule, and nowait.
908 COLLAPSE is the collapse count.
909 PRE_BODY is the sequence of statements that are loop invariant. */
910
911gimple
74bf76ed 912gimple_build_omp_for (gimple_seq body, int kind, tree clauses, size_t collapse,
726a989a
RB
913 gimple_seq pre_body)
914{
915 gimple p = gimple_alloc (GIMPLE_OMP_FOR, 0);
916 if (body)
917 gimple_omp_set_body (p, body);
918 gimple_omp_for_set_clauses (p, clauses);
74bf76ed 919 gimple_omp_for_set_kind (p, kind);
726a989a 920 p->gimple_omp_for.collapse = collapse;
a9429e29
LB
921 p->gimple_omp_for.iter
922 = ggc_alloc_cleared_vec_gimple_omp_for_iter (collapse);
726a989a
RB
923 if (pre_body)
924 gimple_omp_for_set_pre_body (p, pre_body);
925
926 return p;
927}
928
929
930/* Build a GIMPLE_OMP_PARALLEL statement.
931
932 BODY is sequence of statements which are executed in parallel.
933 CLAUSES, are the OMP parallel construct's clauses.
934 CHILD_FN is the function created for the parallel threads to execute.
935 DATA_ARG are the shared data argument(s). */
936
b8698a0f
L
937gimple
938gimple_build_omp_parallel (gimple_seq body, tree clauses, tree child_fn,
726a989a
RB
939 tree data_arg)
940{
941 gimple p = gimple_alloc (GIMPLE_OMP_PARALLEL, 0);
942 if (body)
943 gimple_omp_set_body (p, body);
944 gimple_omp_parallel_set_clauses (p, clauses);
945 gimple_omp_parallel_set_child_fn (p, child_fn);
946 gimple_omp_parallel_set_data_arg (p, data_arg);
947
948 return p;
949}
950
951
952/* Build a GIMPLE_OMP_TASK statement.
953
954 BODY is sequence of statements which are executed by the explicit task.
955 CLAUSES, are the OMP parallel construct's clauses.
956 CHILD_FN is the function created for the parallel threads to execute.
957 DATA_ARG are the shared data argument(s).
958 COPY_FN is the optional function for firstprivate initialization.
959 ARG_SIZE and ARG_ALIGN are size and alignment of the data block. */
960
b8698a0f 961gimple
726a989a
RB
962gimple_build_omp_task (gimple_seq body, tree clauses, tree child_fn,
963 tree data_arg, tree copy_fn, tree arg_size,
964 tree arg_align)
965{
966 gimple p = gimple_alloc (GIMPLE_OMP_TASK, 0);
967 if (body)
968 gimple_omp_set_body (p, body);
969 gimple_omp_task_set_clauses (p, clauses);
970 gimple_omp_task_set_child_fn (p, child_fn);
971 gimple_omp_task_set_data_arg (p, data_arg);
972 gimple_omp_task_set_copy_fn (p, copy_fn);
973 gimple_omp_task_set_arg_size (p, arg_size);
974 gimple_omp_task_set_arg_align (p, arg_align);
975
976 return p;
977}
978
979
980/* Build a GIMPLE_OMP_SECTION statement for a sections statement.
981
982 BODY is the sequence of statements in the section. */
983
984gimple
985gimple_build_omp_section (gimple_seq body)
986{
987 gimple p = gimple_alloc (GIMPLE_OMP_SECTION, 0);
988 if (body)
989 gimple_omp_set_body (p, body);
990
991 return p;
992}
993
994
995/* Build a GIMPLE_OMP_MASTER statement.
996
997 BODY is the sequence of statements to be executed by just the master. */
998
b8698a0f 999gimple
726a989a
RB
1000gimple_build_omp_master (gimple_seq body)
1001{
1002 gimple p = gimple_alloc (GIMPLE_OMP_MASTER, 0);
1003 if (body)
1004 gimple_omp_set_body (p, body);
1005
1006 return p;
1007}
1008
1009
acf0174b
JJ
1010/* Build a GIMPLE_OMP_TASKGROUP statement.
1011
1012 BODY is the sequence of statements to be executed by the taskgroup
1013 construct. */
1014
1015gimple
1016gimple_build_omp_taskgroup (gimple_seq body)
1017{
1018 gimple p = gimple_alloc (GIMPLE_OMP_TASKGROUP, 0);
1019 if (body)
1020 gimple_omp_set_body (p, body);
1021
1022 return p;
1023}
1024
1025
726a989a
RB
1026/* Build a GIMPLE_OMP_CONTINUE statement.
1027
1028 CONTROL_DEF is the definition of the control variable.
1029 CONTROL_USE is the use of the control variable. */
1030
b8698a0f 1031gimple
726a989a
RB
1032gimple_build_omp_continue (tree control_def, tree control_use)
1033{
1034 gimple p = gimple_alloc (GIMPLE_OMP_CONTINUE, 0);
1035 gimple_omp_continue_set_control_def (p, control_def);
1036 gimple_omp_continue_set_control_use (p, control_use);
1037 return p;
1038}
1039
1040/* Build a GIMPLE_OMP_ORDERED statement.
1041
1042 BODY is the sequence of statements inside a loop that will executed in
1043 sequence. */
1044
b8698a0f 1045gimple
726a989a
RB
1046gimple_build_omp_ordered (gimple_seq body)
1047{
1048 gimple p = gimple_alloc (GIMPLE_OMP_ORDERED, 0);
1049 if (body)
1050 gimple_omp_set_body (p, body);
1051
1052 return p;
1053}
1054
1055
1056/* Build a GIMPLE_OMP_RETURN statement.
1057 WAIT_P is true if this is a non-waiting return. */
1058
b8698a0f 1059gimple
726a989a
RB
1060gimple_build_omp_return (bool wait_p)
1061{
1062 gimple p = gimple_alloc (GIMPLE_OMP_RETURN, 0);
1063 if (wait_p)
1064 gimple_omp_return_set_nowait (p);
1065
1066 return p;
1067}
1068
1069
1070/* Build a GIMPLE_OMP_SECTIONS statement.
1071
1072 BODY is a sequence of section statements.
1073 CLAUSES are any of the OMP sections contsruct's clauses: private,
1074 firstprivate, lastprivate, reduction, and nowait. */
1075
b8698a0f 1076gimple
726a989a
RB
1077gimple_build_omp_sections (gimple_seq body, tree clauses)
1078{
1079 gimple p = gimple_alloc (GIMPLE_OMP_SECTIONS, 0);
1080 if (body)
1081 gimple_omp_set_body (p, body);
1082 gimple_omp_sections_set_clauses (p, clauses);
1083
1084 return p;
1085}
1086
1087
1088/* Build a GIMPLE_OMP_SECTIONS_SWITCH. */
1089
1090gimple
1091gimple_build_omp_sections_switch (void)
1092{
1093 return gimple_alloc (GIMPLE_OMP_SECTIONS_SWITCH, 0);
1094}
1095
1096
1097/* Build a GIMPLE_OMP_SINGLE statement.
1098
1099 BODY is the sequence of statements that will be executed once.
1100 CLAUSES are any of the OMP single construct's clauses: private, firstprivate,
1101 copyprivate, nowait. */
1102
b8698a0f 1103gimple
726a989a
RB
1104gimple_build_omp_single (gimple_seq body, tree clauses)
1105{
1106 gimple p = gimple_alloc (GIMPLE_OMP_SINGLE, 0);
1107 if (body)
1108 gimple_omp_set_body (p, body);
1109 gimple_omp_single_set_clauses (p, clauses);
1110
1111 return p;
1112}
1113
1114
acf0174b
JJ
1115/* Build a GIMPLE_OMP_TARGET statement.
1116
1117 BODY is the sequence of statements that will be executed.
1118 CLAUSES are any of the OMP target construct's clauses. */
1119
1120gimple
1121gimple_build_omp_target (gimple_seq body, int kind, tree clauses)
1122{
1123 gimple p = gimple_alloc (GIMPLE_OMP_TARGET, 0);
1124 if (body)
1125 gimple_omp_set_body (p, body);
1126 gimple_omp_target_set_clauses (p, clauses);
1127 gimple_omp_target_set_kind (p, kind);
1128
1129 return p;
1130}
1131
1132
1133/* Build a GIMPLE_OMP_TEAMS statement.
1134
1135 BODY is the sequence of statements that will be executed.
1136 CLAUSES are any of the OMP teams construct's clauses. */
1137
1138gimple
1139gimple_build_omp_teams (gimple_seq body, tree clauses)
1140{
1141 gimple p = gimple_alloc (GIMPLE_OMP_TEAMS, 0);
1142 if (body)
1143 gimple_omp_set_body (p, body);
1144 gimple_omp_teams_set_clauses (p, clauses);
1145
1146 return p;
1147}
1148
1149
726a989a
RB
1150/* Build a GIMPLE_OMP_ATOMIC_LOAD statement. */
1151
1152gimple
1153gimple_build_omp_atomic_load (tree lhs, tree rhs)
1154{
1155 gimple p = gimple_alloc (GIMPLE_OMP_ATOMIC_LOAD, 0);
1156 gimple_omp_atomic_load_set_lhs (p, lhs);
1157 gimple_omp_atomic_load_set_rhs (p, rhs);
1158 return p;
1159}
1160
1161/* Build a GIMPLE_OMP_ATOMIC_STORE statement.
1162
1163 VAL is the value we are storing. */
1164
1165gimple
1166gimple_build_omp_atomic_store (tree val)
1167{
1168 gimple p = gimple_alloc (GIMPLE_OMP_ATOMIC_STORE, 0);
1169 gimple_omp_atomic_store_set_val (p, val);
1170 return p;
1171}
1172
0a35513e
AH
1173/* Build a GIMPLE_TRANSACTION statement. */
1174
1175gimple
1176gimple_build_transaction (gimple_seq body, tree label)
1177{
1178 gimple p = gimple_alloc (GIMPLE_TRANSACTION, 0);
1179 gimple_transaction_set_body (p, body);
1180 gimple_transaction_set_label (p, label);
1181 return p;
1182}
1183
726a989a
RB
1184/* Build a GIMPLE_PREDICT statement. PREDICT is one of the predictors from
1185 predict.def, OUTCOME is NOT_TAKEN or TAKEN. */
1186
1187gimple
1188gimple_build_predict (enum br_predictor predictor, enum prediction outcome)
1189{
1190 gimple p = gimple_alloc (GIMPLE_PREDICT, 0);
1191 /* Ensure all the predictors fit into the lower bits of the subcode. */
e0c68ce9 1192 gcc_assert ((int) END_PREDICTORS <= GF_PREDICT_TAKEN);
726a989a
RB
1193 gimple_predict_set_predictor (p, predictor);
1194 gimple_predict_set_outcome (p, outcome);
1195 return p;
1196}
1197
cea094ed 1198#if defined ENABLE_GIMPLE_CHECKING
726a989a
RB
1199/* Complain of a gimple type mismatch and die. */
1200
1201void
1202gimple_check_failed (const_gimple gs, const char *file, int line,
1203 const char *function, enum gimple_code code,
1204 enum tree_code subcode)
1205{
1206 internal_error ("gimple check: expected %s(%s), have %s(%s) in %s, at %s:%d",
1207 gimple_code_name[code],
1208 tree_code_name[subcode],
1209 gimple_code_name[gimple_code (gs)],
1210 gs->gsbase.subcode > 0
1211 ? tree_code_name[gs->gsbase.subcode]
1212 : "",
1213 function, trim_filename (file), line);
1214}
726a989a
RB
1215#endif /* ENABLE_GIMPLE_CHECKING */
1216
1217
726a989a
RB
1218/* Link gimple statement GS to the end of the sequence *SEQ_P. If
1219 *SEQ_P is NULL, a new sequence is allocated. */
1220
1221void
1222gimple_seq_add_stmt (gimple_seq *seq_p, gimple gs)
1223{
1224 gimple_stmt_iterator si;
726a989a
RB
1225 if (gs == NULL)
1226 return;
1227
726a989a
RB
1228 si = gsi_last (*seq_p);
1229 gsi_insert_after (&si, gs, GSI_NEW_STMT);
1230}
1231
1232
1233/* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
1234 NULL, a new sequence is allocated. */
1235
1236void
1237gimple_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
1238{
1239 gimple_stmt_iterator si;
726a989a
RB
1240 if (src == NULL)
1241 return;
1242
726a989a
RB
1243 si = gsi_last (*dst_p);
1244 gsi_insert_seq_after (&si, src, GSI_NEW_STMT);
1245}
1246
1247
1248/* Helper function of empty_body_p. Return true if STMT is an empty
1249 statement. */
1250
1251static bool
1252empty_stmt_p (gimple stmt)
1253{
1254 if (gimple_code (stmt) == GIMPLE_NOP)
1255 return true;
1256 if (gimple_code (stmt) == GIMPLE_BIND)
1257 return empty_body_p (gimple_bind_body (stmt));
1258 return false;
1259}
1260
1261
1262/* Return true if BODY contains nothing but empty statements. */
1263
1264bool
1265empty_body_p (gimple_seq body)
1266{
1267 gimple_stmt_iterator i;
1268
726a989a
RB
1269 if (gimple_seq_empty_p (body))
1270 return true;
1271 for (i = gsi_start (body); !gsi_end_p (i); gsi_next (&i))
b5b8b0ac
AO
1272 if (!empty_stmt_p (gsi_stmt (i))
1273 && !is_gimple_debug (gsi_stmt (i)))
726a989a
RB
1274 return false;
1275
1276 return true;
1277}
1278
1279
1280/* Perform a deep copy of sequence SRC and return the result. */
1281
1282gimple_seq
1283gimple_seq_copy (gimple_seq src)
1284{
1285 gimple_stmt_iterator gsi;
355a7673 1286 gimple_seq new_seq = NULL;
726a989a
RB
1287 gimple stmt;
1288
1289 for (gsi = gsi_start (src); !gsi_end_p (gsi); gsi_next (&gsi))
1290 {
1291 stmt = gimple_copy (gsi_stmt (gsi));
82d6e6fc 1292 gimple_seq_add_stmt (&new_seq, stmt);
726a989a
RB
1293 }
1294
82d6e6fc 1295 return new_seq;
726a989a
RB
1296}
1297
1298
355a7673 1299/* Walk all the statements in the sequence *PSEQ calling walk_gimple_stmt
726a989a 1300 on each one. WI is as in walk_gimple_stmt.
b8698a0f 1301
0a35513e
AH
1302 If walk_gimple_stmt returns non-NULL, the walk is stopped, and the
1303 value is stored in WI->CALLBACK_RESULT. Also, the statement that
1304 produced the value is returned if this statement has not been
1305 removed by a callback (wi->removed_stmt). If the statement has
1306 been removed, NULL is returned.
726a989a
RB
1307
1308 Otherwise, all the statements are walked and NULL returned. */
1309
1310gimple
355a7673
MM
1311walk_gimple_seq_mod (gimple_seq *pseq, walk_stmt_fn callback_stmt,
1312 walk_tree_fn callback_op, struct walk_stmt_info *wi)
726a989a
RB
1313{
1314 gimple_stmt_iterator gsi;
1315
355a7673 1316 for (gsi = gsi_start (*pseq); !gsi_end_p (gsi); )
726a989a
RB
1317 {
1318 tree ret = walk_gimple_stmt (&gsi, callback_stmt, callback_op, wi);
1319 if (ret)
1320 {
1321 /* If CALLBACK_STMT or CALLBACK_OP return a value, WI must exist
1322 to hold it. */
1323 gcc_assert (wi);
1324 wi->callback_result = ret;
0a35513e
AH
1325
1326 return wi->removed_stmt ? NULL : gsi_stmt (gsi);
726a989a 1327 }
0a35513e
AH
1328
1329 if (!wi->removed_stmt)
1330 gsi_next (&gsi);
726a989a
RB
1331 }
1332
1333 if (wi)
1334 wi->callback_result = NULL_TREE;
1335
1336 return NULL;
1337}
1338
1339
355a7673
MM
1340/* Like walk_gimple_seq_mod, but ensure that the head of SEQ isn't
1341 changed by the callbacks. */
1342
1343gimple
1344walk_gimple_seq (gimple_seq seq, walk_stmt_fn callback_stmt,
1345 walk_tree_fn callback_op, struct walk_stmt_info *wi)
1346{
1347 gimple_seq seq2 = seq;
1348 gimple ret = walk_gimple_seq_mod (&seq2, callback_stmt, callback_op, wi);
1349 gcc_assert (seq2 == seq);
1350 return ret;
1351}
1352
1353
726a989a
RB
1354/* Helper function for walk_gimple_stmt. Walk operands of a GIMPLE_ASM. */
1355
1356static tree
1357walk_gimple_asm (gimple stmt, walk_tree_fn callback_op,
1358 struct walk_stmt_info *wi)
1359{
1c384bf1 1360 tree ret, op;
726a989a
RB
1361 unsigned noutputs;
1362 const char **oconstraints;
1c384bf1 1363 unsigned i, n;
726a989a
RB
1364 const char *constraint;
1365 bool allows_mem, allows_reg, is_inout;
1366
1367 noutputs = gimple_asm_noutputs (stmt);
1368 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
1369
1370 if (wi)
1371 wi->is_lhs = true;
1372
1373 for (i = 0; i < noutputs; i++)
1374 {
1c384bf1 1375 op = gimple_asm_output_op (stmt, i);
726a989a
RB
1376 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
1377 oconstraints[i] = constraint;
1378 parse_output_constraint (&constraint, i, 0, 0, &allows_mem, &allows_reg,
1379 &is_inout);
1380 if (wi)
1381 wi->val_only = (allows_reg || !allows_mem);
1382 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
1383 if (ret)
1384 return ret;
1385 }
1386
1c384bf1
RH
1387 n = gimple_asm_ninputs (stmt);
1388 for (i = 0; i < n; i++)
726a989a 1389 {
1c384bf1 1390 op = gimple_asm_input_op (stmt, i);
726a989a
RB
1391 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
1392 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1393 oconstraints, &allows_mem, &allows_reg);
1394 if (wi)
1c384bf1
RH
1395 {
1396 wi->val_only = (allows_reg || !allows_mem);
1397 /* Although input "m" is not really a LHS, we need a lvalue. */
1398 wi->is_lhs = !wi->val_only;
1399 }
726a989a
RB
1400 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
1401 if (ret)
1402 return ret;
1403 }
1404
1405 if (wi)
1406 {
1407 wi->is_lhs = false;
1408 wi->val_only = true;
1409 }
1410
1c384bf1
RH
1411 n = gimple_asm_nlabels (stmt);
1412 for (i = 0; i < n; i++)
1413 {
1414 op = gimple_asm_label_op (stmt, i);
1415 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
1416 if (ret)
1417 return ret;
1418 }
1419
726a989a
RB
1420 return NULL_TREE;
1421}
1422
1423
1424/* Helper function of WALK_GIMPLE_STMT. Walk every tree operand in
1425 STMT. CALLBACK_OP and WI are as in WALK_GIMPLE_STMT.
1426
1427 CALLBACK_OP is called on each operand of STMT via walk_tree.
1428 Additional parameters to walk_tree must be stored in WI. For each operand
1429 OP, walk_tree is called as:
1430
1431 walk_tree (&OP, CALLBACK_OP, WI, WI->PSET)
1432
1433 If CALLBACK_OP returns non-NULL for an operand, the remaining
1434 operands are not scanned.
1435
1436 The return value is that returned by the last call to walk_tree, or
1437 NULL_TREE if no CALLBACK_OP is specified. */
1438
6a4d4e8a 1439tree
726a989a
RB
1440walk_gimple_op (gimple stmt, walk_tree_fn callback_op,
1441 struct walk_stmt_info *wi)
1442{
1443 struct pointer_set_t *pset = (wi) ? wi->pset : NULL;
1444 unsigned i;
1445 tree ret = NULL_TREE;
1446
1447 switch (gimple_code (stmt))
1448 {
1449 case GIMPLE_ASSIGN:
cb3d597d
EB
1450 /* Walk the RHS operands. If the LHS is of a non-renamable type or
1451 is a register variable, we may use a COMPONENT_REF on the RHS. */
726a989a 1452 if (wi)
cb3d597d
EB
1453 {
1454 tree lhs = gimple_assign_lhs (stmt);
1455 wi->val_only
1456 = (is_gimple_reg_type (TREE_TYPE (lhs)) && !is_gimple_reg (lhs))
b9af73fc 1457 || gimple_assign_rhs_class (stmt) != GIMPLE_SINGLE_RHS;
cb3d597d 1458 }
726a989a
RB
1459
1460 for (i = 1; i < gimple_num_ops (stmt); i++)
1461 {
1462 ret = walk_tree (gimple_op_ptr (stmt, i), callback_op, wi,
1463 pset);
1464 if (ret)
1465 return ret;
1466 }
1467
1468 /* Walk the LHS. If the RHS is appropriate for a memory, we
1469 may use a COMPONENT_REF on the LHS. */
1470 if (wi)
1471 {
216820a4
RG
1472 /* If the RHS is of a non-renamable type or is a register variable,
1473 we may use a COMPONENT_REF on the LHS. */
b9af73fc 1474 tree rhs1 = gimple_assign_rhs1 (stmt);
216820a4
RG
1475 wi->val_only
1476 = (is_gimple_reg_type (TREE_TYPE (rhs1)) && !is_gimple_reg (rhs1))
1477 || gimple_assign_rhs_class (stmt) != GIMPLE_SINGLE_RHS;
726a989a
RB
1478 wi->is_lhs = true;
1479 }
1480
1481 ret = walk_tree (gimple_op_ptr (stmt, 0), callback_op, wi, pset);
1482 if (ret)
1483 return ret;
1484
1485 if (wi)
1486 {
1487 wi->val_only = true;
1488 wi->is_lhs = false;
1489 }
1490 break;
1491
1492 case GIMPLE_CALL:
1493 if (wi)
523968bf
RG
1494 {
1495 wi->is_lhs = false;
1496 wi->val_only = true;
1497 }
726a989a
RB
1498
1499 ret = walk_tree (gimple_call_chain_ptr (stmt), callback_op, wi, pset);
1500 if (ret)
1501 return ret;
1502
1503 ret = walk_tree (gimple_call_fn_ptr (stmt), callback_op, wi, pset);
1504 if (ret)
1505 return ret;
1506
1507 for (i = 0; i < gimple_call_num_args (stmt); i++)
1508 {
523968bf 1509 if (wi)
4d931f41
EB
1510 wi->val_only
1511 = is_gimple_reg_type (TREE_TYPE (gimple_call_arg (stmt, i)));
726a989a
RB
1512 ret = walk_tree (gimple_call_arg_ptr (stmt, i), callback_op, wi,
1513 pset);
1514 if (ret)
1515 return ret;
1516 }
1517
523968bf
RG
1518 if (gimple_call_lhs (stmt))
1519 {
1520 if (wi)
1521 {
1522 wi->is_lhs = true;
4d931f41
EB
1523 wi->val_only
1524 = is_gimple_reg_type (TREE_TYPE (gimple_call_lhs (stmt)));
523968bf 1525 }
726a989a 1526
523968bf
RG
1527 ret = walk_tree (gimple_call_lhs_ptr (stmt), callback_op, wi, pset);
1528 if (ret)
1529 return ret;
1530 }
726a989a
RB
1531
1532 if (wi)
523968bf
RG
1533 {
1534 wi->is_lhs = false;
1535 wi->val_only = true;
1536 }
726a989a
RB
1537 break;
1538
1539 case GIMPLE_CATCH:
1540 ret = walk_tree (gimple_catch_types_ptr (stmt), callback_op, wi,
1541 pset);
1542 if (ret)
1543 return ret;
1544 break;
1545
1546 case GIMPLE_EH_FILTER:
1547 ret = walk_tree (gimple_eh_filter_types_ptr (stmt), callback_op, wi,
1548 pset);
1549 if (ret)
1550 return ret;
1551 break;
1552
726a989a
RB
1553 case GIMPLE_ASM:
1554 ret = walk_gimple_asm (stmt, callback_op, wi);
1555 if (ret)
1556 return ret;
1557 break;
1558
1559 case GIMPLE_OMP_CONTINUE:
1560 ret = walk_tree (gimple_omp_continue_control_def_ptr (stmt),
1561 callback_op, wi, pset);
1562 if (ret)
1563 return ret;
1564
1565 ret = walk_tree (gimple_omp_continue_control_use_ptr (stmt),
1566 callback_op, wi, pset);
1567 if (ret)
1568 return ret;
1569 break;
1570
1571 case GIMPLE_OMP_CRITICAL:
1572 ret = walk_tree (gimple_omp_critical_name_ptr (stmt), callback_op, wi,
1573 pset);
1574 if (ret)
1575 return ret;
1576 break;
1577
1578 case GIMPLE_OMP_FOR:
1579 ret = walk_tree (gimple_omp_for_clauses_ptr (stmt), callback_op, wi,
1580 pset);
1581 if (ret)
1582 return ret;
1583 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1584 {
1585 ret = walk_tree (gimple_omp_for_index_ptr (stmt, i), callback_op,
1586 wi, pset);
1587 if (ret)
1588 return ret;
1589 ret = walk_tree (gimple_omp_for_initial_ptr (stmt, i), callback_op,
1590 wi, pset);
1591 if (ret)
1592 return ret;
1593 ret = walk_tree (gimple_omp_for_final_ptr (stmt, i), callback_op,
1594 wi, pset);
1595 if (ret)
1596 return ret;
1597 ret = walk_tree (gimple_omp_for_incr_ptr (stmt, i), callback_op,
1598 wi, pset);
1599 }
1600 if (ret)
1601 return ret;
1602 break;
1603
1604 case GIMPLE_OMP_PARALLEL:
1605 ret = walk_tree (gimple_omp_parallel_clauses_ptr (stmt), callback_op,
1606 wi, pset);
1607 if (ret)
1608 return ret;
1609 ret = walk_tree (gimple_omp_parallel_child_fn_ptr (stmt), callback_op,
1610 wi, pset);
1611 if (ret)
1612 return ret;
1613 ret = walk_tree (gimple_omp_parallel_data_arg_ptr (stmt), callback_op,
1614 wi, pset);
1615 if (ret)
1616 return ret;
1617 break;
1618
1619 case GIMPLE_OMP_TASK:
1620 ret = walk_tree (gimple_omp_task_clauses_ptr (stmt), callback_op,
1621 wi, pset);
1622 if (ret)
1623 return ret;
1624 ret = walk_tree (gimple_omp_task_child_fn_ptr (stmt), callback_op,
1625 wi, pset);
1626 if (ret)
1627 return ret;
1628 ret = walk_tree (gimple_omp_task_data_arg_ptr (stmt), callback_op,
1629 wi, pset);
1630 if (ret)
1631 return ret;
1632 ret = walk_tree (gimple_omp_task_copy_fn_ptr (stmt), callback_op,
1633 wi, pset);
1634 if (ret)
1635 return ret;
1636 ret = walk_tree (gimple_omp_task_arg_size_ptr (stmt), callback_op,
1637 wi, pset);
1638 if (ret)
1639 return ret;
1640 ret = walk_tree (gimple_omp_task_arg_align_ptr (stmt), callback_op,
1641 wi, pset);
1642 if (ret)
1643 return ret;
1644 break;
1645
1646 case GIMPLE_OMP_SECTIONS:
1647 ret = walk_tree (gimple_omp_sections_clauses_ptr (stmt), callback_op,
1648 wi, pset);
1649 if (ret)
1650 return ret;
1651
1652 ret = walk_tree (gimple_omp_sections_control_ptr (stmt), callback_op,
1653 wi, pset);
1654 if (ret)
1655 return ret;
1656
1657 break;
1658
1659 case GIMPLE_OMP_SINGLE:
1660 ret = walk_tree (gimple_omp_single_clauses_ptr (stmt), callback_op, wi,
1661 pset);
1662 if (ret)
1663 return ret;
1664 break;
1665
acf0174b
JJ
1666 case GIMPLE_OMP_TARGET:
1667 ret = walk_tree (gimple_omp_target_clauses_ptr (stmt), callback_op, wi,
1668 pset);
1669 if (ret)
1670 return ret;
1671 break;
1672
1673 case GIMPLE_OMP_TEAMS:
1674 ret = walk_tree (gimple_omp_teams_clauses_ptr (stmt), callback_op, wi,
1675 pset);
1676 if (ret)
1677 return ret;
1678 break;
1679
726a989a
RB
1680 case GIMPLE_OMP_ATOMIC_LOAD:
1681 ret = walk_tree (gimple_omp_atomic_load_lhs_ptr (stmt), callback_op, wi,
1682 pset);
1683 if (ret)
1684 return ret;
1685
1686 ret = walk_tree (gimple_omp_atomic_load_rhs_ptr (stmt), callback_op, wi,
1687 pset);
1688 if (ret)
1689 return ret;
1690 break;
1691
1692 case GIMPLE_OMP_ATOMIC_STORE:
1693 ret = walk_tree (gimple_omp_atomic_store_val_ptr (stmt), callback_op,
1694 wi, pset);
1695 if (ret)
1696 return ret;
1697 break;
1698
0a35513e
AH
1699 case GIMPLE_TRANSACTION:
1700 ret = walk_tree (gimple_transaction_label_ptr (stmt), callback_op,
1701 wi, pset);
1702 if (ret)
1703 return ret;
1704 break;
1705
acf0174b
JJ
1706 case GIMPLE_OMP_RETURN:
1707 ret = walk_tree (gimple_omp_return_lhs_ptr (stmt), callback_op, wi,
1708 pset);
1709 if (ret)
1710 return ret;
1711 break;
1712
726a989a
RB
1713 /* Tuples that do not have operands. */
1714 case GIMPLE_NOP:
1715 case GIMPLE_RESX:
726a989a
RB
1716 case GIMPLE_PREDICT:
1717 break;
1718
1719 default:
1720 {
1721 enum gimple_statement_structure_enum gss;
1722 gss = gimple_statement_structure (stmt);
1723 if (gss == GSS_WITH_OPS || gss == GSS_WITH_MEM_OPS)
1724 for (i = 0; i < gimple_num_ops (stmt); i++)
1725 {
1726 ret = walk_tree (gimple_op_ptr (stmt, i), callback_op, wi, pset);
1727 if (ret)
1728 return ret;
1729 }
1730 }
1731 break;
1732 }
1733
1734 return NULL_TREE;
1735}
1736
1737
1738/* Walk the current statement in GSI (optionally using traversal state
1739 stored in WI). If WI is NULL, no state is kept during traversal.
1740 The callback CALLBACK_STMT is called. If CALLBACK_STMT indicates
1741 that it has handled all the operands of the statement, its return
1742 value is returned. Otherwise, the return value from CALLBACK_STMT
1743 is discarded and its operands are scanned.
1744
1745 If CALLBACK_STMT is NULL or it didn't handle the operands,
1746 CALLBACK_OP is called on each operand of the statement via
1747 walk_gimple_op. If walk_gimple_op returns non-NULL for any
1748 operand, the remaining operands are not scanned. In this case, the
1749 return value from CALLBACK_OP is returned.
1750
1751 In any other case, NULL_TREE is returned. */
1752
1753tree
1754walk_gimple_stmt (gimple_stmt_iterator *gsi, walk_stmt_fn callback_stmt,
1755 walk_tree_fn callback_op, struct walk_stmt_info *wi)
1756{
1757 gimple ret;
1758 tree tree_ret;
1759 gimple stmt = gsi_stmt (*gsi);
1760
1761 if (wi)
0a35513e
AH
1762 {
1763 wi->gsi = *gsi;
1764 wi->removed_stmt = false;
726a989a 1765
0a35513e
AH
1766 if (wi->want_locations && gimple_has_location (stmt))
1767 input_location = gimple_location (stmt);
1768 }
726a989a
RB
1769
1770 ret = NULL;
1771
1772 /* Invoke the statement callback. Return if the callback handled
1773 all of STMT operands by itself. */
1774 if (callback_stmt)
1775 {
1776 bool handled_ops = false;
1777 tree_ret = callback_stmt (gsi, &handled_ops, wi);
1778 if (handled_ops)
1779 return tree_ret;
1780
1781 /* If CALLBACK_STMT did not handle operands, it should not have
1782 a value to return. */
1783 gcc_assert (tree_ret == NULL);
1784
0a35513e
AH
1785 if (wi && wi->removed_stmt)
1786 return NULL;
1787
726a989a
RB
1788 /* Re-read stmt in case the callback changed it. */
1789 stmt = gsi_stmt (*gsi);
1790 }
1791
1792 /* If CALLBACK_OP is defined, invoke it on every operand of STMT. */
1793 if (callback_op)
1794 {
1795 tree_ret = walk_gimple_op (stmt, callback_op, wi);
1796 if (tree_ret)
1797 return tree_ret;
1798 }
1799
1800 /* If STMT can have statements inside (e.g. GIMPLE_BIND), walk them. */
1801 switch (gimple_code (stmt))
1802 {
1803 case GIMPLE_BIND:
355a7673
MM
1804 ret = walk_gimple_seq_mod (gimple_bind_body_ptr (stmt), callback_stmt,
1805 callback_op, wi);
726a989a
RB
1806 if (ret)
1807 return wi->callback_result;
1808 break;
1809
1810 case GIMPLE_CATCH:
355a7673
MM
1811 ret = walk_gimple_seq_mod (gimple_catch_handler_ptr (stmt), callback_stmt,
1812 callback_op, wi);
726a989a
RB
1813 if (ret)
1814 return wi->callback_result;
1815 break;
1816
1817 case GIMPLE_EH_FILTER:
355a7673 1818 ret = walk_gimple_seq_mod (gimple_eh_filter_failure_ptr (stmt), callback_stmt,
726a989a
RB
1819 callback_op, wi);
1820 if (ret)
1821 return wi->callback_result;
1822 break;
1823
0a35513e 1824 case GIMPLE_EH_ELSE:
355a7673 1825 ret = walk_gimple_seq_mod (gimple_eh_else_n_body_ptr (stmt),
0a35513e
AH
1826 callback_stmt, callback_op, wi);
1827 if (ret)
1828 return wi->callback_result;
355a7673 1829 ret = walk_gimple_seq_mod (gimple_eh_else_e_body_ptr (stmt),
0a35513e
AH
1830 callback_stmt, callback_op, wi);
1831 if (ret)
1832 return wi->callback_result;
1833 break;
1834
726a989a 1835 case GIMPLE_TRY:
355a7673 1836 ret = walk_gimple_seq_mod (gimple_try_eval_ptr (stmt), callback_stmt, callback_op,
726a989a
RB
1837 wi);
1838 if (ret)
1839 return wi->callback_result;
1840
355a7673 1841 ret = walk_gimple_seq_mod (gimple_try_cleanup_ptr (stmt), callback_stmt,
726a989a
RB
1842 callback_op, wi);
1843 if (ret)
1844 return wi->callback_result;
1845 break;
1846
1847 case GIMPLE_OMP_FOR:
355a7673 1848 ret = walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt), callback_stmt,
726a989a
RB
1849 callback_op, wi);
1850 if (ret)
1851 return wi->callback_result;
1852
1853 /* FALL THROUGH. */
1854 case GIMPLE_OMP_CRITICAL:
1855 case GIMPLE_OMP_MASTER:
acf0174b 1856 case GIMPLE_OMP_TASKGROUP:
726a989a
RB
1857 case GIMPLE_OMP_ORDERED:
1858 case GIMPLE_OMP_SECTION:
1859 case GIMPLE_OMP_PARALLEL:
1860 case GIMPLE_OMP_TASK:
1861 case GIMPLE_OMP_SECTIONS:
1862 case GIMPLE_OMP_SINGLE:
acf0174b
JJ
1863 case GIMPLE_OMP_TARGET:
1864 case GIMPLE_OMP_TEAMS:
355a7673 1865 ret = walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), callback_stmt,
0a35513e 1866 callback_op, wi);
726a989a
RB
1867 if (ret)
1868 return wi->callback_result;
1869 break;
1870
1871 case GIMPLE_WITH_CLEANUP_EXPR:
355a7673 1872 ret = walk_gimple_seq_mod (gimple_wce_cleanup_ptr (stmt), callback_stmt,
726a989a
RB
1873 callback_op, wi);
1874 if (ret)
1875 return wi->callback_result;
1876 break;
1877
0a35513e 1878 case GIMPLE_TRANSACTION:
355a7673 1879 ret = walk_gimple_seq_mod (gimple_transaction_body_ptr (stmt),
0a35513e
AH
1880 callback_stmt, callback_op, wi);
1881 if (ret)
1882 return wi->callback_result;
1883 break;
1884
726a989a
RB
1885 default:
1886 gcc_assert (!gimple_has_substatements (stmt));
1887 break;
1888 }
1889
1890 return NULL;
1891}
1892
1893
1894/* Set sequence SEQ to be the GIMPLE body for function FN. */
1895
1896void
1897gimple_set_body (tree fndecl, gimple_seq seq)
1898{
1899 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
1900 if (fn == NULL)
1901 {
1902 /* If FNDECL still does not have a function structure associated
1903 with it, then it does not make sense for it to receive a
1904 GIMPLE body. */
1905 gcc_assert (seq == NULL);
1906 }
1907 else
1908 fn->gimple_body = seq;
1909}
1910
1911
abbd64b9
JS
1912/* Return the body of GIMPLE statements for function FN. After the
1913 CFG pass, the function body doesn't exist anymore because it has
1914 been split up into basic blocks. In this case, it returns
1915 NULL. */
726a989a
RB
1916
1917gimple_seq
1918gimple_body (tree fndecl)
1919{
1920 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
1921 return fn ? fn->gimple_body : NULL;
1922}
1923
39ecc018
JH
1924/* Return true when FNDECL has Gimple body either in unlowered
1925 or CFG form. */
1926bool
1927gimple_has_body_p (tree fndecl)
1928{
1929 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
1930 return (gimple_body (fndecl) || (fn && fn->cfg));
1931}
726a989a 1932
25583c4f
RS
1933/* Return true if calls C1 and C2 are known to go to the same function. */
1934
1935bool
1936gimple_call_same_target_p (const_gimple c1, const_gimple c2)
1937{
1938 if (gimple_call_internal_p (c1))
1939 return (gimple_call_internal_p (c2)
1940 && gimple_call_internal_fn (c1) == gimple_call_internal_fn (c2));
1941 else
1942 return (gimple_call_fn (c1) == gimple_call_fn (c2)
1943 || (gimple_call_fndecl (c1)
1944 && gimple_call_fndecl (c1) == gimple_call_fndecl (c2)));
1945}
1946
726a989a
RB
1947/* Detect flags from a GIMPLE_CALL. This is just like
1948 call_expr_flags, but for gimple tuples. */
1949
1950int
1951gimple_call_flags (const_gimple stmt)
1952{
1953 int flags;
1954 tree decl = gimple_call_fndecl (stmt);
726a989a
RB
1955
1956 if (decl)
1957 flags = flags_from_decl_or_type (decl);
25583c4f
RS
1958 else if (gimple_call_internal_p (stmt))
1959 flags = internal_fn_flags (gimple_call_internal_fn (stmt));
726a989a 1960 else
97e03fa1 1961 flags = flags_from_decl_or_type (gimple_call_fntype (stmt));
726a989a 1962
9bb1a81b
JM
1963 if (stmt->gsbase.subcode & GF_CALL_NOTHROW)
1964 flags |= ECF_NOTHROW;
1965
726a989a
RB
1966 return flags;
1967}
1968
25583c4f
RS
1969/* Return the "fn spec" string for call STMT. */
1970
1971static tree
1972gimple_call_fnspec (const_gimple stmt)
1973{
1974 tree type, attr;
1975
1976 type = gimple_call_fntype (stmt);
1977 if (!type)
1978 return NULL_TREE;
1979
1980 attr = lookup_attribute ("fn spec", TYPE_ATTRIBUTES (type));
1981 if (!attr)
1982 return NULL_TREE;
1983
1984 return TREE_VALUE (TREE_VALUE (attr));
1985}
1986
0b7b376d
RG
1987/* Detects argument flags for argument number ARG on call STMT. */
1988
1989int
1990gimple_call_arg_flags (const_gimple stmt, unsigned arg)
1991{
25583c4f 1992 tree attr = gimple_call_fnspec (stmt);
0b7b376d 1993
25583c4f 1994 if (!attr || 1 + arg >= (unsigned) TREE_STRING_LENGTH (attr))
0b7b376d
RG
1995 return 0;
1996
1997 switch (TREE_STRING_POINTER (attr)[1 + arg])
1998 {
1999 case 'x':
2000 case 'X':
2001 return EAF_UNUSED;
2002
2003 case 'R':
2004 return EAF_DIRECT | EAF_NOCLOBBER | EAF_NOESCAPE;
2005
2006 case 'r':
2007 return EAF_NOCLOBBER | EAF_NOESCAPE;
2008
2009 case 'W':
2010 return EAF_DIRECT | EAF_NOESCAPE;
2011
2012 case 'w':
2013 return EAF_NOESCAPE;
2014
2015 case '.':
2016 default:
2017 return 0;
2018 }
2019}
2020
2021/* Detects return flags for the call STMT. */
2022
2023int
2024gimple_call_return_flags (const_gimple stmt)
2025{
25583c4f 2026 tree attr;
0b7b376d
RG
2027
2028 if (gimple_call_flags (stmt) & ECF_MALLOC)
2029 return ERF_NOALIAS;
2030
25583c4f
RS
2031 attr = gimple_call_fnspec (stmt);
2032 if (!attr || TREE_STRING_LENGTH (attr) < 1)
0b7b376d
RG
2033 return 0;
2034
2035 switch (TREE_STRING_POINTER (attr)[0])
2036 {
2037 case '1':
2038 case '2':
2039 case '3':
2040 case '4':
2041 return ERF_RETURNS_ARG | (TREE_STRING_POINTER (attr)[0] - '1');
2042
2043 case 'm':
2044 return ERF_NOALIAS;
2045
2046 case '.':
2047 default:
2048 return 0;
2049 }
2050}
726a989a 2051
3dbe9454 2052
726a989a
RB
2053/* Return true if GS is a copy assignment. */
2054
2055bool
2056gimple_assign_copy_p (gimple gs)
2057{
3dbe9454
RG
2058 return (gimple_assign_single_p (gs)
2059 && is_gimple_val (gimple_op (gs, 1)));
726a989a
RB
2060}
2061
2062
2063/* Return true if GS is a SSA_NAME copy assignment. */
2064
2065bool
2066gimple_assign_ssa_name_copy_p (gimple gs)
2067{
3dbe9454 2068 return (gimple_assign_single_p (gs)
726a989a
RB
2069 && TREE_CODE (gimple_assign_lhs (gs)) == SSA_NAME
2070 && TREE_CODE (gimple_assign_rhs1 (gs)) == SSA_NAME);
2071}
2072
2073
726a989a
RB
2074/* Return true if GS is an assignment with a unary RHS, but the
2075 operator has no effect on the assigned value. The logic is adapted
2076 from STRIP_NOPS. This predicate is intended to be used in tuplifying
2077 instances in which STRIP_NOPS was previously applied to the RHS of
2078 an assignment.
2079
2080 NOTE: In the use cases that led to the creation of this function
2081 and of gimple_assign_single_p, it is typical to test for either
2082 condition and to proceed in the same manner. In each case, the
2083 assigned value is represented by the single RHS operand of the
2084 assignment. I suspect there may be cases where gimple_assign_copy_p,
2085 gimple_assign_single_p, or equivalent logic is used where a similar
2086 treatment of unary NOPs is appropriate. */
b8698a0f 2087
726a989a
RB
2088bool
2089gimple_assign_unary_nop_p (gimple gs)
2090{
3dbe9454 2091 return (is_gimple_assign (gs)
1a87cf0c 2092 && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (gs))
726a989a
RB
2093 || gimple_assign_rhs_code (gs) == NON_LVALUE_EXPR)
2094 && gimple_assign_rhs1 (gs) != error_mark_node
2095 && (TYPE_MODE (TREE_TYPE (gimple_assign_lhs (gs)))
2096 == TYPE_MODE (TREE_TYPE (gimple_assign_rhs1 (gs)))));
2097}
2098
2099/* Set BB to be the basic block holding G. */
2100
2101void
2102gimple_set_bb (gimple stmt, basic_block bb)
2103{
2104 stmt->gsbase.bb = bb;
2105
2106 /* If the statement is a label, add the label to block-to-labels map
2107 so that we can speed up edge creation for GIMPLE_GOTOs. */
2108 if (cfun->cfg && gimple_code (stmt) == GIMPLE_LABEL)
2109 {
2110 tree t;
2111 int uid;
2112
2113 t = gimple_label_label (stmt);
2114 uid = LABEL_DECL_UID (t);
2115 if (uid == -1)
2116 {
9771b263 2117 unsigned old_len = vec_safe_length (label_to_block_map);
726a989a
RB
2118 LABEL_DECL_UID (t) = uid = cfun->cfg->last_label_uid++;
2119 if (old_len <= (unsigned) uid)
2120 {
5006671f 2121 unsigned new_len = 3 * uid / 2 + 1;
726a989a 2122
9771b263 2123 vec_safe_grow_cleared (label_to_block_map, new_len);
726a989a
RB
2124 }
2125 }
2126
9771b263 2127 (*label_to_block_map)[uid] = bb;
726a989a
RB
2128 }
2129}
2130
2131
726a989a
RB
2132/* Modify the RHS of the assignment pointed-to by GSI using the
2133 operands in the expression tree EXPR.
2134
2135 NOTE: The statement pointed-to by GSI may be reallocated if it
2136 did not have enough operand slots.
2137
2138 This function is useful to convert an existing tree expression into
2139 the flat representation used for the RHS of a GIMPLE assignment.
2140 It will reallocate memory as needed to expand or shrink the number
2141 of operand slots needed to represent EXPR.
2142
2143 NOTE: If you find yourself building a tree and then calling this
2144 function, you are most certainly doing it the slow way. It is much
2145 better to build a new assignment or to use the function
2146 gimple_assign_set_rhs_with_ops, which does not require an
2147 expression tree to be built. */
2148
2149void
2150gimple_assign_set_rhs_from_tree (gimple_stmt_iterator *gsi, tree expr)
2151{
2152 enum tree_code subcode;
0354c0c7 2153 tree op1, op2, op3;
726a989a 2154
0354c0c7
BS
2155 extract_ops_from_tree_1 (expr, &subcode, &op1, &op2, &op3);
2156 gimple_assign_set_rhs_with_ops_1 (gsi, subcode, op1, op2, op3);
726a989a
RB
2157}
2158
2159
2160/* Set the RHS of assignment statement pointed-to by GSI to CODE with
0354c0c7 2161 operands OP1, OP2 and OP3.
726a989a
RB
2162
2163 NOTE: The statement pointed-to by GSI may be reallocated if it
2164 did not have enough operand slots. */
2165
2166void
0354c0c7
BS
2167gimple_assign_set_rhs_with_ops_1 (gimple_stmt_iterator *gsi, enum tree_code code,
2168 tree op1, tree op2, tree op3)
726a989a
RB
2169{
2170 unsigned new_rhs_ops = get_gimple_rhs_num_ops (code);
2171 gimple stmt = gsi_stmt (*gsi);
2172
2173 /* If the new CODE needs more operands, allocate a new statement. */
2174 if (gimple_num_ops (stmt) < new_rhs_ops + 1)
2175 {
2176 tree lhs = gimple_assign_lhs (stmt);
2177 gimple new_stmt = gimple_alloc (gimple_code (stmt), new_rhs_ops + 1);
2178 memcpy (new_stmt, stmt, gimple_size (gimple_code (stmt)));
355a7673 2179 gimple_init_singleton (new_stmt);
726a989a
RB
2180 gsi_replace (gsi, new_stmt, true);
2181 stmt = new_stmt;
2182
2183 /* The LHS needs to be reset as this also changes the SSA name
2184 on the LHS. */
2185 gimple_assign_set_lhs (stmt, lhs);
2186 }
2187
2188 gimple_set_num_ops (stmt, new_rhs_ops + 1);
2189 gimple_set_subcode (stmt, code);
2190 gimple_assign_set_rhs1 (stmt, op1);
2191 if (new_rhs_ops > 1)
2192 gimple_assign_set_rhs2 (stmt, op2);
0354c0c7
BS
2193 if (new_rhs_ops > 2)
2194 gimple_assign_set_rhs3 (stmt, op3);
726a989a
RB
2195}
2196
2197
2198/* Return the LHS of a statement that performs an assignment,
2199 either a GIMPLE_ASSIGN or a GIMPLE_CALL. Returns NULL_TREE
2200 for a call to a function that returns no value, or for a
2201 statement other than an assignment or a call. */
2202
2203tree
2204gimple_get_lhs (const_gimple stmt)
2205{
e0c68ce9 2206 enum gimple_code code = gimple_code (stmt);
726a989a
RB
2207
2208 if (code == GIMPLE_ASSIGN)
2209 return gimple_assign_lhs (stmt);
2210 else if (code == GIMPLE_CALL)
2211 return gimple_call_lhs (stmt);
2212 else
2213 return NULL_TREE;
2214}
2215
2216
2217/* Set the LHS of a statement that performs an assignment,
2218 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
2219
2220void
2221gimple_set_lhs (gimple stmt, tree lhs)
2222{
e0c68ce9 2223 enum gimple_code code = gimple_code (stmt);
726a989a
RB
2224
2225 if (code == GIMPLE_ASSIGN)
2226 gimple_assign_set_lhs (stmt, lhs);
2227 else if (code == GIMPLE_CALL)
2228 gimple_call_set_lhs (stmt, lhs);
2229 else
c3284718 2230 gcc_unreachable ();
726a989a
RB
2231}
2232
2233
2234/* Return a deep copy of statement STMT. All the operands from STMT
2235 are reallocated and copied using unshare_expr. The DEF, USE, VDEF
355a7673
MM
2236 and VUSE operand arrays are set to empty in the new copy. The new
2237 copy isn't part of any sequence. */
726a989a
RB
2238
2239gimple
2240gimple_copy (gimple stmt)
2241{
2242 enum gimple_code code = gimple_code (stmt);
2243 unsigned num_ops = gimple_num_ops (stmt);
2244 gimple copy = gimple_alloc (code, num_ops);
2245 unsigned i;
2246
2247 /* Shallow copy all the fields from STMT. */
2248 memcpy (copy, stmt, gimple_size (code));
355a7673 2249 gimple_init_singleton (copy);
726a989a
RB
2250
2251 /* If STMT has sub-statements, deep-copy them as well. */
2252 if (gimple_has_substatements (stmt))
2253 {
2254 gimple_seq new_seq;
2255 tree t;
2256
2257 switch (gimple_code (stmt))
2258 {
2259 case GIMPLE_BIND:
2260 new_seq = gimple_seq_copy (gimple_bind_body (stmt));
2261 gimple_bind_set_body (copy, new_seq);
2262 gimple_bind_set_vars (copy, unshare_expr (gimple_bind_vars (stmt)));
2263 gimple_bind_set_block (copy, gimple_bind_block (stmt));
2264 break;
2265
2266 case GIMPLE_CATCH:
2267 new_seq = gimple_seq_copy (gimple_catch_handler (stmt));
2268 gimple_catch_set_handler (copy, new_seq);
2269 t = unshare_expr (gimple_catch_types (stmt));
2270 gimple_catch_set_types (copy, t);
2271 break;
2272
2273 case GIMPLE_EH_FILTER:
2274 new_seq = gimple_seq_copy (gimple_eh_filter_failure (stmt));
2275 gimple_eh_filter_set_failure (copy, new_seq);
2276 t = unshare_expr (gimple_eh_filter_types (stmt));
2277 gimple_eh_filter_set_types (copy, t);
2278 break;
2279
0a35513e
AH
2280 case GIMPLE_EH_ELSE:
2281 new_seq = gimple_seq_copy (gimple_eh_else_n_body (stmt));
2282 gimple_eh_else_set_n_body (copy, new_seq);
2283 new_seq = gimple_seq_copy (gimple_eh_else_e_body (stmt));
2284 gimple_eh_else_set_e_body (copy, new_seq);
2285 break;
2286
726a989a
RB
2287 case GIMPLE_TRY:
2288 new_seq = gimple_seq_copy (gimple_try_eval (stmt));
2289 gimple_try_set_eval (copy, new_seq);
2290 new_seq = gimple_seq_copy (gimple_try_cleanup (stmt));
2291 gimple_try_set_cleanup (copy, new_seq);
2292 break;
2293
2294 case GIMPLE_OMP_FOR:
2295 new_seq = gimple_seq_copy (gimple_omp_for_pre_body (stmt));
2296 gimple_omp_for_set_pre_body (copy, new_seq);
2297 t = unshare_expr (gimple_omp_for_clauses (stmt));
2298 gimple_omp_for_set_clauses (copy, t);
2299 copy->gimple_omp_for.iter
a9429e29
LB
2300 = ggc_alloc_vec_gimple_omp_for_iter
2301 (gimple_omp_for_collapse (stmt));
726a989a
RB
2302 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2303 {
2304 gimple_omp_for_set_cond (copy, i,
2305 gimple_omp_for_cond (stmt, i));
2306 gimple_omp_for_set_index (copy, i,
2307 gimple_omp_for_index (stmt, i));
2308 t = unshare_expr (gimple_omp_for_initial (stmt, i));
2309 gimple_omp_for_set_initial (copy, i, t);
2310 t = unshare_expr (gimple_omp_for_final (stmt, i));
2311 gimple_omp_for_set_final (copy, i, t);
2312 t = unshare_expr (gimple_omp_for_incr (stmt, i));
2313 gimple_omp_for_set_incr (copy, i, t);
2314 }
2315 goto copy_omp_body;
2316
2317 case GIMPLE_OMP_PARALLEL:
2318 t = unshare_expr (gimple_omp_parallel_clauses (stmt));
2319 gimple_omp_parallel_set_clauses (copy, t);
2320 t = unshare_expr (gimple_omp_parallel_child_fn (stmt));
2321 gimple_omp_parallel_set_child_fn (copy, t);
2322 t = unshare_expr (gimple_omp_parallel_data_arg (stmt));
2323 gimple_omp_parallel_set_data_arg (copy, t);
2324 goto copy_omp_body;
2325
2326 case GIMPLE_OMP_TASK:
2327 t = unshare_expr (gimple_omp_task_clauses (stmt));
2328 gimple_omp_task_set_clauses (copy, t);
2329 t = unshare_expr (gimple_omp_task_child_fn (stmt));
2330 gimple_omp_task_set_child_fn (copy, t);
2331 t = unshare_expr (gimple_omp_task_data_arg (stmt));
2332 gimple_omp_task_set_data_arg (copy, t);
2333 t = unshare_expr (gimple_omp_task_copy_fn (stmt));
2334 gimple_omp_task_set_copy_fn (copy, t);
2335 t = unshare_expr (gimple_omp_task_arg_size (stmt));
2336 gimple_omp_task_set_arg_size (copy, t);
2337 t = unshare_expr (gimple_omp_task_arg_align (stmt));
2338 gimple_omp_task_set_arg_align (copy, t);
2339 goto copy_omp_body;
2340
2341 case GIMPLE_OMP_CRITICAL:
2342 t = unshare_expr (gimple_omp_critical_name (stmt));
2343 gimple_omp_critical_set_name (copy, t);
2344 goto copy_omp_body;
2345
2346 case GIMPLE_OMP_SECTIONS:
2347 t = unshare_expr (gimple_omp_sections_clauses (stmt));
2348 gimple_omp_sections_set_clauses (copy, t);
2349 t = unshare_expr (gimple_omp_sections_control (stmt));
2350 gimple_omp_sections_set_control (copy, t);
2351 /* FALLTHRU */
2352
2353 case GIMPLE_OMP_SINGLE:
acf0174b
JJ
2354 case GIMPLE_OMP_TARGET:
2355 case GIMPLE_OMP_TEAMS:
726a989a
RB
2356 case GIMPLE_OMP_SECTION:
2357 case GIMPLE_OMP_MASTER:
acf0174b 2358 case GIMPLE_OMP_TASKGROUP:
726a989a
RB
2359 case GIMPLE_OMP_ORDERED:
2360 copy_omp_body:
2361 new_seq = gimple_seq_copy (gimple_omp_body (stmt));
2362 gimple_omp_set_body (copy, new_seq);
2363 break;
2364
0a35513e
AH
2365 case GIMPLE_TRANSACTION:
2366 new_seq = gimple_seq_copy (gimple_transaction_body (stmt));
2367 gimple_transaction_set_body (copy, new_seq);
2368 break;
2369
726a989a
RB
2370 case GIMPLE_WITH_CLEANUP_EXPR:
2371 new_seq = gimple_seq_copy (gimple_wce_cleanup (stmt));
2372 gimple_wce_set_cleanup (copy, new_seq);
2373 break;
2374
2375 default:
2376 gcc_unreachable ();
2377 }
2378 }
2379
2380 /* Make copy of operands. */
483ef49f
RG
2381 for (i = 0; i < num_ops; i++)
2382 gimple_set_op (copy, i, unshare_expr (gimple_op (stmt, i)));
726a989a 2383
483ef49f
RG
2384 if (gimple_has_mem_ops (stmt))
2385 {
2386 gimple_set_vdef (copy, gimple_vdef (stmt));
2387 gimple_set_vuse (copy, gimple_vuse (stmt));
2388 }
726a989a 2389
483ef49f
RG
2390 /* Clear out SSA operand vectors on COPY. */
2391 if (gimple_has_ops (stmt))
2392 {
483ef49f 2393 gimple_set_use_ops (copy, NULL);
726a989a 2394
5006671f
RG
2395 /* SSA operands need to be updated. */
2396 gimple_set_modified (copy, true);
726a989a
RB
2397 }
2398
2399 return copy;
2400}
2401
2402
726a989a
RB
2403/* Return true if statement S has side-effects. We consider a
2404 statement to have side effects if:
2405
2406 - It is a GIMPLE_CALL not marked with ECF_PURE or ECF_CONST.
2407 - Any of its operands are marked TREE_THIS_VOLATILE or TREE_SIDE_EFFECTS. */
2408
2409bool
2410gimple_has_side_effects (const_gimple s)
2411{
b5b8b0ac
AO
2412 if (is_gimple_debug (s))
2413 return false;
2414
726a989a
RB
2415 /* We don't have to scan the arguments to check for
2416 volatile arguments, though, at present, we still
2417 do a scan to check for TREE_SIDE_EFFECTS. */
2418 if (gimple_has_volatile_ops (s))
2419 return true;
2420
179184e3
RG
2421 if (gimple_code (s) == GIMPLE_ASM
2422 && gimple_asm_volatile_p (s))
2423 return true;
2424
726a989a
RB
2425 if (is_gimple_call (s))
2426 {
723afc44 2427 int flags = gimple_call_flags (s);
726a989a 2428
723afc44
RG
2429 /* An infinite loop is considered a side effect. */
2430 if (!(flags & (ECF_CONST | ECF_PURE))
2431 || (flags & ECF_LOOPING_CONST_OR_PURE))
726a989a
RB
2432 return true;
2433
726a989a
RB
2434 return false;
2435 }
726a989a
RB
2436
2437 return false;
2438}
2439
726a989a 2440/* Helper for gimple_could_trap_p and gimple_assign_rhs_could_trap_p.
e1fd038a
SP
2441 Return true if S can trap. When INCLUDE_MEM is true, check whether
2442 the memory operations could trap. When INCLUDE_STORES is true and
2443 S is a GIMPLE_ASSIGN, the LHS of the assignment is also checked. */
726a989a 2444
e1fd038a
SP
2445bool
2446gimple_could_trap_p_1 (gimple s, bool include_mem, bool include_stores)
726a989a 2447{
726a989a
RB
2448 tree t, div = NULL_TREE;
2449 enum tree_code op;
2450
e1fd038a
SP
2451 if (include_mem)
2452 {
2453 unsigned i, start = (is_gimple_assign (s) && !include_stores) ? 1 : 0;
726a989a 2454
e1fd038a
SP
2455 for (i = start; i < gimple_num_ops (s); i++)
2456 if (tree_could_trap_p (gimple_op (s, i)))
2457 return true;
2458 }
726a989a
RB
2459
2460 switch (gimple_code (s))
2461 {
2462 case GIMPLE_ASM:
2463 return gimple_asm_volatile_p (s);
2464
2465 case GIMPLE_CALL:
2466 t = gimple_call_fndecl (s);
2467 /* Assume that calls to weak functions may trap. */
2468 if (!t || !DECL_P (t) || DECL_WEAK (t))
2469 return true;
2470 return false;
2471
2472 case GIMPLE_ASSIGN:
2473 t = gimple_expr_type (s);
2474 op = gimple_assign_rhs_code (s);
2475 if (get_gimple_rhs_class (op) == GIMPLE_BINARY_RHS)
2476 div = gimple_assign_rhs2 (s);
2477 return (operation_could_trap_p (op, FLOAT_TYPE_P (t),
2478 (INTEGRAL_TYPE_P (t)
2479 && TYPE_OVERFLOW_TRAPS (t)),
2480 div));
2481
2482 default:
2483 break;
2484 }
2485
2486 return false;
726a989a
RB
2487}
2488
726a989a
RB
2489/* Return true if statement S can trap. */
2490
2491bool
2492gimple_could_trap_p (gimple s)
2493{
e1fd038a 2494 return gimple_could_trap_p_1 (s, true, true);
726a989a
RB
2495}
2496
726a989a
RB
2497/* Return true if RHS of a GIMPLE_ASSIGN S can trap. */
2498
2499bool
2500gimple_assign_rhs_could_trap_p (gimple s)
2501{
2502 gcc_assert (is_gimple_assign (s));
e1fd038a 2503 return gimple_could_trap_p_1 (s, true, false);
726a989a
RB
2504}
2505
2506
2507/* Print debugging information for gimple stmts generated. */
2508
2509void
2510dump_gimple_statistics (void)
2511{
726a989a
RB
2512 int i, total_tuples = 0, total_bytes = 0;
2513
7aa6d18a
SB
2514 if (! GATHER_STATISTICS)
2515 {
2516 fprintf (stderr, "No gimple statistics\n");
2517 return;
2518 }
2519
726a989a
RB
2520 fprintf (stderr, "\nGIMPLE statements\n");
2521 fprintf (stderr, "Kind Stmts Bytes\n");
2522 fprintf (stderr, "---------------------------------------\n");
2523 for (i = 0; i < (int) gimple_alloc_kind_all; ++i)
2524 {
2525 fprintf (stderr, "%-20s %7d %10d\n", gimple_alloc_kind_names[i],
2526 gimple_alloc_counts[i], gimple_alloc_sizes[i]);
2527 total_tuples += gimple_alloc_counts[i];
2528 total_bytes += gimple_alloc_sizes[i];
2529 }
2530 fprintf (stderr, "---------------------------------------\n");
2531 fprintf (stderr, "%-20s %7d %10d\n", "Total", total_tuples, total_bytes);
2532 fprintf (stderr, "---------------------------------------\n");
726a989a
RB
2533}
2534
2535
726a989a
RB
2536/* Return the number of operands needed on the RHS of a GIMPLE
2537 assignment for an expression with tree code CODE. */
2538
2539unsigned
2540get_gimple_rhs_num_ops (enum tree_code code)
2541{
2542 enum gimple_rhs_class rhs_class = get_gimple_rhs_class (code);
2543
2544 if (rhs_class == GIMPLE_UNARY_RHS || rhs_class == GIMPLE_SINGLE_RHS)
2545 return 1;
2546 else if (rhs_class == GIMPLE_BINARY_RHS)
2547 return 2;
0354c0c7
BS
2548 else if (rhs_class == GIMPLE_TERNARY_RHS)
2549 return 3;
726a989a
RB
2550 else
2551 gcc_unreachable ();
2552}
2553
2554#define DEFTREECODE(SYM, STRING, TYPE, NARGS) \
2555 (unsigned char) \
2556 ((TYPE) == tcc_unary ? GIMPLE_UNARY_RHS \
2557 : ((TYPE) == tcc_binary \
2558 || (TYPE) == tcc_comparison) ? GIMPLE_BINARY_RHS \
2559 : ((TYPE) == tcc_constant \
2560 || (TYPE) == tcc_declaration \
2561 || (TYPE) == tcc_reference) ? GIMPLE_SINGLE_RHS \
2562 : ((SYM) == TRUTH_AND_EXPR \
2563 || (SYM) == TRUTH_OR_EXPR \
2564 || (SYM) == TRUTH_XOR_EXPR) ? GIMPLE_BINARY_RHS \
2565 : (SYM) == TRUTH_NOT_EXPR ? GIMPLE_UNARY_RHS \
4e71066d
RG
2566 : ((SYM) == COND_EXPR \
2567 || (SYM) == WIDEN_MULT_PLUS_EXPR \
16949072 2568 || (SYM) == WIDEN_MULT_MINUS_EXPR \
f471fe72
RG
2569 || (SYM) == DOT_PROD_EXPR \
2570 || (SYM) == REALIGN_LOAD_EXPR \
4e71066d 2571 || (SYM) == VEC_COND_EXPR \
2205ed25 2572 || (SYM) == VEC_PERM_EXPR \
16949072 2573 || (SYM) == FMA_EXPR) ? GIMPLE_TERNARY_RHS \
4e71066d 2574 : ((SYM) == CONSTRUCTOR \
726a989a
RB
2575 || (SYM) == OBJ_TYPE_REF \
2576 || (SYM) == ASSERT_EXPR \
2577 || (SYM) == ADDR_EXPR \
2578 || (SYM) == WITH_SIZE_EXPR \
4e71066d 2579 || (SYM) == SSA_NAME) ? GIMPLE_SINGLE_RHS \
726a989a
RB
2580 : GIMPLE_INVALID_RHS),
2581#define END_OF_BASE_TREE_CODES (unsigned char) GIMPLE_INVALID_RHS,
2582
2583const unsigned char gimple_rhs_class_table[] = {
2584#include "all-tree.def"
2585};
2586
2587#undef DEFTREECODE
2588#undef END_OF_BASE_TREE_CODES
2589
2590/* For the definitive definition of GIMPLE, see doc/tree-ssa.texi. */
2591
2592/* Validation of GIMPLE expressions. */
2593
726a989a
RB
2594/* Return true if T is a valid LHS for a GIMPLE assignment expression. */
2595
2596bool
2597is_gimple_lvalue (tree t)
2598{
2599 return (is_gimple_addressable (t)
2600 || TREE_CODE (t) == WITH_SIZE_EXPR
2601 /* These are complex lvalues, but don't have addresses, so they
2602 go here. */
2603 || TREE_CODE (t) == BIT_FIELD_REF);
2604}
2605
2606/* Return true if T is a GIMPLE condition. */
2607
2608bool
2609is_gimple_condexpr (tree t)
2610{
2611 return (is_gimple_val (t) || (COMPARISON_CLASS_P (t)
f9613c9a 2612 && !tree_could_throw_p (t)
726a989a
RB
2613 && is_gimple_val (TREE_OPERAND (t, 0))
2614 && is_gimple_val (TREE_OPERAND (t, 1))));
2615}
2616
2617/* Return true if T is something whose address can be taken. */
2618
2619bool
2620is_gimple_addressable (tree t)
2621{
70f34814
RG
2622 return (is_gimple_id (t) || handled_component_p (t)
2623 || TREE_CODE (t) == MEM_REF);
726a989a
RB
2624}
2625
2626/* Return true if T is a valid gimple constant. */
2627
2628bool
2629is_gimple_constant (const_tree t)
2630{
2631 switch (TREE_CODE (t))
2632 {
2633 case INTEGER_CST:
2634 case REAL_CST:
2635 case FIXED_CST:
2636 case STRING_CST:
2637 case COMPLEX_CST:
2638 case VECTOR_CST:
2639 return true;
2640
726a989a
RB
2641 default:
2642 return false;
2643 }
2644}
2645
2646/* Return true if T is a gimple address. */
2647
2648bool
2649is_gimple_address (const_tree t)
2650{
2651 tree op;
2652
2653 if (TREE_CODE (t) != ADDR_EXPR)
2654 return false;
2655
2656 op = TREE_OPERAND (t, 0);
2657 while (handled_component_p (op))
2658 {
2659 if ((TREE_CODE (op) == ARRAY_REF
2660 || TREE_CODE (op) == ARRAY_RANGE_REF)
2661 && !is_gimple_val (TREE_OPERAND (op, 1)))
2662 return false;
2663
2664 op = TREE_OPERAND (op, 0);
2665 }
2666
70f34814 2667 if (CONSTANT_CLASS_P (op) || TREE_CODE (op) == MEM_REF)
726a989a
RB
2668 return true;
2669
2670 switch (TREE_CODE (op))
2671 {
2672 case PARM_DECL:
2673 case RESULT_DECL:
2674 case LABEL_DECL:
2675 case FUNCTION_DECL:
2676 case VAR_DECL:
2677 case CONST_DECL:
2678 return true;
2679
2680 default:
2681 return false;
2682 }
2683}
2684
00fc2333
JH
2685/* Return true if T is a gimple invariant address. */
2686
2687bool
2688is_gimple_invariant_address (const_tree t)
2689{
2690 const_tree op;
2691
2692 if (TREE_CODE (t) != ADDR_EXPR)
2693 return false;
2694
2695 op = strip_invariant_refs (TREE_OPERAND (t, 0));
70f34814
RG
2696 if (!op)
2697 return false;
00fc2333 2698
70f34814
RG
2699 if (TREE_CODE (op) == MEM_REF)
2700 {
2701 const_tree op0 = TREE_OPERAND (op, 0);
2702 return (TREE_CODE (op0) == ADDR_EXPR
2703 && (CONSTANT_CLASS_P (TREE_OPERAND (op0, 0))
2704 || decl_address_invariant_p (TREE_OPERAND (op0, 0))));
2705 }
2706
2707 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
00fc2333
JH
2708}
2709
2710/* Return true if T is a gimple invariant address at IPA level
2711 (so addresses of variables on stack are not allowed). */
2712
2713bool
2714is_gimple_ip_invariant_address (const_tree t)
2715{
2716 const_tree op;
2717
2718 if (TREE_CODE (t) != ADDR_EXPR)
2719 return false;
2720
2721 op = strip_invariant_refs (TREE_OPERAND (t, 0));
39cc8c3d
MJ
2722 if (!op)
2723 return false;
2724
2725 if (TREE_CODE (op) == MEM_REF)
2726 {
2727 const_tree op0 = TREE_OPERAND (op, 0);
2728 return (TREE_CODE (op0) == ADDR_EXPR
2729 && (CONSTANT_CLASS_P (TREE_OPERAND (op0, 0))
2730 || decl_address_ip_invariant_p (TREE_OPERAND (op0, 0))));
2731 }
00fc2333 2732
39cc8c3d 2733 return CONSTANT_CLASS_P (op) || decl_address_ip_invariant_p (op);
726a989a
RB
2734}
2735
2736/* Return true if T is a GIMPLE minimal invariant. It's a restricted
2737 form of function invariant. */
2738
2739bool
2740is_gimple_min_invariant (const_tree t)
2741{
2742 if (TREE_CODE (t) == ADDR_EXPR)
2743 return is_gimple_invariant_address (t);
2744
2745 return is_gimple_constant (t);
2746}
2747
00fc2333
JH
2748/* Return true if T is a GIMPLE interprocedural invariant. It's a restricted
2749 form of gimple minimal invariant. */
2750
2751bool
2752is_gimple_ip_invariant (const_tree t)
2753{
2754 if (TREE_CODE (t) == ADDR_EXPR)
2755 return is_gimple_ip_invariant_address (t);
2756
2757 return is_gimple_constant (t);
2758}
2759
726a989a
RB
2760/* Return true if T is a variable. */
2761
2762bool
2763is_gimple_variable (tree t)
2764{
2765 return (TREE_CODE (t) == VAR_DECL
2766 || TREE_CODE (t) == PARM_DECL
2767 || TREE_CODE (t) == RESULT_DECL
2768 || TREE_CODE (t) == SSA_NAME);
2769}
2770
2771/* Return true if T is a GIMPLE identifier (something with an address). */
2772
2773bool
2774is_gimple_id (tree t)
2775{
2776 return (is_gimple_variable (t)
2777 || TREE_CODE (t) == FUNCTION_DECL
2778 || TREE_CODE (t) == LABEL_DECL
2779 || TREE_CODE (t) == CONST_DECL
2780 /* Allow string constants, since they are addressable. */
2781 || TREE_CODE (t) == STRING_CST);
2782}
2783
726a989a
RB
2784/* Return true if T is a non-aggregate register variable. */
2785
2786bool
2787is_gimple_reg (tree t)
2788{
a471762f 2789 if (virtual_operand_p (t))
3828719a 2790 return false;
726a989a 2791
a471762f
RG
2792 if (TREE_CODE (t) == SSA_NAME)
2793 return true;
2794
726a989a
RB
2795 if (!is_gimple_variable (t))
2796 return false;
2797
2798 if (!is_gimple_reg_type (TREE_TYPE (t)))
2799 return false;
2800
2801 /* A volatile decl is not acceptable because we can't reuse it as
2802 needed. We need to copy it into a temp first. */
2803 if (TREE_THIS_VOLATILE (t))
2804 return false;
2805
2806 /* We define "registers" as things that can be renamed as needed,
2807 which with our infrastructure does not apply to memory. */
2808 if (needs_to_live_in_memory (t))
2809 return false;
2810
2811 /* Hard register variables are an interesting case. For those that
2812 are call-clobbered, we don't know where all the calls are, since
2813 we don't (want to) take into account which operations will turn
2814 into libcalls at the rtl level. For those that are call-saved,
2815 we don't currently model the fact that calls may in fact change
2816 global hard registers, nor do we examine ASM_CLOBBERS at the tree
2817 level, and so miss variable changes that might imply. All around,
2818 it seems safest to not do too much optimization with these at the
2819 tree level at all. We'll have to rely on the rtl optimizers to
2820 clean this up, as there we've got all the appropriate bits exposed. */
2821 if (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t))
2822 return false;
2823
4636b850
RG
2824 /* Complex and vector values must have been put into SSA-like form.
2825 That is, no assignments to the individual components. */
2826 if (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
2827 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
2828 return DECL_GIMPLE_REG_P (t);
2829
726a989a
RB
2830 return true;
2831}
2832
2833
726a989a
RB
2834/* Return true if T is a GIMPLE rvalue, i.e. an identifier or a constant. */
2835
2836bool
2837is_gimple_val (tree t)
2838{
2839 /* Make loads from volatiles and memory vars explicit. */
2840 if (is_gimple_variable (t)
2841 && is_gimple_reg_type (TREE_TYPE (t))
2842 && !is_gimple_reg (t))
2843 return false;
2844
726a989a
RB
2845 return (is_gimple_variable (t) || is_gimple_min_invariant (t));
2846}
2847
2848/* Similarly, but accept hard registers as inputs to asm statements. */
2849
2850bool
2851is_gimple_asm_val (tree t)
2852{
2853 if (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t))
2854 return true;
2855
2856 return is_gimple_val (t);
2857}
2858
2859/* Return true if T is a GIMPLE minimal lvalue. */
2860
2861bool
2862is_gimple_min_lval (tree t)
2863{
ba4d8f9d
RG
2864 if (!(t = CONST_CAST_TREE (strip_invariant_refs (t))))
2865 return false;
70f34814 2866 return (is_gimple_id (t) || TREE_CODE (t) == MEM_REF);
726a989a
RB
2867}
2868
726a989a
RB
2869/* Return true if T is a valid function operand of a CALL_EXPR. */
2870
2871bool
2872is_gimple_call_addr (tree t)
2873{
2874 return (TREE_CODE (t) == OBJ_TYPE_REF || is_gimple_val (t));
2875}
2876
70f34814
RG
2877/* Return true if T is a valid address operand of a MEM_REF. */
2878
2879bool
2880is_gimple_mem_ref_addr (tree t)
2881{
2882 return (is_gimple_reg (t)
2883 || TREE_CODE (t) == INTEGER_CST
2884 || (TREE_CODE (t) == ADDR_EXPR
2885 && (CONSTANT_CLASS_P (TREE_OPERAND (t, 0))
2886 || decl_address_invariant_p (TREE_OPERAND (t, 0)))));
2887}
2888
726a989a
RB
2889
2890/* Given a memory reference expression T, return its base address.
2891 The base address of a memory reference expression is the main
2892 object being referenced. For instance, the base address for
2893 'array[i].fld[j]' is 'array'. You can think of this as stripping
2894 away the offset part from a memory address.
2895
2896 This function calls handled_component_p to strip away all the inner
2897 parts of the memory reference until it reaches the base object. */
2898
2899tree
2900get_base_address (tree t)
2901{
2902 while (handled_component_p (t))
2903 t = TREE_OPERAND (t, 0);
b8698a0f 2904
4d948885
RG
2905 if ((TREE_CODE (t) == MEM_REF
2906 || TREE_CODE (t) == TARGET_MEM_REF)
70f34814
RG
2907 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
2908 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
2909
5a27a197
RG
2910 /* ??? Either the alias oracle or all callers need to properly deal
2911 with WITH_SIZE_EXPRs before we can look through those. */
2912 if (TREE_CODE (t) == WITH_SIZE_EXPR)
726a989a 2913 return NULL_TREE;
5a27a197
RG
2914
2915 return t;
726a989a
RB
2916}
2917
2918void
2919recalculate_side_effects (tree t)
2920{
2921 enum tree_code code = TREE_CODE (t);
2922 int len = TREE_OPERAND_LENGTH (t);
2923 int i;
2924
2925 switch (TREE_CODE_CLASS (code))
2926 {
2927 case tcc_expression:
2928 switch (code)
2929 {
2930 case INIT_EXPR:
2931 case MODIFY_EXPR:
2932 case VA_ARG_EXPR:
2933 case PREDECREMENT_EXPR:
2934 case PREINCREMENT_EXPR:
2935 case POSTDECREMENT_EXPR:
2936 case POSTINCREMENT_EXPR:
2937 /* All of these have side-effects, no matter what their
2938 operands are. */
2939 return;
2940
2941 default:
2942 break;
2943 }
2944 /* Fall through. */
2945
2946 case tcc_comparison: /* a comparison expression */
2947 case tcc_unary: /* a unary arithmetic expression */
2948 case tcc_binary: /* a binary arithmetic expression */
2949 case tcc_reference: /* a reference */
2950 case tcc_vl_exp: /* a function call */
2951 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
2952 for (i = 0; i < len; ++i)
2953 {
2954 tree op = TREE_OPERAND (t, i);
2955 if (op && TREE_SIDE_EFFECTS (op))
2956 TREE_SIDE_EFFECTS (t) = 1;
2957 }
2958 break;
2959
13f95bdb
EB
2960 case tcc_constant:
2961 /* No side-effects. */
2962 return;
2963
726a989a 2964 default:
726a989a
RB
2965 gcc_unreachable ();
2966 }
2967}
2968
2969/* Canonicalize a tree T for use in a COND_EXPR as conditional. Returns
2970 a canonicalized tree that is valid for a COND_EXPR or NULL_TREE, if
2971 we failed to create one. */
2972
2973tree
2974canonicalize_cond_expr_cond (tree t)
2975{
b66a1bac
RG
2976 /* Strip conversions around boolean operations. */
2977 if (CONVERT_EXPR_P (t)
9b80d091
KT
2978 && (truth_value_p (TREE_CODE (TREE_OPERAND (t, 0)))
2979 || TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 0)))
2980 == BOOLEAN_TYPE))
b66a1bac
RG
2981 t = TREE_OPERAND (t, 0);
2982
726a989a 2983 /* For !x use x == 0. */
12430896 2984 if (TREE_CODE (t) == TRUTH_NOT_EXPR)
726a989a
RB
2985 {
2986 tree top0 = TREE_OPERAND (t, 0);
2987 t = build2 (EQ_EXPR, TREE_TYPE (t),
2988 top0, build_int_cst (TREE_TYPE (top0), 0));
2989 }
2990 /* For cmp ? 1 : 0 use cmp. */
2991 else if (TREE_CODE (t) == COND_EXPR
2992 && COMPARISON_CLASS_P (TREE_OPERAND (t, 0))
2993 && integer_onep (TREE_OPERAND (t, 1))
2994 && integer_zerop (TREE_OPERAND (t, 2)))
2995 {
2996 tree top0 = TREE_OPERAND (t, 0);
2997 t = build2 (TREE_CODE (top0), TREE_TYPE (t),
2998 TREE_OPERAND (top0, 0), TREE_OPERAND (top0, 1));
2999 }
4481581f
JL
3000 /* For x ^ y use x != y. */
3001 else if (TREE_CODE (t) == BIT_XOR_EXPR)
3002 t = build2 (NE_EXPR, TREE_TYPE (t),
3003 TREE_OPERAND (t, 0), TREE_OPERAND (t, 1));
3004
726a989a
RB
3005 if (is_gimple_condexpr (t))
3006 return t;
3007
3008 return NULL_TREE;
3009}
3010
e6c99067
DN
3011/* Build a GIMPLE_CALL identical to STMT but skipping the arguments in
3012 the positions marked by the set ARGS_TO_SKIP. */
3013
c6f7cfc1 3014gimple
5c0466b5 3015gimple_call_copy_skip_args (gimple stmt, bitmap args_to_skip)
c6f7cfc1
JH
3016{
3017 int i;
c6f7cfc1 3018 int nargs = gimple_call_num_args (stmt);
9771b263
DN
3019 vec<tree> vargs;
3020 vargs.create (nargs);
c6f7cfc1
JH
3021 gimple new_stmt;
3022
3023 for (i = 0; i < nargs; i++)
3024 if (!bitmap_bit_p (args_to_skip, i))
9771b263 3025 vargs.quick_push (gimple_call_arg (stmt, i));
c6f7cfc1 3026
25583c4f
RS
3027 if (gimple_call_internal_p (stmt))
3028 new_stmt = gimple_build_call_internal_vec (gimple_call_internal_fn (stmt),
3029 vargs);
3030 else
3031 new_stmt = gimple_build_call_vec (gimple_call_fn (stmt), vargs);
9771b263 3032 vargs.release ();
c6f7cfc1
JH
3033 if (gimple_call_lhs (stmt))
3034 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
3035
5006671f
RG
3036 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
3037 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
3038
c6f7cfc1
JH
3039 if (gimple_has_location (stmt))
3040 gimple_set_location (new_stmt, gimple_location (stmt));
8d2adc24 3041 gimple_call_copy_flags (new_stmt, stmt);
c6f7cfc1 3042 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
5006671f
RG
3043
3044 gimple_set_modified (new_stmt, true);
3045
c6f7cfc1
JH
3046 return new_stmt;
3047}
3048
5006671f 3049
d7f09764 3050
d025732d
EB
3051/* Return true if the field decls F1 and F2 are at the same offset.
3052
91f2fae8 3053 This is intended to be used on GIMPLE types only. */
d7f09764 3054
1e4bc4eb 3055bool
d025732d 3056gimple_compare_field_offset (tree f1, tree f2)
d7f09764
DN
3057{
3058 if (DECL_OFFSET_ALIGN (f1) == DECL_OFFSET_ALIGN (f2))
d025732d
EB
3059 {
3060 tree offset1 = DECL_FIELD_OFFSET (f1);
3061 tree offset2 = DECL_FIELD_OFFSET (f2);
3062 return ((offset1 == offset2
3063 /* Once gimplification is done, self-referential offsets are
3064 instantiated as operand #2 of the COMPONENT_REF built for
3065 each access and reset. Therefore, they are not relevant
3066 anymore and fields are interchangeable provided that they
3067 represent the same access. */
3068 || (TREE_CODE (offset1) == PLACEHOLDER_EXPR
3069 && TREE_CODE (offset2) == PLACEHOLDER_EXPR
3070 && (DECL_SIZE (f1) == DECL_SIZE (f2)
3071 || (TREE_CODE (DECL_SIZE (f1)) == PLACEHOLDER_EXPR
3072 && TREE_CODE (DECL_SIZE (f2)) == PLACEHOLDER_EXPR)
3073 || operand_equal_p (DECL_SIZE (f1), DECL_SIZE (f2), 0))
3074 && DECL_ALIGN (f1) == DECL_ALIGN (f2))
3075 || operand_equal_p (offset1, offset2, 0))
3076 && tree_int_cst_equal (DECL_FIELD_BIT_OFFSET (f1),
3077 DECL_FIELD_BIT_OFFSET (f2)));
3078 }
d7f09764
DN
3079
3080 /* Fortran and C do not always agree on what DECL_OFFSET_ALIGN
3081 should be, so handle differing ones specially by decomposing
3082 the offset into a byte and bit offset manually. */
3083 if (host_integerp (DECL_FIELD_OFFSET (f1), 0)
3084 && host_integerp (DECL_FIELD_OFFSET (f2), 0))
3085 {
3086 unsigned HOST_WIDE_INT byte_offset1, byte_offset2;
3087 unsigned HOST_WIDE_INT bit_offset1, bit_offset2;
3088 bit_offset1 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (f1));
3089 byte_offset1 = (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f1))
3090 + bit_offset1 / BITS_PER_UNIT);
3091 bit_offset2 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (f2));
3092 byte_offset2 = (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f2))
3093 + bit_offset2 / BITS_PER_UNIT);
3094 if (byte_offset1 != byte_offset2)
3095 return false;
3096 return bit_offset1 % BITS_PER_UNIT == bit_offset2 % BITS_PER_UNIT;
3097 }
3098
3099 return false;
3100}
3101
825b27de
RG
3102/* Returning a hash value for gimple type TYPE combined with VAL.
3103
3104 The hash value returned is equal for types considered compatible
3105 by gimple_canonical_types_compatible_p. */
3106
3107static hashval_t
3108iterative_hash_canonical_type (tree type, hashval_t val)
3109{
3110 hashval_t v;
3111 void **slot;
3112 struct tree_int_map *mp, m;
3113
3114 m.base.from = type;
3115 if ((slot = htab_find_slot (canonical_type_hash_cache, &m, INSERT))
3116 && *slot)
d0340959 3117 return iterative_hash_hashval_t (((struct tree_int_map *) *slot)->to, val);
825b27de
RG
3118
3119 /* Combine a few common features of types so that types are grouped into
3120 smaller sets; when searching for existing matching types to merge,
3121 only existing types having the same features as the new type will be
3122 checked. */
3123 v = iterative_hash_hashval_t (TREE_CODE (type), 0);
825b27de 3124 v = iterative_hash_hashval_t (TREE_ADDRESSABLE (type), v);
61332f77
RG
3125 v = iterative_hash_hashval_t (TYPE_ALIGN (type), v);
3126 v = iterative_hash_hashval_t (TYPE_MODE (type), v);
825b27de
RG
3127
3128 /* Incorporate common features of numerical types. */
3129 if (INTEGRAL_TYPE_P (type)
3130 || SCALAR_FLOAT_TYPE_P (type)
61332f77 3131 || FIXED_POINT_TYPE_P (type)
61332f77
RG
3132 || TREE_CODE (type) == OFFSET_TYPE
3133 || POINTER_TYPE_P (type))
825b27de
RG
3134 {
3135 v = iterative_hash_hashval_t (TYPE_PRECISION (type), v);
825b27de
RG
3136 v = iterative_hash_hashval_t (TYPE_UNSIGNED (type), v);
3137 }
3138
a5e0cd1d
MG
3139 if (VECTOR_TYPE_P (type))
3140 {
3141 v = iterative_hash_hashval_t (TYPE_VECTOR_SUBPARTS (type), v);
3142 v = iterative_hash_hashval_t (TYPE_UNSIGNED (type), v);
3143 }
3144
3145 if (TREE_CODE (type) == COMPLEX_TYPE)
3146 v = iterative_hash_hashval_t (TYPE_UNSIGNED (type), v);
3147
825b27de
RG
3148 /* For pointer and reference types, fold in information about the type
3149 pointed to but do not recurse to the pointed-to type. */
3150 if (POINTER_TYPE_P (type))
3151 {
3152 v = iterative_hash_hashval_t (TYPE_REF_CAN_ALIAS_ALL (type), v);
61332f77
RG
3153 v = iterative_hash_hashval_t (TYPE_ADDR_SPACE (TREE_TYPE (type)), v);
3154 v = iterative_hash_hashval_t (TYPE_RESTRICT (type), v);
825b27de
RG
3155 v = iterative_hash_hashval_t (TREE_CODE (TREE_TYPE (type)), v);
3156 }
3157
2e745103 3158 /* For integer types hash only the string flag. */
825b27de 3159 if (TREE_CODE (type) == INTEGER_TYPE)
3ac8781c 3160 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
825b27de 3161
2e745103
EB
3162 /* For array types hash the domain bounds and the string flag. */
3163 if (TREE_CODE (type) == ARRAY_TYPE && TYPE_DOMAIN (type))
825b27de
RG
3164 {
3165 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
2e745103
EB
3166 /* OMP lowering can introduce error_mark_node in place of
3167 random local decls in types. */
3168 if (TYPE_MIN_VALUE (TYPE_DOMAIN (type)) != error_mark_node)
3169 v = iterative_hash_expr (TYPE_MIN_VALUE (TYPE_DOMAIN (type)), v);
3170 if (TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != error_mark_node)
3171 v = iterative_hash_expr (TYPE_MAX_VALUE (TYPE_DOMAIN (type)), v);
825b27de
RG
3172 }
3173
3174 /* Recurse for aggregates with a single element type. */
3175 if (TREE_CODE (type) == ARRAY_TYPE
3176 || TREE_CODE (type) == COMPLEX_TYPE
3177 || TREE_CODE (type) == VECTOR_TYPE)
3178 v = iterative_hash_canonical_type (TREE_TYPE (type), v);
3179
3180 /* Incorporate function return and argument types. */
3181 if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE)
3182 {
3183 unsigned na;
3184 tree p;
3185
3186 /* For method types also incorporate their parent class. */
3187 if (TREE_CODE (type) == METHOD_TYPE)
3188 v = iterative_hash_canonical_type (TYPE_METHOD_BASETYPE (type), v);
3189
6a20ce76 3190 v = iterative_hash_canonical_type (TREE_TYPE (type), v);
825b27de
RG
3191
3192 for (p = TYPE_ARG_TYPES (type), na = 0; p; p = TREE_CHAIN (p))
3193 {
6a20ce76 3194 v = iterative_hash_canonical_type (TREE_VALUE (p), v);
825b27de
RG
3195 na++;
3196 }
3197
3198 v = iterative_hash_hashval_t (na, v);
3199 }
3200
aa47290b 3201 if (RECORD_OR_UNION_TYPE_P (type))
825b27de
RG
3202 {
3203 unsigned nf;
3204 tree f;
3205
3206 for (f = TYPE_FIELDS (type), nf = 0; f; f = TREE_CHAIN (f))
e7cfe241
RG
3207 if (TREE_CODE (f) == FIELD_DECL)
3208 {
3209 v = iterative_hash_canonical_type (TREE_TYPE (f), v);
3210 nf++;
3211 }
825b27de
RG
3212
3213 v = iterative_hash_hashval_t (nf, v);
3214 }
3215
3216 /* Cache the just computed hash value. */
3217 mp = ggc_alloc_cleared_tree_int_map ();
3218 mp->base.from = type;
3219 mp->to = v;
3220 *slot = (void *) mp;
3221
3222 return iterative_hash_hashval_t (v, val);
3223}
3224
a844a60b
RG
3225static hashval_t
3226gimple_canonical_type_hash (const void *p)
3227{
825b27de
RG
3228 if (canonical_type_hash_cache == NULL)
3229 canonical_type_hash_cache = htab_create_ggc (512, tree_int_map_hash,
3230 tree_int_map_eq, NULL);
3231
3232 return iterative_hash_canonical_type (CONST_CAST_TREE ((const_tree) p), 0);
a844a60b
RG
3233}
3234
d7f09764 3235
93b2a207 3236
4490cae6 3237
825b27de
RG
3238/* The TYPE_CANONICAL merging machinery. It should closely resemble
3239 the middle-end types_compatible_p function. It needs to avoid
3240 claiming types are different for types that should be treated
3241 the same with respect to TBAA. Canonical types are also used
3242 for IL consistency checks via the useless_type_conversion_p
3243 predicate which does not handle all type kinds itself but falls
3244 back to pointer-comparison of TYPE_CANONICAL for aggregates
3245 for example. */
3246
3247/* Return true iff T1 and T2 are structurally identical for what
3248 TBAA is concerned. */
3249
3250static bool
3251gimple_canonical_types_compatible_p (tree t1, tree t2)
3252{
825b27de
RG
3253 /* Before starting to set up the SCC machinery handle simple cases. */
3254
3255 /* Check first for the obvious case of pointer identity. */
3256 if (t1 == t2)
3257 return true;
3258
3259 /* Check that we have two types to compare. */
3260 if (t1 == NULL_TREE || t2 == NULL_TREE)
3261 return false;
3262
3263 /* If the types have been previously registered and found equal
3264 they still are. */
3265 if (TYPE_CANONICAL (t1)
3266 && TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2))
3267 return true;
3268
3269 /* Can't be the same type if the types don't have the same code. */
3270 if (TREE_CODE (t1) != TREE_CODE (t2))
3271 return false;
3272
61332f77 3273 if (TREE_ADDRESSABLE (t1) != TREE_ADDRESSABLE (t2))
825b27de
RG
3274 return false;
3275
61332f77
RG
3276 /* Qualifiers do not matter for canonical type comparison purposes. */
3277
3278 /* Void types and nullptr types are always the same. */
3279 if (TREE_CODE (t1) == VOID_TYPE
3280 || TREE_CODE (t1) == NULLPTR_TYPE)
825b27de
RG
3281 return true;
3282
61332f77
RG
3283 /* Can't be the same type if they have different alignment, or mode. */
3284 if (TYPE_ALIGN (t1) != TYPE_ALIGN (t2)
3285 || TYPE_MODE (t1) != TYPE_MODE (t2))
3286 return false;
3287
3288 /* Non-aggregate types can be handled cheaply. */
825b27de
RG
3289 if (INTEGRAL_TYPE_P (t1)
3290 || SCALAR_FLOAT_TYPE_P (t1)
3291 || FIXED_POINT_TYPE_P (t1)
3292 || TREE_CODE (t1) == VECTOR_TYPE
3293 || TREE_CODE (t1) == COMPLEX_TYPE
61332f77
RG
3294 || TREE_CODE (t1) == OFFSET_TYPE
3295 || POINTER_TYPE_P (t1))
825b27de 3296 {
61332f77
RG
3297 /* Can't be the same type if they have different sign or precision. */
3298 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2)
825b27de
RG
3299 || TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
3300 return false;
3301
3302 if (TREE_CODE (t1) == INTEGER_TYPE
3ac8781c 3303 && TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2))
825b27de
RG
3304 return false;
3305
61332f77
RG
3306 /* For canonical type comparisons we do not want to build SCCs
3307 so we cannot compare pointed-to types. But we can, for now,
3308 require the same pointed-to type kind and match what
3309 useless_type_conversion_p would do. */
3310 if (POINTER_TYPE_P (t1))
3311 {
3312 /* If the two pointers have different ref-all attributes,
3313 they can't be the same type. */
3314 if (TYPE_REF_CAN_ALIAS_ALL (t1) != TYPE_REF_CAN_ALIAS_ALL (t2))
3315 return false;
825b27de 3316
61332f77
RG
3317 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
3318 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
3319 return false;
825b27de 3320
61332f77
RG
3321 if (TYPE_RESTRICT (t1) != TYPE_RESTRICT (t2))
3322 return false;
3323
3324 if (TREE_CODE (TREE_TYPE (t1)) != TREE_CODE (TREE_TYPE (t2)))
3325 return false;
3326 }
3327
3328 /* Tail-recurse to components. */
3329 if (TREE_CODE (t1) == VECTOR_TYPE
3330 || TREE_CODE (t1) == COMPLEX_TYPE)
3331 return gimple_canonical_types_compatible_p (TREE_TYPE (t1),
3332 TREE_TYPE (t2));
3333
3334 return true;
825b27de
RG
3335 }
3336
825b27de
RG
3337 /* Do type-specific comparisons. */
3338 switch (TREE_CODE (t1))
3339 {
825b27de
RG
3340 case ARRAY_TYPE:
3341 /* Array types are the same if the element types are the same and
3342 the number of elements are the same. */
3343 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2))
3344 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
3345 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
b8a71aed 3346 return false;
825b27de
RG
3347 else
3348 {
3349 tree i1 = TYPE_DOMAIN (t1);
3350 tree i2 = TYPE_DOMAIN (t2);
3351
3352 /* For an incomplete external array, the type domain can be
3353 NULL_TREE. Check this condition also. */
3354 if (i1 == NULL_TREE && i2 == NULL_TREE)
b8a71aed 3355 return true;
825b27de 3356 else if (i1 == NULL_TREE || i2 == NULL_TREE)
b8a71aed 3357 return false;
825b27de
RG
3358 else
3359 {
3360 tree min1 = TYPE_MIN_VALUE (i1);
3361 tree min2 = TYPE_MIN_VALUE (i2);
3362 tree max1 = TYPE_MAX_VALUE (i1);
3363 tree max2 = TYPE_MAX_VALUE (i2);
3364
3365 /* The minimum/maximum values have to be the same. */
3366 if ((min1 == min2
3367 || (min1 && min2
3368 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
3369 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
3370 || operand_equal_p (min1, min2, 0))))
3371 && (max1 == max2
3372 || (max1 && max2
3373 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
3374 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
3375 || operand_equal_p (max1, max2, 0)))))
b8a71aed 3376 return true;
825b27de 3377 else
b8a71aed 3378 return false;
825b27de
RG
3379 }
3380 }
3381
3382 case METHOD_TYPE:
825b27de
RG
3383 case FUNCTION_TYPE:
3384 /* Function types are the same if the return type and arguments types
3385 are the same. */
6a20ce76 3386 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2)))
b8a71aed 3387 return false;
825b27de
RG
3388
3389 if (!comp_type_attributes (t1, t2))
b8a71aed 3390 return false;
825b27de
RG
3391
3392 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
b8a71aed 3393 return true;
825b27de
RG
3394 else
3395 {
3396 tree parms1, parms2;
3397
3398 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
3399 parms1 && parms2;
3400 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
3401 {
6a20ce76
RG
3402 if (!gimple_canonical_types_compatible_p
3403 (TREE_VALUE (parms1), TREE_VALUE (parms2)))
b8a71aed 3404 return false;
825b27de
RG
3405 }
3406
3407 if (parms1 || parms2)
b8a71aed 3408 return false;
825b27de 3409
b8a71aed 3410 return true;
825b27de
RG
3411 }
3412
825b27de
RG
3413 case RECORD_TYPE:
3414 case UNION_TYPE:
3415 case QUAL_UNION_TYPE:
3416 {
3417 tree f1, f2;
3418
3419 /* For aggregate types, all the fields must be the same. */
3420 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
4acd1c84 3421 f1 || f2;
825b27de
RG
3422 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
3423 {
e7cfe241
RG
3424 /* Skip non-fields. */
3425 while (f1 && TREE_CODE (f1) != FIELD_DECL)
3426 f1 = TREE_CHAIN (f1);
3427 while (f2 && TREE_CODE (f2) != FIELD_DECL)
3428 f2 = TREE_CHAIN (f2);
3429 if (!f1 || !f2)
3430 break;
825b27de
RG
3431 /* The fields must have the same name, offset and type. */
3432 if (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
3433 || !gimple_compare_field_offset (f1, f2)
3434 || !gimple_canonical_types_compatible_p
3435 (TREE_TYPE (f1), TREE_TYPE (f2)))
b8a71aed 3436 return false;
825b27de
RG
3437 }
3438
3439 /* If one aggregate has more fields than the other, they
3440 are not the same. */
3441 if (f1 || f2)
b8a71aed 3442 return false;
825b27de 3443
b8a71aed 3444 return true;
825b27de
RG
3445 }
3446
3447 default:
3448 gcc_unreachable ();
3449 }
825b27de
RG
3450}
3451
3452
4490cae6
RG
3453/* Returns nonzero if P1 and P2 are equal. */
3454
3455static int
3456gimple_canonical_type_eq (const void *p1, const void *p2)
3457{
3458 const_tree t1 = (const_tree) p1;
3459 const_tree t2 = (const_tree) p2;
825b27de
RG
3460 return gimple_canonical_types_compatible_p (CONST_CAST_TREE (t1),
3461 CONST_CAST_TREE (t2));
4490cae6
RG
3462}
3463
3464/* Register type T in the global type table gimple_types.
3465 If another type T', compatible with T, already existed in
3466 gimple_types then return T', otherwise return T. This is used by
96d91dcf
RG
3467 LTO to merge identical types read from different TUs.
3468
3469 ??? This merging does not exactly match how the tree.c middle-end
3470 functions will assign TYPE_CANONICAL when new types are created
3471 during optimization (which at least happens for pointer and array
3472 types). */
4490cae6
RG
3473
3474tree
3475gimple_register_canonical_type (tree t)
3476{
3477 void **slot;
3478
3479 gcc_assert (TYPE_P (t));
3480
61332f77
RG
3481 if (TYPE_CANONICAL (t))
3482 return TYPE_CANONICAL (t);
3483
4490cae6 3484 if (gimple_canonical_types == NULL)
a844a60b 3485 gimple_canonical_types = htab_create_ggc (16381, gimple_canonical_type_hash,
4490cae6
RG
3486 gimple_canonical_type_eq, 0);
3487
3488 slot = htab_find_slot (gimple_canonical_types, t, INSERT);
3489 if (*slot
3490 && *(tree *)slot != t)
3491 {
3492 tree new_type = (tree) *((tree *) slot);
3493
3494 TYPE_CANONICAL (t) = new_type;
3495 t = new_type;
3496 }
3497 else
3498 {
3499 TYPE_CANONICAL (t) = t;
4a2ac96f
RG
3500 *slot = (void *) t;
3501 }
d7f09764
DN
3502
3503 return t;
3504}
3505
3506
3507/* Show statistics on references to the global type table gimple_types. */
3508
3509void
b8f4e58f 3510print_gimple_types_stats (const char *pfx)
d7f09764 3511{
4490cae6 3512 if (gimple_canonical_types)
b8f4e58f
RG
3513 fprintf (stderr, "[%s] GIMPLE canonical type table: size %ld, "
3514 "%ld elements, %ld searches, %ld collisions (ratio: %f)\n", pfx,
4490cae6
RG
3515 (long) htab_size (gimple_canonical_types),
3516 (long) htab_elements (gimple_canonical_types),
3517 (long) gimple_canonical_types->searches,
3518 (long) gimple_canonical_types->collisions,
3519 htab_collisions (gimple_canonical_types));
3520 else
b8f4e58f 3521 fprintf (stderr, "[%s] GIMPLE canonical type table is empty\n", pfx);
a844a60b 3522 if (canonical_type_hash_cache)
b8f4e58f
RG
3523 fprintf (stderr, "[%s] GIMPLE canonical type hash table: size %ld, "
3524 "%ld elements, %ld searches, %ld collisions (ratio: %f)\n", pfx,
a844a60b
RG
3525 (long) htab_size (canonical_type_hash_cache),
3526 (long) htab_elements (canonical_type_hash_cache),
3527 (long) canonical_type_hash_cache->searches,
3528 (long) canonical_type_hash_cache->collisions,
3529 htab_collisions (canonical_type_hash_cache));
0f443ad0 3530 else
b8f4e58f 3531 fprintf (stderr, "[%s] GIMPLE canonical type hash table is empty\n", pfx);
d7f09764
DN
3532}
3533
0d0bfe17
RG
3534/* Free the gimple type hashtables used for LTO type merging. */
3535
3536void
3537free_gimple_type_tables (void)
3538{
4490cae6
RG
3539 if (gimple_canonical_types)
3540 {
3541 htab_delete (gimple_canonical_types);
3542 gimple_canonical_types = NULL;
3543 }
a844a60b
RG
3544 if (canonical_type_hash_cache)
3545 {
3546 htab_delete (canonical_type_hash_cache);
3547 canonical_type_hash_cache = NULL;
3548 }
0d0bfe17
RG
3549}
3550
d7f09764
DN
3551
3552/* Return a type the same as TYPE except unsigned or
3553 signed according to UNSIGNEDP. */
3554
3555static tree
3556gimple_signed_or_unsigned_type (bool unsignedp, tree type)
3557{
3558 tree type1;
3559
3560 type1 = TYPE_MAIN_VARIANT (type);
3561 if (type1 == signed_char_type_node
3562 || type1 == char_type_node
3563 || type1 == unsigned_char_type_node)
3564 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
3565 if (type1 == integer_type_node || type1 == unsigned_type_node)
3566 return unsignedp ? unsigned_type_node : integer_type_node;
3567 if (type1 == short_integer_type_node || type1 == short_unsigned_type_node)
3568 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
3569 if (type1 == long_integer_type_node || type1 == long_unsigned_type_node)
3570 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
3571 if (type1 == long_long_integer_type_node
3572 || type1 == long_long_unsigned_type_node)
3573 return unsignedp
3574 ? long_long_unsigned_type_node
3575 : long_long_integer_type_node;
a6766312
KT
3576 if (int128_integer_type_node && (type1 == int128_integer_type_node || type1 == int128_unsigned_type_node))
3577 return unsignedp
3578 ? int128_unsigned_type_node
3579 : int128_integer_type_node;
d7f09764
DN
3580#if HOST_BITS_PER_WIDE_INT >= 64
3581 if (type1 == intTI_type_node || type1 == unsigned_intTI_type_node)
3582 return unsignedp ? unsigned_intTI_type_node : intTI_type_node;
3583#endif
3584 if (type1 == intDI_type_node || type1 == unsigned_intDI_type_node)
3585 return unsignedp ? unsigned_intDI_type_node : intDI_type_node;
3586 if (type1 == intSI_type_node || type1 == unsigned_intSI_type_node)
3587 return unsignedp ? unsigned_intSI_type_node : intSI_type_node;
3588 if (type1 == intHI_type_node || type1 == unsigned_intHI_type_node)
3589 return unsignedp ? unsigned_intHI_type_node : intHI_type_node;
3590 if (type1 == intQI_type_node || type1 == unsigned_intQI_type_node)
3591 return unsignedp ? unsigned_intQI_type_node : intQI_type_node;
3592
3593#define GIMPLE_FIXED_TYPES(NAME) \
3594 if (type1 == short_ ## NAME ## _type_node \
3595 || type1 == unsigned_short_ ## NAME ## _type_node) \
3596 return unsignedp ? unsigned_short_ ## NAME ## _type_node \
3597 : short_ ## NAME ## _type_node; \
3598 if (type1 == NAME ## _type_node \
3599 || type1 == unsigned_ ## NAME ## _type_node) \
3600 return unsignedp ? unsigned_ ## NAME ## _type_node \
3601 : NAME ## _type_node; \
3602 if (type1 == long_ ## NAME ## _type_node \
3603 || type1 == unsigned_long_ ## NAME ## _type_node) \
3604 return unsignedp ? unsigned_long_ ## NAME ## _type_node \
3605 : long_ ## NAME ## _type_node; \
3606 if (type1 == long_long_ ## NAME ## _type_node \
3607 || type1 == unsigned_long_long_ ## NAME ## _type_node) \
3608 return unsignedp ? unsigned_long_long_ ## NAME ## _type_node \
3609 : long_long_ ## NAME ## _type_node;
3610
3611#define GIMPLE_FIXED_MODE_TYPES(NAME) \
3612 if (type1 == NAME ## _type_node \
3613 || type1 == u ## NAME ## _type_node) \
3614 return unsignedp ? u ## NAME ## _type_node \
3615 : NAME ## _type_node;
3616
3617#define GIMPLE_FIXED_TYPES_SAT(NAME) \
3618 if (type1 == sat_ ## short_ ## NAME ## _type_node \
3619 || type1 == sat_ ## unsigned_short_ ## NAME ## _type_node) \
3620 return unsignedp ? sat_ ## unsigned_short_ ## NAME ## _type_node \
3621 : sat_ ## short_ ## NAME ## _type_node; \
3622 if (type1 == sat_ ## NAME ## _type_node \
3623 || type1 == sat_ ## unsigned_ ## NAME ## _type_node) \
3624 return unsignedp ? sat_ ## unsigned_ ## NAME ## _type_node \
3625 : sat_ ## NAME ## _type_node; \
3626 if (type1 == sat_ ## long_ ## NAME ## _type_node \
3627 || type1 == sat_ ## unsigned_long_ ## NAME ## _type_node) \
3628 return unsignedp ? sat_ ## unsigned_long_ ## NAME ## _type_node \
3629 : sat_ ## long_ ## NAME ## _type_node; \
3630 if (type1 == sat_ ## long_long_ ## NAME ## _type_node \
3631 || type1 == sat_ ## unsigned_long_long_ ## NAME ## _type_node) \
3632 return unsignedp ? sat_ ## unsigned_long_long_ ## NAME ## _type_node \
3633 : sat_ ## long_long_ ## NAME ## _type_node;
3634
3635#define GIMPLE_FIXED_MODE_TYPES_SAT(NAME) \
3636 if (type1 == sat_ ## NAME ## _type_node \
3637 || type1 == sat_ ## u ## NAME ## _type_node) \
3638 return unsignedp ? sat_ ## u ## NAME ## _type_node \
3639 : sat_ ## NAME ## _type_node;
3640
3641 GIMPLE_FIXED_TYPES (fract);
3642 GIMPLE_FIXED_TYPES_SAT (fract);
3643 GIMPLE_FIXED_TYPES (accum);
3644 GIMPLE_FIXED_TYPES_SAT (accum);
3645
3646 GIMPLE_FIXED_MODE_TYPES (qq);
3647 GIMPLE_FIXED_MODE_TYPES (hq);
3648 GIMPLE_FIXED_MODE_TYPES (sq);
3649 GIMPLE_FIXED_MODE_TYPES (dq);
3650 GIMPLE_FIXED_MODE_TYPES (tq);
3651 GIMPLE_FIXED_MODE_TYPES_SAT (qq);
3652 GIMPLE_FIXED_MODE_TYPES_SAT (hq);
3653 GIMPLE_FIXED_MODE_TYPES_SAT (sq);
3654 GIMPLE_FIXED_MODE_TYPES_SAT (dq);
3655 GIMPLE_FIXED_MODE_TYPES_SAT (tq);
3656 GIMPLE_FIXED_MODE_TYPES (ha);
3657 GIMPLE_FIXED_MODE_TYPES (sa);
3658 GIMPLE_FIXED_MODE_TYPES (da);
3659 GIMPLE_FIXED_MODE_TYPES (ta);
3660 GIMPLE_FIXED_MODE_TYPES_SAT (ha);
3661 GIMPLE_FIXED_MODE_TYPES_SAT (sa);
3662 GIMPLE_FIXED_MODE_TYPES_SAT (da);
3663 GIMPLE_FIXED_MODE_TYPES_SAT (ta);
3664
3665 /* For ENUMERAL_TYPEs in C++, must check the mode of the types, not
3666 the precision; they have precision set to match their range, but
3667 may use a wider mode to match an ABI. If we change modes, we may
3668 wind up with bad conversions. For INTEGER_TYPEs in C, must check
3669 the precision as well, so as to yield correct results for
3670 bit-field types. C++ does not have these separate bit-field
3671 types, and producing a signed or unsigned variant of an
3672 ENUMERAL_TYPE may cause other problems as well. */
3673 if (!INTEGRAL_TYPE_P (type)
3674 || TYPE_UNSIGNED (type) == unsignedp)
3675 return type;
3676
3677#define TYPE_OK(node) \
3678 (TYPE_MODE (type) == TYPE_MODE (node) \
3679 && TYPE_PRECISION (type) == TYPE_PRECISION (node))
3680 if (TYPE_OK (signed_char_type_node))
3681 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
3682 if (TYPE_OK (integer_type_node))
3683 return unsignedp ? unsigned_type_node : integer_type_node;
3684 if (TYPE_OK (short_integer_type_node))
3685 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
3686 if (TYPE_OK (long_integer_type_node))
3687 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
3688 if (TYPE_OK (long_long_integer_type_node))
3689 return (unsignedp
3690 ? long_long_unsigned_type_node
3691 : long_long_integer_type_node);
a6766312
KT
3692 if (int128_integer_type_node && TYPE_OK (int128_integer_type_node))
3693 return (unsignedp
3694 ? int128_unsigned_type_node
3695 : int128_integer_type_node);
d7f09764
DN
3696
3697#if HOST_BITS_PER_WIDE_INT >= 64
3698 if (TYPE_OK (intTI_type_node))
3699 return unsignedp ? unsigned_intTI_type_node : intTI_type_node;
3700#endif
3701 if (TYPE_OK (intDI_type_node))
3702 return unsignedp ? unsigned_intDI_type_node : intDI_type_node;
3703 if (TYPE_OK (intSI_type_node))
3704 return unsignedp ? unsigned_intSI_type_node : intSI_type_node;
3705 if (TYPE_OK (intHI_type_node))
3706 return unsignedp ? unsigned_intHI_type_node : intHI_type_node;
3707 if (TYPE_OK (intQI_type_node))
3708 return unsignedp ? unsigned_intQI_type_node : intQI_type_node;
3709
3710#undef GIMPLE_FIXED_TYPES
3711#undef GIMPLE_FIXED_MODE_TYPES
3712#undef GIMPLE_FIXED_TYPES_SAT
3713#undef GIMPLE_FIXED_MODE_TYPES_SAT
3714#undef TYPE_OK
3715
3716 return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp);
3717}
3718
3719
3720/* Return an unsigned type the same as TYPE in other respects. */
3721
3722tree
3723gimple_unsigned_type (tree type)
3724{
3725 return gimple_signed_or_unsigned_type (true, type);
3726}
3727
3728
3729/* Return a signed type the same as TYPE in other respects. */
3730
3731tree
3732gimple_signed_type (tree type)
3733{
3734 return gimple_signed_or_unsigned_type (false, type);
3735}
3736
3737
3738/* Return the typed-based alias set for T, which may be an expression
3739 or a type. Return -1 if we don't do anything special. */
3740
3741alias_set_type
3742gimple_get_alias_set (tree t)
3743{
3744 tree u;
3745
3746 /* Permit type-punning when accessing a union, provided the access
3747 is directly through the union. For example, this code does not
3748 permit taking the address of a union member and then storing
3749 through it. Even the type-punning allowed here is a GCC
3750 extension, albeit a common and useful one; the C standard says
3751 that such accesses have implementation-defined behavior. */
3752 for (u = t;
3753 TREE_CODE (u) == COMPONENT_REF || TREE_CODE (u) == ARRAY_REF;
3754 u = TREE_OPERAND (u, 0))
3755 if (TREE_CODE (u) == COMPONENT_REF
3756 && TREE_CODE (TREE_TYPE (TREE_OPERAND (u, 0))) == UNION_TYPE)
3757 return 0;
3758
3759 /* That's all the expressions we handle specially. */
3760 if (!TYPE_P (t))
3761 return -1;
3762
3763 /* For convenience, follow the C standard when dealing with
3764 character types. Any object may be accessed via an lvalue that
3765 has character type. */
3766 if (t == char_type_node
3767 || t == signed_char_type_node
3768 || t == unsigned_char_type_node)
3769 return 0;
3770
3771 /* Allow aliasing between signed and unsigned variants of the same
3772 type. We treat the signed variant as canonical. */
3773 if (TREE_CODE (t) == INTEGER_TYPE && TYPE_UNSIGNED (t))
3774 {
3775 tree t1 = gimple_signed_type (t);
3776
3777 /* t1 == t can happen for boolean nodes which are always unsigned. */
3778 if (t1 != t)
3779 return get_alias_set (t1);
3780 }
d7f09764
DN
3781
3782 return -1;
3783}
3784
3785
346ef3fa
RG
3786/* From a tree operand OP return the base of a load or store operation
3787 or NULL_TREE if OP is not a load or a store. */
3788
3789static tree
3790get_base_loadstore (tree op)
3791{
3792 while (handled_component_p (op))
3793 op = TREE_OPERAND (op, 0);
3794 if (DECL_P (op)
3795 || INDIRECT_REF_P (op)
70f34814 3796 || TREE_CODE (op) == MEM_REF
346ef3fa
RG
3797 || TREE_CODE (op) == TARGET_MEM_REF)
3798 return op;
3799 return NULL_TREE;
3800}
3801
3802/* For the statement STMT call the callbacks VISIT_LOAD, VISIT_STORE and
3803 VISIT_ADDR if non-NULL on loads, store and address-taken operands
3804 passing the STMT, the base of the operand and DATA to it. The base
3805 will be either a decl, an indirect reference (including TARGET_MEM_REF)
3806 or the argument of an address expression.
3807 Returns the results of these callbacks or'ed. */
3808
3809bool
3810walk_stmt_load_store_addr_ops (gimple stmt, void *data,
3811 bool (*visit_load)(gimple, tree, void *),
3812 bool (*visit_store)(gimple, tree, void *),
3813 bool (*visit_addr)(gimple, tree, void *))
3814{
3815 bool ret = false;
3816 unsigned i;
3817 if (gimple_assign_single_p (stmt))
3818 {
3819 tree lhs, rhs;
3820 if (visit_store)
3821 {
3822 lhs = get_base_loadstore (gimple_assign_lhs (stmt));
3823 if (lhs)
3824 ret |= visit_store (stmt, lhs, data);
3825 }
3826 rhs = gimple_assign_rhs1 (stmt);
ad8a1ac0
RG
3827 while (handled_component_p (rhs))
3828 rhs = TREE_OPERAND (rhs, 0);
346ef3fa
RG
3829 if (visit_addr)
3830 {
3831 if (TREE_CODE (rhs) == ADDR_EXPR)
3832 ret |= visit_addr (stmt, TREE_OPERAND (rhs, 0), data);
3833 else if (TREE_CODE (rhs) == TARGET_MEM_REF
3834 && TREE_CODE (TMR_BASE (rhs)) == ADDR_EXPR)
3835 ret |= visit_addr (stmt, TREE_OPERAND (TMR_BASE (rhs), 0), data);
3836 else if (TREE_CODE (rhs) == OBJ_TYPE_REF
3837 && TREE_CODE (OBJ_TYPE_REF_OBJECT (rhs)) == ADDR_EXPR)
3838 ret |= visit_addr (stmt, TREE_OPERAND (OBJ_TYPE_REF_OBJECT (rhs),
3839 0), data);
cb3d2e33
JJ
3840 else if (TREE_CODE (rhs) == CONSTRUCTOR)
3841 {
3842 unsigned int ix;
3843 tree val;
3844
3845 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), ix, val)
3846 if (TREE_CODE (val) == ADDR_EXPR)
3847 ret |= visit_addr (stmt, TREE_OPERAND (val, 0), data);
3848 else if (TREE_CODE (val) == OBJ_TYPE_REF
3849 && TREE_CODE (OBJ_TYPE_REF_OBJECT (val)) == ADDR_EXPR)
3850 ret |= visit_addr (stmt,
3851 TREE_OPERAND (OBJ_TYPE_REF_OBJECT (val),
3852 0), data);
3853 }
fff1894c
AB
3854 lhs = gimple_assign_lhs (stmt);
3855 if (TREE_CODE (lhs) == TARGET_MEM_REF
fff1894c
AB
3856 && TREE_CODE (TMR_BASE (lhs)) == ADDR_EXPR)
3857 ret |= visit_addr (stmt, TREE_OPERAND (TMR_BASE (lhs), 0), data);
346ef3fa
RG
3858 }
3859 if (visit_load)
3860 {
3861 rhs = get_base_loadstore (rhs);
3862 if (rhs)
3863 ret |= visit_load (stmt, rhs, data);
3864 }
3865 }
3866 else if (visit_addr
3867 && (is_gimple_assign (stmt)
4d7a65ea 3868 || gimple_code (stmt) == GIMPLE_COND))
346ef3fa
RG
3869 {
3870 for (i = 0; i < gimple_num_ops (stmt); ++i)
9dd58aa4
JJ
3871 {
3872 tree op = gimple_op (stmt, i);
3873 if (op == NULL_TREE)
3874 ;
3875 else if (TREE_CODE (op) == ADDR_EXPR)
3876 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
3877 /* COND_EXPR and VCOND_EXPR rhs1 argument is a comparison
3878 tree with two operands. */
3879 else if (i == 1 && COMPARISON_CLASS_P (op))
3880 {
3881 if (TREE_CODE (TREE_OPERAND (op, 0)) == ADDR_EXPR)
3882 ret |= visit_addr (stmt, TREE_OPERAND (TREE_OPERAND (op, 0),
3883 0), data);
3884 if (TREE_CODE (TREE_OPERAND (op, 1)) == ADDR_EXPR)
3885 ret |= visit_addr (stmt, TREE_OPERAND (TREE_OPERAND (op, 1),
3886 0), data);
3887 }
3888 }
346ef3fa
RG
3889 }
3890 else if (is_gimple_call (stmt))
3891 {
3892 if (visit_store)
3893 {
3894 tree lhs = gimple_call_lhs (stmt);
3895 if (lhs)
3896 {
3897 lhs = get_base_loadstore (lhs);
3898 if (lhs)
3899 ret |= visit_store (stmt, lhs, data);
3900 }
3901 }
3902 if (visit_load || visit_addr)
3903 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3904 {
3905 tree rhs = gimple_call_arg (stmt, i);
3906 if (visit_addr
3907 && TREE_CODE (rhs) == ADDR_EXPR)
3908 ret |= visit_addr (stmt, TREE_OPERAND (rhs, 0), data);
3909 else if (visit_load)
3910 {
3911 rhs = get_base_loadstore (rhs);
3912 if (rhs)
3913 ret |= visit_load (stmt, rhs, data);
3914 }
3915 }
3916 if (visit_addr
3917 && gimple_call_chain (stmt)
3918 && TREE_CODE (gimple_call_chain (stmt)) == ADDR_EXPR)
3919 ret |= visit_addr (stmt, TREE_OPERAND (gimple_call_chain (stmt), 0),
3920 data);
1d24fdd9
RG
3921 if (visit_addr
3922 && gimple_call_return_slot_opt_p (stmt)
3923 && gimple_call_lhs (stmt) != NULL_TREE
4d61856d 3924 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
1d24fdd9 3925 ret |= visit_addr (stmt, gimple_call_lhs (stmt), data);
346ef3fa
RG
3926 }
3927 else if (gimple_code (stmt) == GIMPLE_ASM)
3928 {
3929 unsigned noutputs;
3930 const char *constraint;
3931 const char **oconstraints;
3932 bool allows_mem, allows_reg, is_inout;
3933 noutputs = gimple_asm_noutputs (stmt);
3934 oconstraints = XALLOCAVEC (const char *, noutputs);
3935 if (visit_store || visit_addr)
3936 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
3937 {
3938 tree link = gimple_asm_output_op (stmt, i);
3939 tree op = get_base_loadstore (TREE_VALUE (link));
3940 if (op && visit_store)
3941 ret |= visit_store (stmt, op, data);
3942 if (visit_addr)
3943 {
3944 constraint = TREE_STRING_POINTER
3945 (TREE_VALUE (TREE_PURPOSE (link)));
3946 oconstraints[i] = constraint;
3947 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
3948 &allows_reg, &is_inout);
3949 if (op && !allows_reg && allows_mem)
3950 ret |= visit_addr (stmt, op, data);
3951 }
3952 }
3953 if (visit_load || visit_addr)
3954 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
3955 {
3956 tree link = gimple_asm_input_op (stmt, i);
3957 tree op = TREE_VALUE (link);
3958 if (visit_addr
3959 && TREE_CODE (op) == ADDR_EXPR)
3960 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
3961 else if (visit_load || visit_addr)
3962 {
3963 op = get_base_loadstore (op);
3964 if (op)
3965 {
3966 if (visit_load)
3967 ret |= visit_load (stmt, op, data);
3968 if (visit_addr)
3969 {
3970 constraint = TREE_STRING_POINTER
3971 (TREE_VALUE (TREE_PURPOSE (link)));
3972 parse_input_constraint (&constraint, 0, 0, noutputs,
3973 0, oconstraints,
3974 &allows_mem, &allows_reg);
3975 if (!allows_reg && allows_mem)
3976 ret |= visit_addr (stmt, op, data);
3977 }
3978 }
3979 }
3980 }
3981 }
3982 else if (gimple_code (stmt) == GIMPLE_RETURN)
3983 {
3984 tree op = gimple_return_retval (stmt);
3985 if (op)
3986 {
3987 if (visit_addr
3988 && TREE_CODE (op) == ADDR_EXPR)
3989 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
3990 else if (visit_load)
3991 {
3992 op = get_base_loadstore (op);
3993 if (op)
3994 ret |= visit_load (stmt, op, data);
3995 }
3996 }
3997 }
3998 else if (visit_addr
3999 && gimple_code (stmt) == GIMPLE_PHI)
4000 {
4001 for (i = 0; i < gimple_phi_num_args (stmt); ++i)
4002 {
80560f95 4003 tree op = gimple_phi_arg_def (stmt, i);
346ef3fa
RG
4004 if (TREE_CODE (op) == ADDR_EXPR)
4005 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
4006 }
4007 }
639dc669
JJ
4008 else if (visit_addr
4009 && gimple_code (stmt) == GIMPLE_GOTO)
4010 {
4011 tree op = gimple_goto_dest (stmt);
4012 if (TREE_CODE (op) == ADDR_EXPR)
4013 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
4014 }
346ef3fa
RG
4015
4016 return ret;
4017}
4018
4019/* Like walk_stmt_load_store_addr_ops but with NULL visit_addr. IPA-CP
4020 should make a faster clone for this case. */
4021
4022bool
4023walk_stmt_load_store_ops (gimple stmt, void *data,
4024 bool (*visit_load)(gimple, tree, void *),
4025 bool (*visit_store)(gimple, tree, void *))
4026{
4027 return walk_stmt_load_store_addr_ops (stmt, data,
4028 visit_load, visit_store, NULL);
4029}
4030
ccacdf06
RG
4031/* Helper for gimple_ior_addresses_taken_1. */
4032
4033static bool
4034gimple_ior_addresses_taken_1 (gimple stmt ATTRIBUTE_UNUSED,
4035 tree addr, void *data)
4036{
4037 bitmap addresses_taken = (bitmap)data;
2ea9dc64
RG
4038 addr = get_base_address (addr);
4039 if (addr
4040 && DECL_P (addr))
ccacdf06
RG
4041 {
4042 bitmap_set_bit (addresses_taken, DECL_UID (addr));
4043 return true;
4044 }
4045 return false;
4046}
4047
4048/* Set the bit for the uid of all decls that have their address taken
4049 in STMT in the ADDRESSES_TAKEN bitmap. Returns true if there
4050 were any in this stmt. */
4051
4052bool
4053gimple_ior_addresses_taken (bitmap addresses_taken, gimple stmt)
4054{
4055 return walk_stmt_load_store_addr_ops (stmt, addresses_taken, NULL, NULL,
4056 gimple_ior_addresses_taken_1);
4057}
4058
4537ec0c
DN
4059
4060/* Return a printable name for symbol DECL. */
4061
4062const char *
4063gimple_decl_printable_name (tree decl, int verbosity)
4064{
98b2dfbb
RG
4065 if (!DECL_NAME (decl))
4066 return NULL;
4537ec0c
DN
4067
4068 if (DECL_ASSEMBLER_NAME_SET_P (decl))
4069 {
4070 const char *str, *mangled_str;
4071 int dmgl_opts = DMGL_NO_OPTS;
4072
4073 if (verbosity >= 2)
4074 {
4075 dmgl_opts = DMGL_VERBOSE
4537ec0c
DN
4076 | DMGL_ANSI
4077 | DMGL_GNU_V3
4078 | DMGL_RET_POSTFIX;
4079 if (TREE_CODE (decl) == FUNCTION_DECL)
4080 dmgl_opts |= DMGL_PARAMS;
4081 }
4082
4083 mangled_str = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
4084 str = cplus_demangle_v3 (mangled_str, dmgl_opts);
4085 return (str) ? str : mangled_str;
4086 }
4087
4088 return IDENTIFIER_POINTER (DECL_NAME (decl));
4089}
4090
25ae5027
DS
4091/* Return TRUE iff stmt is a call to a built-in function. */
4092
4093bool
4094is_gimple_builtin_call (gimple stmt)
4095{
4096 tree callee;
4097
4098 if (is_gimple_call (stmt)
4099 && (callee = gimple_call_fndecl (stmt))
4100 && is_builtin_fn (callee)
4101 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
4102 return true;
4103
4104 return false;
4105}
4106
3626621a
RB
4107/* Return true when STMTs arguments match those of FNDECL. */
4108
4109static bool
4110validate_call (gimple stmt, tree fndecl)
4111{
4112 tree targs = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
4113 unsigned nargs = gimple_call_num_args (stmt);
4114 for (unsigned i = 0; i < nargs; ++i)
4115 {
4116 /* Variadic args follow. */
4117 if (!targs)
4118 return true;
4119 tree arg = gimple_call_arg (stmt, i);
4120 if (INTEGRAL_TYPE_P (TREE_TYPE (arg))
4121 && INTEGRAL_TYPE_P (TREE_VALUE (targs)))
4122 ;
4123 else if (POINTER_TYPE_P (TREE_TYPE (arg))
4124 && POINTER_TYPE_P (TREE_VALUE (targs)))
4125 ;
4126 else if (TREE_CODE (TREE_TYPE (arg))
4127 != TREE_CODE (TREE_VALUE (targs)))
4128 return false;
4129 targs = TREE_CHAIN (targs);
4130 }
4131 if (targs && !VOID_TYPE_P (TREE_VALUE (targs)))
4132 return false;
4133 return true;
4134}
4135
4136/* Return true when STMT is builtins call to CLASS. */
4137
4138bool
4139gimple_call_builtin_p (gimple stmt, enum built_in_class klass)
4140{
4141 tree fndecl;
4142 if (is_gimple_call (stmt)
4143 && (fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
4144 && DECL_BUILT_IN_CLASS (fndecl) == klass)
4145 return validate_call (stmt, fndecl);
4146 return false;
4147}
4148
4149/* Return true when STMT is builtins call to CODE of CLASS. */
c54c785d
JH
4150
4151bool
4152gimple_call_builtin_p (gimple stmt, enum built_in_function code)
4153{
4154 tree fndecl;
3626621a
RB
4155 if (is_gimple_call (stmt)
4156 && (fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
4157 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
4158 && DECL_FUNCTION_CODE (fndecl) == code)
4159 return validate_call (stmt, fndecl);
4160 return false;
c54c785d
JH
4161}
4162
edcdea5b
NF
4163/* Return true if STMT clobbers memory. STMT is required to be a
4164 GIMPLE_ASM. */
4165
4166bool
4167gimple_asm_clobbers_memory_p (const_gimple stmt)
4168{
4169 unsigned i;
4170
4171 for (i = 0; i < gimple_asm_nclobbers (stmt); i++)
4172 {
4173 tree op = gimple_asm_clobber_op (stmt, i);
4174 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (op)), "memory") == 0)
4175 return true;
4176 }
4177
4178 return false;
4179}
475b8f37
DN
4180
4181
7a300452
AM
4182/* Return true if the conversion from INNER_TYPE to OUTER_TYPE is a
4183 useless type conversion, otherwise return false.
4184
4185 This function implicitly defines the middle-end type system. With
4186 the notion of 'a < b' meaning that useless_type_conversion_p (a, b)
4187 holds and 'a > b' meaning that useless_type_conversion_p (b, a) holds,
4188 the following invariants shall be fulfilled:
4189
4190 1) useless_type_conversion_p is transitive.
4191 If a < b and b < c then a < c.
4192
4193 2) useless_type_conversion_p is not symmetric.
4194 From a < b does not follow a > b.
4195
4196 3) Types define the available set of operations applicable to values.
4197 A type conversion is useless if the operations for the target type
4198 is a subset of the operations for the source type. For example
4199 casts to void* are useless, casts from void* are not (void* can't
4200 be dereferenced or offsetted, but copied, hence its set of operations
4201 is a strict subset of that of all other data pointer types). Casts
4202 to const T* are useless (can't be written to), casts from const T*
4203 to T* are not. */
4204
4205bool
4206useless_type_conversion_p (tree outer_type, tree inner_type)
4207{
4208 /* Do the following before stripping toplevel qualifiers. */
4209 if (POINTER_TYPE_P (inner_type)
4210 && POINTER_TYPE_P (outer_type))
4211 {
4212 /* Do not lose casts between pointers to different address spaces. */
4213 if (TYPE_ADDR_SPACE (TREE_TYPE (outer_type))
4214 != TYPE_ADDR_SPACE (TREE_TYPE (inner_type)))
4215 return false;
4216 }
4217
4218 /* From now on qualifiers on value types do not matter. */
4219 inner_type = TYPE_MAIN_VARIANT (inner_type);
4220 outer_type = TYPE_MAIN_VARIANT (outer_type);
4221
4222 if (inner_type == outer_type)
4223 return true;
4224
4225 /* If we know the canonical types, compare them. */
4226 if (TYPE_CANONICAL (inner_type)
4227 && TYPE_CANONICAL (inner_type) == TYPE_CANONICAL (outer_type))
4228 return true;
4229
4230 /* Changes in machine mode are never useless conversions unless we
4231 deal with aggregate types in which case we defer to later checks. */
4232 if (TYPE_MODE (inner_type) != TYPE_MODE (outer_type)
4233 && !AGGREGATE_TYPE_P (inner_type))
4234 return false;
4235
4236 /* If both the inner and outer types are integral types, then the
4237 conversion is not necessary if they have the same mode and
4238 signedness and precision, and both or neither are boolean. */
4239 if (INTEGRAL_TYPE_P (inner_type)
4240 && INTEGRAL_TYPE_P (outer_type))
4241 {
4242 /* Preserve changes in signedness or precision. */
4243 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
4244 || TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
4245 return false;
4246
4247 /* Preserve conversions to/from BOOLEAN_TYPE if types are not
4248 of precision one. */
4249 if (((TREE_CODE (inner_type) == BOOLEAN_TYPE)
4250 != (TREE_CODE (outer_type) == BOOLEAN_TYPE))
4251 && TYPE_PRECISION (outer_type) != 1)
4252 return false;
4253
4254 /* We don't need to preserve changes in the types minimum or
4255 maximum value in general as these do not generate code
4256 unless the types precisions are different. */
4257 return true;
4258 }
4259
4260 /* Scalar floating point types with the same mode are compatible. */
4261 else if (SCALAR_FLOAT_TYPE_P (inner_type)
4262 && SCALAR_FLOAT_TYPE_P (outer_type))
4263 return true;
4264
4265 /* Fixed point types with the same mode are compatible. */
4266 else if (FIXED_POINT_TYPE_P (inner_type)
4267 && FIXED_POINT_TYPE_P (outer_type))
4268 return true;
4269
4270 /* We need to take special care recursing to pointed-to types. */
4271 else if (POINTER_TYPE_P (inner_type)
4272 && POINTER_TYPE_P (outer_type))
4273 {
4274 /* Do not lose casts to function pointer types. */
4275 if ((TREE_CODE (TREE_TYPE (outer_type)) == FUNCTION_TYPE
4276 || TREE_CODE (TREE_TYPE (outer_type)) == METHOD_TYPE)
4277 && !(TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE
4278 || TREE_CODE (TREE_TYPE (inner_type)) == METHOD_TYPE))
4279 return false;
4280
4281 /* We do not care for const qualification of the pointed-to types
4282 as const qualification has no semantic value to the middle-end. */
4283
4284 /* Otherwise pointers/references are equivalent. */
4285 return true;
4286 }
4287
4288 /* Recurse for complex types. */
4289 else if (TREE_CODE (inner_type) == COMPLEX_TYPE
4290 && TREE_CODE (outer_type) == COMPLEX_TYPE)
4291 return useless_type_conversion_p (TREE_TYPE (outer_type),
4292 TREE_TYPE (inner_type));
4293
4294 /* Recurse for vector types with the same number of subparts. */
4295 else if (TREE_CODE (inner_type) == VECTOR_TYPE
4296 && TREE_CODE (outer_type) == VECTOR_TYPE
4297 && TYPE_PRECISION (inner_type) == TYPE_PRECISION (outer_type))
4298 return useless_type_conversion_p (TREE_TYPE (outer_type),
4299 TREE_TYPE (inner_type));
4300
4301 else if (TREE_CODE (inner_type) == ARRAY_TYPE
4302 && TREE_CODE (outer_type) == ARRAY_TYPE)
4303 {
4304 /* Preserve string attributes. */
4305 if (TYPE_STRING_FLAG (inner_type) != TYPE_STRING_FLAG (outer_type))
4306 return false;
4307
4308 /* Conversions from array types with unknown extent to
4309 array types with known extent are not useless. */
4310 if (!TYPE_DOMAIN (inner_type)
4311 && TYPE_DOMAIN (outer_type))
4312 return false;
4313
4314 /* Nor are conversions from array types with non-constant size to
4315 array types with constant size or to different size. */
4316 if (TYPE_SIZE (outer_type)
4317 && TREE_CODE (TYPE_SIZE (outer_type)) == INTEGER_CST
4318 && (!TYPE_SIZE (inner_type)
4319 || TREE_CODE (TYPE_SIZE (inner_type)) != INTEGER_CST
4320 || !tree_int_cst_equal (TYPE_SIZE (outer_type),
4321 TYPE_SIZE (inner_type))))
4322 return false;
4323
4324 /* Check conversions between arrays with partially known extents.
4325 If the array min/max values are constant they have to match.
4326 Otherwise allow conversions to unknown and variable extents.
4327 In particular this declares conversions that may change the
4328 mode to BLKmode as useless. */
4329 if (TYPE_DOMAIN (inner_type)
4330 && TYPE_DOMAIN (outer_type)
4331 && TYPE_DOMAIN (inner_type) != TYPE_DOMAIN (outer_type))
4332 {
4333 tree inner_min = TYPE_MIN_VALUE (TYPE_DOMAIN (inner_type));
4334 tree outer_min = TYPE_MIN_VALUE (TYPE_DOMAIN (outer_type));
4335 tree inner_max = TYPE_MAX_VALUE (TYPE_DOMAIN (inner_type));
4336 tree outer_max = TYPE_MAX_VALUE (TYPE_DOMAIN (outer_type));
4337
4338 /* After gimplification a variable min/max value carries no
4339 additional information compared to a NULL value. All that
4340 matters has been lowered to be part of the IL. */
4341 if (inner_min && TREE_CODE (inner_min) != INTEGER_CST)
4342 inner_min = NULL_TREE;
4343 if (outer_min && TREE_CODE (outer_min) != INTEGER_CST)
4344 outer_min = NULL_TREE;
4345 if (inner_max && TREE_CODE (inner_max) != INTEGER_CST)
4346 inner_max = NULL_TREE;
4347 if (outer_max && TREE_CODE (outer_max) != INTEGER_CST)
4348 outer_max = NULL_TREE;
4349
4350 /* Conversions NULL / variable <- cst are useless, but not
4351 the other way around. */
4352 if (outer_min
4353 && (!inner_min
4354 || !tree_int_cst_equal (inner_min, outer_min)))
4355 return false;
4356 if (outer_max
4357 && (!inner_max
4358 || !tree_int_cst_equal (inner_max, outer_max)))
4359 return false;
4360 }
4361
4362 /* Recurse on the element check. */
4363 return useless_type_conversion_p (TREE_TYPE (outer_type),
4364 TREE_TYPE (inner_type));
4365 }
4366
4367 else if ((TREE_CODE (inner_type) == FUNCTION_TYPE
4368 || TREE_CODE (inner_type) == METHOD_TYPE)
4369 && TREE_CODE (inner_type) == TREE_CODE (outer_type))
4370 {
4371 tree outer_parm, inner_parm;
4372
4373 /* If the return types are not compatible bail out. */
4374 if (!useless_type_conversion_p (TREE_TYPE (outer_type),
4375 TREE_TYPE (inner_type)))
4376 return false;
4377
4378 /* Method types should belong to a compatible base class. */
4379 if (TREE_CODE (inner_type) == METHOD_TYPE
4380 && !useless_type_conversion_p (TYPE_METHOD_BASETYPE (outer_type),
4381 TYPE_METHOD_BASETYPE (inner_type)))
4382 return false;
4383
4384 /* A conversion to an unprototyped argument list is ok. */
4385 if (!prototype_p (outer_type))
4386 return true;
4387
4388 /* If the unqualified argument types are compatible the conversion
4389 is useless. */
4390 if (TYPE_ARG_TYPES (outer_type) == TYPE_ARG_TYPES (inner_type))
4391 return true;
4392
4393 for (outer_parm = TYPE_ARG_TYPES (outer_type),
4394 inner_parm = TYPE_ARG_TYPES (inner_type);
4395 outer_parm && inner_parm;
4396 outer_parm = TREE_CHAIN (outer_parm),
4397 inner_parm = TREE_CHAIN (inner_parm))
4398 if (!useless_type_conversion_p
4399 (TYPE_MAIN_VARIANT (TREE_VALUE (outer_parm)),
4400 TYPE_MAIN_VARIANT (TREE_VALUE (inner_parm))))
4401 return false;
4402
4403 /* If there is a mismatch in the number of arguments the functions
4404 are not compatible. */
4405 if (outer_parm || inner_parm)
4406 return false;
4407
4408 /* Defer to the target if necessary. */
4409 if (TYPE_ATTRIBUTES (inner_type) || TYPE_ATTRIBUTES (outer_type))
4410 return comp_type_attributes (outer_type, inner_type) != 0;
4411
4412 return true;
4413 }
4414
4415 /* For aggregates we rely on TYPE_CANONICAL exclusively and require
4416 explicit conversions for types involving to be structurally
4417 compared types. */
4418 else if (AGGREGATE_TYPE_P (inner_type)
4419 && TREE_CODE (inner_type) == TREE_CODE (outer_type))
4420 return false;
4421
4422 return false;
4423}
4424
4425/* Return true if a conversion from either type of TYPE1 and TYPE2
4426 to the other is not required. Otherwise return false. */
4427
4428bool
4429types_compatible_p (tree type1, tree type2)
4430{
4431 return (type1 == type2
4432 || (useless_type_conversion_p (type1, type2)
4433 && useless_type_conversion_p (type2, type1)));
4434}
4435
80560f95
AM
4436/* Dump bitmap SET (assumed to contain VAR_DECLs) to FILE. */
4437
4438void
4439dump_decl_set (FILE *file, bitmap set)
4440{
4441 if (set)
4442 {
4443 bitmap_iterator bi;
4444 unsigned i;
4445
4446 fprintf (file, "{ ");
4447
4448 EXECUTE_IF_SET_IN_BITMAP (set, 0, i, bi)
4449 {
4450 fprintf (file, "D.%u", i);
4451 fprintf (file, " ");
4452 }
4453
4454 fprintf (file, "}");
4455 }
4456 else
4457 fprintf (file, "NIL");
4458}
7a300452 4459
1df9f5a9
AM
4460/* Given SSA_NAMEs NAME1 and NAME2, return true if they are candidates for
4461 coalescing together, false otherwise.
4462
4463 This must stay consistent with var_map_base_init in tree-ssa-live.c. */
4464
4465bool
4466gimple_can_coalesce_p (tree name1, tree name2)
4467{
4468 /* First check the SSA_NAME's associated DECL. We only want to
4469 coalesce if they have the same DECL or both have no associated DECL. */
4470 tree var1 = SSA_NAME_VAR (name1);
4471 tree var2 = SSA_NAME_VAR (name2);
4472 var1 = (var1 && (!VAR_P (var1) || !DECL_IGNORED_P (var1))) ? var1 : NULL_TREE;
4473 var2 = (var2 && (!VAR_P (var2) || !DECL_IGNORED_P (var2))) ? var2 : NULL_TREE;
4474 if (var1 != var2)
4475 return false;
4476
4477 /* Now check the types. If the types are the same, then we should
4478 try to coalesce V1 and V2. */
4479 tree t1 = TREE_TYPE (name1);
4480 tree t2 = TREE_TYPE (name2);
4481 if (t1 == t2)
4482 return true;
4483
4484 /* If the types are not the same, check for a canonical type match. This
4485 (for example) allows coalescing when the types are fundamentally the
4486 same, but just have different names.
4487
4488 Note pointer types with different address spaces may have the same
4489 canonical type. Those are rejected for coalescing by the
4490 types_compatible_p check. */
4491 if (TYPE_CANONICAL (t1)
4492 && TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2)
4493 && types_compatible_p (t1, t2))
4494 return true;
4495
4496 return false;
4497}
3d9c733e
AM
4498
4499/* Return true when CALL is a call stmt that definitely doesn't
4500 free any memory or makes it unavailable otherwise. */
4501bool
4502nonfreeing_call_p (gimple call)
4503{
4504 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL)
4505 && gimple_call_flags (call) & ECF_LEAF)
4506 switch (DECL_FUNCTION_CODE (gimple_call_fndecl (call)))
4507 {
4508 /* Just in case these become ECF_LEAF in the future. */
4509 case BUILT_IN_FREE:
4510 case BUILT_IN_TM_FREE:
4511 case BUILT_IN_REALLOC:
4512 case BUILT_IN_STACK_RESTORE:
4513 return false;
4514 default:
4515 return true;
4516 }
4517
4518 return false;
4519}
4520
726a989a 4521#include "gt-gimple.h"