]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/gimple.c
backport: As described in http://gcc.gnu.org/ml/gcc/2012-08/msg00015.html...
[thirdparty/gcc.git] / gcc / gimple.c
CommitLineData
726a989a
RB
1/* Gimple IR support functions.
2
ddb555ed 3 Copyright 2007, 2008, 2009, 2010, 2011 Free Software Foundation, Inc.
726a989a
RB
4 Contributed by Aldy Hernandez <aldyh@redhat.com>
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 3, or (at your option) any later
11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
21
22#include "config.h"
23#include "system.h"
24#include "coretypes.h"
25#include "tm.h"
d7f09764 26#include "target.h"
726a989a
RB
27#include "tree.h"
28#include "ggc.h"
726a989a
RB
29#include "hard-reg-set.h"
30#include "basic-block.h"
31#include "gimple.h"
32#include "diagnostic.h"
33#include "tree-flow.h"
34#include "value-prof.h"
35#include "flags.h"
d7f09764 36#include "alias.h"
4537ec0c 37#include "demangle.h"
0f443ad0 38#include "langhooks.h"
726a989a 39
d7f09764
DN
40/* Global type table. FIXME lto, it should be possible to re-use some
41 of the type hashing routines in tree.c (type_hash_canon, type_hash_lookup,
42 etc), but those assume that types were built with the various
43 build_*_type routines which is not the case with the streamer. */
0f443ad0
RG
44static GTY((if_marked ("ggc_marked_p"), param_is (union tree_node)))
45 htab_t gimple_types;
4490cae6
RG
46static GTY((if_marked ("ggc_marked_p"), param_is (union tree_node)))
47 htab_t gimple_canonical_types;
0f443ad0
RG
48static GTY((if_marked ("tree_int_map_marked_p"), param_is (struct tree_int_map)))
49 htab_t type_hash_cache;
a844a60b
RG
50static GTY((if_marked ("tree_int_map_marked_p"), param_is (struct tree_int_map)))
51 htab_t canonical_type_hash_cache;
d7f09764 52
f2c4a81c 53/* All the tuples have their operand vector (if present) at the very bottom
726a989a
RB
54 of the structure. Therefore, the offset required to find the
55 operands vector the size of the structure minus the size of the 1
56 element tree array at the end (see gimple_ops). */
f2c4a81c
RH
57#define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) \
58 (HAS_TREE_OP ? sizeof (struct STRUCT) - sizeof (tree) : 0),
6bc7bc14 59EXPORTED_CONST size_t gimple_ops_offset_[] = {
f2c4a81c
RH
60#include "gsstruct.def"
61};
62#undef DEFGSSTRUCT
63
64#define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) sizeof(struct STRUCT),
65static const size_t gsstruct_code_size[] = {
66#include "gsstruct.def"
67};
68#undef DEFGSSTRUCT
69
70#define DEFGSCODE(SYM, NAME, GSSCODE) NAME,
71const char *const gimple_code_name[] = {
72#include "gimple.def"
73};
74#undef DEFGSCODE
75
76#define DEFGSCODE(SYM, NAME, GSSCODE) GSSCODE,
77EXPORTED_CONST enum gimple_statement_structure_enum gss_for_code_[] = {
726a989a
RB
78#include "gimple.def"
79};
80#undef DEFGSCODE
81
726a989a
RB
82/* Gimple stats. */
83
84int gimple_alloc_counts[(int) gimple_alloc_kind_all];
85int gimple_alloc_sizes[(int) gimple_alloc_kind_all];
86
87/* Keep in sync with gimple.h:enum gimple_alloc_kind. */
88static const char * const gimple_alloc_kind_names[] = {
89 "assignments",
90 "phi nodes",
91 "conditionals",
726a989a
RB
92 "everything else"
93};
94
726a989a
RB
95/* Private API manipulation functions shared only with some
96 other files. */
97extern void gimple_set_stored_syms (gimple, bitmap, bitmap_obstack *);
98extern void gimple_set_loaded_syms (gimple, bitmap, bitmap_obstack *);
99
100/* Gimple tuple constructors.
101 Note: Any constructor taking a ``gimple_seq'' as a parameter, can
102 be passed a NULL to start with an empty sequence. */
103
104/* Set the code for statement G to CODE. */
105
106static inline void
107gimple_set_code (gimple g, enum gimple_code code)
108{
109 g->gsbase.code = code;
110}
111
726a989a
RB
112/* Return the number of bytes needed to hold a GIMPLE statement with
113 code CODE. */
114
f2c4a81c 115static inline size_t
726a989a
RB
116gimple_size (enum gimple_code code)
117{
f2c4a81c 118 return gsstruct_code_size[gss_for_code (code)];
726a989a
RB
119}
120
726a989a
RB
121/* Allocate memory for a GIMPLE statement with code CODE and NUM_OPS
122 operands. */
123
d7f09764 124gimple
726a989a
RB
125gimple_alloc_stat (enum gimple_code code, unsigned num_ops MEM_STAT_DECL)
126{
127 size_t size;
128 gimple stmt;
129
130 size = gimple_size (code);
131 if (num_ops > 0)
132 size += sizeof (tree) * (num_ops - 1);
133
7aa6d18a
SB
134 if (GATHER_STATISTICS)
135 {
136 enum gimple_alloc_kind kind = gimple_alloc_kind (code);
137 gimple_alloc_counts[(int) kind]++;
138 gimple_alloc_sizes[(int) kind] += size;
139 }
726a989a 140
a9429e29 141 stmt = ggc_alloc_cleared_gimple_statement_d_stat (size PASS_MEM_STAT);
726a989a
RB
142 gimple_set_code (stmt, code);
143 gimple_set_num_ops (stmt, num_ops);
144
145 /* Do not call gimple_set_modified here as it has other side
146 effects and this tuple is still not completely built. */
147 stmt->gsbase.modified = 1;
355a7673 148 gimple_init_singleton (stmt);
726a989a
RB
149
150 return stmt;
151}
152
153/* Set SUBCODE to be the code of the expression computed by statement G. */
154
155static inline void
156gimple_set_subcode (gimple g, unsigned subcode)
157{
158 /* We only have 16 bits for the RHS code. Assert that we are not
159 overflowing it. */
160 gcc_assert (subcode < (1 << 16));
161 g->gsbase.subcode = subcode;
162}
163
164
165
166/* Build a tuple with operands. CODE is the statement to build (which
167 must be one of the GIMPLE_WITH_OPS tuples). SUBCODE is the sub-code
b8698a0f 168 for the new tuple. NUM_OPS is the number of operands to allocate. */
726a989a
RB
169
170#define gimple_build_with_ops(c, s, n) \
171 gimple_build_with_ops_stat (c, s, n MEM_STAT_INFO)
172
173static gimple
b5b8b0ac 174gimple_build_with_ops_stat (enum gimple_code code, unsigned subcode,
726a989a
RB
175 unsigned num_ops MEM_STAT_DECL)
176{
177 gimple s = gimple_alloc_stat (code, num_ops PASS_MEM_STAT);
178 gimple_set_subcode (s, subcode);
179
180 return s;
181}
182
183
184/* Build a GIMPLE_RETURN statement returning RETVAL. */
185
186gimple
187gimple_build_return (tree retval)
188{
bbbbb16a 189 gimple s = gimple_build_with_ops (GIMPLE_RETURN, ERROR_MARK, 1);
726a989a
RB
190 if (retval)
191 gimple_return_set_retval (s, retval);
192 return s;
193}
194
d086d311
RG
195/* Reset alias information on call S. */
196
197void
198gimple_call_reset_alias_info (gimple s)
199{
200 if (gimple_call_flags (s) & ECF_CONST)
201 memset (gimple_call_use_set (s), 0, sizeof (struct pt_solution));
202 else
203 pt_solution_reset (gimple_call_use_set (s));
204 if (gimple_call_flags (s) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
205 memset (gimple_call_clobber_set (s), 0, sizeof (struct pt_solution));
206 else
207 pt_solution_reset (gimple_call_clobber_set (s));
208}
209
21860814
JJ
210/* Helper for gimple_build_call, gimple_build_call_valist,
211 gimple_build_call_vec and gimple_build_call_from_tree. Build the basic
212 components of a GIMPLE_CALL statement to function FN with NARGS
213 arguments. */
726a989a
RB
214
215static inline gimple
216gimple_build_call_1 (tree fn, unsigned nargs)
217{
bbbbb16a 218 gimple s = gimple_build_with_ops (GIMPLE_CALL, ERROR_MARK, nargs + 3);
7c9577be
RG
219 if (TREE_CODE (fn) == FUNCTION_DECL)
220 fn = build_fold_addr_expr (fn);
726a989a 221 gimple_set_op (s, 1, fn);
f20ca725 222 gimple_call_set_fntype (s, TREE_TYPE (TREE_TYPE (fn)));
d086d311 223 gimple_call_reset_alias_info (s);
726a989a
RB
224 return s;
225}
226
227
228/* Build a GIMPLE_CALL statement to function FN with the arguments
229 specified in vector ARGS. */
230
231gimple
232gimple_build_call_vec (tree fn, VEC(tree, heap) *args)
233{
234 unsigned i;
235 unsigned nargs = VEC_length (tree, args);
236 gimple call = gimple_build_call_1 (fn, nargs);
237
238 for (i = 0; i < nargs; i++)
239 gimple_call_set_arg (call, i, VEC_index (tree, args, i));
240
241 return call;
242}
243
244
245/* Build a GIMPLE_CALL statement to function FN. NARGS is the number of
246 arguments. The ... are the arguments. */
247
248gimple
249gimple_build_call (tree fn, unsigned nargs, ...)
250{
251 va_list ap;
252 gimple call;
253 unsigned i;
254
255 gcc_assert (TREE_CODE (fn) == FUNCTION_DECL || is_gimple_call_addr (fn));
256
257 call = gimple_build_call_1 (fn, nargs);
258
259 va_start (ap, nargs);
260 for (i = 0; i < nargs; i++)
261 gimple_call_set_arg (call, i, va_arg (ap, tree));
262 va_end (ap);
263
264 return call;
265}
266
267
21860814
JJ
268/* Build a GIMPLE_CALL statement to function FN. NARGS is the number of
269 arguments. AP contains the arguments. */
270
271gimple
272gimple_build_call_valist (tree fn, unsigned nargs, va_list ap)
273{
274 gimple call;
275 unsigned i;
276
277 gcc_assert (TREE_CODE (fn) == FUNCTION_DECL || is_gimple_call_addr (fn));
278
279 call = gimple_build_call_1 (fn, nargs);
280
281 for (i = 0; i < nargs; i++)
282 gimple_call_set_arg (call, i, va_arg (ap, tree));
283
284 return call;
285}
286
287
25583c4f
RS
288/* Helper for gimple_build_call_internal and gimple_build_call_internal_vec.
289 Build the basic components of a GIMPLE_CALL statement to internal
290 function FN with NARGS arguments. */
291
292static inline gimple
293gimple_build_call_internal_1 (enum internal_fn fn, unsigned nargs)
294{
295 gimple s = gimple_build_with_ops (GIMPLE_CALL, ERROR_MARK, nargs + 3);
296 s->gsbase.subcode |= GF_CALL_INTERNAL;
297 gimple_call_set_internal_fn (s, fn);
298 gimple_call_reset_alias_info (s);
299 return s;
300}
301
302
303/* Build a GIMPLE_CALL statement to internal function FN. NARGS is
304 the number of arguments. The ... are the arguments. */
305
306gimple
307gimple_build_call_internal (enum internal_fn fn, unsigned nargs, ...)
308{
309 va_list ap;
310 gimple call;
311 unsigned i;
312
313 call = gimple_build_call_internal_1 (fn, nargs);
314 va_start (ap, nargs);
315 for (i = 0; i < nargs; i++)
316 gimple_call_set_arg (call, i, va_arg (ap, tree));
317 va_end (ap);
318
319 return call;
320}
321
322
323/* Build a GIMPLE_CALL statement to internal function FN with the arguments
324 specified in vector ARGS. */
325
326gimple
327gimple_build_call_internal_vec (enum internal_fn fn, VEC(tree, heap) *args)
328{
329 unsigned i, nargs;
330 gimple call;
331
332 nargs = VEC_length (tree, args);
333 call = gimple_build_call_internal_1 (fn, nargs);
334 for (i = 0; i < nargs; i++)
335 gimple_call_set_arg (call, i, VEC_index (tree, args, i));
336
337 return call;
338}
339
340
726a989a
RB
341/* Build a GIMPLE_CALL statement from CALL_EXPR T. Note that T is
342 assumed to be in GIMPLE form already. Minimal checking is done of
343 this fact. */
344
345gimple
346gimple_build_call_from_tree (tree t)
347{
348 unsigned i, nargs;
349 gimple call;
350 tree fndecl = get_callee_fndecl (t);
351
352 gcc_assert (TREE_CODE (t) == CALL_EXPR);
353
354 nargs = call_expr_nargs (t);
355 call = gimple_build_call_1 (fndecl ? fndecl : CALL_EXPR_FN (t), nargs);
356
357 for (i = 0; i < nargs; i++)
358 gimple_call_set_arg (call, i, CALL_EXPR_ARG (t, i));
359
360 gimple_set_block (call, TREE_BLOCK (t));
361
362 /* Carry all the CALL_EXPR flags to the new GIMPLE_CALL. */
363 gimple_call_set_chain (call, CALL_EXPR_STATIC_CHAIN (t));
364 gimple_call_set_tail (call, CALL_EXPR_TAILCALL (t));
726a989a 365 gimple_call_set_return_slot_opt (call, CALL_EXPR_RETURN_SLOT_OPT (t));
63d2a353
MM
366 if (fndecl
367 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
13e49da9
TV
368 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA
369 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA_WITH_ALIGN))
63d2a353
MM
370 gimple_call_set_alloca_for_var (call, CALL_ALLOCA_FOR_VAR_P (t));
371 else
372 gimple_call_set_from_thunk (call, CALL_FROM_THUNK_P (t));
726a989a 373 gimple_call_set_va_arg_pack (call, CALL_EXPR_VA_ARG_PACK (t));
9bb1a81b 374 gimple_call_set_nothrow (call, TREE_NOTHROW (t));
d665b6e5 375 gimple_set_no_warning (call, TREE_NO_WARNING (t));
726a989a
RB
376
377 return call;
378}
379
380
381/* Extract the operands and code for expression EXPR into *SUBCODE_P,
0354c0c7 382 *OP1_P, *OP2_P and *OP3_P respectively. */
726a989a
RB
383
384void
0354c0c7
BS
385extract_ops_from_tree_1 (tree expr, enum tree_code *subcode_p, tree *op1_p,
386 tree *op2_p, tree *op3_p)
726a989a 387{
82d6e6fc 388 enum gimple_rhs_class grhs_class;
726a989a
RB
389
390 *subcode_p = TREE_CODE (expr);
82d6e6fc 391 grhs_class = get_gimple_rhs_class (*subcode_p);
726a989a 392
0354c0c7 393 if (grhs_class == GIMPLE_TERNARY_RHS)
726a989a
RB
394 {
395 *op1_p = TREE_OPERAND (expr, 0);
396 *op2_p = TREE_OPERAND (expr, 1);
0354c0c7
BS
397 *op3_p = TREE_OPERAND (expr, 2);
398 }
399 else if (grhs_class == GIMPLE_BINARY_RHS)
400 {
401 *op1_p = TREE_OPERAND (expr, 0);
402 *op2_p = TREE_OPERAND (expr, 1);
403 *op3_p = NULL_TREE;
726a989a 404 }
82d6e6fc 405 else if (grhs_class == GIMPLE_UNARY_RHS)
726a989a
RB
406 {
407 *op1_p = TREE_OPERAND (expr, 0);
408 *op2_p = NULL_TREE;
0354c0c7 409 *op3_p = NULL_TREE;
726a989a 410 }
82d6e6fc 411 else if (grhs_class == GIMPLE_SINGLE_RHS)
726a989a
RB
412 {
413 *op1_p = expr;
414 *op2_p = NULL_TREE;
0354c0c7 415 *op3_p = NULL_TREE;
726a989a
RB
416 }
417 else
418 gcc_unreachable ();
419}
420
421
422/* Build a GIMPLE_ASSIGN statement.
423
424 LHS of the assignment.
425 RHS of the assignment which can be unary or binary. */
426
427gimple
428gimple_build_assign_stat (tree lhs, tree rhs MEM_STAT_DECL)
429{
430 enum tree_code subcode;
0354c0c7 431 tree op1, op2, op3;
726a989a 432
0354c0c7
BS
433 extract_ops_from_tree_1 (rhs, &subcode, &op1, &op2, &op3);
434 return gimple_build_assign_with_ops_stat (subcode, lhs, op1, op2, op3
726a989a
RB
435 PASS_MEM_STAT);
436}
437
438
439/* Build a GIMPLE_ASSIGN statement with sub-code SUBCODE and operands
440 OP1 and OP2. If OP2 is NULL then SUBCODE must be of class
441 GIMPLE_UNARY_RHS or GIMPLE_SINGLE_RHS. */
442
443gimple
444gimple_build_assign_with_ops_stat (enum tree_code subcode, tree lhs, tree op1,
0354c0c7 445 tree op2, tree op3 MEM_STAT_DECL)
726a989a
RB
446{
447 unsigned num_ops;
448 gimple p;
449
450 /* Need 1 operand for LHS and 1 or 2 for the RHS (depending on the
451 code). */
452 num_ops = get_gimple_rhs_num_ops (subcode) + 1;
b8698a0f 453
b5b8b0ac 454 p = gimple_build_with_ops_stat (GIMPLE_ASSIGN, (unsigned)subcode, num_ops
726a989a
RB
455 PASS_MEM_STAT);
456 gimple_assign_set_lhs (p, lhs);
457 gimple_assign_set_rhs1 (p, op1);
458 if (op2)
459 {
460 gcc_assert (num_ops > 2);
461 gimple_assign_set_rhs2 (p, op2);
462 }
463
0354c0c7
BS
464 if (op3)
465 {
466 gcc_assert (num_ops > 3);
467 gimple_assign_set_rhs3 (p, op3);
468 }
469
726a989a
RB
470 return p;
471}
472
473
474/* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
475
476 DST/SRC are the destination and source respectively. You can pass
477 ungimplified trees in DST or SRC, in which case they will be
478 converted to a gimple operand if necessary.
479
480 This function returns the newly created GIMPLE_ASSIGN tuple. */
481
5fd8300b 482gimple
726a989a 483gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
b8698a0f 484{
726a989a
RB
485 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
486 gimplify_and_add (t, seq_p);
487 ggc_free (t);
488 return gimple_seq_last_stmt (*seq_p);
489}
490
491
492/* Build a GIMPLE_COND statement.
493
494 PRED is the condition used to compare LHS and the RHS.
495 T_LABEL is the label to jump to if the condition is true.
496 F_LABEL is the label to jump to otherwise. */
497
498gimple
499gimple_build_cond (enum tree_code pred_code, tree lhs, tree rhs,
500 tree t_label, tree f_label)
501{
502 gimple p;
503
504 gcc_assert (TREE_CODE_CLASS (pred_code) == tcc_comparison);
505 p = gimple_build_with_ops (GIMPLE_COND, pred_code, 4);
506 gimple_cond_set_lhs (p, lhs);
507 gimple_cond_set_rhs (p, rhs);
508 gimple_cond_set_true_label (p, t_label);
509 gimple_cond_set_false_label (p, f_label);
510 return p;
511}
512
513
514/* Extract operands for a GIMPLE_COND statement out of COND_EXPR tree COND. */
515
516void
517gimple_cond_get_ops_from_tree (tree cond, enum tree_code *code_p,
518 tree *lhs_p, tree *rhs_p)
519{
520 gcc_assert (TREE_CODE_CLASS (TREE_CODE (cond)) == tcc_comparison
521 || TREE_CODE (cond) == TRUTH_NOT_EXPR
522 || is_gimple_min_invariant (cond)
523 || SSA_VAR_P (cond));
524
525 extract_ops_from_tree (cond, code_p, lhs_p, rhs_p);
526
527 /* Canonicalize conditionals of the form 'if (!VAL)'. */
528 if (*code_p == TRUTH_NOT_EXPR)
529 {
530 *code_p = EQ_EXPR;
531 gcc_assert (*lhs_p && *rhs_p == NULL_TREE);
e8160c9a 532 *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
726a989a
RB
533 }
534 /* Canonicalize conditionals of the form 'if (VAL)' */
535 else if (TREE_CODE_CLASS (*code_p) != tcc_comparison)
536 {
537 *code_p = NE_EXPR;
538 gcc_assert (*lhs_p && *rhs_p == NULL_TREE);
e8160c9a 539 *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
726a989a
RB
540 }
541}
542
543
544/* Build a GIMPLE_COND statement from the conditional expression tree
545 COND. T_LABEL and F_LABEL are as in gimple_build_cond. */
546
547gimple
548gimple_build_cond_from_tree (tree cond, tree t_label, tree f_label)
549{
550 enum tree_code code;
551 tree lhs, rhs;
552
553 gimple_cond_get_ops_from_tree (cond, &code, &lhs, &rhs);
554 return gimple_build_cond (code, lhs, rhs, t_label, f_label);
555}
556
557/* Set code, lhs, and rhs of a GIMPLE_COND from a suitable
558 boolean expression tree COND. */
559
560void
561gimple_cond_set_condition_from_tree (gimple stmt, tree cond)
562{
563 enum tree_code code;
564 tree lhs, rhs;
565
566 gimple_cond_get_ops_from_tree (cond, &code, &lhs, &rhs);
567 gimple_cond_set_condition (stmt, code, lhs, rhs);
568}
569
570/* Build a GIMPLE_LABEL statement for LABEL. */
571
572gimple
573gimple_build_label (tree label)
574{
bbbbb16a 575 gimple p = gimple_build_with_ops (GIMPLE_LABEL, ERROR_MARK, 1);
726a989a
RB
576 gimple_label_set_label (p, label);
577 return p;
578}
579
580/* Build a GIMPLE_GOTO statement to label DEST. */
581
582gimple
583gimple_build_goto (tree dest)
584{
bbbbb16a 585 gimple p = gimple_build_with_ops (GIMPLE_GOTO, ERROR_MARK, 1);
726a989a
RB
586 gimple_goto_set_dest (p, dest);
587 return p;
588}
589
590
591/* Build a GIMPLE_NOP statement. */
592
b8698a0f 593gimple
726a989a
RB
594gimple_build_nop (void)
595{
596 return gimple_alloc (GIMPLE_NOP, 0);
597}
598
599
600/* Build a GIMPLE_BIND statement.
601 VARS are the variables in BODY.
602 BLOCK is the containing block. */
603
604gimple
605gimple_build_bind (tree vars, gimple_seq body, tree block)
606{
607 gimple p = gimple_alloc (GIMPLE_BIND, 0);
608 gimple_bind_set_vars (p, vars);
609 if (body)
610 gimple_bind_set_body (p, body);
611 if (block)
612 gimple_bind_set_block (p, block);
613 return p;
614}
615
616/* Helper function to set the simple fields of a asm stmt.
617
618 STRING is a pointer to a string that is the asm blocks assembly code.
619 NINPUT is the number of register inputs.
620 NOUTPUT is the number of register outputs.
621 NCLOBBERS is the number of clobbered registers.
622 */
623
624static inline gimple
b8698a0f 625gimple_build_asm_1 (const char *string, unsigned ninputs, unsigned noutputs,
1c384bf1 626 unsigned nclobbers, unsigned nlabels)
726a989a
RB
627{
628 gimple p;
629 int size = strlen (string);
630
1c384bf1
RH
631 /* ASMs with labels cannot have outputs. This should have been
632 enforced by the front end. */
633 gcc_assert (nlabels == 0 || noutputs == 0);
634
bbbbb16a 635 p = gimple_build_with_ops (GIMPLE_ASM, ERROR_MARK,
1c384bf1 636 ninputs + noutputs + nclobbers + nlabels);
726a989a
RB
637
638 p->gimple_asm.ni = ninputs;
639 p->gimple_asm.no = noutputs;
640 p->gimple_asm.nc = nclobbers;
1c384bf1 641 p->gimple_asm.nl = nlabels;
726a989a
RB
642 p->gimple_asm.string = ggc_alloc_string (string, size);
643
7aa6d18a
SB
644 if (GATHER_STATISTICS)
645 gimple_alloc_sizes[(int) gimple_alloc_kind (GIMPLE_ASM)] += size;
b8698a0f 646
726a989a
RB
647 return p;
648}
649
650/* Build a GIMPLE_ASM statement.
651
652 STRING is the assembly code.
653 NINPUT is the number of register inputs.
654 NOUTPUT is the number of register outputs.
655 NCLOBBERS is the number of clobbered registers.
656 INPUTS is a vector of the input register parameters.
657 OUTPUTS is a vector of the output register parameters.
1c384bf1
RH
658 CLOBBERS is a vector of the clobbered register parameters.
659 LABELS is a vector of destination labels. */
726a989a
RB
660
661gimple
b8698a0f 662gimple_build_asm_vec (const char *string, VEC(tree,gc)* inputs,
1c384bf1
RH
663 VEC(tree,gc)* outputs, VEC(tree,gc)* clobbers,
664 VEC(tree,gc)* labels)
726a989a
RB
665{
666 gimple p;
667 unsigned i;
668
669 p = gimple_build_asm_1 (string,
670 VEC_length (tree, inputs),
b8698a0f 671 VEC_length (tree, outputs),
1c384bf1
RH
672 VEC_length (tree, clobbers),
673 VEC_length (tree, labels));
b8698a0f 674
726a989a
RB
675 for (i = 0; i < VEC_length (tree, inputs); i++)
676 gimple_asm_set_input_op (p, i, VEC_index (tree, inputs, i));
677
678 for (i = 0; i < VEC_length (tree, outputs); i++)
679 gimple_asm_set_output_op (p, i, VEC_index (tree, outputs, i));
680
681 for (i = 0; i < VEC_length (tree, clobbers); i++)
682 gimple_asm_set_clobber_op (p, i, VEC_index (tree, clobbers, i));
b8698a0f 683
1c384bf1
RH
684 for (i = 0; i < VEC_length (tree, labels); i++)
685 gimple_asm_set_label_op (p, i, VEC_index (tree, labels, i));
b8698a0f 686
726a989a
RB
687 return p;
688}
689
690/* Build a GIMPLE_CATCH statement.
691
692 TYPES are the catch types.
693 HANDLER is the exception handler. */
694
695gimple
696gimple_build_catch (tree types, gimple_seq handler)
697{
698 gimple p = gimple_alloc (GIMPLE_CATCH, 0);
699 gimple_catch_set_types (p, types);
700 if (handler)
701 gimple_catch_set_handler (p, handler);
702
703 return p;
704}
705
706/* Build a GIMPLE_EH_FILTER statement.
707
708 TYPES are the filter's types.
709 FAILURE is the filter's failure action. */
710
711gimple
712gimple_build_eh_filter (tree types, gimple_seq failure)
713{
714 gimple p = gimple_alloc (GIMPLE_EH_FILTER, 0);
715 gimple_eh_filter_set_types (p, types);
716 if (failure)
717 gimple_eh_filter_set_failure (p, failure);
718
719 return p;
720}
721
1d65f45c
RH
722/* Build a GIMPLE_EH_MUST_NOT_THROW statement. */
723
724gimple
725gimple_build_eh_must_not_throw (tree decl)
726{
786f715d 727 gimple p = gimple_alloc (GIMPLE_EH_MUST_NOT_THROW, 0);
1d65f45c
RH
728
729 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
730 gcc_assert (flags_from_decl_or_type (decl) & ECF_NORETURN);
d7f09764 731 gimple_eh_must_not_throw_set_fndecl (p, decl);
1d65f45c
RH
732
733 return p;
734}
735
0a35513e
AH
736/* Build a GIMPLE_EH_ELSE statement. */
737
738gimple
739gimple_build_eh_else (gimple_seq n_body, gimple_seq e_body)
740{
741 gimple p = gimple_alloc (GIMPLE_EH_ELSE, 0);
742 gimple_eh_else_set_n_body (p, n_body);
743 gimple_eh_else_set_e_body (p, e_body);
744 return p;
745}
746
726a989a
RB
747/* Build a GIMPLE_TRY statement.
748
749 EVAL is the expression to evaluate.
750 CLEANUP is the cleanup expression.
751 KIND is either GIMPLE_TRY_CATCH or GIMPLE_TRY_FINALLY depending on
752 whether this is a try/catch or a try/finally respectively. */
753
754gimple
755gimple_build_try (gimple_seq eval, gimple_seq cleanup,
756 enum gimple_try_flags kind)
757{
758 gimple p;
759
760 gcc_assert (kind == GIMPLE_TRY_CATCH || kind == GIMPLE_TRY_FINALLY);
761 p = gimple_alloc (GIMPLE_TRY, 0);
762 gimple_set_subcode (p, kind);
763 if (eval)
764 gimple_try_set_eval (p, eval);
765 if (cleanup)
766 gimple_try_set_cleanup (p, cleanup);
767
768 return p;
769}
770
771/* Construct a GIMPLE_WITH_CLEANUP_EXPR statement.
772
773 CLEANUP is the cleanup expression. */
774
775gimple
776gimple_build_wce (gimple_seq cleanup)
777{
778 gimple p = gimple_alloc (GIMPLE_WITH_CLEANUP_EXPR, 0);
779 if (cleanup)
780 gimple_wce_set_cleanup (p, cleanup);
781
782 return p;
783}
784
785
1d65f45c 786/* Build a GIMPLE_RESX statement. */
726a989a
RB
787
788gimple
789gimple_build_resx (int region)
790{
1d65f45c
RH
791 gimple p = gimple_build_with_ops (GIMPLE_RESX, ERROR_MARK, 0);
792 p->gimple_eh_ctrl.region = region;
726a989a
RB
793 return p;
794}
795
796
797/* The helper for constructing a gimple switch statement.
798 INDEX is the switch's index.
799 NLABELS is the number of labels in the switch excluding the default.
800 DEFAULT_LABEL is the default label for the switch statement. */
801
b8698a0f 802gimple
1d65f45c 803gimple_build_switch_nlabels (unsigned nlabels, tree index, tree default_label)
726a989a
RB
804{
805 /* nlabels + 1 default label + 1 index. */
bbbbb16a 806 gimple p = gimple_build_with_ops (GIMPLE_SWITCH, ERROR_MARK,
1d65f45c 807 1 + (default_label != NULL) + nlabels);
726a989a 808 gimple_switch_set_index (p, index);
1d65f45c
RH
809 if (default_label)
810 gimple_switch_set_default_label (p, default_label);
726a989a
RB
811 return p;
812}
813
814
815/* Build a GIMPLE_SWITCH statement.
816
817 INDEX is the switch's index.
b8698a0f 818 NLABELS is the number of labels in the switch excluding the DEFAULT_LABEL.
726a989a
RB
819 ... are the labels excluding the default. */
820
b8698a0f 821gimple
726a989a
RB
822gimple_build_switch (unsigned nlabels, tree index, tree default_label, ...)
823{
824 va_list al;
1d65f45c
RH
825 unsigned i, offset;
826 gimple p = gimple_build_switch_nlabels (nlabels, index, default_label);
726a989a
RB
827
828 /* Store the rest of the labels. */
829 va_start (al, default_label);
1d65f45c
RH
830 offset = (default_label != NULL);
831 for (i = 0; i < nlabels; i++)
832 gimple_switch_set_label (p, i + offset, va_arg (al, tree));
726a989a
RB
833 va_end (al);
834
835 return p;
836}
837
838
839/* Build a GIMPLE_SWITCH statement.
840
841 INDEX is the switch's index.
842 DEFAULT_LABEL is the default label
843 ARGS is a vector of labels excluding the default. */
844
845gimple
846gimple_build_switch_vec (tree index, tree default_label, VEC(tree, heap) *args)
847{
1d65f45c
RH
848 unsigned i, offset, nlabels = VEC_length (tree, args);
849 gimple p = gimple_build_switch_nlabels (nlabels, index, default_label);
726a989a 850
1d65f45c
RH
851 /* Copy the labels from the vector to the switch statement. */
852 offset = (default_label != NULL);
853 for (i = 0; i < nlabels; i++)
854 gimple_switch_set_label (p, i + offset, VEC_index (tree, args, i));
726a989a
RB
855
856 return p;
857}
858
1d65f45c
RH
859/* Build a GIMPLE_EH_DISPATCH statement. */
860
861gimple
862gimple_build_eh_dispatch (int region)
863{
864 gimple p = gimple_build_with_ops (GIMPLE_EH_DISPATCH, ERROR_MARK, 0);
865 p->gimple_eh_ctrl.region = region;
866 return p;
867}
726a989a 868
b5b8b0ac
AO
869/* Build a new GIMPLE_DEBUG_BIND statement.
870
871 VAR is bound to VALUE; block and location are taken from STMT. */
872
873gimple
874gimple_build_debug_bind_stat (tree var, tree value, gimple stmt MEM_STAT_DECL)
875{
876 gimple p = gimple_build_with_ops_stat (GIMPLE_DEBUG,
877 (unsigned)GIMPLE_DEBUG_BIND, 2
878 PASS_MEM_STAT);
879
880 gimple_debug_bind_set_var (p, var);
881 gimple_debug_bind_set_value (p, value);
882 if (stmt)
883 {
884 gimple_set_block (p, gimple_block (stmt));
885 gimple_set_location (p, gimple_location (stmt));
886 }
887
888 return p;
889}
890
891
ddb555ed
JJ
892/* Build a new GIMPLE_DEBUG_SOURCE_BIND statement.
893
894 VAR is bound to VALUE; block and location are taken from STMT. */
895
896gimple
897gimple_build_debug_source_bind_stat (tree var, tree value,
898 gimple stmt MEM_STAT_DECL)
899{
900 gimple p = gimple_build_with_ops_stat (GIMPLE_DEBUG,
901 (unsigned)GIMPLE_DEBUG_SOURCE_BIND, 2
902 PASS_MEM_STAT);
903
904 gimple_debug_source_bind_set_var (p, var);
905 gimple_debug_source_bind_set_value (p, value);
906 if (stmt)
907 {
908 gimple_set_block (p, gimple_block (stmt));
909 gimple_set_location (p, gimple_location (stmt));
910 }
911
912 return p;
913}
914
915
726a989a
RB
916/* Build a GIMPLE_OMP_CRITICAL statement.
917
918 BODY is the sequence of statements for which only one thread can execute.
919 NAME is optional identifier for this critical block. */
920
b8698a0f 921gimple
726a989a
RB
922gimple_build_omp_critical (gimple_seq body, tree name)
923{
924 gimple p = gimple_alloc (GIMPLE_OMP_CRITICAL, 0);
925 gimple_omp_critical_set_name (p, name);
926 if (body)
927 gimple_omp_set_body (p, body);
928
929 return p;
930}
931
932/* Build a GIMPLE_OMP_FOR statement.
933
934 BODY is sequence of statements inside the for loop.
b8698a0f 935 CLAUSES, are any of the OMP loop construct's clauses: private, firstprivate,
726a989a
RB
936 lastprivate, reductions, ordered, schedule, and nowait.
937 COLLAPSE is the collapse count.
938 PRE_BODY is the sequence of statements that are loop invariant. */
939
940gimple
941gimple_build_omp_for (gimple_seq body, tree clauses, size_t collapse,
942 gimple_seq pre_body)
943{
944 gimple p = gimple_alloc (GIMPLE_OMP_FOR, 0);
945 if (body)
946 gimple_omp_set_body (p, body);
947 gimple_omp_for_set_clauses (p, clauses);
948 p->gimple_omp_for.collapse = collapse;
a9429e29
LB
949 p->gimple_omp_for.iter
950 = ggc_alloc_cleared_vec_gimple_omp_for_iter (collapse);
726a989a
RB
951 if (pre_body)
952 gimple_omp_for_set_pre_body (p, pre_body);
953
954 return p;
955}
956
957
958/* Build a GIMPLE_OMP_PARALLEL statement.
959
960 BODY is sequence of statements which are executed in parallel.
961 CLAUSES, are the OMP parallel construct's clauses.
962 CHILD_FN is the function created for the parallel threads to execute.
963 DATA_ARG are the shared data argument(s). */
964
b8698a0f
L
965gimple
966gimple_build_omp_parallel (gimple_seq body, tree clauses, tree child_fn,
726a989a
RB
967 tree data_arg)
968{
969 gimple p = gimple_alloc (GIMPLE_OMP_PARALLEL, 0);
970 if (body)
971 gimple_omp_set_body (p, body);
972 gimple_omp_parallel_set_clauses (p, clauses);
973 gimple_omp_parallel_set_child_fn (p, child_fn);
974 gimple_omp_parallel_set_data_arg (p, data_arg);
975
976 return p;
977}
978
979
980/* Build a GIMPLE_OMP_TASK statement.
981
982 BODY is sequence of statements which are executed by the explicit task.
983 CLAUSES, are the OMP parallel construct's clauses.
984 CHILD_FN is the function created for the parallel threads to execute.
985 DATA_ARG are the shared data argument(s).
986 COPY_FN is the optional function for firstprivate initialization.
987 ARG_SIZE and ARG_ALIGN are size and alignment of the data block. */
988
b8698a0f 989gimple
726a989a
RB
990gimple_build_omp_task (gimple_seq body, tree clauses, tree child_fn,
991 tree data_arg, tree copy_fn, tree arg_size,
992 tree arg_align)
993{
994 gimple p = gimple_alloc (GIMPLE_OMP_TASK, 0);
995 if (body)
996 gimple_omp_set_body (p, body);
997 gimple_omp_task_set_clauses (p, clauses);
998 gimple_omp_task_set_child_fn (p, child_fn);
999 gimple_omp_task_set_data_arg (p, data_arg);
1000 gimple_omp_task_set_copy_fn (p, copy_fn);
1001 gimple_omp_task_set_arg_size (p, arg_size);
1002 gimple_omp_task_set_arg_align (p, arg_align);
1003
1004 return p;
1005}
1006
1007
1008/* Build a GIMPLE_OMP_SECTION statement for a sections statement.
1009
1010 BODY is the sequence of statements in the section. */
1011
1012gimple
1013gimple_build_omp_section (gimple_seq body)
1014{
1015 gimple p = gimple_alloc (GIMPLE_OMP_SECTION, 0);
1016 if (body)
1017 gimple_omp_set_body (p, body);
1018
1019 return p;
1020}
1021
1022
1023/* Build a GIMPLE_OMP_MASTER statement.
1024
1025 BODY is the sequence of statements to be executed by just the master. */
1026
b8698a0f 1027gimple
726a989a
RB
1028gimple_build_omp_master (gimple_seq body)
1029{
1030 gimple p = gimple_alloc (GIMPLE_OMP_MASTER, 0);
1031 if (body)
1032 gimple_omp_set_body (p, body);
1033
1034 return p;
1035}
1036
1037
1038/* Build a GIMPLE_OMP_CONTINUE statement.
1039
1040 CONTROL_DEF is the definition of the control variable.
1041 CONTROL_USE is the use of the control variable. */
1042
b8698a0f 1043gimple
726a989a
RB
1044gimple_build_omp_continue (tree control_def, tree control_use)
1045{
1046 gimple p = gimple_alloc (GIMPLE_OMP_CONTINUE, 0);
1047 gimple_omp_continue_set_control_def (p, control_def);
1048 gimple_omp_continue_set_control_use (p, control_use);
1049 return p;
1050}
1051
1052/* Build a GIMPLE_OMP_ORDERED statement.
1053
1054 BODY is the sequence of statements inside a loop that will executed in
1055 sequence. */
1056
b8698a0f 1057gimple
726a989a
RB
1058gimple_build_omp_ordered (gimple_seq body)
1059{
1060 gimple p = gimple_alloc (GIMPLE_OMP_ORDERED, 0);
1061 if (body)
1062 gimple_omp_set_body (p, body);
1063
1064 return p;
1065}
1066
1067
1068/* Build a GIMPLE_OMP_RETURN statement.
1069 WAIT_P is true if this is a non-waiting return. */
1070
b8698a0f 1071gimple
726a989a
RB
1072gimple_build_omp_return (bool wait_p)
1073{
1074 gimple p = gimple_alloc (GIMPLE_OMP_RETURN, 0);
1075 if (wait_p)
1076 gimple_omp_return_set_nowait (p);
1077
1078 return p;
1079}
1080
1081
1082/* Build a GIMPLE_OMP_SECTIONS statement.
1083
1084 BODY is a sequence of section statements.
1085 CLAUSES are any of the OMP sections contsruct's clauses: private,
1086 firstprivate, lastprivate, reduction, and nowait. */
1087
b8698a0f 1088gimple
726a989a
RB
1089gimple_build_omp_sections (gimple_seq body, tree clauses)
1090{
1091 gimple p = gimple_alloc (GIMPLE_OMP_SECTIONS, 0);
1092 if (body)
1093 gimple_omp_set_body (p, body);
1094 gimple_omp_sections_set_clauses (p, clauses);
1095
1096 return p;
1097}
1098
1099
1100/* Build a GIMPLE_OMP_SECTIONS_SWITCH. */
1101
1102gimple
1103gimple_build_omp_sections_switch (void)
1104{
1105 return gimple_alloc (GIMPLE_OMP_SECTIONS_SWITCH, 0);
1106}
1107
1108
1109/* Build a GIMPLE_OMP_SINGLE statement.
1110
1111 BODY is the sequence of statements that will be executed once.
1112 CLAUSES are any of the OMP single construct's clauses: private, firstprivate,
1113 copyprivate, nowait. */
1114
b8698a0f 1115gimple
726a989a
RB
1116gimple_build_omp_single (gimple_seq body, tree clauses)
1117{
1118 gimple p = gimple_alloc (GIMPLE_OMP_SINGLE, 0);
1119 if (body)
1120 gimple_omp_set_body (p, body);
1121 gimple_omp_single_set_clauses (p, clauses);
1122
1123 return p;
1124}
1125
1126
726a989a
RB
1127/* Build a GIMPLE_OMP_ATOMIC_LOAD statement. */
1128
1129gimple
1130gimple_build_omp_atomic_load (tree lhs, tree rhs)
1131{
1132 gimple p = gimple_alloc (GIMPLE_OMP_ATOMIC_LOAD, 0);
1133 gimple_omp_atomic_load_set_lhs (p, lhs);
1134 gimple_omp_atomic_load_set_rhs (p, rhs);
1135 return p;
1136}
1137
1138/* Build a GIMPLE_OMP_ATOMIC_STORE statement.
1139
1140 VAL is the value we are storing. */
1141
1142gimple
1143gimple_build_omp_atomic_store (tree val)
1144{
1145 gimple p = gimple_alloc (GIMPLE_OMP_ATOMIC_STORE, 0);
1146 gimple_omp_atomic_store_set_val (p, val);
1147 return p;
1148}
1149
0a35513e
AH
1150/* Build a GIMPLE_TRANSACTION statement. */
1151
1152gimple
1153gimple_build_transaction (gimple_seq body, tree label)
1154{
1155 gimple p = gimple_alloc (GIMPLE_TRANSACTION, 0);
1156 gimple_transaction_set_body (p, body);
1157 gimple_transaction_set_label (p, label);
1158 return p;
1159}
1160
726a989a
RB
1161/* Build a GIMPLE_PREDICT statement. PREDICT is one of the predictors from
1162 predict.def, OUTCOME is NOT_TAKEN or TAKEN. */
1163
1164gimple
1165gimple_build_predict (enum br_predictor predictor, enum prediction outcome)
1166{
1167 gimple p = gimple_alloc (GIMPLE_PREDICT, 0);
1168 /* Ensure all the predictors fit into the lower bits of the subcode. */
e0c68ce9 1169 gcc_assert ((int) END_PREDICTORS <= GF_PREDICT_TAKEN);
726a989a
RB
1170 gimple_predict_set_predictor (p, predictor);
1171 gimple_predict_set_outcome (p, outcome);
1172 return p;
1173}
1174
cea094ed 1175#if defined ENABLE_GIMPLE_CHECKING
726a989a
RB
1176/* Complain of a gimple type mismatch and die. */
1177
1178void
1179gimple_check_failed (const_gimple gs, const char *file, int line,
1180 const char *function, enum gimple_code code,
1181 enum tree_code subcode)
1182{
1183 internal_error ("gimple check: expected %s(%s), have %s(%s) in %s, at %s:%d",
1184 gimple_code_name[code],
1185 tree_code_name[subcode],
1186 gimple_code_name[gimple_code (gs)],
1187 gs->gsbase.subcode > 0
1188 ? tree_code_name[gs->gsbase.subcode]
1189 : "",
1190 function, trim_filename (file), line);
1191}
726a989a
RB
1192#endif /* ENABLE_GIMPLE_CHECKING */
1193
1194
726a989a
RB
1195/* Link gimple statement GS to the end of the sequence *SEQ_P. If
1196 *SEQ_P is NULL, a new sequence is allocated. */
1197
1198void
1199gimple_seq_add_stmt (gimple_seq *seq_p, gimple gs)
1200{
1201 gimple_stmt_iterator si;
726a989a
RB
1202 if (gs == NULL)
1203 return;
1204
726a989a
RB
1205 si = gsi_last (*seq_p);
1206 gsi_insert_after (&si, gs, GSI_NEW_STMT);
1207}
1208
1209
1210/* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
1211 NULL, a new sequence is allocated. */
1212
1213void
1214gimple_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
1215{
1216 gimple_stmt_iterator si;
726a989a
RB
1217 if (src == NULL)
1218 return;
1219
726a989a
RB
1220 si = gsi_last (*dst_p);
1221 gsi_insert_seq_after (&si, src, GSI_NEW_STMT);
1222}
1223
1224
1225/* Helper function of empty_body_p. Return true if STMT is an empty
1226 statement. */
1227
1228static bool
1229empty_stmt_p (gimple stmt)
1230{
1231 if (gimple_code (stmt) == GIMPLE_NOP)
1232 return true;
1233 if (gimple_code (stmt) == GIMPLE_BIND)
1234 return empty_body_p (gimple_bind_body (stmt));
1235 return false;
1236}
1237
1238
1239/* Return true if BODY contains nothing but empty statements. */
1240
1241bool
1242empty_body_p (gimple_seq body)
1243{
1244 gimple_stmt_iterator i;
1245
726a989a
RB
1246 if (gimple_seq_empty_p (body))
1247 return true;
1248 for (i = gsi_start (body); !gsi_end_p (i); gsi_next (&i))
b5b8b0ac
AO
1249 if (!empty_stmt_p (gsi_stmt (i))
1250 && !is_gimple_debug (gsi_stmt (i)))
726a989a
RB
1251 return false;
1252
1253 return true;
1254}
1255
1256
1257/* Perform a deep copy of sequence SRC and return the result. */
1258
1259gimple_seq
1260gimple_seq_copy (gimple_seq src)
1261{
1262 gimple_stmt_iterator gsi;
355a7673 1263 gimple_seq new_seq = NULL;
726a989a
RB
1264 gimple stmt;
1265
1266 for (gsi = gsi_start (src); !gsi_end_p (gsi); gsi_next (&gsi))
1267 {
1268 stmt = gimple_copy (gsi_stmt (gsi));
82d6e6fc 1269 gimple_seq_add_stmt (&new_seq, stmt);
726a989a
RB
1270 }
1271
82d6e6fc 1272 return new_seq;
726a989a
RB
1273}
1274
1275
355a7673 1276/* Walk all the statements in the sequence *PSEQ calling walk_gimple_stmt
726a989a 1277 on each one. WI is as in walk_gimple_stmt.
b8698a0f 1278
0a35513e
AH
1279 If walk_gimple_stmt returns non-NULL, the walk is stopped, and the
1280 value is stored in WI->CALLBACK_RESULT. Also, the statement that
1281 produced the value is returned if this statement has not been
1282 removed by a callback (wi->removed_stmt). If the statement has
1283 been removed, NULL is returned.
726a989a
RB
1284
1285 Otherwise, all the statements are walked and NULL returned. */
1286
1287gimple
355a7673
MM
1288walk_gimple_seq_mod (gimple_seq *pseq, walk_stmt_fn callback_stmt,
1289 walk_tree_fn callback_op, struct walk_stmt_info *wi)
726a989a
RB
1290{
1291 gimple_stmt_iterator gsi;
1292
355a7673 1293 for (gsi = gsi_start (*pseq); !gsi_end_p (gsi); )
726a989a
RB
1294 {
1295 tree ret = walk_gimple_stmt (&gsi, callback_stmt, callback_op, wi);
1296 if (ret)
1297 {
1298 /* If CALLBACK_STMT or CALLBACK_OP return a value, WI must exist
1299 to hold it. */
1300 gcc_assert (wi);
1301 wi->callback_result = ret;
0a35513e
AH
1302
1303 return wi->removed_stmt ? NULL : gsi_stmt (gsi);
726a989a 1304 }
0a35513e
AH
1305
1306 if (!wi->removed_stmt)
1307 gsi_next (&gsi);
726a989a
RB
1308 }
1309
1310 if (wi)
1311 wi->callback_result = NULL_TREE;
1312
1313 return NULL;
1314}
1315
1316
355a7673
MM
1317/* Like walk_gimple_seq_mod, but ensure that the head of SEQ isn't
1318 changed by the callbacks. */
1319
1320gimple
1321walk_gimple_seq (gimple_seq seq, walk_stmt_fn callback_stmt,
1322 walk_tree_fn callback_op, struct walk_stmt_info *wi)
1323{
1324 gimple_seq seq2 = seq;
1325 gimple ret = walk_gimple_seq_mod (&seq2, callback_stmt, callback_op, wi);
1326 gcc_assert (seq2 == seq);
1327 return ret;
1328}
1329
1330
726a989a
RB
1331/* Helper function for walk_gimple_stmt. Walk operands of a GIMPLE_ASM. */
1332
1333static tree
1334walk_gimple_asm (gimple stmt, walk_tree_fn callback_op,
1335 struct walk_stmt_info *wi)
1336{
1c384bf1 1337 tree ret, op;
726a989a
RB
1338 unsigned noutputs;
1339 const char **oconstraints;
1c384bf1 1340 unsigned i, n;
726a989a
RB
1341 const char *constraint;
1342 bool allows_mem, allows_reg, is_inout;
1343
1344 noutputs = gimple_asm_noutputs (stmt);
1345 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
1346
1347 if (wi)
1348 wi->is_lhs = true;
1349
1350 for (i = 0; i < noutputs; i++)
1351 {
1c384bf1 1352 op = gimple_asm_output_op (stmt, i);
726a989a
RB
1353 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
1354 oconstraints[i] = constraint;
1355 parse_output_constraint (&constraint, i, 0, 0, &allows_mem, &allows_reg,
1356 &is_inout);
1357 if (wi)
1358 wi->val_only = (allows_reg || !allows_mem);
1359 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
1360 if (ret)
1361 return ret;
1362 }
1363
1c384bf1
RH
1364 n = gimple_asm_ninputs (stmt);
1365 for (i = 0; i < n; i++)
726a989a 1366 {
1c384bf1 1367 op = gimple_asm_input_op (stmt, i);
726a989a
RB
1368 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
1369 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1370 oconstraints, &allows_mem, &allows_reg);
1371 if (wi)
1c384bf1
RH
1372 {
1373 wi->val_only = (allows_reg || !allows_mem);
1374 /* Although input "m" is not really a LHS, we need a lvalue. */
1375 wi->is_lhs = !wi->val_only;
1376 }
726a989a
RB
1377 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
1378 if (ret)
1379 return ret;
1380 }
1381
1382 if (wi)
1383 {
1384 wi->is_lhs = false;
1385 wi->val_only = true;
1386 }
1387
1c384bf1
RH
1388 n = gimple_asm_nlabels (stmt);
1389 for (i = 0; i < n; i++)
1390 {
1391 op = gimple_asm_label_op (stmt, i);
1392 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
1393 if (ret)
1394 return ret;
1395 }
1396
726a989a
RB
1397 return NULL_TREE;
1398}
1399
1400
1401/* Helper function of WALK_GIMPLE_STMT. Walk every tree operand in
1402 STMT. CALLBACK_OP and WI are as in WALK_GIMPLE_STMT.
1403
1404 CALLBACK_OP is called on each operand of STMT via walk_tree.
1405 Additional parameters to walk_tree must be stored in WI. For each operand
1406 OP, walk_tree is called as:
1407
1408 walk_tree (&OP, CALLBACK_OP, WI, WI->PSET)
1409
1410 If CALLBACK_OP returns non-NULL for an operand, the remaining
1411 operands are not scanned.
1412
1413 The return value is that returned by the last call to walk_tree, or
1414 NULL_TREE if no CALLBACK_OP is specified. */
1415
6a4d4e8a 1416tree
726a989a
RB
1417walk_gimple_op (gimple stmt, walk_tree_fn callback_op,
1418 struct walk_stmt_info *wi)
1419{
1420 struct pointer_set_t *pset = (wi) ? wi->pset : NULL;
1421 unsigned i;
1422 tree ret = NULL_TREE;
1423
1424 switch (gimple_code (stmt))
1425 {
1426 case GIMPLE_ASSIGN:
cb3d597d
EB
1427 /* Walk the RHS operands. If the LHS is of a non-renamable type or
1428 is a register variable, we may use a COMPONENT_REF on the RHS. */
726a989a 1429 if (wi)
cb3d597d
EB
1430 {
1431 tree lhs = gimple_assign_lhs (stmt);
1432 wi->val_only
1433 = (is_gimple_reg_type (TREE_TYPE (lhs)) && !is_gimple_reg (lhs))
b9af73fc 1434 || gimple_assign_rhs_class (stmt) != GIMPLE_SINGLE_RHS;
cb3d597d 1435 }
726a989a
RB
1436
1437 for (i = 1; i < gimple_num_ops (stmt); i++)
1438 {
1439 ret = walk_tree (gimple_op_ptr (stmt, i), callback_op, wi,
1440 pset);
1441 if (ret)
1442 return ret;
1443 }
1444
1445 /* Walk the LHS. If the RHS is appropriate for a memory, we
1446 may use a COMPONENT_REF on the LHS. */
1447 if (wi)
1448 {
216820a4
RG
1449 /* If the RHS is of a non-renamable type or is a register variable,
1450 we may use a COMPONENT_REF on the LHS. */
b9af73fc 1451 tree rhs1 = gimple_assign_rhs1 (stmt);
216820a4
RG
1452 wi->val_only
1453 = (is_gimple_reg_type (TREE_TYPE (rhs1)) && !is_gimple_reg (rhs1))
1454 || gimple_assign_rhs_class (stmt) != GIMPLE_SINGLE_RHS;
726a989a
RB
1455 wi->is_lhs = true;
1456 }
1457
1458 ret = walk_tree (gimple_op_ptr (stmt, 0), callback_op, wi, pset);
1459 if (ret)
1460 return ret;
1461
1462 if (wi)
1463 {
1464 wi->val_only = true;
1465 wi->is_lhs = false;
1466 }
1467 break;
1468
1469 case GIMPLE_CALL:
1470 if (wi)
523968bf
RG
1471 {
1472 wi->is_lhs = false;
1473 wi->val_only = true;
1474 }
726a989a
RB
1475
1476 ret = walk_tree (gimple_call_chain_ptr (stmt), callback_op, wi, pset);
1477 if (ret)
1478 return ret;
1479
1480 ret = walk_tree (gimple_call_fn_ptr (stmt), callback_op, wi, pset);
1481 if (ret)
1482 return ret;
1483
1484 for (i = 0; i < gimple_call_num_args (stmt); i++)
1485 {
523968bf 1486 if (wi)
4d931f41
EB
1487 wi->val_only
1488 = is_gimple_reg_type (TREE_TYPE (gimple_call_arg (stmt, i)));
726a989a
RB
1489 ret = walk_tree (gimple_call_arg_ptr (stmt, i), callback_op, wi,
1490 pset);
1491 if (ret)
1492 return ret;
1493 }
1494
523968bf
RG
1495 if (gimple_call_lhs (stmt))
1496 {
1497 if (wi)
1498 {
1499 wi->is_lhs = true;
4d931f41
EB
1500 wi->val_only
1501 = is_gimple_reg_type (TREE_TYPE (gimple_call_lhs (stmt)));
523968bf 1502 }
726a989a 1503
523968bf
RG
1504 ret = walk_tree (gimple_call_lhs_ptr (stmt), callback_op, wi, pset);
1505 if (ret)
1506 return ret;
1507 }
726a989a
RB
1508
1509 if (wi)
523968bf
RG
1510 {
1511 wi->is_lhs = false;
1512 wi->val_only = true;
1513 }
726a989a
RB
1514 break;
1515
1516 case GIMPLE_CATCH:
1517 ret = walk_tree (gimple_catch_types_ptr (stmt), callback_op, wi,
1518 pset);
1519 if (ret)
1520 return ret;
1521 break;
1522
1523 case GIMPLE_EH_FILTER:
1524 ret = walk_tree (gimple_eh_filter_types_ptr (stmt), callback_op, wi,
1525 pset);
1526 if (ret)
1527 return ret;
1528 break;
1529
726a989a
RB
1530 case GIMPLE_ASM:
1531 ret = walk_gimple_asm (stmt, callback_op, wi);
1532 if (ret)
1533 return ret;
1534 break;
1535
1536 case GIMPLE_OMP_CONTINUE:
1537 ret = walk_tree (gimple_omp_continue_control_def_ptr (stmt),
1538 callback_op, wi, pset);
1539 if (ret)
1540 return ret;
1541
1542 ret = walk_tree (gimple_omp_continue_control_use_ptr (stmt),
1543 callback_op, wi, pset);
1544 if (ret)
1545 return ret;
1546 break;
1547
1548 case GIMPLE_OMP_CRITICAL:
1549 ret = walk_tree (gimple_omp_critical_name_ptr (stmt), callback_op, wi,
1550 pset);
1551 if (ret)
1552 return ret;
1553 break;
1554
1555 case GIMPLE_OMP_FOR:
1556 ret = walk_tree (gimple_omp_for_clauses_ptr (stmt), callback_op, wi,
1557 pset);
1558 if (ret)
1559 return ret;
1560 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1561 {
1562 ret = walk_tree (gimple_omp_for_index_ptr (stmt, i), callback_op,
1563 wi, pset);
1564 if (ret)
1565 return ret;
1566 ret = walk_tree (gimple_omp_for_initial_ptr (stmt, i), callback_op,
1567 wi, pset);
1568 if (ret)
1569 return ret;
1570 ret = walk_tree (gimple_omp_for_final_ptr (stmt, i), callback_op,
1571 wi, pset);
1572 if (ret)
1573 return ret;
1574 ret = walk_tree (gimple_omp_for_incr_ptr (stmt, i), callback_op,
1575 wi, pset);
1576 }
1577 if (ret)
1578 return ret;
1579 break;
1580
1581 case GIMPLE_OMP_PARALLEL:
1582 ret = walk_tree (gimple_omp_parallel_clauses_ptr (stmt), callback_op,
1583 wi, pset);
1584 if (ret)
1585 return ret;
1586 ret = walk_tree (gimple_omp_parallel_child_fn_ptr (stmt), callback_op,
1587 wi, pset);
1588 if (ret)
1589 return ret;
1590 ret = walk_tree (gimple_omp_parallel_data_arg_ptr (stmt), callback_op,
1591 wi, pset);
1592 if (ret)
1593 return ret;
1594 break;
1595
1596 case GIMPLE_OMP_TASK:
1597 ret = walk_tree (gimple_omp_task_clauses_ptr (stmt), callback_op,
1598 wi, pset);
1599 if (ret)
1600 return ret;
1601 ret = walk_tree (gimple_omp_task_child_fn_ptr (stmt), callback_op,
1602 wi, pset);
1603 if (ret)
1604 return ret;
1605 ret = walk_tree (gimple_omp_task_data_arg_ptr (stmt), callback_op,
1606 wi, pset);
1607 if (ret)
1608 return ret;
1609 ret = walk_tree (gimple_omp_task_copy_fn_ptr (stmt), callback_op,
1610 wi, pset);
1611 if (ret)
1612 return ret;
1613 ret = walk_tree (gimple_omp_task_arg_size_ptr (stmt), callback_op,
1614 wi, pset);
1615 if (ret)
1616 return ret;
1617 ret = walk_tree (gimple_omp_task_arg_align_ptr (stmt), callback_op,
1618 wi, pset);
1619 if (ret)
1620 return ret;
1621 break;
1622
1623 case GIMPLE_OMP_SECTIONS:
1624 ret = walk_tree (gimple_omp_sections_clauses_ptr (stmt), callback_op,
1625 wi, pset);
1626 if (ret)
1627 return ret;
1628
1629 ret = walk_tree (gimple_omp_sections_control_ptr (stmt), callback_op,
1630 wi, pset);
1631 if (ret)
1632 return ret;
1633
1634 break;
1635
1636 case GIMPLE_OMP_SINGLE:
1637 ret = walk_tree (gimple_omp_single_clauses_ptr (stmt), callback_op, wi,
1638 pset);
1639 if (ret)
1640 return ret;
1641 break;
1642
1643 case GIMPLE_OMP_ATOMIC_LOAD:
1644 ret = walk_tree (gimple_omp_atomic_load_lhs_ptr (stmt), callback_op, wi,
1645 pset);
1646 if (ret)
1647 return ret;
1648
1649 ret = walk_tree (gimple_omp_atomic_load_rhs_ptr (stmt), callback_op, wi,
1650 pset);
1651 if (ret)
1652 return ret;
1653 break;
1654
1655 case GIMPLE_OMP_ATOMIC_STORE:
1656 ret = walk_tree (gimple_omp_atomic_store_val_ptr (stmt), callback_op,
1657 wi, pset);
1658 if (ret)
1659 return ret;
1660 break;
1661
0a35513e
AH
1662 case GIMPLE_TRANSACTION:
1663 ret = walk_tree (gimple_transaction_label_ptr (stmt), callback_op,
1664 wi, pset);
1665 if (ret)
1666 return ret;
1667 break;
1668
726a989a
RB
1669 /* Tuples that do not have operands. */
1670 case GIMPLE_NOP:
1671 case GIMPLE_RESX:
1672 case GIMPLE_OMP_RETURN:
1673 case GIMPLE_PREDICT:
1674 break;
1675
1676 default:
1677 {
1678 enum gimple_statement_structure_enum gss;
1679 gss = gimple_statement_structure (stmt);
1680 if (gss == GSS_WITH_OPS || gss == GSS_WITH_MEM_OPS)
1681 for (i = 0; i < gimple_num_ops (stmt); i++)
1682 {
1683 ret = walk_tree (gimple_op_ptr (stmt, i), callback_op, wi, pset);
1684 if (ret)
1685 return ret;
1686 }
1687 }
1688 break;
1689 }
1690
1691 return NULL_TREE;
1692}
1693
1694
1695/* Walk the current statement in GSI (optionally using traversal state
1696 stored in WI). If WI is NULL, no state is kept during traversal.
1697 The callback CALLBACK_STMT is called. If CALLBACK_STMT indicates
1698 that it has handled all the operands of the statement, its return
1699 value is returned. Otherwise, the return value from CALLBACK_STMT
1700 is discarded and its operands are scanned.
1701
1702 If CALLBACK_STMT is NULL or it didn't handle the operands,
1703 CALLBACK_OP is called on each operand of the statement via
1704 walk_gimple_op. If walk_gimple_op returns non-NULL for any
1705 operand, the remaining operands are not scanned. In this case, the
1706 return value from CALLBACK_OP is returned.
1707
1708 In any other case, NULL_TREE is returned. */
1709
1710tree
1711walk_gimple_stmt (gimple_stmt_iterator *gsi, walk_stmt_fn callback_stmt,
1712 walk_tree_fn callback_op, struct walk_stmt_info *wi)
1713{
1714 gimple ret;
1715 tree tree_ret;
1716 gimple stmt = gsi_stmt (*gsi);
1717
1718 if (wi)
0a35513e
AH
1719 {
1720 wi->gsi = *gsi;
1721 wi->removed_stmt = false;
726a989a 1722
0a35513e
AH
1723 if (wi->want_locations && gimple_has_location (stmt))
1724 input_location = gimple_location (stmt);
1725 }
726a989a
RB
1726
1727 ret = NULL;
1728
1729 /* Invoke the statement callback. Return if the callback handled
1730 all of STMT operands by itself. */
1731 if (callback_stmt)
1732 {
1733 bool handled_ops = false;
1734 tree_ret = callback_stmt (gsi, &handled_ops, wi);
1735 if (handled_ops)
1736 return tree_ret;
1737
1738 /* If CALLBACK_STMT did not handle operands, it should not have
1739 a value to return. */
1740 gcc_assert (tree_ret == NULL);
1741
0a35513e
AH
1742 if (wi && wi->removed_stmt)
1743 return NULL;
1744
726a989a
RB
1745 /* Re-read stmt in case the callback changed it. */
1746 stmt = gsi_stmt (*gsi);
1747 }
1748
1749 /* If CALLBACK_OP is defined, invoke it on every operand of STMT. */
1750 if (callback_op)
1751 {
1752 tree_ret = walk_gimple_op (stmt, callback_op, wi);
1753 if (tree_ret)
1754 return tree_ret;
1755 }
1756
1757 /* If STMT can have statements inside (e.g. GIMPLE_BIND), walk them. */
1758 switch (gimple_code (stmt))
1759 {
1760 case GIMPLE_BIND:
355a7673
MM
1761 ret = walk_gimple_seq_mod (gimple_bind_body_ptr (stmt), callback_stmt,
1762 callback_op, wi);
726a989a
RB
1763 if (ret)
1764 return wi->callback_result;
1765 break;
1766
1767 case GIMPLE_CATCH:
355a7673
MM
1768 ret = walk_gimple_seq_mod (gimple_catch_handler_ptr (stmt), callback_stmt,
1769 callback_op, wi);
726a989a
RB
1770 if (ret)
1771 return wi->callback_result;
1772 break;
1773
1774 case GIMPLE_EH_FILTER:
355a7673 1775 ret = walk_gimple_seq_mod (gimple_eh_filter_failure_ptr (stmt), callback_stmt,
726a989a
RB
1776 callback_op, wi);
1777 if (ret)
1778 return wi->callback_result;
1779 break;
1780
0a35513e 1781 case GIMPLE_EH_ELSE:
355a7673 1782 ret = walk_gimple_seq_mod (gimple_eh_else_n_body_ptr (stmt),
0a35513e
AH
1783 callback_stmt, callback_op, wi);
1784 if (ret)
1785 return wi->callback_result;
355a7673 1786 ret = walk_gimple_seq_mod (gimple_eh_else_e_body_ptr (stmt),
0a35513e
AH
1787 callback_stmt, callback_op, wi);
1788 if (ret)
1789 return wi->callback_result;
1790 break;
1791
726a989a 1792 case GIMPLE_TRY:
355a7673 1793 ret = walk_gimple_seq_mod (gimple_try_eval_ptr (stmt), callback_stmt, callback_op,
726a989a
RB
1794 wi);
1795 if (ret)
1796 return wi->callback_result;
1797
355a7673 1798 ret = walk_gimple_seq_mod (gimple_try_cleanup_ptr (stmt), callback_stmt,
726a989a
RB
1799 callback_op, wi);
1800 if (ret)
1801 return wi->callback_result;
1802 break;
1803
1804 case GIMPLE_OMP_FOR:
355a7673 1805 ret = walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt), callback_stmt,
726a989a
RB
1806 callback_op, wi);
1807 if (ret)
1808 return wi->callback_result;
1809
1810 /* FALL THROUGH. */
1811 case GIMPLE_OMP_CRITICAL:
1812 case GIMPLE_OMP_MASTER:
1813 case GIMPLE_OMP_ORDERED:
1814 case GIMPLE_OMP_SECTION:
1815 case GIMPLE_OMP_PARALLEL:
1816 case GIMPLE_OMP_TASK:
1817 case GIMPLE_OMP_SECTIONS:
1818 case GIMPLE_OMP_SINGLE:
355a7673 1819 ret = walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), callback_stmt,
0a35513e 1820 callback_op, wi);
726a989a
RB
1821 if (ret)
1822 return wi->callback_result;
1823 break;
1824
1825 case GIMPLE_WITH_CLEANUP_EXPR:
355a7673 1826 ret = walk_gimple_seq_mod (gimple_wce_cleanup_ptr (stmt), callback_stmt,
726a989a
RB
1827 callback_op, wi);
1828 if (ret)
1829 return wi->callback_result;
1830 break;
1831
0a35513e 1832 case GIMPLE_TRANSACTION:
355a7673 1833 ret = walk_gimple_seq_mod (gimple_transaction_body_ptr (stmt),
0a35513e
AH
1834 callback_stmt, callback_op, wi);
1835 if (ret)
1836 return wi->callback_result;
1837 break;
1838
726a989a
RB
1839 default:
1840 gcc_assert (!gimple_has_substatements (stmt));
1841 break;
1842 }
1843
1844 return NULL;
1845}
1846
1847
1848/* Set sequence SEQ to be the GIMPLE body for function FN. */
1849
1850void
1851gimple_set_body (tree fndecl, gimple_seq seq)
1852{
1853 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
1854 if (fn == NULL)
1855 {
1856 /* If FNDECL still does not have a function structure associated
1857 with it, then it does not make sense for it to receive a
1858 GIMPLE body. */
1859 gcc_assert (seq == NULL);
1860 }
1861 else
1862 fn->gimple_body = seq;
1863}
1864
1865
abbd64b9
JS
1866/* Return the body of GIMPLE statements for function FN. After the
1867 CFG pass, the function body doesn't exist anymore because it has
1868 been split up into basic blocks. In this case, it returns
1869 NULL. */
726a989a
RB
1870
1871gimple_seq
1872gimple_body (tree fndecl)
1873{
1874 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
1875 return fn ? fn->gimple_body : NULL;
1876}
1877
39ecc018
JH
1878/* Return true when FNDECL has Gimple body either in unlowered
1879 or CFG form. */
1880bool
1881gimple_has_body_p (tree fndecl)
1882{
1883 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
1884 return (gimple_body (fndecl) || (fn && fn->cfg));
1885}
726a989a 1886
25583c4f
RS
1887/* Return true if calls C1 and C2 are known to go to the same function. */
1888
1889bool
1890gimple_call_same_target_p (const_gimple c1, const_gimple c2)
1891{
1892 if (gimple_call_internal_p (c1))
1893 return (gimple_call_internal_p (c2)
1894 && gimple_call_internal_fn (c1) == gimple_call_internal_fn (c2));
1895 else
1896 return (gimple_call_fn (c1) == gimple_call_fn (c2)
1897 || (gimple_call_fndecl (c1)
1898 && gimple_call_fndecl (c1) == gimple_call_fndecl (c2)));
1899}
1900
726a989a
RB
1901/* Detect flags from a GIMPLE_CALL. This is just like
1902 call_expr_flags, but for gimple tuples. */
1903
1904int
1905gimple_call_flags (const_gimple stmt)
1906{
1907 int flags;
1908 tree decl = gimple_call_fndecl (stmt);
726a989a
RB
1909
1910 if (decl)
1911 flags = flags_from_decl_or_type (decl);
25583c4f
RS
1912 else if (gimple_call_internal_p (stmt))
1913 flags = internal_fn_flags (gimple_call_internal_fn (stmt));
726a989a 1914 else
97e03fa1 1915 flags = flags_from_decl_or_type (gimple_call_fntype (stmt));
726a989a 1916
9bb1a81b
JM
1917 if (stmt->gsbase.subcode & GF_CALL_NOTHROW)
1918 flags |= ECF_NOTHROW;
1919
726a989a
RB
1920 return flags;
1921}
1922
25583c4f
RS
1923/* Return the "fn spec" string for call STMT. */
1924
1925static tree
1926gimple_call_fnspec (const_gimple stmt)
1927{
1928 tree type, attr;
1929
1930 type = gimple_call_fntype (stmt);
1931 if (!type)
1932 return NULL_TREE;
1933
1934 attr = lookup_attribute ("fn spec", TYPE_ATTRIBUTES (type));
1935 if (!attr)
1936 return NULL_TREE;
1937
1938 return TREE_VALUE (TREE_VALUE (attr));
1939}
1940
0b7b376d
RG
1941/* Detects argument flags for argument number ARG on call STMT. */
1942
1943int
1944gimple_call_arg_flags (const_gimple stmt, unsigned arg)
1945{
25583c4f 1946 tree attr = gimple_call_fnspec (stmt);
0b7b376d 1947
25583c4f 1948 if (!attr || 1 + arg >= (unsigned) TREE_STRING_LENGTH (attr))
0b7b376d
RG
1949 return 0;
1950
1951 switch (TREE_STRING_POINTER (attr)[1 + arg])
1952 {
1953 case 'x':
1954 case 'X':
1955 return EAF_UNUSED;
1956
1957 case 'R':
1958 return EAF_DIRECT | EAF_NOCLOBBER | EAF_NOESCAPE;
1959
1960 case 'r':
1961 return EAF_NOCLOBBER | EAF_NOESCAPE;
1962
1963 case 'W':
1964 return EAF_DIRECT | EAF_NOESCAPE;
1965
1966 case 'w':
1967 return EAF_NOESCAPE;
1968
1969 case '.':
1970 default:
1971 return 0;
1972 }
1973}
1974
1975/* Detects return flags for the call STMT. */
1976
1977int
1978gimple_call_return_flags (const_gimple stmt)
1979{
25583c4f 1980 tree attr;
0b7b376d
RG
1981
1982 if (gimple_call_flags (stmt) & ECF_MALLOC)
1983 return ERF_NOALIAS;
1984
25583c4f
RS
1985 attr = gimple_call_fnspec (stmt);
1986 if (!attr || TREE_STRING_LENGTH (attr) < 1)
0b7b376d
RG
1987 return 0;
1988
1989 switch (TREE_STRING_POINTER (attr)[0])
1990 {
1991 case '1':
1992 case '2':
1993 case '3':
1994 case '4':
1995 return ERF_RETURNS_ARG | (TREE_STRING_POINTER (attr)[0] - '1');
1996
1997 case 'm':
1998 return ERF_NOALIAS;
1999
2000 case '.':
2001 default:
2002 return 0;
2003 }
2004}
726a989a 2005
3dbe9454 2006
726a989a
RB
2007/* Return true if GS is a copy assignment. */
2008
2009bool
2010gimple_assign_copy_p (gimple gs)
2011{
3dbe9454
RG
2012 return (gimple_assign_single_p (gs)
2013 && is_gimple_val (gimple_op (gs, 1)));
726a989a
RB
2014}
2015
2016
2017/* Return true if GS is a SSA_NAME copy assignment. */
2018
2019bool
2020gimple_assign_ssa_name_copy_p (gimple gs)
2021{
3dbe9454 2022 return (gimple_assign_single_p (gs)
726a989a
RB
2023 && TREE_CODE (gimple_assign_lhs (gs)) == SSA_NAME
2024 && TREE_CODE (gimple_assign_rhs1 (gs)) == SSA_NAME);
2025}
2026
2027
726a989a
RB
2028/* Return true if GS is an assignment with a unary RHS, but the
2029 operator has no effect on the assigned value. The logic is adapted
2030 from STRIP_NOPS. This predicate is intended to be used in tuplifying
2031 instances in which STRIP_NOPS was previously applied to the RHS of
2032 an assignment.
2033
2034 NOTE: In the use cases that led to the creation of this function
2035 and of gimple_assign_single_p, it is typical to test for either
2036 condition and to proceed in the same manner. In each case, the
2037 assigned value is represented by the single RHS operand of the
2038 assignment. I suspect there may be cases where gimple_assign_copy_p,
2039 gimple_assign_single_p, or equivalent logic is used where a similar
2040 treatment of unary NOPs is appropriate. */
b8698a0f 2041
726a989a
RB
2042bool
2043gimple_assign_unary_nop_p (gimple gs)
2044{
3dbe9454 2045 return (is_gimple_assign (gs)
1a87cf0c 2046 && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (gs))
726a989a
RB
2047 || gimple_assign_rhs_code (gs) == NON_LVALUE_EXPR)
2048 && gimple_assign_rhs1 (gs) != error_mark_node
2049 && (TYPE_MODE (TREE_TYPE (gimple_assign_lhs (gs)))
2050 == TYPE_MODE (TREE_TYPE (gimple_assign_rhs1 (gs)))));
2051}
2052
2053/* Set BB to be the basic block holding G. */
2054
2055void
2056gimple_set_bb (gimple stmt, basic_block bb)
2057{
2058 stmt->gsbase.bb = bb;
2059
2060 /* If the statement is a label, add the label to block-to-labels map
2061 so that we can speed up edge creation for GIMPLE_GOTOs. */
2062 if (cfun->cfg && gimple_code (stmt) == GIMPLE_LABEL)
2063 {
2064 tree t;
2065 int uid;
2066
2067 t = gimple_label_label (stmt);
2068 uid = LABEL_DECL_UID (t);
2069 if (uid == -1)
2070 {
2071 unsigned old_len = VEC_length (basic_block, label_to_block_map);
2072 LABEL_DECL_UID (t) = uid = cfun->cfg->last_label_uid++;
2073 if (old_len <= (unsigned) uid)
2074 {
5006671f 2075 unsigned new_len = 3 * uid / 2 + 1;
726a989a
RB
2076
2077 VEC_safe_grow_cleared (basic_block, gc, label_to_block_map,
2078 new_len);
2079 }
2080 }
2081
2082 VEC_replace (basic_block, label_to_block_map, uid, bb);
2083 }
2084}
2085
2086
726a989a
RB
2087/* Modify the RHS of the assignment pointed-to by GSI using the
2088 operands in the expression tree EXPR.
2089
2090 NOTE: The statement pointed-to by GSI may be reallocated if it
2091 did not have enough operand slots.
2092
2093 This function is useful to convert an existing tree expression into
2094 the flat representation used for the RHS of a GIMPLE assignment.
2095 It will reallocate memory as needed to expand or shrink the number
2096 of operand slots needed to represent EXPR.
2097
2098 NOTE: If you find yourself building a tree and then calling this
2099 function, you are most certainly doing it the slow way. It is much
2100 better to build a new assignment or to use the function
2101 gimple_assign_set_rhs_with_ops, which does not require an
2102 expression tree to be built. */
2103
2104void
2105gimple_assign_set_rhs_from_tree (gimple_stmt_iterator *gsi, tree expr)
2106{
2107 enum tree_code subcode;
0354c0c7 2108 tree op1, op2, op3;
726a989a 2109
0354c0c7
BS
2110 extract_ops_from_tree_1 (expr, &subcode, &op1, &op2, &op3);
2111 gimple_assign_set_rhs_with_ops_1 (gsi, subcode, op1, op2, op3);
726a989a
RB
2112}
2113
2114
2115/* Set the RHS of assignment statement pointed-to by GSI to CODE with
0354c0c7 2116 operands OP1, OP2 and OP3.
726a989a
RB
2117
2118 NOTE: The statement pointed-to by GSI may be reallocated if it
2119 did not have enough operand slots. */
2120
2121void
0354c0c7
BS
2122gimple_assign_set_rhs_with_ops_1 (gimple_stmt_iterator *gsi, enum tree_code code,
2123 tree op1, tree op2, tree op3)
726a989a
RB
2124{
2125 unsigned new_rhs_ops = get_gimple_rhs_num_ops (code);
2126 gimple stmt = gsi_stmt (*gsi);
2127
2128 /* If the new CODE needs more operands, allocate a new statement. */
2129 if (gimple_num_ops (stmt) < new_rhs_ops + 1)
2130 {
2131 tree lhs = gimple_assign_lhs (stmt);
2132 gimple new_stmt = gimple_alloc (gimple_code (stmt), new_rhs_ops + 1);
2133 memcpy (new_stmt, stmt, gimple_size (gimple_code (stmt)));
355a7673 2134 gimple_init_singleton (new_stmt);
726a989a
RB
2135 gsi_replace (gsi, new_stmt, true);
2136 stmt = new_stmt;
2137
2138 /* The LHS needs to be reset as this also changes the SSA name
2139 on the LHS. */
2140 gimple_assign_set_lhs (stmt, lhs);
2141 }
2142
2143 gimple_set_num_ops (stmt, new_rhs_ops + 1);
2144 gimple_set_subcode (stmt, code);
2145 gimple_assign_set_rhs1 (stmt, op1);
2146 if (new_rhs_ops > 1)
2147 gimple_assign_set_rhs2 (stmt, op2);
0354c0c7
BS
2148 if (new_rhs_ops > 2)
2149 gimple_assign_set_rhs3 (stmt, op3);
726a989a
RB
2150}
2151
2152
2153/* Return the LHS of a statement that performs an assignment,
2154 either a GIMPLE_ASSIGN or a GIMPLE_CALL. Returns NULL_TREE
2155 for a call to a function that returns no value, or for a
2156 statement other than an assignment or a call. */
2157
2158tree
2159gimple_get_lhs (const_gimple stmt)
2160{
e0c68ce9 2161 enum gimple_code code = gimple_code (stmt);
726a989a
RB
2162
2163 if (code == GIMPLE_ASSIGN)
2164 return gimple_assign_lhs (stmt);
2165 else if (code == GIMPLE_CALL)
2166 return gimple_call_lhs (stmt);
2167 else
2168 return NULL_TREE;
2169}
2170
2171
2172/* Set the LHS of a statement that performs an assignment,
2173 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
2174
2175void
2176gimple_set_lhs (gimple stmt, tree lhs)
2177{
e0c68ce9 2178 enum gimple_code code = gimple_code (stmt);
726a989a
RB
2179
2180 if (code == GIMPLE_ASSIGN)
2181 gimple_assign_set_lhs (stmt, lhs);
2182 else if (code == GIMPLE_CALL)
2183 gimple_call_set_lhs (stmt, lhs);
2184 else
2185 gcc_unreachable();
2186}
2187
21cf7180
AO
2188/* Replace the LHS of STMT, an assignment, either a GIMPLE_ASSIGN or a
2189 GIMPLE_CALL, with NLHS, in preparation for modifying the RHS to an
2190 expression with a different value.
2191
2192 This will update any annotations (say debug bind stmts) referring
2193 to the original LHS, so that they use the RHS instead. This is
2194 done even if NLHS and LHS are the same, for it is understood that
2195 the RHS will be modified afterwards, and NLHS will not be assigned
2196 an equivalent value.
2197
2198 Adjusting any non-annotation uses of the LHS, if needed, is a
2199 responsibility of the caller.
2200
2201 The effect of this call should be pretty much the same as that of
2202 inserting a copy of STMT before STMT, and then removing the
2203 original stmt, at which time gsi_remove() would have update
2204 annotations, but using this function saves all the inserting,
2205 copying and removing. */
2206
2207void
2208gimple_replace_lhs (gimple stmt, tree nlhs)
2209{
2210 if (MAY_HAVE_DEBUG_STMTS)
2211 {
2212 tree lhs = gimple_get_lhs (stmt);
2213
2214 gcc_assert (SSA_NAME_DEF_STMT (lhs) == stmt);
2215
2216 insert_debug_temp_for_var_def (NULL, lhs);
2217 }
2218
2219 gimple_set_lhs (stmt, nlhs);
2220}
726a989a
RB
2221
2222/* Return a deep copy of statement STMT. All the operands from STMT
2223 are reallocated and copied using unshare_expr. The DEF, USE, VDEF
355a7673
MM
2224 and VUSE operand arrays are set to empty in the new copy. The new
2225 copy isn't part of any sequence. */
726a989a
RB
2226
2227gimple
2228gimple_copy (gimple stmt)
2229{
2230 enum gimple_code code = gimple_code (stmt);
2231 unsigned num_ops = gimple_num_ops (stmt);
2232 gimple copy = gimple_alloc (code, num_ops);
2233 unsigned i;
2234
2235 /* Shallow copy all the fields from STMT. */
2236 memcpy (copy, stmt, gimple_size (code));
355a7673 2237 gimple_init_singleton (copy);
726a989a
RB
2238
2239 /* If STMT has sub-statements, deep-copy them as well. */
2240 if (gimple_has_substatements (stmt))
2241 {
2242 gimple_seq new_seq;
2243 tree t;
2244
2245 switch (gimple_code (stmt))
2246 {
2247 case GIMPLE_BIND:
2248 new_seq = gimple_seq_copy (gimple_bind_body (stmt));
2249 gimple_bind_set_body (copy, new_seq);
2250 gimple_bind_set_vars (copy, unshare_expr (gimple_bind_vars (stmt)));
2251 gimple_bind_set_block (copy, gimple_bind_block (stmt));
2252 break;
2253
2254 case GIMPLE_CATCH:
2255 new_seq = gimple_seq_copy (gimple_catch_handler (stmt));
2256 gimple_catch_set_handler (copy, new_seq);
2257 t = unshare_expr (gimple_catch_types (stmt));
2258 gimple_catch_set_types (copy, t);
2259 break;
2260
2261 case GIMPLE_EH_FILTER:
2262 new_seq = gimple_seq_copy (gimple_eh_filter_failure (stmt));
2263 gimple_eh_filter_set_failure (copy, new_seq);
2264 t = unshare_expr (gimple_eh_filter_types (stmt));
2265 gimple_eh_filter_set_types (copy, t);
2266 break;
2267
0a35513e
AH
2268 case GIMPLE_EH_ELSE:
2269 new_seq = gimple_seq_copy (gimple_eh_else_n_body (stmt));
2270 gimple_eh_else_set_n_body (copy, new_seq);
2271 new_seq = gimple_seq_copy (gimple_eh_else_e_body (stmt));
2272 gimple_eh_else_set_e_body (copy, new_seq);
2273 break;
2274
726a989a
RB
2275 case GIMPLE_TRY:
2276 new_seq = gimple_seq_copy (gimple_try_eval (stmt));
2277 gimple_try_set_eval (copy, new_seq);
2278 new_seq = gimple_seq_copy (gimple_try_cleanup (stmt));
2279 gimple_try_set_cleanup (copy, new_seq);
2280 break;
2281
2282 case GIMPLE_OMP_FOR:
2283 new_seq = gimple_seq_copy (gimple_omp_for_pre_body (stmt));
2284 gimple_omp_for_set_pre_body (copy, new_seq);
2285 t = unshare_expr (gimple_omp_for_clauses (stmt));
2286 gimple_omp_for_set_clauses (copy, t);
2287 copy->gimple_omp_for.iter
a9429e29
LB
2288 = ggc_alloc_vec_gimple_omp_for_iter
2289 (gimple_omp_for_collapse (stmt));
726a989a
RB
2290 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2291 {
2292 gimple_omp_for_set_cond (copy, i,
2293 gimple_omp_for_cond (stmt, i));
2294 gimple_omp_for_set_index (copy, i,
2295 gimple_omp_for_index (stmt, i));
2296 t = unshare_expr (gimple_omp_for_initial (stmt, i));
2297 gimple_omp_for_set_initial (copy, i, t);
2298 t = unshare_expr (gimple_omp_for_final (stmt, i));
2299 gimple_omp_for_set_final (copy, i, t);
2300 t = unshare_expr (gimple_omp_for_incr (stmt, i));
2301 gimple_omp_for_set_incr (copy, i, t);
2302 }
2303 goto copy_omp_body;
2304
2305 case GIMPLE_OMP_PARALLEL:
2306 t = unshare_expr (gimple_omp_parallel_clauses (stmt));
2307 gimple_omp_parallel_set_clauses (copy, t);
2308 t = unshare_expr (gimple_omp_parallel_child_fn (stmt));
2309 gimple_omp_parallel_set_child_fn (copy, t);
2310 t = unshare_expr (gimple_omp_parallel_data_arg (stmt));
2311 gimple_omp_parallel_set_data_arg (copy, t);
2312 goto copy_omp_body;
2313
2314 case GIMPLE_OMP_TASK:
2315 t = unshare_expr (gimple_omp_task_clauses (stmt));
2316 gimple_omp_task_set_clauses (copy, t);
2317 t = unshare_expr (gimple_omp_task_child_fn (stmt));
2318 gimple_omp_task_set_child_fn (copy, t);
2319 t = unshare_expr (gimple_omp_task_data_arg (stmt));
2320 gimple_omp_task_set_data_arg (copy, t);
2321 t = unshare_expr (gimple_omp_task_copy_fn (stmt));
2322 gimple_omp_task_set_copy_fn (copy, t);
2323 t = unshare_expr (gimple_omp_task_arg_size (stmt));
2324 gimple_omp_task_set_arg_size (copy, t);
2325 t = unshare_expr (gimple_omp_task_arg_align (stmt));
2326 gimple_omp_task_set_arg_align (copy, t);
2327 goto copy_omp_body;
2328
2329 case GIMPLE_OMP_CRITICAL:
2330 t = unshare_expr (gimple_omp_critical_name (stmt));
2331 gimple_omp_critical_set_name (copy, t);
2332 goto copy_omp_body;
2333
2334 case GIMPLE_OMP_SECTIONS:
2335 t = unshare_expr (gimple_omp_sections_clauses (stmt));
2336 gimple_omp_sections_set_clauses (copy, t);
2337 t = unshare_expr (gimple_omp_sections_control (stmt));
2338 gimple_omp_sections_set_control (copy, t);
2339 /* FALLTHRU */
2340
2341 case GIMPLE_OMP_SINGLE:
2342 case GIMPLE_OMP_SECTION:
2343 case GIMPLE_OMP_MASTER:
2344 case GIMPLE_OMP_ORDERED:
2345 copy_omp_body:
2346 new_seq = gimple_seq_copy (gimple_omp_body (stmt));
2347 gimple_omp_set_body (copy, new_seq);
2348 break;
2349
0a35513e
AH
2350 case GIMPLE_TRANSACTION:
2351 new_seq = gimple_seq_copy (gimple_transaction_body (stmt));
2352 gimple_transaction_set_body (copy, new_seq);
2353 break;
2354
726a989a
RB
2355 case GIMPLE_WITH_CLEANUP_EXPR:
2356 new_seq = gimple_seq_copy (gimple_wce_cleanup (stmt));
2357 gimple_wce_set_cleanup (copy, new_seq);
2358 break;
2359
2360 default:
2361 gcc_unreachable ();
2362 }
2363 }
2364
2365 /* Make copy of operands. */
2366 if (num_ops > 0)
2367 {
2368 for (i = 0; i < num_ops; i++)
2369 gimple_set_op (copy, i, unshare_expr (gimple_op (stmt, i)));
2370
ccacdf06 2371 /* Clear out SSA operand vectors on COPY. */
726a989a
RB
2372 if (gimple_has_ops (stmt))
2373 {
2374 gimple_set_def_ops (copy, NULL);
2375 gimple_set_use_ops (copy, NULL);
726a989a
RB
2376 }
2377
2378 if (gimple_has_mem_ops (stmt))
2379 {
5006671f
RG
2380 gimple_set_vdef (copy, gimple_vdef (stmt));
2381 gimple_set_vuse (copy, gimple_vuse (stmt));
726a989a
RB
2382 }
2383
5006671f
RG
2384 /* SSA operands need to be updated. */
2385 gimple_set_modified (copy, true);
726a989a
RB
2386 }
2387
2388 return copy;
2389}
2390
2391
726a989a
RB
2392/* Return true if statement S has side-effects. We consider a
2393 statement to have side effects if:
2394
2395 - It is a GIMPLE_CALL not marked with ECF_PURE or ECF_CONST.
2396 - Any of its operands are marked TREE_THIS_VOLATILE or TREE_SIDE_EFFECTS. */
2397
2398bool
2399gimple_has_side_effects (const_gimple s)
2400{
b5b8b0ac
AO
2401 if (is_gimple_debug (s))
2402 return false;
2403
726a989a
RB
2404 /* We don't have to scan the arguments to check for
2405 volatile arguments, though, at present, we still
2406 do a scan to check for TREE_SIDE_EFFECTS. */
2407 if (gimple_has_volatile_ops (s))
2408 return true;
2409
179184e3
RG
2410 if (gimple_code (s) == GIMPLE_ASM
2411 && gimple_asm_volatile_p (s))
2412 return true;
2413
726a989a
RB
2414 if (is_gimple_call (s))
2415 {
723afc44 2416 int flags = gimple_call_flags (s);
726a989a 2417
723afc44
RG
2418 /* An infinite loop is considered a side effect. */
2419 if (!(flags & (ECF_CONST | ECF_PURE))
2420 || (flags & ECF_LOOPING_CONST_OR_PURE))
726a989a
RB
2421 return true;
2422
726a989a
RB
2423 return false;
2424 }
726a989a
RB
2425
2426 return false;
2427}
2428
726a989a 2429/* Helper for gimple_could_trap_p and gimple_assign_rhs_could_trap_p.
e1fd038a
SP
2430 Return true if S can trap. When INCLUDE_MEM is true, check whether
2431 the memory operations could trap. When INCLUDE_STORES is true and
2432 S is a GIMPLE_ASSIGN, the LHS of the assignment is also checked. */
726a989a 2433
e1fd038a
SP
2434bool
2435gimple_could_trap_p_1 (gimple s, bool include_mem, bool include_stores)
726a989a 2436{
726a989a
RB
2437 tree t, div = NULL_TREE;
2438 enum tree_code op;
2439
e1fd038a
SP
2440 if (include_mem)
2441 {
2442 unsigned i, start = (is_gimple_assign (s) && !include_stores) ? 1 : 0;
726a989a 2443
e1fd038a
SP
2444 for (i = start; i < gimple_num_ops (s); i++)
2445 if (tree_could_trap_p (gimple_op (s, i)))
2446 return true;
2447 }
726a989a
RB
2448
2449 switch (gimple_code (s))
2450 {
2451 case GIMPLE_ASM:
2452 return gimple_asm_volatile_p (s);
2453
2454 case GIMPLE_CALL:
2455 t = gimple_call_fndecl (s);
2456 /* Assume that calls to weak functions may trap. */
2457 if (!t || !DECL_P (t) || DECL_WEAK (t))
2458 return true;
2459 return false;
2460
2461 case GIMPLE_ASSIGN:
2462 t = gimple_expr_type (s);
2463 op = gimple_assign_rhs_code (s);
2464 if (get_gimple_rhs_class (op) == GIMPLE_BINARY_RHS)
2465 div = gimple_assign_rhs2 (s);
2466 return (operation_could_trap_p (op, FLOAT_TYPE_P (t),
2467 (INTEGRAL_TYPE_P (t)
2468 && TYPE_OVERFLOW_TRAPS (t)),
2469 div));
2470
2471 default:
2472 break;
2473 }
2474
2475 return false;
726a989a
RB
2476}
2477
726a989a
RB
2478/* Return true if statement S can trap. */
2479
2480bool
2481gimple_could_trap_p (gimple s)
2482{
e1fd038a 2483 return gimple_could_trap_p_1 (s, true, true);
726a989a
RB
2484}
2485
726a989a
RB
2486/* Return true if RHS of a GIMPLE_ASSIGN S can trap. */
2487
2488bool
2489gimple_assign_rhs_could_trap_p (gimple s)
2490{
2491 gcc_assert (is_gimple_assign (s));
e1fd038a 2492 return gimple_could_trap_p_1 (s, true, false);
726a989a
RB
2493}
2494
2495
2496/* Print debugging information for gimple stmts generated. */
2497
2498void
2499dump_gimple_statistics (void)
2500{
726a989a
RB
2501 int i, total_tuples = 0, total_bytes = 0;
2502
7aa6d18a
SB
2503 if (! GATHER_STATISTICS)
2504 {
2505 fprintf (stderr, "No gimple statistics\n");
2506 return;
2507 }
2508
726a989a
RB
2509 fprintf (stderr, "\nGIMPLE statements\n");
2510 fprintf (stderr, "Kind Stmts Bytes\n");
2511 fprintf (stderr, "---------------------------------------\n");
2512 for (i = 0; i < (int) gimple_alloc_kind_all; ++i)
2513 {
2514 fprintf (stderr, "%-20s %7d %10d\n", gimple_alloc_kind_names[i],
2515 gimple_alloc_counts[i], gimple_alloc_sizes[i]);
2516 total_tuples += gimple_alloc_counts[i];
2517 total_bytes += gimple_alloc_sizes[i];
2518 }
2519 fprintf (stderr, "---------------------------------------\n");
2520 fprintf (stderr, "%-20s %7d %10d\n", "Total", total_tuples, total_bytes);
2521 fprintf (stderr, "---------------------------------------\n");
726a989a
RB
2522}
2523
2524
726a989a
RB
2525/* Return the number of operands needed on the RHS of a GIMPLE
2526 assignment for an expression with tree code CODE. */
2527
2528unsigned
2529get_gimple_rhs_num_ops (enum tree_code code)
2530{
2531 enum gimple_rhs_class rhs_class = get_gimple_rhs_class (code);
2532
2533 if (rhs_class == GIMPLE_UNARY_RHS || rhs_class == GIMPLE_SINGLE_RHS)
2534 return 1;
2535 else if (rhs_class == GIMPLE_BINARY_RHS)
2536 return 2;
0354c0c7
BS
2537 else if (rhs_class == GIMPLE_TERNARY_RHS)
2538 return 3;
726a989a
RB
2539 else
2540 gcc_unreachable ();
2541}
2542
2543#define DEFTREECODE(SYM, STRING, TYPE, NARGS) \
2544 (unsigned char) \
2545 ((TYPE) == tcc_unary ? GIMPLE_UNARY_RHS \
2546 : ((TYPE) == tcc_binary \
2547 || (TYPE) == tcc_comparison) ? GIMPLE_BINARY_RHS \
2548 : ((TYPE) == tcc_constant \
2549 || (TYPE) == tcc_declaration \
2550 || (TYPE) == tcc_reference) ? GIMPLE_SINGLE_RHS \
2551 : ((SYM) == TRUTH_AND_EXPR \
2552 || (SYM) == TRUTH_OR_EXPR \
2553 || (SYM) == TRUTH_XOR_EXPR) ? GIMPLE_BINARY_RHS \
2554 : (SYM) == TRUTH_NOT_EXPR ? GIMPLE_UNARY_RHS \
4e71066d
RG
2555 : ((SYM) == COND_EXPR \
2556 || (SYM) == WIDEN_MULT_PLUS_EXPR \
16949072 2557 || (SYM) == WIDEN_MULT_MINUS_EXPR \
f471fe72
RG
2558 || (SYM) == DOT_PROD_EXPR \
2559 || (SYM) == REALIGN_LOAD_EXPR \
4e71066d 2560 || (SYM) == VEC_COND_EXPR \
2205ed25 2561 || (SYM) == VEC_PERM_EXPR \
16949072 2562 || (SYM) == FMA_EXPR) ? GIMPLE_TERNARY_RHS \
4e71066d 2563 : ((SYM) == CONSTRUCTOR \
726a989a
RB
2564 || (SYM) == OBJ_TYPE_REF \
2565 || (SYM) == ASSERT_EXPR \
2566 || (SYM) == ADDR_EXPR \
2567 || (SYM) == WITH_SIZE_EXPR \
4e71066d 2568 || (SYM) == SSA_NAME) ? GIMPLE_SINGLE_RHS \
726a989a
RB
2569 : GIMPLE_INVALID_RHS),
2570#define END_OF_BASE_TREE_CODES (unsigned char) GIMPLE_INVALID_RHS,
2571
2572const unsigned char gimple_rhs_class_table[] = {
2573#include "all-tree.def"
2574};
2575
2576#undef DEFTREECODE
2577#undef END_OF_BASE_TREE_CODES
2578
2579/* For the definitive definition of GIMPLE, see doc/tree-ssa.texi. */
2580
2581/* Validation of GIMPLE expressions. */
2582
726a989a
RB
2583/* Return true if T is a valid LHS for a GIMPLE assignment expression. */
2584
2585bool
2586is_gimple_lvalue (tree t)
2587{
2588 return (is_gimple_addressable (t)
2589 || TREE_CODE (t) == WITH_SIZE_EXPR
2590 /* These are complex lvalues, but don't have addresses, so they
2591 go here. */
2592 || TREE_CODE (t) == BIT_FIELD_REF);
2593}
2594
2595/* Return true if T is a GIMPLE condition. */
2596
2597bool
2598is_gimple_condexpr (tree t)
2599{
2600 return (is_gimple_val (t) || (COMPARISON_CLASS_P (t)
f9613c9a 2601 && !tree_could_throw_p (t)
726a989a
RB
2602 && is_gimple_val (TREE_OPERAND (t, 0))
2603 && is_gimple_val (TREE_OPERAND (t, 1))));
2604}
2605
2606/* Return true if T is something whose address can be taken. */
2607
2608bool
2609is_gimple_addressable (tree t)
2610{
70f34814
RG
2611 return (is_gimple_id (t) || handled_component_p (t)
2612 || TREE_CODE (t) == MEM_REF);
726a989a
RB
2613}
2614
2615/* Return true if T is a valid gimple constant. */
2616
2617bool
2618is_gimple_constant (const_tree t)
2619{
2620 switch (TREE_CODE (t))
2621 {
2622 case INTEGER_CST:
2623 case REAL_CST:
2624 case FIXED_CST:
2625 case STRING_CST:
2626 case COMPLEX_CST:
2627 case VECTOR_CST:
2628 return true;
2629
2630 /* Vector constant constructors are gimple invariant. */
2631 case CONSTRUCTOR:
2632 if (TREE_TYPE (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
2633 return TREE_CONSTANT (t);
2634 else
2635 return false;
2636
2637 default:
2638 return false;
2639 }
2640}
2641
2642/* Return true if T is a gimple address. */
2643
2644bool
2645is_gimple_address (const_tree t)
2646{
2647 tree op;
2648
2649 if (TREE_CODE (t) != ADDR_EXPR)
2650 return false;
2651
2652 op = TREE_OPERAND (t, 0);
2653 while (handled_component_p (op))
2654 {
2655 if ((TREE_CODE (op) == ARRAY_REF
2656 || TREE_CODE (op) == ARRAY_RANGE_REF)
2657 && !is_gimple_val (TREE_OPERAND (op, 1)))
2658 return false;
2659
2660 op = TREE_OPERAND (op, 0);
2661 }
2662
70f34814 2663 if (CONSTANT_CLASS_P (op) || TREE_CODE (op) == MEM_REF)
726a989a
RB
2664 return true;
2665
2666 switch (TREE_CODE (op))
2667 {
2668 case PARM_DECL:
2669 case RESULT_DECL:
2670 case LABEL_DECL:
2671 case FUNCTION_DECL:
2672 case VAR_DECL:
2673 case CONST_DECL:
2674 return true;
2675
2676 default:
2677 return false;
2678 }
2679}
2680
00fc2333
JH
2681/* Return true if T is a gimple invariant address. */
2682
2683bool
2684is_gimple_invariant_address (const_tree t)
2685{
2686 const_tree op;
2687
2688 if (TREE_CODE (t) != ADDR_EXPR)
2689 return false;
2690
2691 op = strip_invariant_refs (TREE_OPERAND (t, 0));
70f34814
RG
2692 if (!op)
2693 return false;
00fc2333 2694
70f34814
RG
2695 if (TREE_CODE (op) == MEM_REF)
2696 {
2697 const_tree op0 = TREE_OPERAND (op, 0);
2698 return (TREE_CODE (op0) == ADDR_EXPR
2699 && (CONSTANT_CLASS_P (TREE_OPERAND (op0, 0))
2700 || decl_address_invariant_p (TREE_OPERAND (op0, 0))));
2701 }
2702
2703 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
00fc2333
JH
2704}
2705
2706/* Return true if T is a gimple invariant address at IPA level
2707 (so addresses of variables on stack are not allowed). */
2708
2709bool
2710is_gimple_ip_invariant_address (const_tree t)
2711{
2712 const_tree op;
2713
2714 if (TREE_CODE (t) != ADDR_EXPR)
2715 return false;
2716
2717 op = strip_invariant_refs (TREE_OPERAND (t, 0));
39cc8c3d
MJ
2718 if (!op)
2719 return false;
2720
2721 if (TREE_CODE (op) == MEM_REF)
2722 {
2723 const_tree op0 = TREE_OPERAND (op, 0);
2724 return (TREE_CODE (op0) == ADDR_EXPR
2725 && (CONSTANT_CLASS_P (TREE_OPERAND (op0, 0))
2726 || decl_address_ip_invariant_p (TREE_OPERAND (op0, 0))));
2727 }
00fc2333 2728
39cc8c3d 2729 return CONSTANT_CLASS_P (op) || decl_address_ip_invariant_p (op);
726a989a
RB
2730}
2731
2732/* Return true if T is a GIMPLE minimal invariant. It's a restricted
2733 form of function invariant. */
2734
2735bool
2736is_gimple_min_invariant (const_tree t)
2737{
2738 if (TREE_CODE (t) == ADDR_EXPR)
2739 return is_gimple_invariant_address (t);
2740
2741 return is_gimple_constant (t);
2742}
2743
00fc2333
JH
2744/* Return true if T is a GIMPLE interprocedural invariant. It's a restricted
2745 form of gimple minimal invariant. */
2746
2747bool
2748is_gimple_ip_invariant (const_tree t)
2749{
2750 if (TREE_CODE (t) == ADDR_EXPR)
2751 return is_gimple_ip_invariant_address (t);
2752
2753 return is_gimple_constant (t);
2754}
2755
726a989a
RB
2756/* Return true if T is a variable. */
2757
2758bool
2759is_gimple_variable (tree t)
2760{
2761 return (TREE_CODE (t) == VAR_DECL
2762 || TREE_CODE (t) == PARM_DECL
2763 || TREE_CODE (t) == RESULT_DECL
2764 || TREE_CODE (t) == SSA_NAME);
2765}
2766
2767/* Return true if T is a GIMPLE identifier (something with an address). */
2768
2769bool
2770is_gimple_id (tree t)
2771{
2772 return (is_gimple_variable (t)
2773 || TREE_CODE (t) == FUNCTION_DECL
2774 || TREE_CODE (t) == LABEL_DECL
2775 || TREE_CODE (t) == CONST_DECL
2776 /* Allow string constants, since they are addressable. */
2777 || TREE_CODE (t) == STRING_CST);
2778}
2779
726a989a
RB
2780/* Return true if T is a non-aggregate register variable. */
2781
2782bool
2783is_gimple_reg (tree t)
2784{
a471762f 2785 if (virtual_operand_p (t))
3828719a 2786 return false;
726a989a 2787
a471762f
RG
2788 if (TREE_CODE (t) == SSA_NAME)
2789 return true;
2790
726a989a
RB
2791 if (!is_gimple_variable (t))
2792 return false;
2793
2794 if (!is_gimple_reg_type (TREE_TYPE (t)))
2795 return false;
2796
2797 /* A volatile decl is not acceptable because we can't reuse it as
2798 needed. We need to copy it into a temp first. */
2799 if (TREE_THIS_VOLATILE (t))
2800 return false;
2801
2802 /* We define "registers" as things that can be renamed as needed,
2803 which with our infrastructure does not apply to memory. */
2804 if (needs_to_live_in_memory (t))
2805 return false;
2806
2807 /* Hard register variables are an interesting case. For those that
2808 are call-clobbered, we don't know where all the calls are, since
2809 we don't (want to) take into account which operations will turn
2810 into libcalls at the rtl level. For those that are call-saved,
2811 we don't currently model the fact that calls may in fact change
2812 global hard registers, nor do we examine ASM_CLOBBERS at the tree
2813 level, and so miss variable changes that might imply. All around,
2814 it seems safest to not do too much optimization with these at the
2815 tree level at all. We'll have to rely on the rtl optimizers to
2816 clean this up, as there we've got all the appropriate bits exposed. */
2817 if (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t))
2818 return false;
2819
4636b850
RG
2820 /* Complex and vector values must have been put into SSA-like form.
2821 That is, no assignments to the individual components. */
2822 if (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
2823 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
2824 return DECL_GIMPLE_REG_P (t);
2825
726a989a
RB
2826 return true;
2827}
2828
2829
726a989a
RB
2830/* Return true if T is a GIMPLE rvalue, i.e. an identifier or a constant. */
2831
2832bool
2833is_gimple_val (tree t)
2834{
2835 /* Make loads from volatiles and memory vars explicit. */
2836 if (is_gimple_variable (t)
2837 && is_gimple_reg_type (TREE_TYPE (t))
2838 && !is_gimple_reg (t))
2839 return false;
2840
726a989a
RB
2841 return (is_gimple_variable (t) || is_gimple_min_invariant (t));
2842}
2843
2844/* Similarly, but accept hard registers as inputs to asm statements. */
2845
2846bool
2847is_gimple_asm_val (tree t)
2848{
2849 if (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t))
2850 return true;
2851
2852 return is_gimple_val (t);
2853}
2854
2855/* Return true if T is a GIMPLE minimal lvalue. */
2856
2857bool
2858is_gimple_min_lval (tree t)
2859{
ba4d8f9d
RG
2860 if (!(t = CONST_CAST_TREE (strip_invariant_refs (t))))
2861 return false;
70f34814 2862 return (is_gimple_id (t) || TREE_CODE (t) == MEM_REF);
726a989a
RB
2863}
2864
726a989a
RB
2865/* Return true if T is a valid function operand of a CALL_EXPR. */
2866
2867bool
2868is_gimple_call_addr (tree t)
2869{
2870 return (TREE_CODE (t) == OBJ_TYPE_REF || is_gimple_val (t));
2871}
2872
70f34814
RG
2873/* Return true if T is a valid address operand of a MEM_REF. */
2874
2875bool
2876is_gimple_mem_ref_addr (tree t)
2877{
2878 return (is_gimple_reg (t)
2879 || TREE_CODE (t) == INTEGER_CST
2880 || (TREE_CODE (t) == ADDR_EXPR
2881 && (CONSTANT_CLASS_P (TREE_OPERAND (t, 0))
2882 || decl_address_invariant_p (TREE_OPERAND (t, 0)))));
2883}
2884
726a989a
RB
2885
2886/* Given a memory reference expression T, return its base address.
2887 The base address of a memory reference expression is the main
2888 object being referenced. For instance, the base address for
2889 'array[i].fld[j]' is 'array'. You can think of this as stripping
2890 away the offset part from a memory address.
2891
2892 This function calls handled_component_p to strip away all the inner
2893 parts of the memory reference until it reaches the base object. */
2894
2895tree
2896get_base_address (tree t)
2897{
2898 while (handled_component_p (t))
2899 t = TREE_OPERAND (t, 0);
b8698a0f 2900
4d948885
RG
2901 if ((TREE_CODE (t) == MEM_REF
2902 || TREE_CODE (t) == TARGET_MEM_REF)
70f34814
RG
2903 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
2904 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
2905
b3b9f3d0
JH
2906 if (TREE_CODE (t) == SSA_NAME
2907 || DECL_P (t)
726a989a
RB
2908 || TREE_CODE (t) == STRING_CST
2909 || TREE_CODE (t) == CONSTRUCTOR
70f34814 2910 || INDIRECT_REF_P (t)
4d948885
RG
2911 || TREE_CODE (t) == MEM_REF
2912 || TREE_CODE (t) == TARGET_MEM_REF)
726a989a
RB
2913 return t;
2914 else
2915 return NULL_TREE;
2916}
2917
2918void
2919recalculate_side_effects (tree t)
2920{
2921 enum tree_code code = TREE_CODE (t);
2922 int len = TREE_OPERAND_LENGTH (t);
2923 int i;
2924
2925 switch (TREE_CODE_CLASS (code))
2926 {
2927 case tcc_expression:
2928 switch (code)
2929 {
2930 case INIT_EXPR:
2931 case MODIFY_EXPR:
2932 case VA_ARG_EXPR:
2933 case PREDECREMENT_EXPR:
2934 case PREINCREMENT_EXPR:
2935 case POSTDECREMENT_EXPR:
2936 case POSTINCREMENT_EXPR:
2937 /* All of these have side-effects, no matter what their
2938 operands are. */
2939 return;
2940
2941 default:
2942 break;
2943 }
2944 /* Fall through. */
2945
2946 case tcc_comparison: /* a comparison expression */
2947 case tcc_unary: /* a unary arithmetic expression */
2948 case tcc_binary: /* a binary arithmetic expression */
2949 case tcc_reference: /* a reference */
2950 case tcc_vl_exp: /* a function call */
2951 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
2952 for (i = 0; i < len; ++i)
2953 {
2954 tree op = TREE_OPERAND (t, i);
2955 if (op && TREE_SIDE_EFFECTS (op))
2956 TREE_SIDE_EFFECTS (t) = 1;
2957 }
2958 break;
2959
13f95bdb
EB
2960 case tcc_constant:
2961 /* No side-effects. */
2962 return;
2963
726a989a 2964 default:
726a989a
RB
2965 gcc_unreachable ();
2966 }
2967}
2968
2969/* Canonicalize a tree T for use in a COND_EXPR as conditional. Returns
2970 a canonicalized tree that is valid for a COND_EXPR or NULL_TREE, if
2971 we failed to create one. */
2972
2973tree
2974canonicalize_cond_expr_cond (tree t)
2975{
b66a1bac
RG
2976 /* Strip conversions around boolean operations. */
2977 if (CONVERT_EXPR_P (t)
9b80d091
KT
2978 && (truth_value_p (TREE_CODE (TREE_OPERAND (t, 0)))
2979 || TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 0)))
2980 == BOOLEAN_TYPE))
b66a1bac
RG
2981 t = TREE_OPERAND (t, 0);
2982
726a989a 2983 /* For !x use x == 0. */
12430896 2984 if (TREE_CODE (t) == TRUTH_NOT_EXPR)
726a989a
RB
2985 {
2986 tree top0 = TREE_OPERAND (t, 0);
2987 t = build2 (EQ_EXPR, TREE_TYPE (t),
2988 top0, build_int_cst (TREE_TYPE (top0), 0));
2989 }
2990 /* For cmp ? 1 : 0 use cmp. */
2991 else if (TREE_CODE (t) == COND_EXPR
2992 && COMPARISON_CLASS_P (TREE_OPERAND (t, 0))
2993 && integer_onep (TREE_OPERAND (t, 1))
2994 && integer_zerop (TREE_OPERAND (t, 2)))
2995 {
2996 tree top0 = TREE_OPERAND (t, 0);
2997 t = build2 (TREE_CODE (top0), TREE_TYPE (t),
2998 TREE_OPERAND (top0, 0), TREE_OPERAND (top0, 1));
2999 }
3000
3001 if (is_gimple_condexpr (t))
3002 return t;
3003
3004 return NULL_TREE;
3005}
3006
e6c99067
DN
3007/* Build a GIMPLE_CALL identical to STMT but skipping the arguments in
3008 the positions marked by the set ARGS_TO_SKIP. */
3009
c6f7cfc1 3010gimple
5c0466b5 3011gimple_call_copy_skip_args (gimple stmt, bitmap args_to_skip)
c6f7cfc1
JH
3012{
3013 int i;
c6f7cfc1
JH
3014 int nargs = gimple_call_num_args (stmt);
3015 VEC(tree, heap) *vargs = VEC_alloc (tree, heap, nargs);
3016 gimple new_stmt;
3017
3018 for (i = 0; i < nargs; i++)
3019 if (!bitmap_bit_p (args_to_skip, i))
3020 VEC_quick_push (tree, vargs, gimple_call_arg (stmt, i));
3021
25583c4f
RS
3022 if (gimple_call_internal_p (stmt))
3023 new_stmt = gimple_build_call_internal_vec (gimple_call_internal_fn (stmt),
3024 vargs);
3025 else
3026 new_stmt = gimple_build_call_vec (gimple_call_fn (stmt), vargs);
c6f7cfc1
JH
3027 VEC_free (tree, heap, vargs);
3028 if (gimple_call_lhs (stmt))
3029 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
3030
5006671f
RG
3031 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
3032 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
3033
c6f7cfc1
JH
3034 gimple_set_block (new_stmt, gimple_block (stmt));
3035 if (gimple_has_location (stmt))
3036 gimple_set_location (new_stmt, gimple_location (stmt));
8d2adc24 3037 gimple_call_copy_flags (new_stmt, stmt);
c6f7cfc1 3038 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
5006671f
RG
3039
3040 gimple_set_modified (new_stmt, true);
3041
c6f7cfc1
JH
3042 return new_stmt;
3043}
3044
5006671f 3045
0ca8de87
RG
3046enum gtc_mode { GTC_MERGE = 0, GTC_DIAG = 1 };
3047
b5e04de5 3048static hashval_t gimple_type_hash (const void *);
d7f09764
DN
3049
3050/* Structure used to maintain a cache of some type pairs compared by
3051 gimple_types_compatible_p when comparing aggregate types. There are
c4fcd06a 3052 three possible values for SAME_P:
d7f09764
DN
3053
3054 -2: The pair (T1, T2) has just been inserted in the table.
d7f09764
DN
3055 0: T1 and T2 are different types.
3056 1: T1 and T2 are the same type.
3057
c4fcd06a
RG
3058 The two elements in the SAME_P array are indexed by the comparison
3059 mode gtc_mode. */
3060
d7f09764
DN
3061struct type_pair_d
3062{
88ca1146
RG
3063 unsigned int uid1;
3064 unsigned int uid2;
c4fcd06a 3065 signed char same_p[2];
d7f09764
DN
3066};
3067typedef struct type_pair_d *type_pair_t;
d4398a43
RG
3068DEF_VEC_P(type_pair_t);
3069DEF_VEC_ALLOC_P(type_pair_t,heap);
3070
a30726a4
JH
3071#define GIMPLE_TYPE_PAIR_SIZE 16381
3072struct type_pair_d *type_pair_cache;
d7f09764 3073
d7f09764
DN
3074
3075/* Lookup the pair of types T1 and T2 in *VISITED_P. Insert a new
3076 entry if none existed. */
3077
a30726a4
JH
3078static inline type_pair_t
3079lookup_type_pair (tree t1, tree t2)
d7f09764 3080{
a30726a4
JH
3081 unsigned int index;
3082 unsigned int uid1, uid2;
d7f09764 3083
a30726a4
JH
3084 if (type_pair_cache == NULL)
3085 type_pair_cache = XCNEWVEC (struct type_pair_d, GIMPLE_TYPE_PAIR_SIZE);
d7f09764 3086
4fc07af9
JH
3087 if (TYPE_UID (t1) < TYPE_UID (t2))
3088 {
a30726a4
JH
3089 uid1 = TYPE_UID (t1);
3090 uid2 = TYPE_UID (t2);
4fc07af9
JH
3091 }
3092 else
3093 {
a30726a4
JH
3094 uid1 = TYPE_UID (t2);
3095 uid2 = TYPE_UID (t1);
4fc07af9 3096 }
a30726a4 3097 gcc_checking_assert (uid1 != uid2);
d7f09764 3098
a30726a4
JH
3099 /* iterative_hash_hashval_t imply an function calls.
3100 We know that UIDS are in limited range. */
3101 index = ((((unsigned HOST_WIDE_INT)uid1 << HOST_BITS_PER_WIDE_INT / 2) + uid2)
3102 % GIMPLE_TYPE_PAIR_SIZE);
3103 if (type_pair_cache [index].uid1 == uid1
3104 && type_pair_cache [index].uid2 == uid2)
3105 return &type_pair_cache[index];
d7f09764 3106
a30726a4
JH
3107 type_pair_cache [index].uid1 = uid1;
3108 type_pair_cache [index].uid2 = uid2;
3109 type_pair_cache [index].same_p[0] = -2;
3110 type_pair_cache [index].same_p[1] = -2;
3111
3112 return &type_pair_cache[index];
d7f09764
DN
3113}
3114
d4398a43
RG
3115/* Per pointer state for the SCC finding. The on_sccstack flag
3116 is not strictly required, it is true when there is no hash value
3117 recorded for the type and false otherwise. But querying that
3118 is slower. */
3119
3120struct sccs
3121{
3122 unsigned int dfsnum;
3123 unsigned int low;
3124 bool on_sccstack;
3125 union {
3126 hashval_t hash;
c4fcd06a 3127 signed char same_p;
d4398a43
RG
3128 } u;
3129};
3130
3131static unsigned int next_dfs_num;
3132static unsigned int gtc_next_dfs_num;
d7f09764 3133
4490cae6
RG
3134
3135/* GIMPLE type merging cache. A direct-mapped cache based on TYPE_UID. */
3136
3137typedef struct GTY(()) gimple_type_leader_entry_s {
3138 tree type;
3139 tree leader;
3140} gimple_type_leader_entry;
3141
3142#define GIMPLE_TYPE_LEADER_SIZE 16381
e89964e3
MM
3143static GTY((deletable, length("GIMPLE_TYPE_LEADER_SIZE")))
3144 gimple_type_leader_entry *gimple_type_leader;
4490cae6
RG
3145
3146/* Lookup an existing leader for T and return it or NULL_TREE, if
3147 there is none in the cache. */
3148
4fc07af9 3149static inline tree
4490cae6
RG
3150gimple_lookup_type_leader (tree t)
3151{
3152 gimple_type_leader_entry *leader;
3153
3154 if (!gimple_type_leader)
3155 return NULL_TREE;
3156
3157 leader = &gimple_type_leader[TYPE_UID (t) % GIMPLE_TYPE_LEADER_SIZE];
3158 if (leader->type != t)
3159 return NULL_TREE;
3160
3161 return leader->leader;
3162}
3163
77785f4f
RG
3164/* Return true if T1 and T2 have the same name. If FOR_COMPLETION_P is
3165 true then if any type has no name return false, otherwise return
3166 true if both types have no names. */
d7f09764
DN
3167
3168static bool
6a20ce76 3169compare_type_names_p (tree t1, tree t2)
d7f09764
DN
3170{
3171 tree name1 = TYPE_NAME (t1);
3172 tree name2 = TYPE_NAME (t2);
3173
ee7a54c5
RG
3174 if ((name1 != NULL_TREE) != (name2 != NULL_TREE))
3175 return false;
3176
3177 if (name1 == NULL_TREE)
3178 return true;
3179
3180 /* Either both should be a TYPE_DECL or both an IDENTIFIER_NODE. */
3181 if (TREE_CODE (name1) != TREE_CODE (name2))
3182 return false;
3183
3184 if (TREE_CODE (name1) == TYPE_DECL)
6a20ce76
RG
3185 name1 = DECL_NAME (name1);
3186 gcc_checking_assert (!name1 || TREE_CODE (name1) == IDENTIFIER_NODE);
d7f09764 3187
ee7a54c5 3188 if (TREE_CODE (name2) == TYPE_DECL)
6a20ce76
RG
3189 name2 = DECL_NAME (name2);
3190 gcc_checking_assert (!name2 || TREE_CODE (name2) == IDENTIFIER_NODE);
d7f09764
DN
3191
3192 /* Identifiers can be compared with pointer equality rather
3193 than a string comparison. */
3194 if (name1 == name2)
3195 return true;
3196
3197 return false;
3198}
3199
d025732d
EB
3200/* Return true if the field decls F1 and F2 are at the same offset.
3201
91f2fae8 3202 This is intended to be used on GIMPLE types only. */
d7f09764 3203
1e4bc4eb 3204bool
d025732d 3205gimple_compare_field_offset (tree f1, tree f2)
d7f09764
DN
3206{
3207 if (DECL_OFFSET_ALIGN (f1) == DECL_OFFSET_ALIGN (f2))
d025732d
EB
3208 {
3209 tree offset1 = DECL_FIELD_OFFSET (f1);
3210 tree offset2 = DECL_FIELD_OFFSET (f2);
3211 return ((offset1 == offset2
3212 /* Once gimplification is done, self-referential offsets are
3213 instantiated as operand #2 of the COMPONENT_REF built for
3214 each access and reset. Therefore, they are not relevant
3215 anymore and fields are interchangeable provided that they
3216 represent the same access. */
3217 || (TREE_CODE (offset1) == PLACEHOLDER_EXPR
3218 && TREE_CODE (offset2) == PLACEHOLDER_EXPR
3219 && (DECL_SIZE (f1) == DECL_SIZE (f2)
3220 || (TREE_CODE (DECL_SIZE (f1)) == PLACEHOLDER_EXPR
3221 && TREE_CODE (DECL_SIZE (f2)) == PLACEHOLDER_EXPR)
3222 || operand_equal_p (DECL_SIZE (f1), DECL_SIZE (f2), 0))
3223 && DECL_ALIGN (f1) == DECL_ALIGN (f2))
3224 || operand_equal_p (offset1, offset2, 0))
3225 && tree_int_cst_equal (DECL_FIELD_BIT_OFFSET (f1),
3226 DECL_FIELD_BIT_OFFSET (f2)));
3227 }
d7f09764
DN
3228
3229 /* Fortran and C do not always agree on what DECL_OFFSET_ALIGN
3230 should be, so handle differing ones specially by decomposing
3231 the offset into a byte and bit offset manually. */
3232 if (host_integerp (DECL_FIELD_OFFSET (f1), 0)
3233 && host_integerp (DECL_FIELD_OFFSET (f2), 0))
3234 {
3235 unsigned HOST_WIDE_INT byte_offset1, byte_offset2;
3236 unsigned HOST_WIDE_INT bit_offset1, bit_offset2;
3237 bit_offset1 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (f1));
3238 byte_offset1 = (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f1))
3239 + bit_offset1 / BITS_PER_UNIT);
3240 bit_offset2 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (f2));
3241 byte_offset2 = (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f2))
3242 + bit_offset2 / BITS_PER_UNIT);
3243 if (byte_offset1 != byte_offset2)
3244 return false;
3245 return bit_offset1 % BITS_PER_UNIT == bit_offset2 % BITS_PER_UNIT;
3246 }
3247
3248 return false;
3249}
3250
d4398a43 3251static bool
b5e04de5 3252gimple_types_compatible_p_1 (tree, tree, type_pair_t,
c4fcd06a 3253 VEC(type_pair_t, heap) **,
d4398a43 3254 struct pointer_map_t *, struct obstack *);
d7f09764 3255
d4398a43
RG
3256/* DFS visit the edge from the callers type pair with state *STATE to
3257 the pair T1, T2 while operating in FOR_MERGING_P mode.
3258 Update the merging status if it is not part of the SCC containing the
3259 callers pair and return it.
3260 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
3261
3262static bool
b5e04de5 3263gtc_visit (tree t1, tree t2,
d4398a43
RG
3264 struct sccs *state,
3265 VEC(type_pair_t, heap) **sccstack,
3266 struct pointer_map_t *sccstate,
3267 struct obstack *sccstate_obstack)
d7f09764 3268{
d4398a43
RG
3269 struct sccs *cstate = NULL;
3270 type_pair_t p;
3271 void **slot;
b5e04de5 3272 tree leader1, leader2;
d7f09764
DN
3273
3274 /* Check first for the obvious case of pointer identity. */
3275 if (t1 == t2)
d4398a43 3276 return true;
d7f09764
DN
3277
3278 /* Check that we have two types to compare. */
3279 if (t1 == NULL_TREE || t2 == NULL_TREE)
d4398a43 3280 return false;
d7f09764
DN
3281
3282 /* Can't be the same type if the types don't have the same code. */
3283 if (TREE_CODE (t1) != TREE_CODE (t2))
d4398a43 3284 return false;
b0cc341f
RG
3285
3286 /* Can't be the same type if they have different CV qualifiers. */
3287 if (TYPE_QUALS (t1) != TYPE_QUALS (t2))
d4398a43 3288 return false;
d7f09764 3289
61332f77
RG
3290 if (TREE_ADDRESSABLE (t1) != TREE_ADDRESSABLE (t2))
3291 return false;
3292
3293 /* Void types and nullptr types are always the same. */
3294 if (TREE_CODE (t1) == VOID_TYPE
3295 || TREE_CODE (t1) == NULLPTR_TYPE)
d4398a43 3296 return true;
d7f09764 3297
61332f77
RG
3298 /* Can't be the same type if they have different alignment or mode. */
3299 if (TYPE_ALIGN (t1) != TYPE_ALIGN (t2)
3300 || TYPE_MODE (t1) != TYPE_MODE (t2))
3301 return false;
3302
c9549072 3303 /* Do some simple checks before doing three hashtable queries. */
b0cc341f
RG
3304 if (INTEGRAL_TYPE_P (t1)
3305 || SCALAR_FLOAT_TYPE_P (t1)
3306 || FIXED_POINT_TYPE_P (t1)
3307 || TREE_CODE (t1) == VECTOR_TYPE
b23dc2c0 3308 || TREE_CODE (t1) == COMPLEX_TYPE
61332f77
RG
3309 || TREE_CODE (t1) == OFFSET_TYPE
3310 || POINTER_TYPE_P (t1))
b0cc341f 3311 {
61332f77
RG
3312 /* Can't be the same type if they have different sign or precision. */
3313 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2)
b0cc341f 3314 || TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
d4398a43 3315 return false;
b0cc341f
RG
3316
3317 if (TREE_CODE (t1) == INTEGER_TYPE
3ac8781c 3318 && TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2))
d4398a43 3319 return false;
b0cc341f
RG
3320
3321 /* That's all we need to check for float and fixed-point types. */
3322 if (SCALAR_FLOAT_TYPE_P (t1)
3323 || FIXED_POINT_TYPE_P (t1))
d4398a43 3324 return true;
b0cc341f 3325
073a8998 3326 /* For other types fall through to more complex checks. */
b0cc341f 3327 }
d7f09764 3328
61332f77
RG
3329 /* If the types have been previously registered and found equal
3330 they still are. */
3331 leader1 = gimple_lookup_type_leader (t1);
3332 leader2 = gimple_lookup_type_leader (t2);
3333 if (leader1 == t2
3334 || t1 == leader2
3335 || (leader1 && leader1 == leader2))
3336 return true;
c9549072 3337
d7f09764
DN
3338 /* If the hash values of t1 and t2 are different the types can't
3339 possibly be the same. This helps keeping the type-pair hashtable
3340 small, only tracking comparisons for hash collisions. */
b5e04de5 3341 if (gimple_type_hash (t1) != gimple_type_hash (t2))
d4398a43 3342 return false;
d7f09764 3343
d4398a43 3344 /* Allocate a new cache entry for this comparison. */
a30726a4 3345 p = lookup_type_pair (t1, t2);
b5e04de5 3346 if (p->same_p[GTC_MERGE] == 0 || p->same_p[GTC_MERGE] == 1)
d7f09764
DN
3347 {
3348 /* We have already decided whether T1 and T2 are the
3349 same, return the cached result. */
b5e04de5 3350 return p->same_p[GTC_MERGE] == 1;
d7f09764 3351 }
d4398a43 3352
d4398a43
RG
3353 if ((slot = pointer_map_contains (sccstate, p)) != NULL)
3354 cstate = (struct sccs *)*slot;
67701d1d 3355 /* Not yet visited. DFS recurse. */
d4398a43 3356 if (!cstate)
d7f09764 3357 {
b5e04de5 3358 gimple_types_compatible_p_1 (t1, t2, p,
67701d1d
RG
3359 sccstack, sccstate, sccstate_obstack);
3360 cstate = (struct sccs *)* pointer_map_contains (sccstate, p);
d4398a43 3361 state->low = MIN (state->low, cstate->low);
d7f09764 3362 }
67701d1d 3363 /* If the type is still on the SCC stack adjust the parents low. */
d4398a43
RG
3364 if (cstate->dfsnum < state->dfsnum
3365 && cstate->on_sccstack)
3366 state->low = MIN (cstate->dfsnum, state->low);
d7f09764 3367
67701d1d
RG
3368 /* Return the current lattice value. We start with an equality
3369 assumption so types part of a SCC will be optimistically
3370 treated equal unless proven otherwise. */
3371 return cstate->u.same_p;
d4398a43
RG
3372}
3373
3374/* Worker for gimple_types_compatible.
3375 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
3376
3377static bool
b5e04de5 3378gimple_types_compatible_p_1 (tree t1, tree t2, type_pair_t p,
d4398a43
RG
3379 VEC(type_pair_t, heap) **sccstack,
3380 struct pointer_map_t *sccstate,
3381 struct obstack *sccstate_obstack)
3382{
d4398a43
RG
3383 struct sccs *state;
3384
b5e04de5 3385 gcc_assert (p->same_p[GTC_MERGE] == -2);
d7f09764 3386
d4398a43
RG
3387 state = XOBNEW (sccstate_obstack, struct sccs);
3388 *pointer_map_insert (sccstate, p) = state;
3389
3390 VEC_safe_push (type_pair_t, heap, *sccstack, p);
3391 state->dfsnum = gtc_next_dfs_num++;
3392 state->low = state->dfsnum;
3393 state->on_sccstack = true;
67701d1d
RG
3394 /* Start with an equality assumption. As we DFS recurse into child
3395 SCCs this assumption may get revisited. */
3396 state->u.same_p = 1;
d7f09764 3397
e1caba18 3398 /* The struct tags shall compare equal. */
6a20ce76 3399 if (!compare_type_names_p (t1, t2))
e1caba18
RG
3400 goto different_types;
3401
ee7a54c5
RG
3402 /* We may not merge typedef types to the same type in different
3403 contexts. */
3404 if (TYPE_NAME (t1)
3405 && TREE_CODE (TYPE_NAME (t1)) == TYPE_DECL
3406 && DECL_CONTEXT (TYPE_NAME (t1))
3407 && TYPE_P (DECL_CONTEXT (TYPE_NAME (t1))))
3408 {
3409 if (!gtc_visit (DECL_CONTEXT (TYPE_NAME (t1)),
3410 DECL_CONTEXT (TYPE_NAME (t2)),
3411 state, sccstack, sccstate, sccstate_obstack))
3412 goto different_types;
3413 }
3414
59a6c9d5
L
3415 /* If their attributes are not the same they can't be the same type. */
3416 if (!attribute_list_equal (TYPE_ATTRIBUTES (t1), TYPE_ATTRIBUTES (t2)))
3417 goto different_types;
3418
d7f09764
DN
3419 /* Do type-specific comparisons. */
3420 switch (TREE_CODE (t1))
3421 {
d4398a43
RG
3422 case VECTOR_TYPE:
3423 case COMPLEX_TYPE:
b5e04de5 3424 if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
d4398a43
RG
3425 state, sccstack, sccstate, sccstate_obstack))
3426 goto different_types;
3427 goto same_types;
3428
d7f09764
DN
3429 case ARRAY_TYPE:
3430 /* Array types are the same if the element types are the same and
3431 the number of elements are the same. */
b5e04de5 3432 if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
d4398a43 3433 state, sccstack, sccstate, sccstate_obstack)
b0cc341f
RG
3434 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
3435 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
d7f09764
DN
3436 goto different_types;
3437 else
3438 {
3439 tree i1 = TYPE_DOMAIN (t1);
3440 tree i2 = TYPE_DOMAIN (t2);
3441
3442 /* For an incomplete external array, the type domain can be
3443 NULL_TREE. Check this condition also. */
3444 if (i1 == NULL_TREE && i2 == NULL_TREE)
3445 goto same_types;
3446 else if (i1 == NULL_TREE || i2 == NULL_TREE)
3447 goto different_types;
d7f09764
DN
3448 else
3449 {
3450 tree min1 = TYPE_MIN_VALUE (i1);
3451 tree min2 = TYPE_MIN_VALUE (i2);
3452 tree max1 = TYPE_MAX_VALUE (i1);
3453 tree max2 = TYPE_MAX_VALUE (i2);
3454
3455 /* The minimum/maximum values have to be the same. */
3456 if ((min1 == min2
f56000ed
EB
3457 || (min1 && min2
3458 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
3459 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
3460 || operand_equal_p (min1, min2, 0))))
d7f09764 3461 && (max1 == max2
f56000ed
EB
3462 || (max1 && max2
3463 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
3464 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
3465 || operand_equal_p (max1, max2, 0)))))
d7f09764
DN
3466 goto same_types;
3467 else
3468 goto different_types;
3469 }
3470 }
3471
3472 case METHOD_TYPE:
3473 /* Method types should belong to the same class. */
d4398a43 3474 if (!gtc_visit (TYPE_METHOD_BASETYPE (t1), TYPE_METHOD_BASETYPE (t2),
b5e04de5 3475 state, sccstack, sccstate, sccstate_obstack))
d7f09764
DN
3476 goto different_types;
3477
3478 /* Fallthru */
3479
3480 case FUNCTION_TYPE:
3481 /* Function types are the same if the return type and arguments types
3482 are the same. */
b5e04de5
RG
3483 if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
3484 state, sccstack, sccstate, sccstate_obstack))
bcee752e
RG
3485 goto different_types;
3486
ac9a30ae 3487 if (!comp_type_attributes (t1, t2))
d7f09764 3488 goto different_types;
bcee752e
RG
3489
3490 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
3491 goto same_types;
d7f09764
DN
3492 else
3493 {
bcee752e 3494 tree parms1, parms2;
d7f09764 3495
bcee752e
RG
3496 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
3497 parms1 && parms2;
3498 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
d7f09764 3499 {
b5e04de5
RG
3500 if (!gtc_visit (TREE_VALUE (parms1), TREE_VALUE (parms2),
3501 state, sccstack, sccstate, sccstate_obstack))
d7f09764 3502 goto different_types;
d7f09764 3503 }
bcee752e
RG
3504
3505 if (parms1 || parms2)
3506 goto different_types;
3507
3508 goto same_types;
d7f09764
DN
3509 }
3510
b23dc2c0
RG
3511 case OFFSET_TYPE:
3512 {
b5e04de5 3513 if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
d4398a43
RG
3514 state, sccstack, sccstate, sccstate_obstack)
3515 || !gtc_visit (TYPE_OFFSET_BASETYPE (t1),
b5e04de5 3516 TYPE_OFFSET_BASETYPE (t2),
d4398a43 3517 state, sccstack, sccstate, sccstate_obstack))
b23dc2c0
RG
3518 goto different_types;
3519
3520 goto same_types;
3521 }
3522
d7f09764
DN
3523 case POINTER_TYPE:
3524 case REFERENCE_TYPE:
e575382e
RG
3525 {
3526 /* If the two pointers have different ref-all attributes,
3527 they can't be the same type. */
3528 if (TYPE_REF_CAN_ALIAS_ALL (t1) != TYPE_REF_CAN_ALIAS_ALL (t2))
3529 goto different_types;
d7f09764 3530
e575382e
RG
3531 /* Otherwise, pointer and reference types are the same if the
3532 pointed-to types are the same. */
b5e04de5 3533 if (gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
d4398a43 3534 state, sccstack, sccstate, sccstate_obstack))
e575382e
RG
3535 goto same_types;
3536
3537 goto different_types;
3538 }
d7f09764 3539
b0cc341f
RG
3540 case INTEGER_TYPE:
3541 case BOOLEAN_TYPE:
3542 {
3543 tree min1 = TYPE_MIN_VALUE (t1);
3544 tree max1 = TYPE_MAX_VALUE (t1);
3545 tree min2 = TYPE_MIN_VALUE (t2);
3546 tree max2 = TYPE_MAX_VALUE (t2);
3547 bool min_equal_p = false;
3548 bool max_equal_p = false;
3549
3550 /* If either type has a minimum value, the other type must
3551 have the same. */
3552 if (min1 == NULL_TREE && min2 == NULL_TREE)
3553 min_equal_p = true;
3554 else if (min1 && min2 && operand_equal_p (min1, min2, 0))
3555 min_equal_p = true;
3556
3557 /* Likewise, if either type has a maximum value, the other
3558 type must have the same. */
3559 if (max1 == NULL_TREE && max2 == NULL_TREE)
3560 max_equal_p = true;
3561 else if (max1 && max2 && operand_equal_p (max1, max2, 0))
3562 max_equal_p = true;
3563
3564 if (!min_equal_p || !max_equal_p)
3565 goto different_types;
3566
3567 goto same_types;
3568 }
3569
d7f09764 3570 case ENUMERAL_TYPE:
e575382e 3571 {
b0cc341f
RG
3572 /* FIXME lto, we cannot check bounds on enumeral types because
3573 different front ends will produce different values.
3574 In C, enumeral types are integers, while in C++ each element
3575 will have its own symbolic value. We should decide how enums
3576 are to be represented in GIMPLE and have each front end lower
3577 to that. */
e575382e 3578 tree v1, v2;
d7f09764 3579
b0cc341f 3580 /* For enumeral types, all the values must be the same. */
e575382e
RG
3581 if (TYPE_VALUES (t1) == TYPE_VALUES (t2))
3582 goto same_types;
d7f09764 3583
e575382e
RG
3584 for (v1 = TYPE_VALUES (t1), v2 = TYPE_VALUES (t2);
3585 v1 && v2;
3586 v1 = TREE_CHAIN (v1), v2 = TREE_CHAIN (v2))
3587 {
3588 tree c1 = TREE_VALUE (v1);
3589 tree c2 = TREE_VALUE (v2);
d7f09764 3590
e575382e
RG
3591 if (TREE_CODE (c1) == CONST_DECL)
3592 c1 = DECL_INITIAL (c1);
d7f09764 3593
e575382e
RG
3594 if (TREE_CODE (c2) == CONST_DECL)
3595 c2 = DECL_INITIAL (c2);
d7f09764 3596
e575382e
RG
3597 if (tree_int_cst_equal (c1, c2) != 1)
3598 goto different_types;
abe36b81 3599
b5e04de5 3600 if (TREE_PURPOSE (v1) != TREE_PURPOSE (v2))
abe36b81 3601 goto different_types;
e575382e 3602 }
d7f09764 3603
e575382e
RG
3604 /* If one enumeration has more values than the other, they
3605 are not the same. */
3606 if (v1 || v2)
3607 goto different_types;
d7f09764 3608
e575382e
RG
3609 goto same_types;
3610 }
d7f09764
DN
3611
3612 case RECORD_TYPE:
3613 case UNION_TYPE:
3614 case QUAL_UNION_TYPE:
e575382e
RG
3615 {
3616 tree f1, f2;
d7f09764 3617
e575382e
RG
3618 /* For aggregate types, all the fields must be the same. */
3619 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
3620 f1 && f2;
3621 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
3622 {
17e99cdb
RG
3623 /* Different field kinds are not compatible. */
3624 if (TREE_CODE (f1) != TREE_CODE (f2))
3625 goto different_types;
3626 /* Field decls must have the same name and offset. */
3627 if (TREE_CODE (f1) == FIELD_DECL
3628 && (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
3629 || !gimple_compare_field_offset (f1, f2)))
3630 goto different_types;
3631 /* All entities should have the same name and type. */
b5e04de5 3632 if (DECL_NAME (f1) != DECL_NAME (f2)
b5e04de5 3633 || !gtc_visit (TREE_TYPE (f1), TREE_TYPE (f2),
d4398a43 3634 state, sccstack, sccstate, sccstate_obstack))
e575382e
RG
3635 goto different_types;
3636 }
d7f09764 3637
e575382e
RG
3638 /* If one aggregate has more fields than the other, they
3639 are not the same. */
3640 if (f1 || f2)
3641 goto different_types;
d7f09764 3642
e575382e
RG
3643 goto same_types;
3644 }
d7f09764 3645
d7f09764 3646 default:
b0cc341f 3647 gcc_unreachable ();
d7f09764
DN
3648 }
3649
3650 /* Common exit path for types that are not compatible. */
3651different_types:
d4398a43
RG
3652 state->u.same_p = 0;
3653 goto pop;
d7f09764
DN
3654
3655 /* Common exit path for types that are compatible. */
3656same_types:
67701d1d 3657 gcc_assert (state->u.same_p == 1);
d7f09764 3658
d4398a43
RG
3659pop:
3660 if (state->low == state->dfsnum)
3661 {
3662 type_pair_t x;
d7f09764 3663
67701d1d
RG
3664 /* Pop off the SCC and set its cache values to the final
3665 comparison result. */
d4398a43
RG
3666 do
3667 {
3668 struct sccs *cstate;
3669 x = VEC_pop (type_pair_t, *sccstack);
3670 cstate = (struct sccs *)*pointer_map_contains (sccstate, x);
3671 cstate->on_sccstack = false;
b5e04de5 3672 x->same_p[GTC_MERGE] = state->u.same_p;
d4398a43
RG
3673 }
3674 while (x != p);
3675 }
d7f09764 3676
d4398a43
RG
3677 return state->u.same_p;
3678}
d7f09764 3679
d4398a43
RG
3680/* Return true iff T1 and T2 are structurally identical. When
3681 FOR_MERGING_P is true the an incomplete type and a complete type
3682 are considered different, otherwise they are considered compatible. */
d7f09764 3683
0ca8de87 3684static bool
b5e04de5 3685gimple_types_compatible_p (tree t1, tree t2)
d7f09764 3686{
d4398a43
RG
3687 VEC(type_pair_t, heap) *sccstack = NULL;
3688 struct pointer_map_t *sccstate;
3689 struct obstack sccstate_obstack;
3690 type_pair_t p = NULL;
3691 bool res;
b5e04de5 3692 tree leader1, leader2;
d4398a43
RG
3693
3694 /* Before starting to set up the SCC machinery handle simple cases. */
3695
3696 /* Check first for the obvious case of pointer identity. */
3697 if (t1 == t2)
3698 return true;
3699
3700 /* Check that we have two types to compare. */
3701 if (t1 == NULL_TREE || t2 == NULL_TREE)
3702 return false;
3703
d4398a43
RG
3704 /* Can't be the same type if the types don't have the same code. */
3705 if (TREE_CODE (t1) != TREE_CODE (t2))
3706 return false;
3707
3708 /* Can't be the same type if they have different CV qualifiers. */
3709 if (TYPE_QUALS (t1) != TYPE_QUALS (t2))
3710 return false;
3711
61332f77
RG
3712 if (TREE_ADDRESSABLE (t1) != TREE_ADDRESSABLE (t2))
3713 return false;
3714
3715 /* Void types and nullptr types are always the same. */
3716 if (TREE_CODE (t1) == VOID_TYPE
3717 || TREE_CODE (t1) == NULLPTR_TYPE)
d4398a43
RG
3718 return true;
3719
61332f77
RG
3720 /* Can't be the same type if they have different alignment or mode. */
3721 if (TYPE_ALIGN (t1) != TYPE_ALIGN (t2)
3722 || TYPE_MODE (t1) != TYPE_MODE (t2))
3723 return false;
3724
d4398a43
RG
3725 /* Do some simple checks before doing three hashtable queries. */
3726 if (INTEGRAL_TYPE_P (t1)
3727 || SCALAR_FLOAT_TYPE_P (t1)
3728 || FIXED_POINT_TYPE_P (t1)
3729 || TREE_CODE (t1) == VECTOR_TYPE
3730 || TREE_CODE (t1) == COMPLEX_TYPE
61332f77
RG
3731 || TREE_CODE (t1) == OFFSET_TYPE
3732 || POINTER_TYPE_P (t1))
d4398a43 3733 {
61332f77
RG
3734 /* Can't be the same type if they have different sign or precision. */
3735 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2)
d4398a43
RG
3736 || TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
3737 return false;
3738
3739 if (TREE_CODE (t1) == INTEGER_TYPE
3ac8781c 3740 && TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2))
d4398a43
RG
3741 return false;
3742
3743 /* That's all we need to check for float and fixed-point types. */
3744 if (SCALAR_FLOAT_TYPE_P (t1)
3745 || FIXED_POINT_TYPE_P (t1))
3746 return true;
3747
073a8998 3748 /* For other types fall through to more complex checks. */
d4398a43
RG
3749 }
3750
61332f77
RG
3751 /* If the types have been previously registered and found equal
3752 they still are. */
3753 leader1 = gimple_lookup_type_leader (t1);
3754 leader2 = gimple_lookup_type_leader (t2);
3755 if (leader1 == t2
3756 || t1 == leader2
3757 || (leader1 && leader1 == leader2))
3758 return true;
d4398a43
RG
3759
3760 /* If the hash values of t1 and t2 are different the types can't
3761 possibly be the same. This helps keeping the type-pair hashtable
3762 small, only tracking comparisons for hash collisions. */
b5e04de5 3763 if (gimple_type_hash (t1) != gimple_type_hash (t2))
d4398a43
RG
3764 return false;
3765
3766 /* If we've visited this type pair before (in the case of aggregates
3767 with self-referential types), and we made a decision, return it. */
a30726a4 3768 p = lookup_type_pair (t1, t2);
b5e04de5 3769 if (p->same_p[GTC_MERGE] == 0 || p->same_p[GTC_MERGE] == 1)
d4398a43
RG
3770 {
3771 /* We have already decided whether T1 and T2 are the
3772 same, return the cached result. */
b5e04de5 3773 return p->same_p[GTC_MERGE] == 1;
d4398a43
RG
3774 }
3775
3776 /* Now set up the SCC machinery for the comparison. */
3777 gtc_next_dfs_num = 1;
3778 sccstate = pointer_map_create ();
3779 gcc_obstack_init (&sccstate_obstack);
b5e04de5 3780 res = gimple_types_compatible_p_1 (t1, t2, p,
d4398a43
RG
3781 &sccstack, sccstate, &sccstate_obstack);
3782 VEC_free (type_pair_t, heap, sccstack);
3783 pointer_map_destroy (sccstate);
3784 obstack_free (&sccstate_obstack, NULL);
3785
3786 return res;
3787}
d7f09764 3788
d7f09764
DN
3789
3790static hashval_t
3791iterative_hash_gimple_type (tree, hashval_t, VEC(tree, heap) **,
b5e04de5 3792 struct pointer_map_t *, struct obstack *);
d7f09764
DN
3793
3794/* DFS visit the edge from the callers type with state *STATE to T.
3795 Update the callers type hash V with the hash for T if it is not part
3796 of the SCC containing the callers type and return it.
3797 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
3798
3799static hashval_t
3800visit (tree t, struct sccs *state, hashval_t v,
3801 VEC (tree, heap) **sccstack,
3802 struct pointer_map_t *sccstate,
b5e04de5 3803 struct obstack *sccstate_obstack)
d7f09764
DN
3804{
3805 struct sccs *cstate = NULL;
0f443ad0 3806 struct tree_int_map m;
d7f09764
DN
3807 void **slot;
3808
3809 /* If there is a hash value recorded for this type then it can't
3810 possibly be part of our parent SCC. Simply mix in its hash. */
0f443ad0 3811 m.base.from = t;
b5e04de5 3812 if ((slot = htab_find_slot (type_hash_cache, &m, NO_INSERT))
0f443ad0
RG
3813 && *slot)
3814 return iterative_hash_hashval_t (((struct tree_int_map *) *slot)->to, v);
d7f09764
DN
3815
3816 if ((slot = pointer_map_contains (sccstate, t)) != NULL)
3817 cstate = (struct sccs *)*slot;
3818 if (!cstate)
3819 {
3820 hashval_t tem;
3821 /* Not yet visited. DFS recurse. */
3822 tem = iterative_hash_gimple_type (t, v,
b5e04de5 3823 sccstack, sccstate, sccstate_obstack);
d7f09764
DN
3824 if (!cstate)
3825 cstate = (struct sccs *)* pointer_map_contains (sccstate, t);
3826 state->low = MIN (state->low, cstate->low);
3827 /* If the type is no longer on the SCC stack and thus is not part
3828 of the parents SCC mix in its hash value. Otherwise we will
3829 ignore the type for hashing purposes and return the unaltered
3830 hash value. */
3831 if (!cstate->on_sccstack)
3832 return tem;
3833 }
3834 if (cstate->dfsnum < state->dfsnum
3835 && cstate->on_sccstack)
3836 state->low = MIN (cstate->dfsnum, state->low);
3837
3838 /* We are part of our parents SCC, skip this type during hashing
3839 and return the unaltered hash value. */
3840 return v;
3841}
3842
77785f4f 3843/* Hash NAME with the previous hash value V and return it. */
d7f09764
DN
3844
3845static hashval_t
77785f4f 3846iterative_hash_name (tree name, hashval_t v)
d7f09764 3847{
d7f09764
DN
3848 if (!name)
3849 return v;
ee7a54c5 3850 v = iterative_hash_hashval_t (TREE_CODE (name), v);
d7f09764
DN
3851 if (TREE_CODE (name) == TYPE_DECL)
3852 name = DECL_NAME (name);
3853 if (!name)
3854 return v;
3855 gcc_assert (TREE_CODE (name) == IDENTIFIER_NODE);
d7f09764
DN
3856 return iterative_hash_object (IDENTIFIER_HASH_VALUE (name), v);
3857}
3858
3066f593
RG
3859/* A type, hashvalue pair for sorting SCC members. */
3860
3861struct type_hash_pair {
3862 tree type;
3863 hashval_t hash;
3864};
3865
3866/* Compare two type, hashvalue pairs. */
3867
3868static int
3869type_hash_pair_compare (const void *p1_, const void *p2_)
3870{
3871 const struct type_hash_pair *p1 = (const struct type_hash_pair *) p1_;
3872 const struct type_hash_pair *p2 = (const struct type_hash_pair *) p2_;
5beaf664
RG
3873 if (p1->hash < p2->hash)
3874 return -1;
3875 else if (p1->hash > p2->hash)
3876 return 1;
3877 return 0;
3066f593
RG
3878}
3879
d7f09764
DN
3880/* Returning a hash value for gimple type TYPE combined with VAL.
3881 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done.
3882
3883 To hash a type we end up hashing in types that are reachable.
3884 Through pointers we can end up with cycles which messes up the
3885 required property that we need to compute the same hash value
3886 for structurally equivalent types. To avoid this we have to
3887 hash all types in a cycle (the SCC) in a commutative way. The
3888 easiest way is to not mix in the hashes of the SCC members at
3889 all. To make this work we have to delay setting the hash
3890 values of the SCC until it is complete. */
3891
3892static hashval_t
3893iterative_hash_gimple_type (tree type, hashval_t val,
3894 VEC(tree, heap) **sccstack,
3895 struct pointer_map_t *sccstate,
b5e04de5 3896 struct obstack *sccstate_obstack)
d7f09764
DN
3897{
3898 hashval_t v;
3899 void **slot;
3900 struct sccs *state;
3901
0f443ad0 3902 /* Not visited during this DFS walk. */
77a74ed7 3903 gcc_checking_assert (!pointer_map_contains (sccstate, type));
d7f09764
DN
3904 state = XOBNEW (sccstate_obstack, struct sccs);
3905 *pointer_map_insert (sccstate, type) = state;
3906
3907 VEC_safe_push (tree, heap, *sccstack, type);
3908 state->dfsnum = next_dfs_num++;
3909 state->low = state->dfsnum;
3910 state->on_sccstack = true;
3911
3912 /* Combine a few common features of types so that types are grouped into
3913 smaller sets; when searching for existing matching types to merge,
3914 only existing types having the same features as the new type will be
3915 checked. */
e1caba18 3916 v = iterative_hash_name (TYPE_NAME (type), 0);
ee7a54c5
RG
3917 if (TYPE_NAME (type)
3918 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
3919 && DECL_CONTEXT (TYPE_NAME (type))
3920 && TYPE_P (DECL_CONTEXT (TYPE_NAME (type))))
3921 v = visit (DECL_CONTEXT (TYPE_NAME (type)), state, v,
3922 sccstack, sccstate, sccstate_obstack);
e1caba18 3923 v = iterative_hash_hashval_t (TREE_CODE (type), v);
d7f09764
DN
3924 v = iterative_hash_hashval_t (TYPE_QUALS (type), v);
3925 v = iterative_hash_hashval_t (TREE_ADDRESSABLE (type), v);
3926
3927 /* Do not hash the types size as this will cause differences in
3928 hash values for the complete vs. the incomplete type variant. */
3929
3930 /* Incorporate common features of numerical types. */
3931 if (INTEGRAL_TYPE_P (type)
3932 || SCALAR_FLOAT_TYPE_P (type)
3933 || FIXED_POINT_TYPE_P (type))
3934 {
3935 v = iterative_hash_hashval_t (TYPE_PRECISION (type), v);
3936 v = iterative_hash_hashval_t (TYPE_MODE (type), v);
3937 v = iterative_hash_hashval_t (TYPE_UNSIGNED (type), v);
3938 }
3939
3940 /* For pointer and reference types, fold in information about the type
1e83b5f1 3941 pointed to. */
d7f09764 3942 if (POINTER_TYPE_P (type))
1e83b5f1
RG
3943 v = visit (TREE_TYPE (type), state, v,
3944 sccstack, sccstate, sccstate_obstack);
d7f09764 3945
f798226d
RG
3946 /* For integer types hash the types min/max values and the string flag. */
3947 if (TREE_CODE (type) == INTEGER_TYPE)
3948 {
429c98c9
RG
3949 /* OMP lowering can introduce error_mark_node in place of
3950 random local decls in types. */
3951 if (TYPE_MIN_VALUE (type) != error_mark_node)
3952 v = iterative_hash_expr (TYPE_MIN_VALUE (type), v);
3953 if (TYPE_MAX_VALUE (type) != error_mark_node)
3954 v = iterative_hash_expr (TYPE_MAX_VALUE (type), v);
f798226d
RG
3955 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
3956 }
3957
2e745103
EB
3958 /* For array types hash the domain and the string flag. */
3959 if (TREE_CODE (type) == ARRAY_TYPE && TYPE_DOMAIN (type))
f798226d
RG
3960 {
3961 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
3962 v = visit (TYPE_DOMAIN (type), state, v,
b5e04de5 3963 sccstack, sccstate, sccstate_obstack);
f798226d
RG
3964 }
3965
3966 /* Recurse for aggregates with a single element type. */
d7f09764
DN
3967 if (TREE_CODE (type) == ARRAY_TYPE
3968 || TREE_CODE (type) == COMPLEX_TYPE
3969 || TREE_CODE (type) == VECTOR_TYPE)
3970 v = visit (TREE_TYPE (type), state, v,
b5e04de5 3971 sccstack, sccstate, sccstate_obstack);
d7f09764
DN
3972
3973 /* Incorporate function return and argument types. */
3974 if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE)
3975 {
3976 unsigned na;
3977 tree p;
3978
3979 /* For method types also incorporate their parent class. */
3980 if (TREE_CODE (type) == METHOD_TYPE)
3981 v = visit (TYPE_METHOD_BASETYPE (type), state, v,
b5e04de5 3982 sccstack, sccstate, sccstate_obstack);
d7f09764 3983
1e83b5f1
RG
3984 /* Check result and argument types. */
3985 v = visit (TREE_TYPE (type), state, v,
3986 sccstack, sccstate, sccstate_obstack);
d7f09764
DN
3987 for (p = TYPE_ARG_TYPES (type), na = 0; p; p = TREE_CHAIN (p))
3988 {
1e83b5f1
RG
3989 v = visit (TREE_VALUE (p), state, v,
3990 sccstack, sccstate, sccstate_obstack);
d7f09764
DN
3991 na++;
3992 }
3993
3994 v = iterative_hash_hashval_t (na, v);
3995 }
3996
aa47290b 3997 if (RECORD_OR_UNION_TYPE_P (type))
d7f09764
DN
3998 {
3999 unsigned nf;
4000 tree f;
4001
d7f09764
DN
4002 for (f = TYPE_FIELDS (type), nf = 0; f; f = TREE_CHAIN (f))
4003 {
b5e04de5 4004 v = iterative_hash_name (DECL_NAME (f), v);
d7f09764 4005 v = visit (TREE_TYPE (f), state, v,
b5e04de5 4006 sccstack, sccstate, sccstate_obstack);
d7f09764
DN
4007 nf++;
4008 }
4009
4010 v = iterative_hash_hashval_t (nf, v);
4011 }
4012
4013 /* Record hash for us. */
d4398a43 4014 state->u.hash = v;
d7f09764
DN
4015
4016 /* See if we found an SCC. */
4017 if (state->low == state->dfsnum)
4018 {
4019 tree x;
3066f593 4020 struct tree_int_map *m;
d7f09764
DN
4021
4022 /* Pop off the SCC and set its hash values. */
3066f593 4023 x = VEC_pop (tree, *sccstack);
3066f593
RG
4024 /* Optimize SCC size one. */
4025 if (x == type)
d7f09764 4026 {
008bad7a 4027 state->on_sccstack = false;
3066f593 4028 m = ggc_alloc_cleared_tree_int_map ();
0f443ad0 4029 m->base.from = x;
008bad7a 4030 m->to = v;
b5e04de5 4031 slot = htab_find_slot (type_hash_cache, m, INSERT);
0f443ad0
RG
4032 gcc_assert (!*slot);
4033 *slot = (void *) m;
d7f09764 4034 }
3066f593
RG
4035 else
4036 {
008bad7a 4037 struct sccs *cstate;
3066f593
RG
4038 unsigned first, i, size, j;
4039 struct type_hash_pair *pairs;
4040 /* Pop off the SCC and build an array of type, hash pairs. */
4041 first = VEC_length (tree, *sccstack) - 1;
4042 while (VEC_index (tree, *sccstack, first) != type)
4043 --first;
4044 size = VEC_length (tree, *sccstack) - first + 1;
4045 pairs = XALLOCAVEC (struct type_hash_pair, size);
4046 i = 0;
008bad7a
RG
4047 cstate = (struct sccs *)*pointer_map_contains (sccstate, x);
4048 cstate->on_sccstack = false;
3066f593
RG
4049 pairs[i].type = x;
4050 pairs[i].hash = cstate->u.hash;
4051 do
4052 {
4053 x = VEC_pop (tree, *sccstack);
4054 cstate = (struct sccs *)*pointer_map_contains (sccstate, x);
4055 cstate->on_sccstack = false;
4056 ++i;
4057 pairs[i].type = x;
4058 pairs[i].hash = cstate->u.hash;
4059 }
4060 while (x != type);
4061 gcc_assert (i + 1 == size);
4062 /* Sort the arrays of type, hash pairs so that when we mix in
4063 all members of the SCC the hash value becomes independent on
4064 the order we visited the SCC. Disregard hashes equal to
4065 the hash of the type we mix into because we cannot guarantee
4066 a stable sort for those across different TUs. */
4067 qsort (pairs, size, sizeof (struct type_hash_pair),
4068 type_hash_pair_compare);
4069 for (i = 0; i < size; ++i)
4070 {
4071 hashval_t hash;
4072 m = ggc_alloc_cleared_tree_int_map ();
4073 m->base.from = pairs[i].type;
4074 hash = pairs[i].hash;
4075 /* Skip same hashes. */
4076 for (j = i + 1; j < size && pairs[j].hash == pairs[i].hash; ++j)
4077 ;
4078 for (; j < size; ++j)
4079 hash = iterative_hash_hashval_t (pairs[j].hash, hash);
4080 for (j = 0; pairs[j].hash != pairs[i].hash; ++j)
4081 hash = iterative_hash_hashval_t (pairs[j].hash, hash);
4082 m->to = hash;
008bad7a
RG
4083 if (pairs[i].type == type)
4084 v = hash;
3066f593
RG
4085 slot = htab_find_slot (type_hash_cache, m, INSERT);
4086 gcc_assert (!*slot);
4087 *slot = (void *) m;
4088 }
4089 }
d7f09764
DN
4090 }
4091
4092 return iterative_hash_hashval_t (v, val);
4093}
4094
4095
4096/* Returns a hash value for P (assumed to be a type). The hash value
4097 is computed using some distinguishing features of the type. Note
4098 that we cannot use pointer hashing here as we may be dealing with
4099 two distinct instances of the same type.
4100
4101 This function should produce the same hash value for two compatible
4102 types according to gimple_types_compatible_p. */
4103
4104static hashval_t
b5e04de5 4105gimple_type_hash (const void *p)
d7f09764 4106{
ddd4d0e1 4107 const_tree t = (const_tree) p;
d7f09764
DN
4108 VEC(tree, heap) *sccstack = NULL;
4109 struct pointer_map_t *sccstate;
4110 struct obstack sccstate_obstack;
4111 hashval_t val;
4112 void **slot;
0f443ad0 4113 struct tree_int_map m;
d7f09764 4114
b5e04de5 4115 if (type_hash_cache == NULL)
0f443ad0
RG
4116 type_hash_cache = htab_create_ggc (512, tree_int_map_hash,
4117 tree_int_map_eq, NULL);
d7f09764 4118
0f443ad0 4119 m.base.from = CONST_CAST_TREE (t);
b5e04de5 4120 if ((slot = htab_find_slot (type_hash_cache, &m, NO_INSERT))
0f443ad0
RG
4121 && *slot)
4122 return iterative_hash_hashval_t (((struct tree_int_map *) *slot)->to, 0);
d7f09764
DN
4123
4124 /* Perform a DFS walk and pre-hash all reachable types. */
4125 next_dfs_num = 1;
4126 sccstate = pointer_map_create ();
4127 gcc_obstack_init (&sccstate_obstack);
ddd4d0e1 4128 val = iterative_hash_gimple_type (CONST_CAST_TREE (t), 0,
b5e04de5 4129 &sccstack, sccstate, &sccstate_obstack);
d7f09764
DN
4130 VEC_free (tree, heap, sccstack);
4131 pointer_map_destroy (sccstate);
4132 obstack_free (&sccstate_obstack, NULL);
4133
4134 return val;
4135}
4136
825b27de
RG
4137/* Returning a hash value for gimple type TYPE combined with VAL.
4138
4139 The hash value returned is equal for types considered compatible
4140 by gimple_canonical_types_compatible_p. */
4141
4142static hashval_t
4143iterative_hash_canonical_type (tree type, hashval_t val)
4144{
4145 hashval_t v;
4146 void **slot;
4147 struct tree_int_map *mp, m;
4148
4149 m.base.from = type;
4150 if ((slot = htab_find_slot (canonical_type_hash_cache, &m, INSERT))
4151 && *slot)
d0340959 4152 return iterative_hash_hashval_t (((struct tree_int_map *) *slot)->to, val);
825b27de
RG
4153
4154 /* Combine a few common features of types so that types are grouped into
4155 smaller sets; when searching for existing matching types to merge,
4156 only existing types having the same features as the new type will be
4157 checked. */
4158 v = iterative_hash_hashval_t (TREE_CODE (type), 0);
825b27de 4159 v = iterative_hash_hashval_t (TREE_ADDRESSABLE (type), v);
61332f77
RG
4160 v = iterative_hash_hashval_t (TYPE_ALIGN (type), v);
4161 v = iterative_hash_hashval_t (TYPE_MODE (type), v);
825b27de
RG
4162
4163 /* Incorporate common features of numerical types. */
4164 if (INTEGRAL_TYPE_P (type)
4165 || SCALAR_FLOAT_TYPE_P (type)
61332f77
RG
4166 || FIXED_POINT_TYPE_P (type)
4167 || TREE_CODE (type) == VECTOR_TYPE
4168 || TREE_CODE (type) == COMPLEX_TYPE
4169 || TREE_CODE (type) == OFFSET_TYPE
4170 || POINTER_TYPE_P (type))
825b27de
RG
4171 {
4172 v = iterative_hash_hashval_t (TYPE_PRECISION (type), v);
825b27de
RG
4173 v = iterative_hash_hashval_t (TYPE_UNSIGNED (type), v);
4174 }
4175
4176 /* For pointer and reference types, fold in information about the type
4177 pointed to but do not recurse to the pointed-to type. */
4178 if (POINTER_TYPE_P (type))
4179 {
4180 v = iterative_hash_hashval_t (TYPE_REF_CAN_ALIAS_ALL (type), v);
61332f77
RG
4181 v = iterative_hash_hashval_t (TYPE_ADDR_SPACE (TREE_TYPE (type)), v);
4182 v = iterative_hash_hashval_t (TYPE_RESTRICT (type), v);
825b27de
RG
4183 v = iterative_hash_hashval_t (TREE_CODE (TREE_TYPE (type)), v);
4184 }
4185
2e745103 4186 /* For integer types hash only the string flag. */
825b27de 4187 if (TREE_CODE (type) == INTEGER_TYPE)
3ac8781c 4188 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
825b27de 4189
2e745103
EB
4190 /* For array types hash the domain bounds and the string flag. */
4191 if (TREE_CODE (type) == ARRAY_TYPE && TYPE_DOMAIN (type))
825b27de
RG
4192 {
4193 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
2e745103
EB
4194 /* OMP lowering can introduce error_mark_node in place of
4195 random local decls in types. */
4196 if (TYPE_MIN_VALUE (TYPE_DOMAIN (type)) != error_mark_node)
4197 v = iterative_hash_expr (TYPE_MIN_VALUE (TYPE_DOMAIN (type)), v);
4198 if (TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != error_mark_node)
4199 v = iterative_hash_expr (TYPE_MAX_VALUE (TYPE_DOMAIN (type)), v);
825b27de
RG
4200 }
4201
4202 /* Recurse for aggregates with a single element type. */
4203 if (TREE_CODE (type) == ARRAY_TYPE
4204 || TREE_CODE (type) == COMPLEX_TYPE
4205 || TREE_CODE (type) == VECTOR_TYPE)
4206 v = iterative_hash_canonical_type (TREE_TYPE (type), v);
4207
4208 /* Incorporate function return and argument types. */
4209 if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE)
4210 {
4211 unsigned na;
4212 tree p;
4213
4214 /* For method types also incorporate their parent class. */
4215 if (TREE_CODE (type) == METHOD_TYPE)
4216 v = iterative_hash_canonical_type (TYPE_METHOD_BASETYPE (type), v);
4217
6a20ce76 4218 v = iterative_hash_canonical_type (TREE_TYPE (type), v);
825b27de
RG
4219
4220 for (p = TYPE_ARG_TYPES (type), na = 0; p; p = TREE_CHAIN (p))
4221 {
6a20ce76 4222 v = iterative_hash_canonical_type (TREE_VALUE (p), v);
825b27de
RG
4223 na++;
4224 }
4225
4226 v = iterative_hash_hashval_t (na, v);
4227 }
4228
aa47290b 4229 if (RECORD_OR_UNION_TYPE_P (type))
825b27de
RG
4230 {
4231 unsigned nf;
4232 tree f;
4233
4234 for (f = TYPE_FIELDS (type), nf = 0; f; f = TREE_CHAIN (f))
e7cfe241
RG
4235 if (TREE_CODE (f) == FIELD_DECL)
4236 {
4237 v = iterative_hash_canonical_type (TREE_TYPE (f), v);
4238 nf++;
4239 }
825b27de
RG
4240
4241 v = iterative_hash_hashval_t (nf, v);
4242 }
4243
4244 /* Cache the just computed hash value. */
4245 mp = ggc_alloc_cleared_tree_int_map ();
4246 mp->base.from = type;
4247 mp->to = v;
4248 *slot = (void *) mp;
4249
4250 return iterative_hash_hashval_t (v, val);
4251}
4252
a844a60b
RG
4253static hashval_t
4254gimple_canonical_type_hash (const void *p)
4255{
825b27de
RG
4256 if (canonical_type_hash_cache == NULL)
4257 canonical_type_hash_cache = htab_create_ggc (512, tree_int_map_hash,
4258 tree_int_map_eq, NULL);
4259
4260 return iterative_hash_canonical_type (CONST_CAST_TREE ((const_tree) p), 0);
a844a60b
RG
4261}
4262
d7f09764
DN
4263
4264/* Returns nonzero if P1 and P2 are equal. */
4265
4266static int
4267gimple_type_eq (const void *p1, const void *p2)
4268{
4269 const_tree t1 = (const_tree) p1;
4270 const_tree t2 = (const_tree) p2;
f5d6836a 4271 return gimple_types_compatible_p (CONST_CAST_TREE (t1),
b5e04de5 4272 CONST_CAST_TREE (t2));
d7f09764
DN
4273}
4274
4275
93b2a207
RG
4276/* Worker for gimple_register_type.
4277 Register type T in the global type table gimple_types.
4278 When REGISTERING_MV is false first recurse for the main variant of T. */
d7f09764 4279
93b2a207
RG
4280static tree
4281gimple_register_type_1 (tree t, bool registering_mv)
d7f09764
DN
4282{
4283 void **slot;
4490cae6 4284 gimple_type_leader_entry *leader;
d7f09764 4285
4490cae6
RG
4286 /* If we registered this type before return the cached result. */
4287 leader = &gimple_type_leader[TYPE_UID (t) % GIMPLE_TYPE_LEADER_SIZE];
4288 if (leader->type == t)
4289 return leader->leader;
4a2ac96f 4290
20d36f0e
RG
4291 /* Always register the main variant first. This is important so we
4292 pick up the non-typedef variants as canonical, otherwise we'll end
93b2a207
RG
4293 up taking typedef ids for structure tags during comparison.
4294 It also makes sure that main variants will be merged to main variants.
4295 As we are operating on a possibly partially fixed up type graph
4296 do not bother to recurse more than once, otherwise we may end up
31b3ca64
RG
4297 walking in circles.
4298 If we are registering a main variant it will either remain its
4299 own main variant or it will be merged to something else in which
4300 case we do not care for the main variant leader. */
93b2a207
RG
4301 if (!registering_mv
4302 && TYPE_MAIN_VARIANT (t) != t)
fb291a1e 4303 gimple_register_type_1 (TYPE_MAIN_VARIANT (t), true);
d7f09764 4304
fb291a1e 4305 /* See if we already have an equivalent type registered. */
d7f09764
DN
4306 slot = htab_find_slot (gimple_types, t, INSERT);
4307 if (*slot
4308 && *(tree *)slot != t)
4309 {
4310 tree new_type = (tree) *((tree *) slot);
4490cae6
RG
4311 leader->type = t;
4312 leader->leader = new_type;
fb291a1e 4313 return new_type;
4490cae6
RG
4314 }
4315
fb291a1e
RG
4316 /* If not, insert it to the cache and the hash. */
4317 leader->type = t;
4318 leader->leader = t;
4319 *slot = (void *) t;
4490cae6
RG
4320 return t;
4321}
4322
93b2a207
RG
4323/* Register type T in the global type table gimple_types.
4324 If another type T', compatible with T, already existed in
4325 gimple_types then return T', otherwise return T. This is used by
4326 LTO to merge identical types read from different TUs. */
4327
4328tree
4329gimple_register_type (tree t)
4330{
4331 gcc_assert (TYPE_P (t));
4332
4333 if (!gimple_type_leader)
4334 gimple_type_leader = ggc_alloc_cleared_vec_gimple_type_leader_entry_s
4335 (GIMPLE_TYPE_LEADER_SIZE);
4336
4337 if (gimple_types == NULL)
4338 gimple_types = htab_create_ggc (16381, gimple_type_hash, gimple_type_eq, 0);
4339
4340 return gimple_register_type_1 (t, false);
4341}
4490cae6 4342
825b27de
RG
4343/* The TYPE_CANONICAL merging machinery. It should closely resemble
4344 the middle-end types_compatible_p function. It needs to avoid
4345 claiming types are different for types that should be treated
4346 the same with respect to TBAA. Canonical types are also used
4347 for IL consistency checks via the useless_type_conversion_p
4348 predicate which does not handle all type kinds itself but falls
4349 back to pointer-comparison of TYPE_CANONICAL for aggregates
4350 for example. */
4351
4352/* Return true iff T1 and T2 are structurally identical for what
4353 TBAA is concerned. */
4354
4355static bool
4356gimple_canonical_types_compatible_p (tree t1, tree t2)
4357{
825b27de
RG
4358 /* Before starting to set up the SCC machinery handle simple cases. */
4359
4360 /* Check first for the obvious case of pointer identity. */
4361 if (t1 == t2)
4362 return true;
4363
4364 /* Check that we have two types to compare. */
4365 if (t1 == NULL_TREE || t2 == NULL_TREE)
4366 return false;
4367
4368 /* If the types have been previously registered and found equal
4369 they still are. */
4370 if (TYPE_CANONICAL (t1)
4371 && TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2))
4372 return true;
4373
4374 /* Can't be the same type if the types don't have the same code. */
4375 if (TREE_CODE (t1) != TREE_CODE (t2))
4376 return false;
4377
61332f77 4378 if (TREE_ADDRESSABLE (t1) != TREE_ADDRESSABLE (t2))
825b27de
RG
4379 return false;
4380
61332f77
RG
4381 /* Qualifiers do not matter for canonical type comparison purposes. */
4382
4383 /* Void types and nullptr types are always the same. */
4384 if (TREE_CODE (t1) == VOID_TYPE
4385 || TREE_CODE (t1) == NULLPTR_TYPE)
825b27de
RG
4386 return true;
4387
61332f77
RG
4388 /* Can't be the same type if they have different alignment, or mode. */
4389 if (TYPE_ALIGN (t1) != TYPE_ALIGN (t2)
4390 || TYPE_MODE (t1) != TYPE_MODE (t2))
4391 return false;
4392
4393 /* Non-aggregate types can be handled cheaply. */
825b27de
RG
4394 if (INTEGRAL_TYPE_P (t1)
4395 || SCALAR_FLOAT_TYPE_P (t1)
4396 || FIXED_POINT_TYPE_P (t1)
4397 || TREE_CODE (t1) == VECTOR_TYPE
4398 || TREE_CODE (t1) == COMPLEX_TYPE
61332f77
RG
4399 || TREE_CODE (t1) == OFFSET_TYPE
4400 || POINTER_TYPE_P (t1))
825b27de 4401 {
61332f77
RG
4402 /* Can't be the same type if they have different sign or precision. */
4403 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2)
825b27de
RG
4404 || TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
4405 return false;
4406
4407 if (TREE_CODE (t1) == INTEGER_TYPE
3ac8781c 4408 && TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2))
825b27de
RG
4409 return false;
4410
61332f77
RG
4411 /* For canonical type comparisons we do not want to build SCCs
4412 so we cannot compare pointed-to types. But we can, for now,
4413 require the same pointed-to type kind and match what
4414 useless_type_conversion_p would do. */
4415 if (POINTER_TYPE_P (t1))
4416 {
4417 /* If the two pointers have different ref-all attributes,
4418 they can't be the same type. */
4419 if (TYPE_REF_CAN_ALIAS_ALL (t1) != TYPE_REF_CAN_ALIAS_ALL (t2))
4420 return false;
825b27de 4421
61332f77
RG
4422 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
4423 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
4424 return false;
825b27de 4425
61332f77
RG
4426 if (TYPE_RESTRICT (t1) != TYPE_RESTRICT (t2))
4427 return false;
4428
4429 if (TREE_CODE (TREE_TYPE (t1)) != TREE_CODE (TREE_TYPE (t2)))
4430 return false;
4431 }
4432
4433 /* Tail-recurse to components. */
4434 if (TREE_CODE (t1) == VECTOR_TYPE
4435 || TREE_CODE (t1) == COMPLEX_TYPE)
4436 return gimple_canonical_types_compatible_p (TREE_TYPE (t1),
4437 TREE_TYPE (t2));
4438
4439 return true;
825b27de
RG
4440 }
4441
825b27de
RG
4442 /* Do type-specific comparisons. */
4443 switch (TREE_CODE (t1))
4444 {
825b27de
RG
4445 case ARRAY_TYPE:
4446 /* Array types are the same if the element types are the same and
4447 the number of elements are the same. */
4448 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2))
4449 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
4450 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
b8a71aed 4451 return false;
825b27de
RG
4452 else
4453 {
4454 tree i1 = TYPE_DOMAIN (t1);
4455 tree i2 = TYPE_DOMAIN (t2);
4456
4457 /* For an incomplete external array, the type domain can be
4458 NULL_TREE. Check this condition also. */
4459 if (i1 == NULL_TREE && i2 == NULL_TREE)
b8a71aed 4460 return true;
825b27de 4461 else if (i1 == NULL_TREE || i2 == NULL_TREE)
b8a71aed 4462 return false;
825b27de
RG
4463 else
4464 {
4465 tree min1 = TYPE_MIN_VALUE (i1);
4466 tree min2 = TYPE_MIN_VALUE (i2);
4467 tree max1 = TYPE_MAX_VALUE (i1);
4468 tree max2 = TYPE_MAX_VALUE (i2);
4469
4470 /* The minimum/maximum values have to be the same. */
4471 if ((min1 == min2
4472 || (min1 && min2
4473 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
4474 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
4475 || operand_equal_p (min1, min2, 0))))
4476 && (max1 == max2
4477 || (max1 && max2
4478 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
4479 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
4480 || operand_equal_p (max1, max2, 0)))))
b8a71aed 4481 return true;
825b27de 4482 else
b8a71aed 4483 return false;
825b27de
RG
4484 }
4485 }
4486
4487 case METHOD_TYPE:
825b27de
RG
4488 case FUNCTION_TYPE:
4489 /* Function types are the same if the return type and arguments types
4490 are the same. */
6a20ce76 4491 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2)))
b8a71aed 4492 return false;
825b27de
RG
4493
4494 if (!comp_type_attributes (t1, t2))
b8a71aed 4495 return false;
825b27de
RG
4496
4497 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
b8a71aed 4498 return true;
825b27de
RG
4499 else
4500 {
4501 tree parms1, parms2;
4502
4503 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
4504 parms1 && parms2;
4505 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
4506 {
6a20ce76
RG
4507 if (!gimple_canonical_types_compatible_p
4508 (TREE_VALUE (parms1), TREE_VALUE (parms2)))
b8a71aed 4509 return false;
825b27de
RG
4510 }
4511
4512 if (parms1 || parms2)
b8a71aed 4513 return false;
825b27de 4514
b8a71aed 4515 return true;
825b27de
RG
4516 }
4517
825b27de
RG
4518 case RECORD_TYPE:
4519 case UNION_TYPE:
4520 case QUAL_UNION_TYPE:
4521 {
4522 tree f1, f2;
4523
4524 /* For aggregate types, all the fields must be the same. */
4525 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
4acd1c84 4526 f1 || f2;
825b27de
RG
4527 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
4528 {
e7cfe241
RG
4529 /* Skip non-fields. */
4530 while (f1 && TREE_CODE (f1) != FIELD_DECL)
4531 f1 = TREE_CHAIN (f1);
4532 while (f2 && TREE_CODE (f2) != FIELD_DECL)
4533 f2 = TREE_CHAIN (f2);
4534 if (!f1 || !f2)
4535 break;
825b27de
RG
4536 /* The fields must have the same name, offset and type. */
4537 if (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
4538 || !gimple_compare_field_offset (f1, f2)
4539 || !gimple_canonical_types_compatible_p
4540 (TREE_TYPE (f1), TREE_TYPE (f2)))
b8a71aed 4541 return false;
825b27de
RG
4542 }
4543
4544 /* If one aggregate has more fields than the other, they
4545 are not the same. */
4546 if (f1 || f2)
b8a71aed 4547 return false;
825b27de 4548
b8a71aed 4549 return true;
825b27de
RG
4550 }
4551
4552 default:
4553 gcc_unreachable ();
4554 }
825b27de
RG
4555}
4556
4557
4490cae6
RG
4558/* Returns nonzero if P1 and P2 are equal. */
4559
4560static int
4561gimple_canonical_type_eq (const void *p1, const void *p2)
4562{
4563 const_tree t1 = (const_tree) p1;
4564 const_tree t2 = (const_tree) p2;
825b27de
RG
4565 return gimple_canonical_types_compatible_p (CONST_CAST_TREE (t1),
4566 CONST_CAST_TREE (t2));
4490cae6
RG
4567}
4568
4569/* Register type T in the global type table gimple_types.
4570 If another type T', compatible with T, already existed in
4571 gimple_types then return T', otherwise return T. This is used by
96d91dcf
RG
4572 LTO to merge identical types read from different TUs.
4573
4574 ??? This merging does not exactly match how the tree.c middle-end
4575 functions will assign TYPE_CANONICAL when new types are created
4576 during optimization (which at least happens for pointer and array
4577 types). */
4490cae6
RG
4578
4579tree
4580gimple_register_canonical_type (tree t)
4581{
4582 void **slot;
4583
4584 gcc_assert (TYPE_P (t));
4585
61332f77
RG
4586 if (TYPE_CANONICAL (t))
4587 return TYPE_CANONICAL (t);
4588
4490cae6 4589 if (gimple_canonical_types == NULL)
a844a60b 4590 gimple_canonical_types = htab_create_ggc (16381, gimple_canonical_type_hash,
4490cae6
RG
4591 gimple_canonical_type_eq, 0);
4592
4593 slot = htab_find_slot (gimple_canonical_types, t, INSERT);
4594 if (*slot
4595 && *(tree *)slot != t)
4596 {
4597 tree new_type = (tree) *((tree *) slot);
4598
4599 TYPE_CANONICAL (t) = new_type;
4600 t = new_type;
4601 }
4602 else
4603 {
4604 TYPE_CANONICAL (t) = t;
4a2ac96f
RG
4605 *slot = (void *) t;
4606 }
d7f09764
DN
4607
4608 return t;
4609}
4610
4611
4612/* Show statistics on references to the global type table gimple_types. */
4613
4614void
4615print_gimple_types_stats (void)
4616{
4617 if (gimple_types)
4618 fprintf (stderr, "GIMPLE type table: size %ld, %ld elements, "
4619 "%ld searches, %ld collisions (ratio: %f)\n",
4620 (long) htab_size (gimple_types),
4621 (long) htab_elements (gimple_types),
4622 (long) gimple_types->searches,
4623 (long) gimple_types->collisions,
4624 htab_collisions (gimple_types));
4625 else
4626 fprintf (stderr, "GIMPLE type table is empty\n");
a844a60b
RG
4627 if (type_hash_cache)
4628 fprintf (stderr, "GIMPLE type hash table: size %ld, %ld elements, "
4629 "%ld searches, %ld collisions (ratio: %f)\n",
4630 (long) htab_size (type_hash_cache),
4631 (long) htab_elements (type_hash_cache),
4632 (long) type_hash_cache->searches,
4633 (long) type_hash_cache->collisions,
4634 htab_collisions (type_hash_cache));
4635 else
4636 fprintf (stderr, "GIMPLE type hash table is empty\n");
4490cae6
RG
4637 if (gimple_canonical_types)
4638 fprintf (stderr, "GIMPLE canonical type table: size %ld, %ld elements, "
4639 "%ld searches, %ld collisions (ratio: %f)\n",
4640 (long) htab_size (gimple_canonical_types),
4641 (long) htab_elements (gimple_canonical_types),
4642 (long) gimple_canonical_types->searches,
4643 (long) gimple_canonical_types->collisions,
4644 htab_collisions (gimple_canonical_types));
4645 else
4646 fprintf (stderr, "GIMPLE canonical type table is empty\n");
a844a60b
RG
4647 if (canonical_type_hash_cache)
4648 fprintf (stderr, "GIMPLE canonical type hash table: size %ld, %ld elements, "
0f443ad0 4649 "%ld searches, %ld collisions (ratio: %f)\n",
a844a60b
RG
4650 (long) htab_size (canonical_type_hash_cache),
4651 (long) htab_elements (canonical_type_hash_cache),
4652 (long) canonical_type_hash_cache->searches,
4653 (long) canonical_type_hash_cache->collisions,
4654 htab_collisions (canonical_type_hash_cache));
0f443ad0 4655 else
a844a60b 4656 fprintf (stderr, "GIMPLE canonical type hash table is empty\n");
d7f09764
DN
4657}
4658
0d0bfe17
RG
4659/* Free the gimple type hashtables used for LTO type merging. */
4660
4661void
4662free_gimple_type_tables (void)
4663{
4664 /* Last chance to print stats for the tables. */
4665 if (flag_lto_report)
4666 print_gimple_types_stats ();
4667
4668 if (gimple_types)
4669 {
4670 htab_delete (gimple_types);
4671 gimple_types = NULL;
4672 }
4490cae6
RG
4673 if (gimple_canonical_types)
4674 {
4675 htab_delete (gimple_canonical_types);
4676 gimple_canonical_types = NULL;
4677 }
0d0bfe17
RG
4678 if (type_hash_cache)
4679 {
0f443ad0 4680 htab_delete (type_hash_cache);
0d0bfe17
RG
4681 type_hash_cache = NULL;
4682 }
a844a60b
RG
4683 if (canonical_type_hash_cache)
4684 {
4685 htab_delete (canonical_type_hash_cache);
4686 canonical_type_hash_cache = NULL;
4687 }
a30726a4 4688 if (type_pair_cache)
0d0bfe17 4689 {
a30726a4
JH
4690 free (type_pair_cache);
4691 type_pair_cache = NULL;
0d0bfe17 4692 }
4490cae6 4693 gimple_type_leader = NULL;
0d0bfe17
RG
4694}
4695
d7f09764
DN
4696
4697/* Return a type the same as TYPE except unsigned or
4698 signed according to UNSIGNEDP. */
4699
4700static tree
4701gimple_signed_or_unsigned_type (bool unsignedp, tree type)
4702{
4703 tree type1;
4704
4705 type1 = TYPE_MAIN_VARIANT (type);
4706 if (type1 == signed_char_type_node
4707 || type1 == char_type_node
4708 || type1 == unsigned_char_type_node)
4709 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
4710 if (type1 == integer_type_node || type1 == unsigned_type_node)
4711 return unsignedp ? unsigned_type_node : integer_type_node;
4712 if (type1 == short_integer_type_node || type1 == short_unsigned_type_node)
4713 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
4714 if (type1 == long_integer_type_node || type1 == long_unsigned_type_node)
4715 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
4716 if (type1 == long_long_integer_type_node
4717 || type1 == long_long_unsigned_type_node)
4718 return unsignedp
4719 ? long_long_unsigned_type_node
4720 : long_long_integer_type_node;
a6766312
KT
4721 if (int128_integer_type_node && (type1 == int128_integer_type_node || type1 == int128_unsigned_type_node))
4722 return unsignedp
4723 ? int128_unsigned_type_node
4724 : int128_integer_type_node;
d7f09764
DN
4725#if HOST_BITS_PER_WIDE_INT >= 64
4726 if (type1 == intTI_type_node || type1 == unsigned_intTI_type_node)
4727 return unsignedp ? unsigned_intTI_type_node : intTI_type_node;
4728#endif
4729 if (type1 == intDI_type_node || type1 == unsigned_intDI_type_node)
4730 return unsignedp ? unsigned_intDI_type_node : intDI_type_node;
4731 if (type1 == intSI_type_node || type1 == unsigned_intSI_type_node)
4732 return unsignedp ? unsigned_intSI_type_node : intSI_type_node;
4733 if (type1 == intHI_type_node || type1 == unsigned_intHI_type_node)
4734 return unsignedp ? unsigned_intHI_type_node : intHI_type_node;
4735 if (type1 == intQI_type_node || type1 == unsigned_intQI_type_node)
4736 return unsignedp ? unsigned_intQI_type_node : intQI_type_node;
4737
4738#define GIMPLE_FIXED_TYPES(NAME) \
4739 if (type1 == short_ ## NAME ## _type_node \
4740 || type1 == unsigned_short_ ## NAME ## _type_node) \
4741 return unsignedp ? unsigned_short_ ## NAME ## _type_node \
4742 : short_ ## NAME ## _type_node; \
4743 if (type1 == NAME ## _type_node \
4744 || type1 == unsigned_ ## NAME ## _type_node) \
4745 return unsignedp ? unsigned_ ## NAME ## _type_node \
4746 : NAME ## _type_node; \
4747 if (type1 == long_ ## NAME ## _type_node \
4748 || type1 == unsigned_long_ ## NAME ## _type_node) \
4749 return unsignedp ? unsigned_long_ ## NAME ## _type_node \
4750 : long_ ## NAME ## _type_node; \
4751 if (type1 == long_long_ ## NAME ## _type_node \
4752 || type1 == unsigned_long_long_ ## NAME ## _type_node) \
4753 return unsignedp ? unsigned_long_long_ ## NAME ## _type_node \
4754 : long_long_ ## NAME ## _type_node;
4755
4756#define GIMPLE_FIXED_MODE_TYPES(NAME) \
4757 if (type1 == NAME ## _type_node \
4758 || type1 == u ## NAME ## _type_node) \
4759 return unsignedp ? u ## NAME ## _type_node \
4760 : NAME ## _type_node;
4761
4762#define GIMPLE_FIXED_TYPES_SAT(NAME) \
4763 if (type1 == sat_ ## short_ ## NAME ## _type_node \
4764 || type1 == sat_ ## unsigned_short_ ## NAME ## _type_node) \
4765 return unsignedp ? sat_ ## unsigned_short_ ## NAME ## _type_node \
4766 : sat_ ## short_ ## NAME ## _type_node; \
4767 if (type1 == sat_ ## NAME ## _type_node \
4768 || type1 == sat_ ## unsigned_ ## NAME ## _type_node) \
4769 return unsignedp ? sat_ ## unsigned_ ## NAME ## _type_node \
4770 : sat_ ## NAME ## _type_node; \
4771 if (type1 == sat_ ## long_ ## NAME ## _type_node \
4772 || type1 == sat_ ## unsigned_long_ ## NAME ## _type_node) \
4773 return unsignedp ? sat_ ## unsigned_long_ ## NAME ## _type_node \
4774 : sat_ ## long_ ## NAME ## _type_node; \
4775 if (type1 == sat_ ## long_long_ ## NAME ## _type_node \
4776 || type1 == sat_ ## unsigned_long_long_ ## NAME ## _type_node) \
4777 return unsignedp ? sat_ ## unsigned_long_long_ ## NAME ## _type_node \
4778 : sat_ ## long_long_ ## NAME ## _type_node;
4779
4780#define GIMPLE_FIXED_MODE_TYPES_SAT(NAME) \
4781 if (type1 == sat_ ## NAME ## _type_node \
4782 || type1 == sat_ ## u ## NAME ## _type_node) \
4783 return unsignedp ? sat_ ## u ## NAME ## _type_node \
4784 : sat_ ## NAME ## _type_node;
4785
4786 GIMPLE_FIXED_TYPES (fract);
4787 GIMPLE_FIXED_TYPES_SAT (fract);
4788 GIMPLE_FIXED_TYPES (accum);
4789 GIMPLE_FIXED_TYPES_SAT (accum);
4790
4791 GIMPLE_FIXED_MODE_TYPES (qq);
4792 GIMPLE_FIXED_MODE_TYPES (hq);
4793 GIMPLE_FIXED_MODE_TYPES (sq);
4794 GIMPLE_FIXED_MODE_TYPES (dq);
4795 GIMPLE_FIXED_MODE_TYPES (tq);
4796 GIMPLE_FIXED_MODE_TYPES_SAT (qq);
4797 GIMPLE_FIXED_MODE_TYPES_SAT (hq);
4798 GIMPLE_FIXED_MODE_TYPES_SAT (sq);
4799 GIMPLE_FIXED_MODE_TYPES_SAT (dq);
4800 GIMPLE_FIXED_MODE_TYPES_SAT (tq);
4801 GIMPLE_FIXED_MODE_TYPES (ha);
4802 GIMPLE_FIXED_MODE_TYPES (sa);
4803 GIMPLE_FIXED_MODE_TYPES (da);
4804 GIMPLE_FIXED_MODE_TYPES (ta);
4805 GIMPLE_FIXED_MODE_TYPES_SAT (ha);
4806 GIMPLE_FIXED_MODE_TYPES_SAT (sa);
4807 GIMPLE_FIXED_MODE_TYPES_SAT (da);
4808 GIMPLE_FIXED_MODE_TYPES_SAT (ta);
4809
4810 /* For ENUMERAL_TYPEs in C++, must check the mode of the types, not
4811 the precision; they have precision set to match their range, but
4812 may use a wider mode to match an ABI. If we change modes, we may
4813 wind up with bad conversions. For INTEGER_TYPEs in C, must check
4814 the precision as well, so as to yield correct results for
4815 bit-field types. C++ does not have these separate bit-field
4816 types, and producing a signed or unsigned variant of an
4817 ENUMERAL_TYPE may cause other problems as well. */
4818 if (!INTEGRAL_TYPE_P (type)
4819 || TYPE_UNSIGNED (type) == unsignedp)
4820 return type;
4821
4822#define TYPE_OK(node) \
4823 (TYPE_MODE (type) == TYPE_MODE (node) \
4824 && TYPE_PRECISION (type) == TYPE_PRECISION (node))
4825 if (TYPE_OK (signed_char_type_node))
4826 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
4827 if (TYPE_OK (integer_type_node))
4828 return unsignedp ? unsigned_type_node : integer_type_node;
4829 if (TYPE_OK (short_integer_type_node))
4830 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
4831 if (TYPE_OK (long_integer_type_node))
4832 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
4833 if (TYPE_OK (long_long_integer_type_node))
4834 return (unsignedp
4835 ? long_long_unsigned_type_node
4836 : long_long_integer_type_node);
a6766312
KT
4837 if (int128_integer_type_node && TYPE_OK (int128_integer_type_node))
4838 return (unsignedp
4839 ? int128_unsigned_type_node
4840 : int128_integer_type_node);
d7f09764
DN
4841
4842#if HOST_BITS_PER_WIDE_INT >= 64
4843 if (TYPE_OK (intTI_type_node))
4844 return unsignedp ? unsigned_intTI_type_node : intTI_type_node;
4845#endif
4846 if (TYPE_OK (intDI_type_node))
4847 return unsignedp ? unsigned_intDI_type_node : intDI_type_node;
4848 if (TYPE_OK (intSI_type_node))
4849 return unsignedp ? unsigned_intSI_type_node : intSI_type_node;
4850 if (TYPE_OK (intHI_type_node))
4851 return unsignedp ? unsigned_intHI_type_node : intHI_type_node;
4852 if (TYPE_OK (intQI_type_node))
4853 return unsignedp ? unsigned_intQI_type_node : intQI_type_node;
4854
4855#undef GIMPLE_FIXED_TYPES
4856#undef GIMPLE_FIXED_MODE_TYPES
4857#undef GIMPLE_FIXED_TYPES_SAT
4858#undef GIMPLE_FIXED_MODE_TYPES_SAT
4859#undef TYPE_OK
4860
4861 return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp);
4862}
4863
4864
4865/* Return an unsigned type the same as TYPE in other respects. */
4866
4867tree
4868gimple_unsigned_type (tree type)
4869{
4870 return gimple_signed_or_unsigned_type (true, type);
4871}
4872
4873
4874/* Return a signed type the same as TYPE in other respects. */
4875
4876tree
4877gimple_signed_type (tree type)
4878{
4879 return gimple_signed_or_unsigned_type (false, type);
4880}
4881
4882
4883/* Return the typed-based alias set for T, which may be an expression
4884 or a type. Return -1 if we don't do anything special. */
4885
4886alias_set_type
4887gimple_get_alias_set (tree t)
4888{
4889 tree u;
4890
4891 /* Permit type-punning when accessing a union, provided the access
4892 is directly through the union. For example, this code does not
4893 permit taking the address of a union member and then storing
4894 through it. Even the type-punning allowed here is a GCC
4895 extension, albeit a common and useful one; the C standard says
4896 that such accesses have implementation-defined behavior. */
4897 for (u = t;
4898 TREE_CODE (u) == COMPONENT_REF || TREE_CODE (u) == ARRAY_REF;
4899 u = TREE_OPERAND (u, 0))
4900 if (TREE_CODE (u) == COMPONENT_REF
4901 && TREE_CODE (TREE_TYPE (TREE_OPERAND (u, 0))) == UNION_TYPE)
4902 return 0;
4903
4904 /* That's all the expressions we handle specially. */
4905 if (!TYPE_P (t))
4906 return -1;
4907
4908 /* For convenience, follow the C standard when dealing with
4909 character types. Any object may be accessed via an lvalue that
4910 has character type. */
4911 if (t == char_type_node
4912 || t == signed_char_type_node
4913 || t == unsigned_char_type_node)
4914 return 0;
4915
4916 /* Allow aliasing between signed and unsigned variants of the same
4917 type. We treat the signed variant as canonical. */
4918 if (TREE_CODE (t) == INTEGER_TYPE && TYPE_UNSIGNED (t))
4919 {
4920 tree t1 = gimple_signed_type (t);
4921
4922 /* t1 == t can happen for boolean nodes which are always unsigned. */
4923 if (t1 != t)
4924 return get_alias_set (t1);
4925 }
d7f09764
DN
4926
4927 return -1;
4928}
4929
4930
5006671f
RG
4931/* Data structure used to count the number of dereferences to PTR
4932 inside an expression. */
4933struct count_ptr_d
4934{
4935 tree ptr;
4936 unsigned num_stores;
4937 unsigned num_loads;
4938};
4939
4940/* Helper for count_uses_and_derefs. Called by walk_tree to look for
4941 (ALIGN/MISALIGNED_)INDIRECT_REF nodes for the pointer passed in DATA. */
4942
4943static tree
4944count_ptr_derefs (tree *tp, int *walk_subtrees, void *data)
4945{
4946 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
4947 struct count_ptr_d *count_p = (struct count_ptr_d *) wi_p->info;
4948
4949 /* Do not walk inside ADDR_EXPR nodes. In the expression &ptr->fld,
4950 pointer 'ptr' is *not* dereferenced, it is simply used to compute
4951 the address of 'fld' as 'ptr + offsetof(fld)'. */
4952 if (TREE_CODE (*tp) == ADDR_EXPR)
4953 {
4954 *walk_subtrees = 0;
4955 return NULL_TREE;
4956 }
4957
70f34814 4958 if (TREE_CODE (*tp) == MEM_REF && TREE_OPERAND (*tp, 0) == count_p->ptr)
5006671f
RG
4959 {
4960 if (wi_p->is_lhs)
4961 count_p->num_stores++;
4962 else
4963 count_p->num_loads++;
4964 }
4965
4966 return NULL_TREE;
4967}
4968
4969/* Count the number of direct and indirect uses for pointer PTR in
4970 statement STMT. The number of direct uses is stored in
4971 *NUM_USES_P. Indirect references are counted separately depending
4972 on whether they are store or load operations. The counts are
4973 stored in *NUM_STORES_P and *NUM_LOADS_P. */
4974
4975void
4976count_uses_and_derefs (tree ptr, gimple stmt, unsigned *num_uses_p,
4977 unsigned *num_loads_p, unsigned *num_stores_p)
4978{
4979 ssa_op_iter i;
4980 tree use;
4981
4982 *num_uses_p = 0;
4983 *num_loads_p = 0;
4984 *num_stores_p = 0;
4985
4986 /* Find out the total number of uses of PTR in STMT. */
4987 FOR_EACH_SSA_TREE_OPERAND (use, stmt, i, SSA_OP_USE)
4988 if (use == ptr)
4989 (*num_uses_p)++;
4990
4991 /* Now count the number of indirect references to PTR. This is
4992 truly awful, but we don't have much choice. There are no parent
4993 pointers inside INDIRECT_REFs, so an expression like
4994 '*x_1 = foo (x_1, *x_1)' needs to be traversed piece by piece to
4995 find all the indirect and direct uses of x_1 inside. The only
4996 shortcut we can take is the fact that GIMPLE only allows
4997 INDIRECT_REFs inside the expressions below. */
4998 if (is_gimple_assign (stmt)
4999 || gimple_code (stmt) == GIMPLE_RETURN
5000 || gimple_code (stmt) == GIMPLE_ASM
5001 || is_gimple_call (stmt))
5002 {
5003 struct walk_stmt_info wi;
5004 struct count_ptr_d count;
5005
5006 count.ptr = ptr;
5007 count.num_stores = 0;
5008 count.num_loads = 0;
5009
5010 memset (&wi, 0, sizeof (wi));
5011 wi.info = &count;
5012 walk_gimple_op (stmt, count_ptr_derefs, &wi);
5013
5014 *num_stores_p = count.num_stores;
5015 *num_loads_p = count.num_loads;
5016 }
5017
5018 gcc_assert (*num_uses_p >= *num_loads_p + *num_stores_p);
5019}
5020
346ef3fa
RG
5021/* From a tree operand OP return the base of a load or store operation
5022 or NULL_TREE if OP is not a load or a store. */
5023
5024static tree
5025get_base_loadstore (tree op)
5026{
5027 while (handled_component_p (op))
5028 op = TREE_OPERAND (op, 0);
5029 if (DECL_P (op)
5030 || INDIRECT_REF_P (op)
70f34814 5031 || TREE_CODE (op) == MEM_REF
346ef3fa
RG
5032 || TREE_CODE (op) == TARGET_MEM_REF)
5033 return op;
5034 return NULL_TREE;
5035}
5036
5037/* For the statement STMT call the callbacks VISIT_LOAD, VISIT_STORE and
5038 VISIT_ADDR if non-NULL on loads, store and address-taken operands
5039 passing the STMT, the base of the operand and DATA to it. The base
5040 will be either a decl, an indirect reference (including TARGET_MEM_REF)
5041 or the argument of an address expression.
5042 Returns the results of these callbacks or'ed. */
5043
5044bool
5045walk_stmt_load_store_addr_ops (gimple stmt, void *data,
5046 bool (*visit_load)(gimple, tree, void *),
5047 bool (*visit_store)(gimple, tree, void *),
5048 bool (*visit_addr)(gimple, tree, void *))
5049{
5050 bool ret = false;
5051 unsigned i;
5052 if (gimple_assign_single_p (stmt))
5053 {
5054 tree lhs, rhs;
5055 if (visit_store)
5056 {
5057 lhs = get_base_loadstore (gimple_assign_lhs (stmt));
5058 if (lhs)
5059 ret |= visit_store (stmt, lhs, data);
5060 }
5061 rhs = gimple_assign_rhs1 (stmt);
ad8a1ac0
RG
5062 while (handled_component_p (rhs))
5063 rhs = TREE_OPERAND (rhs, 0);
346ef3fa
RG
5064 if (visit_addr)
5065 {
5066 if (TREE_CODE (rhs) == ADDR_EXPR)
5067 ret |= visit_addr (stmt, TREE_OPERAND (rhs, 0), data);
5068 else if (TREE_CODE (rhs) == TARGET_MEM_REF
5069 && TREE_CODE (TMR_BASE (rhs)) == ADDR_EXPR)
5070 ret |= visit_addr (stmt, TREE_OPERAND (TMR_BASE (rhs), 0), data);
5071 else if (TREE_CODE (rhs) == OBJ_TYPE_REF
5072 && TREE_CODE (OBJ_TYPE_REF_OBJECT (rhs)) == ADDR_EXPR)
5073 ret |= visit_addr (stmt, TREE_OPERAND (OBJ_TYPE_REF_OBJECT (rhs),
5074 0), data);
cb3d2e33
JJ
5075 else if (TREE_CODE (rhs) == CONSTRUCTOR)
5076 {
5077 unsigned int ix;
5078 tree val;
5079
5080 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), ix, val)
5081 if (TREE_CODE (val) == ADDR_EXPR)
5082 ret |= visit_addr (stmt, TREE_OPERAND (val, 0), data);
5083 else if (TREE_CODE (val) == OBJ_TYPE_REF
5084 && TREE_CODE (OBJ_TYPE_REF_OBJECT (val)) == ADDR_EXPR)
5085 ret |= visit_addr (stmt,
5086 TREE_OPERAND (OBJ_TYPE_REF_OBJECT (val),
5087 0), data);
5088 }
fff1894c
AB
5089 lhs = gimple_assign_lhs (stmt);
5090 if (TREE_CODE (lhs) == TARGET_MEM_REF
fff1894c
AB
5091 && TREE_CODE (TMR_BASE (lhs)) == ADDR_EXPR)
5092 ret |= visit_addr (stmt, TREE_OPERAND (TMR_BASE (lhs), 0), data);
346ef3fa
RG
5093 }
5094 if (visit_load)
5095 {
5096 rhs = get_base_loadstore (rhs);
5097 if (rhs)
5098 ret |= visit_load (stmt, rhs, data);
5099 }
5100 }
5101 else if (visit_addr
5102 && (is_gimple_assign (stmt)
4d7a65ea 5103 || gimple_code (stmt) == GIMPLE_COND))
346ef3fa
RG
5104 {
5105 for (i = 0; i < gimple_num_ops (stmt); ++i)
9dd58aa4
JJ
5106 {
5107 tree op = gimple_op (stmt, i);
5108 if (op == NULL_TREE)
5109 ;
5110 else if (TREE_CODE (op) == ADDR_EXPR)
5111 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
5112 /* COND_EXPR and VCOND_EXPR rhs1 argument is a comparison
5113 tree with two operands. */
5114 else if (i == 1 && COMPARISON_CLASS_P (op))
5115 {
5116 if (TREE_CODE (TREE_OPERAND (op, 0)) == ADDR_EXPR)
5117 ret |= visit_addr (stmt, TREE_OPERAND (TREE_OPERAND (op, 0),
5118 0), data);
5119 if (TREE_CODE (TREE_OPERAND (op, 1)) == ADDR_EXPR)
5120 ret |= visit_addr (stmt, TREE_OPERAND (TREE_OPERAND (op, 1),
5121 0), data);
5122 }
5123 }
346ef3fa
RG
5124 }
5125 else if (is_gimple_call (stmt))
5126 {
5127 if (visit_store)
5128 {
5129 tree lhs = gimple_call_lhs (stmt);
5130 if (lhs)
5131 {
5132 lhs = get_base_loadstore (lhs);
5133 if (lhs)
5134 ret |= visit_store (stmt, lhs, data);
5135 }
5136 }
5137 if (visit_load || visit_addr)
5138 for (i = 0; i < gimple_call_num_args (stmt); ++i)
5139 {
5140 tree rhs = gimple_call_arg (stmt, i);
5141 if (visit_addr
5142 && TREE_CODE (rhs) == ADDR_EXPR)
5143 ret |= visit_addr (stmt, TREE_OPERAND (rhs, 0), data);
5144 else if (visit_load)
5145 {
5146 rhs = get_base_loadstore (rhs);
5147 if (rhs)
5148 ret |= visit_load (stmt, rhs, data);
5149 }
5150 }
5151 if (visit_addr
5152 && gimple_call_chain (stmt)
5153 && TREE_CODE (gimple_call_chain (stmt)) == ADDR_EXPR)
5154 ret |= visit_addr (stmt, TREE_OPERAND (gimple_call_chain (stmt), 0),
5155 data);
1d24fdd9
RG
5156 if (visit_addr
5157 && gimple_call_return_slot_opt_p (stmt)
5158 && gimple_call_lhs (stmt) != NULL_TREE
4d61856d 5159 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
1d24fdd9 5160 ret |= visit_addr (stmt, gimple_call_lhs (stmt), data);
346ef3fa
RG
5161 }
5162 else if (gimple_code (stmt) == GIMPLE_ASM)
5163 {
5164 unsigned noutputs;
5165 const char *constraint;
5166 const char **oconstraints;
5167 bool allows_mem, allows_reg, is_inout;
5168 noutputs = gimple_asm_noutputs (stmt);
5169 oconstraints = XALLOCAVEC (const char *, noutputs);
5170 if (visit_store || visit_addr)
5171 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
5172 {
5173 tree link = gimple_asm_output_op (stmt, i);
5174 tree op = get_base_loadstore (TREE_VALUE (link));
5175 if (op && visit_store)
5176 ret |= visit_store (stmt, op, data);
5177 if (visit_addr)
5178 {
5179 constraint = TREE_STRING_POINTER
5180 (TREE_VALUE (TREE_PURPOSE (link)));
5181 oconstraints[i] = constraint;
5182 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
5183 &allows_reg, &is_inout);
5184 if (op && !allows_reg && allows_mem)
5185 ret |= visit_addr (stmt, op, data);
5186 }
5187 }
5188 if (visit_load || visit_addr)
5189 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
5190 {
5191 tree link = gimple_asm_input_op (stmt, i);
5192 tree op = TREE_VALUE (link);
5193 if (visit_addr
5194 && TREE_CODE (op) == ADDR_EXPR)
5195 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
5196 else if (visit_load || visit_addr)
5197 {
5198 op = get_base_loadstore (op);
5199 if (op)
5200 {
5201 if (visit_load)
5202 ret |= visit_load (stmt, op, data);
5203 if (visit_addr)
5204 {
5205 constraint = TREE_STRING_POINTER
5206 (TREE_VALUE (TREE_PURPOSE (link)));
5207 parse_input_constraint (&constraint, 0, 0, noutputs,
5208 0, oconstraints,
5209 &allows_mem, &allows_reg);
5210 if (!allows_reg && allows_mem)
5211 ret |= visit_addr (stmt, op, data);
5212 }
5213 }
5214 }
5215 }
5216 }
5217 else if (gimple_code (stmt) == GIMPLE_RETURN)
5218 {
5219 tree op = gimple_return_retval (stmt);
5220 if (op)
5221 {
5222 if (visit_addr
5223 && TREE_CODE (op) == ADDR_EXPR)
5224 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
5225 else if (visit_load)
5226 {
5227 op = get_base_loadstore (op);
5228 if (op)
5229 ret |= visit_load (stmt, op, data);
5230 }
5231 }
5232 }
5233 else if (visit_addr
5234 && gimple_code (stmt) == GIMPLE_PHI)
5235 {
5236 for (i = 0; i < gimple_phi_num_args (stmt); ++i)
5237 {
5238 tree op = PHI_ARG_DEF (stmt, i);
5239 if (TREE_CODE (op) == ADDR_EXPR)
5240 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
5241 }
5242 }
5243
5244 return ret;
5245}
5246
5247/* Like walk_stmt_load_store_addr_ops but with NULL visit_addr. IPA-CP
5248 should make a faster clone for this case. */
5249
5250bool
5251walk_stmt_load_store_ops (gimple stmt, void *data,
5252 bool (*visit_load)(gimple, tree, void *),
5253 bool (*visit_store)(gimple, tree, void *))
5254{
5255 return walk_stmt_load_store_addr_ops (stmt, data,
5256 visit_load, visit_store, NULL);
5257}
5258
ccacdf06
RG
5259/* Helper for gimple_ior_addresses_taken_1. */
5260
5261static bool
5262gimple_ior_addresses_taken_1 (gimple stmt ATTRIBUTE_UNUSED,
5263 tree addr, void *data)
5264{
5265 bitmap addresses_taken = (bitmap)data;
2ea9dc64
RG
5266 addr = get_base_address (addr);
5267 if (addr
5268 && DECL_P (addr))
ccacdf06
RG
5269 {
5270 bitmap_set_bit (addresses_taken, DECL_UID (addr));
5271 return true;
5272 }
5273 return false;
5274}
5275
5276/* Set the bit for the uid of all decls that have their address taken
5277 in STMT in the ADDRESSES_TAKEN bitmap. Returns true if there
5278 were any in this stmt. */
5279
5280bool
5281gimple_ior_addresses_taken (bitmap addresses_taken, gimple stmt)
5282{
5283 return walk_stmt_load_store_addr_ops (stmt, addresses_taken, NULL, NULL,
5284 gimple_ior_addresses_taken_1);
5285}
5286
4537ec0c
DN
5287
5288/* Return a printable name for symbol DECL. */
5289
5290const char *
5291gimple_decl_printable_name (tree decl, int verbosity)
5292{
98b2dfbb
RG
5293 if (!DECL_NAME (decl))
5294 return NULL;
4537ec0c
DN
5295
5296 if (DECL_ASSEMBLER_NAME_SET_P (decl))
5297 {
5298 const char *str, *mangled_str;
5299 int dmgl_opts = DMGL_NO_OPTS;
5300
5301 if (verbosity >= 2)
5302 {
5303 dmgl_opts = DMGL_VERBOSE
4537ec0c
DN
5304 | DMGL_ANSI
5305 | DMGL_GNU_V3
5306 | DMGL_RET_POSTFIX;
5307 if (TREE_CODE (decl) == FUNCTION_DECL)
5308 dmgl_opts |= DMGL_PARAMS;
5309 }
5310
5311 mangled_str = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
5312 str = cplus_demangle_v3 (mangled_str, dmgl_opts);
5313 return (str) ? str : mangled_str;
5314 }
5315
5316 return IDENTIFIER_POINTER (DECL_NAME (decl));
5317}
5318
c54c785d
JH
5319/* Return true when STMT is builtins call to CODE. */
5320
5321bool
5322gimple_call_builtin_p (gimple stmt, enum built_in_function code)
5323{
5324 tree fndecl;
5325 return (is_gimple_call (stmt)
5326 && (fndecl = gimple_call_fndecl (stmt)) != NULL
5327 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
5328 && DECL_FUNCTION_CODE (fndecl) == code);
5329}
5330
edcdea5b
NF
5331/* Return true if STMT clobbers memory. STMT is required to be a
5332 GIMPLE_ASM. */
5333
5334bool
5335gimple_asm_clobbers_memory_p (const_gimple stmt)
5336{
5337 unsigned i;
5338
5339 for (i = 0; i < gimple_asm_nclobbers (stmt); i++)
5340 {
5341 tree op = gimple_asm_clobber_op (stmt, i);
5342 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (op)), "memory") == 0)
5343 return true;
5344 }
5345
5346 return false;
5347}
726a989a 5348#include "gt-gimple.h"