]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/gimple.c
coretypes.h (gimple_seq, [...]): Typedef as gimple.
[thirdparty/gcc.git] / gcc / gimple.c
CommitLineData
726a989a
RB
1/* Gimple IR support functions.
2
ddb555ed 3 Copyright 2007, 2008, 2009, 2010, 2011 Free Software Foundation, Inc.
726a989a
RB
4 Contributed by Aldy Hernandez <aldyh@redhat.com>
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 3, or (at your option) any later
11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
21
22#include "config.h"
23#include "system.h"
24#include "coretypes.h"
25#include "tm.h"
d7f09764 26#include "target.h"
726a989a
RB
27#include "tree.h"
28#include "ggc.h"
726a989a
RB
29#include "hard-reg-set.h"
30#include "basic-block.h"
31#include "gimple.h"
32#include "diagnostic.h"
33#include "tree-flow.h"
34#include "value-prof.h"
35#include "flags.h"
d7f09764 36#include "alias.h"
4537ec0c 37#include "demangle.h"
0f443ad0 38#include "langhooks.h"
726a989a 39
d7f09764
DN
40/* Global type table. FIXME lto, it should be possible to re-use some
41 of the type hashing routines in tree.c (type_hash_canon, type_hash_lookup,
42 etc), but those assume that types were built with the various
43 build_*_type routines which is not the case with the streamer. */
0f443ad0
RG
44static GTY((if_marked ("ggc_marked_p"), param_is (union tree_node)))
45 htab_t gimple_types;
4490cae6
RG
46static GTY((if_marked ("ggc_marked_p"), param_is (union tree_node)))
47 htab_t gimple_canonical_types;
0f443ad0
RG
48static GTY((if_marked ("tree_int_map_marked_p"), param_is (struct tree_int_map)))
49 htab_t type_hash_cache;
a844a60b
RG
50static GTY((if_marked ("tree_int_map_marked_p"), param_is (struct tree_int_map)))
51 htab_t canonical_type_hash_cache;
d7f09764 52
f2c4a81c 53/* All the tuples have their operand vector (if present) at the very bottom
726a989a
RB
54 of the structure. Therefore, the offset required to find the
55 operands vector the size of the structure minus the size of the 1
56 element tree array at the end (see gimple_ops). */
f2c4a81c
RH
57#define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) \
58 (HAS_TREE_OP ? sizeof (struct STRUCT) - sizeof (tree) : 0),
6bc7bc14 59EXPORTED_CONST size_t gimple_ops_offset_[] = {
f2c4a81c
RH
60#include "gsstruct.def"
61};
62#undef DEFGSSTRUCT
63
64#define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) sizeof(struct STRUCT),
65static const size_t gsstruct_code_size[] = {
66#include "gsstruct.def"
67};
68#undef DEFGSSTRUCT
69
70#define DEFGSCODE(SYM, NAME, GSSCODE) NAME,
71const char *const gimple_code_name[] = {
72#include "gimple.def"
73};
74#undef DEFGSCODE
75
76#define DEFGSCODE(SYM, NAME, GSSCODE) GSSCODE,
77EXPORTED_CONST enum gimple_statement_structure_enum gss_for_code_[] = {
726a989a
RB
78#include "gimple.def"
79};
80#undef DEFGSCODE
81
82#ifdef GATHER_STATISTICS
83/* Gimple stats. */
84
85int gimple_alloc_counts[(int) gimple_alloc_kind_all];
86int gimple_alloc_sizes[(int) gimple_alloc_kind_all];
87
88/* Keep in sync with gimple.h:enum gimple_alloc_kind. */
89static const char * const gimple_alloc_kind_names[] = {
90 "assignments",
91 "phi nodes",
92 "conditionals",
726a989a
RB
93 "everything else"
94};
95
96#endif /* GATHER_STATISTICS */
97
726a989a
RB
98/* Private API manipulation functions shared only with some
99 other files. */
100extern void gimple_set_stored_syms (gimple, bitmap, bitmap_obstack *);
101extern void gimple_set_loaded_syms (gimple, bitmap, bitmap_obstack *);
102
103/* Gimple tuple constructors.
104 Note: Any constructor taking a ``gimple_seq'' as a parameter, can
105 be passed a NULL to start with an empty sequence. */
106
107/* Set the code for statement G to CODE. */
108
109static inline void
110gimple_set_code (gimple g, enum gimple_code code)
111{
112 g->gsbase.code = code;
113}
114
726a989a
RB
115/* Return the number of bytes needed to hold a GIMPLE statement with
116 code CODE. */
117
f2c4a81c 118static inline size_t
726a989a
RB
119gimple_size (enum gimple_code code)
120{
f2c4a81c 121 return gsstruct_code_size[gss_for_code (code)];
726a989a
RB
122}
123
726a989a
RB
124/* Allocate memory for a GIMPLE statement with code CODE and NUM_OPS
125 operands. */
126
d7f09764 127gimple
726a989a
RB
128gimple_alloc_stat (enum gimple_code code, unsigned num_ops MEM_STAT_DECL)
129{
130 size_t size;
131 gimple stmt;
132
133 size = gimple_size (code);
134 if (num_ops > 0)
135 size += sizeof (tree) * (num_ops - 1);
136
137#ifdef GATHER_STATISTICS
138 {
139 enum gimple_alloc_kind kind = gimple_alloc_kind (code);
140 gimple_alloc_counts[(int) kind]++;
141 gimple_alloc_sizes[(int) kind] += size;
142 }
143#endif
144
a9429e29 145 stmt = ggc_alloc_cleared_gimple_statement_d_stat (size PASS_MEM_STAT);
726a989a
RB
146 gimple_set_code (stmt, code);
147 gimple_set_num_ops (stmt, num_ops);
148
149 /* Do not call gimple_set_modified here as it has other side
150 effects and this tuple is still not completely built. */
151 stmt->gsbase.modified = 1;
355a7673 152 gimple_init_singleton (stmt);
726a989a
RB
153
154 return stmt;
155}
156
157/* Set SUBCODE to be the code of the expression computed by statement G. */
158
159static inline void
160gimple_set_subcode (gimple g, unsigned subcode)
161{
162 /* We only have 16 bits for the RHS code. Assert that we are not
163 overflowing it. */
164 gcc_assert (subcode < (1 << 16));
165 g->gsbase.subcode = subcode;
166}
167
168
169
170/* Build a tuple with operands. CODE is the statement to build (which
171 must be one of the GIMPLE_WITH_OPS tuples). SUBCODE is the sub-code
b8698a0f 172 for the new tuple. NUM_OPS is the number of operands to allocate. */
726a989a
RB
173
174#define gimple_build_with_ops(c, s, n) \
175 gimple_build_with_ops_stat (c, s, n MEM_STAT_INFO)
176
177static gimple
b5b8b0ac 178gimple_build_with_ops_stat (enum gimple_code code, unsigned subcode,
726a989a
RB
179 unsigned num_ops MEM_STAT_DECL)
180{
181 gimple s = gimple_alloc_stat (code, num_ops PASS_MEM_STAT);
182 gimple_set_subcode (s, subcode);
183
184 return s;
185}
186
187
188/* Build a GIMPLE_RETURN statement returning RETVAL. */
189
190gimple
191gimple_build_return (tree retval)
192{
bbbbb16a 193 gimple s = gimple_build_with_ops (GIMPLE_RETURN, ERROR_MARK, 1);
726a989a
RB
194 if (retval)
195 gimple_return_set_retval (s, retval);
196 return s;
197}
198
d086d311
RG
199/* Reset alias information on call S. */
200
201void
202gimple_call_reset_alias_info (gimple s)
203{
204 if (gimple_call_flags (s) & ECF_CONST)
205 memset (gimple_call_use_set (s), 0, sizeof (struct pt_solution));
206 else
207 pt_solution_reset (gimple_call_use_set (s));
208 if (gimple_call_flags (s) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
209 memset (gimple_call_clobber_set (s), 0, sizeof (struct pt_solution));
210 else
211 pt_solution_reset (gimple_call_clobber_set (s));
212}
213
21860814
JJ
214/* Helper for gimple_build_call, gimple_build_call_valist,
215 gimple_build_call_vec and gimple_build_call_from_tree. Build the basic
216 components of a GIMPLE_CALL statement to function FN with NARGS
217 arguments. */
726a989a
RB
218
219static inline gimple
220gimple_build_call_1 (tree fn, unsigned nargs)
221{
bbbbb16a 222 gimple s = gimple_build_with_ops (GIMPLE_CALL, ERROR_MARK, nargs + 3);
7c9577be
RG
223 if (TREE_CODE (fn) == FUNCTION_DECL)
224 fn = build_fold_addr_expr (fn);
726a989a 225 gimple_set_op (s, 1, fn);
f20ca725 226 gimple_call_set_fntype (s, TREE_TYPE (TREE_TYPE (fn)));
d086d311 227 gimple_call_reset_alias_info (s);
726a989a
RB
228 return s;
229}
230
231
232/* Build a GIMPLE_CALL statement to function FN with the arguments
233 specified in vector ARGS. */
234
235gimple
236gimple_build_call_vec (tree fn, VEC(tree, heap) *args)
237{
238 unsigned i;
239 unsigned nargs = VEC_length (tree, args);
240 gimple call = gimple_build_call_1 (fn, nargs);
241
242 for (i = 0; i < nargs; i++)
243 gimple_call_set_arg (call, i, VEC_index (tree, args, i));
244
245 return call;
246}
247
248
249/* Build a GIMPLE_CALL statement to function FN. NARGS is the number of
250 arguments. The ... are the arguments. */
251
252gimple
253gimple_build_call (tree fn, unsigned nargs, ...)
254{
255 va_list ap;
256 gimple call;
257 unsigned i;
258
259 gcc_assert (TREE_CODE (fn) == FUNCTION_DECL || is_gimple_call_addr (fn));
260
261 call = gimple_build_call_1 (fn, nargs);
262
263 va_start (ap, nargs);
264 for (i = 0; i < nargs; i++)
265 gimple_call_set_arg (call, i, va_arg (ap, tree));
266 va_end (ap);
267
268 return call;
269}
270
271
21860814
JJ
272/* Build a GIMPLE_CALL statement to function FN. NARGS is the number of
273 arguments. AP contains the arguments. */
274
275gimple
276gimple_build_call_valist (tree fn, unsigned nargs, va_list ap)
277{
278 gimple call;
279 unsigned i;
280
281 gcc_assert (TREE_CODE (fn) == FUNCTION_DECL || is_gimple_call_addr (fn));
282
283 call = gimple_build_call_1 (fn, nargs);
284
285 for (i = 0; i < nargs; i++)
286 gimple_call_set_arg (call, i, va_arg (ap, tree));
287
288 return call;
289}
290
291
25583c4f
RS
292/* Helper for gimple_build_call_internal and gimple_build_call_internal_vec.
293 Build the basic components of a GIMPLE_CALL statement to internal
294 function FN with NARGS arguments. */
295
296static inline gimple
297gimple_build_call_internal_1 (enum internal_fn fn, unsigned nargs)
298{
299 gimple s = gimple_build_with_ops (GIMPLE_CALL, ERROR_MARK, nargs + 3);
300 s->gsbase.subcode |= GF_CALL_INTERNAL;
301 gimple_call_set_internal_fn (s, fn);
302 gimple_call_reset_alias_info (s);
303 return s;
304}
305
306
307/* Build a GIMPLE_CALL statement to internal function FN. NARGS is
308 the number of arguments. The ... are the arguments. */
309
310gimple
311gimple_build_call_internal (enum internal_fn fn, unsigned nargs, ...)
312{
313 va_list ap;
314 gimple call;
315 unsigned i;
316
317 call = gimple_build_call_internal_1 (fn, nargs);
318 va_start (ap, nargs);
319 for (i = 0; i < nargs; i++)
320 gimple_call_set_arg (call, i, va_arg (ap, tree));
321 va_end (ap);
322
323 return call;
324}
325
326
327/* Build a GIMPLE_CALL statement to internal function FN with the arguments
328 specified in vector ARGS. */
329
330gimple
331gimple_build_call_internal_vec (enum internal_fn fn, VEC(tree, heap) *args)
332{
333 unsigned i, nargs;
334 gimple call;
335
336 nargs = VEC_length (tree, args);
337 call = gimple_build_call_internal_1 (fn, nargs);
338 for (i = 0; i < nargs; i++)
339 gimple_call_set_arg (call, i, VEC_index (tree, args, i));
340
341 return call;
342}
343
344
726a989a
RB
345/* Build a GIMPLE_CALL statement from CALL_EXPR T. Note that T is
346 assumed to be in GIMPLE form already. Minimal checking is done of
347 this fact. */
348
349gimple
350gimple_build_call_from_tree (tree t)
351{
352 unsigned i, nargs;
353 gimple call;
354 tree fndecl = get_callee_fndecl (t);
355
356 gcc_assert (TREE_CODE (t) == CALL_EXPR);
357
358 nargs = call_expr_nargs (t);
359 call = gimple_build_call_1 (fndecl ? fndecl : CALL_EXPR_FN (t), nargs);
360
361 for (i = 0; i < nargs; i++)
362 gimple_call_set_arg (call, i, CALL_EXPR_ARG (t, i));
363
364 gimple_set_block (call, TREE_BLOCK (t));
365
366 /* Carry all the CALL_EXPR flags to the new GIMPLE_CALL. */
367 gimple_call_set_chain (call, CALL_EXPR_STATIC_CHAIN (t));
368 gimple_call_set_tail (call, CALL_EXPR_TAILCALL (t));
726a989a 369 gimple_call_set_return_slot_opt (call, CALL_EXPR_RETURN_SLOT_OPT (t));
63d2a353
MM
370 if (fndecl
371 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
13e49da9
TV
372 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA
373 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA_WITH_ALIGN))
63d2a353
MM
374 gimple_call_set_alloca_for_var (call, CALL_ALLOCA_FOR_VAR_P (t));
375 else
376 gimple_call_set_from_thunk (call, CALL_FROM_THUNK_P (t));
726a989a 377 gimple_call_set_va_arg_pack (call, CALL_EXPR_VA_ARG_PACK (t));
9bb1a81b 378 gimple_call_set_nothrow (call, TREE_NOTHROW (t));
d665b6e5 379 gimple_set_no_warning (call, TREE_NO_WARNING (t));
726a989a
RB
380
381 return call;
382}
383
384
385/* Extract the operands and code for expression EXPR into *SUBCODE_P,
0354c0c7 386 *OP1_P, *OP2_P and *OP3_P respectively. */
726a989a
RB
387
388void
0354c0c7
BS
389extract_ops_from_tree_1 (tree expr, enum tree_code *subcode_p, tree *op1_p,
390 tree *op2_p, tree *op3_p)
726a989a 391{
82d6e6fc 392 enum gimple_rhs_class grhs_class;
726a989a
RB
393
394 *subcode_p = TREE_CODE (expr);
82d6e6fc 395 grhs_class = get_gimple_rhs_class (*subcode_p);
726a989a 396
0354c0c7 397 if (grhs_class == GIMPLE_TERNARY_RHS)
726a989a
RB
398 {
399 *op1_p = TREE_OPERAND (expr, 0);
400 *op2_p = TREE_OPERAND (expr, 1);
0354c0c7
BS
401 *op3_p = TREE_OPERAND (expr, 2);
402 }
403 else if (grhs_class == GIMPLE_BINARY_RHS)
404 {
405 *op1_p = TREE_OPERAND (expr, 0);
406 *op2_p = TREE_OPERAND (expr, 1);
407 *op3_p = NULL_TREE;
726a989a 408 }
82d6e6fc 409 else if (grhs_class == GIMPLE_UNARY_RHS)
726a989a
RB
410 {
411 *op1_p = TREE_OPERAND (expr, 0);
412 *op2_p = NULL_TREE;
0354c0c7 413 *op3_p = NULL_TREE;
726a989a 414 }
82d6e6fc 415 else if (grhs_class == GIMPLE_SINGLE_RHS)
726a989a
RB
416 {
417 *op1_p = expr;
418 *op2_p = NULL_TREE;
0354c0c7 419 *op3_p = NULL_TREE;
726a989a
RB
420 }
421 else
422 gcc_unreachable ();
423}
424
425
426/* Build a GIMPLE_ASSIGN statement.
427
428 LHS of the assignment.
429 RHS of the assignment which can be unary or binary. */
430
431gimple
432gimple_build_assign_stat (tree lhs, tree rhs MEM_STAT_DECL)
433{
434 enum tree_code subcode;
0354c0c7 435 tree op1, op2, op3;
726a989a 436
0354c0c7
BS
437 extract_ops_from_tree_1 (rhs, &subcode, &op1, &op2, &op3);
438 return gimple_build_assign_with_ops_stat (subcode, lhs, op1, op2, op3
726a989a
RB
439 PASS_MEM_STAT);
440}
441
442
443/* Build a GIMPLE_ASSIGN statement with sub-code SUBCODE and operands
444 OP1 and OP2. If OP2 is NULL then SUBCODE must be of class
445 GIMPLE_UNARY_RHS or GIMPLE_SINGLE_RHS. */
446
447gimple
448gimple_build_assign_with_ops_stat (enum tree_code subcode, tree lhs, tree op1,
0354c0c7 449 tree op2, tree op3 MEM_STAT_DECL)
726a989a
RB
450{
451 unsigned num_ops;
452 gimple p;
453
454 /* Need 1 operand for LHS and 1 or 2 for the RHS (depending on the
455 code). */
456 num_ops = get_gimple_rhs_num_ops (subcode) + 1;
b8698a0f 457
b5b8b0ac 458 p = gimple_build_with_ops_stat (GIMPLE_ASSIGN, (unsigned)subcode, num_ops
726a989a
RB
459 PASS_MEM_STAT);
460 gimple_assign_set_lhs (p, lhs);
461 gimple_assign_set_rhs1 (p, op1);
462 if (op2)
463 {
464 gcc_assert (num_ops > 2);
465 gimple_assign_set_rhs2 (p, op2);
466 }
467
0354c0c7
BS
468 if (op3)
469 {
470 gcc_assert (num_ops > 3);
471 gimple_assign_set_rhs3 (p, op3);
472 }
473
726a989a
RB
474 return p;
475}
476
477
478/* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
479
480 DST/SRC are the destination and source respectively. You can pass
481 ungimplified trees in DST or SRC, in which case they will be
482 converted to a gimple operand if necessary.
483
484 This function returns the newly created GIMPLE_ASSIGN tuple. */
485
5fd8300b 486gimple
726a989a 487gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
b8698a0f 488{
726a989a
RB
489 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
490 gimplify_and_add (t, seq_p);
491 ggc_free (t);
492 return gimple_seq_last_stmt (*seq_p);
493}
494
495
496/* Build a GIMPLE_COND statement.
497
498 PRED is the condition used to compare LHS and the RHS.
499 T_LABEL is the label to jump to if the condition is true.
500 F_LABEL is the label to jump to otherwise. */
501
502gimple
503gimple_build_cond (enum tree_code pred_code, tree lhs, tree rhs,
504 tree t_label, tree f_label)
505{
506 gimple p;
507
508 gcc_assert (TREE_CODE_CLASS (pred_code) == tcc_comparison);
509 p = gimple_build_with_ops (GIMPLE_COND, pred_code, 4);
510 gimple_cond_set_lhs (p, lhs);
511 gimple_cond_set_rhs (p, rhs);
512 gimple_cond_set_true_label (p, t_label);
513 gimple_cond_set_false_label (p, f_label);
514 return p;
515}
516
517
518/* Extract operands for a GIMPLE_COND statement out of COND_EXPR tree COND. */
519
520void
521gimple_cond_get_ops_from_tree (tree cond, enum tree_code *code_p,
522 tree *lhs_p, tree *rhs_p)
523{
524 gcc_assert (TREE_CODE_CLASS (TREE_CODE (cond)) == tcc_comparison
525 || TREE_CODE (cond) == TRUTH_NOT_EXPR
526 || is_gimple_min_invariant (cond)
527 || SSA_VAR_P (cond));
528
529 extract_ops_from_tree (cond, code_p, lhs_p, rhs_p);
530
531 /* Canonicalize conditionals of the form 'if (!VAL)'. */
532 if (*code_p == TRUTH_NOT_EXPR)
533 {
534 *code_p = EQ_EXPR;
535 gcc_assert (*lhs_p && *rhs_p == NULL_TREE);
e8160c9a 536 *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
726a989a
RB
537 }
538 /* Canonicalize conditionals of the form 'if (VAL)' */
539 else if (TREE_CODE_CLASS (*code_p) != tcc_comparison)
540 {
541 *code_p = NE_EXPR;
542 gcc_assert (*lhs_p && *rhs_p == NULL_TREE);
e8160c9a 543 *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
726a989a
RB
544 }
545}
546
547
548/* Build a GIMPLE_COND statement from the conditional expression tree
549 COND. T_LABEL and F_LABEL are as in gimple_build_cond. */
550
551gimple
552gimple_build_cond_from_tree (tree cond, tree t_label, tree f_label)
553{
554 enum tree_code code;
555 tree lhs, rhs;
556
557 gimple_cond_get_ops_from_tree (cond, &code, &lhs, &rhs);
558 return gimple_build_cond (code, lhs, rhs, t_label, f_label);
559}
560
561/* Set code, lhs, and rhs of a GIMPLE_COND from a suitable
562 boolean expression tree COND. */
563
564void
565gimple_cond_set_condition_from_tree (gimple stmt, tree cond)
566{
567 enum tree_code code;
568 tree lhs, rhs;
569
570 gimple_cond_get_ops_from_tree (cond, &code, &lhs, &rhs);
571 gimple_cond_set_condition (stmt, code, lhs, rhs);
572}
573
574/* Build a GIMPLE_LABEL statement for LABEL. */
575
576gimple
577gimple_build_label (tree label)
578{
bbbbb16a 579 gimple p = gimple_build_with_ops (GIMPLE_LABEL, ERROR_MARK, 1);
726a989a
RB
580 gimple_label_set_label (p, label);
581 return p;
582}
583
584/* Build a GIMPLE_GOTO statement to label DEST. */
585
586gimple
587gimple_build_goto (tree dest)
588{
bbbbb16a 589 gimple p = gimple_build_with_ops (GIMPLE_GOTO, ERROR_MARK, 1);
726a989a
RB
590 gimple_goto_set_dest (p, dest);
591 return p;
592}
593
594
595/* Build a GIMPLE_NOP statement. */
596
b8698a0f 597gimple
726a989a
RB
598gimple_build_nop (void)
599{
600 return gimple_alloc (GIMPLE_NOP, 0);
601}
602
603
604/* Build a GIMPLE_BIND statement.
605 VARS are the variables in BODY.
606 BLOCK is the containing block. */
607
608gimple
609gimple_build_bind (tree vars, gimple_seq body, tree block)
610{
611 gimple p = gimple_alloc (GIMPLE_BIND, 0);
612 gimple_bind_set_vars (p, vars);
613 if (body)
614 gimple_bind_set_body (p, body);
615 if (block)
616 gimple_bind_set_block (p, block);
617 return p;
618}
619
620/* Helper function to set the simple fields of a asm stmt.
621
622 STRING is a pointer to a string that is the asm blocks assembly code.
623 NINPUT is the number of register inputs.
624 NOUTPUT is the number of register outputs.
625 NCLOBBERS is the number of clobbered registers.
626 */
627
628static inline gimple
b8698a0f 629gimple_build_asm_1 (const char *string, unsigned ninputs, unsigned noutputs,
1c384bf1 630 unsigned nclobbers, unsigned nlabels)
726a989a
RB
631{
632 gimple p;
633 int size = strlen (string);
634
1c384bf1
RH
635 /* ASMs with labels cannot have outputs. This should have been
636 enforced by the front end. */
637 gcc_assert (nlabels == 0 || noutputs == 0);
638
bbbbb16a 639 p = gimple_build_with_ops (GIMPLE_ASM, ERROR_MARK,
1c384bf1 640 ninputs + noutputs + nclobbers + nlabels);
726a989a
RB
641
642 p->gimple_asm.ni = ninputs;
643 p->gimple_asm.no = noutputs;
644 p->gimple_asm.nc = nclobbers;
1c384bf1 645 p->gimple_asm.nl = nlabels;
726a989a
RB
646 p->gimple_asm.string = ggc_alloc_string (string, size);
647
648#ifdef GATHER_STATISTICS
649 gimple_alloc_sizes[(int) gimple_alloc_kind (GIMPLE_ASM)] += size;
650#endif
b8698a0f 651
726a989a
RB
652 return p;
653}
654
655/* Build a GIMPLE_ASM statement.
656
657 STRING is the assembly code.
658 NINPUT is the number of register inputs.
659 NOUTPUT is the number of register outputs.
660 NCLOBBERS is the number of clobbered registers.
661 INPUTS is a vector of the input register parameters.
662 OUTPUTS is a vector of the output register parameters.
1c384bf1
RH
663 CLOBBERS is a vector of the clobbered register parameters.
664 LABELS is a vector of destination labels. */
726a989a
RB
665
666gimple
b8698a0f 667gimple_build_asm_vec (const char *string, VEC(tree,gc)* inputs,
1c384bf1
RH
668 VEC(tree,gc)* outputs, VEC(tree,gc)* clobbers,
669 VEC(tree,gc)* labels)
726a989a
RB
670{
671 gimple p;
672 unsigned i;
673
674 p = gimple_build_asm_1 (string,
675 VEC_length (tree, inputs),
b8698a0f 676 VEC_length (tree, outputs),
1c384bf1
RH
677 VEC_length (tree, clobbers),
678 VEC_length (tree, labels));
b8698a0f 679
726a989a
RB
680 for (i = 0; i < VEC_length (tree, inputs); i++)
681 gimple_asm_set_input_op (p, i, VEC_index (tree, inputs, i));
682
683 for (i = 0; i < VEC_length (tree, outputs); i++)
684 gimple_asm_set_output_op (p, i, VEC_index (tree, outputs, i));
685
686 for (i = 0; i < VEC_length (tree, clobbers); i++)
687 gimple_asm_set_clobber_op (p, i, VEC_index (tree, clobbers, i));
b8698a0f 688
1c384bf1
RH
689 for (i = 0; i < VEC_length (tree, labels); i++)
690 gimple_asm_set_label_op (p, i, VEC_index (tree, labels, i));
b8698a0f 691
726a989a
RB
692 return p;
693}
694
695/* Build a GIMPLE_CATCH statement.
696
697 TYPES are the catch types.
698 HANDLER is the exception handler. */
699
700gimple
701gimple_build_catch (tree types, gimple_seq handler)
702{
703 gimple p = gimple_alloc (GIMPLE_CATCH, 0);
704 gimple_catch_set_types (p, types);
705 if (handler)
706 gimple_catch_set_handler (p, handler);
707
708 return p;
709}
710
711/* Build a GIMPLE_EH_FILTER statement.
712
713 TYPES are the filter's types.
714 FAILURE is the filter's failure action. */
715
716gimple
717gimple_build_eh_filter (tree types, gimple_seq failure)
718{
719 gimple p = gimple_alloc (GIMPLE_EH_FILTER, 0);
720 gimple_eh_filter_set_types (p, types);
721 if (failure)
722 gimple_eh_filter_set_failure (p, failure);
723
724 return p;
725}
726
1d65f45c
RH
727/* Build a GIMPLE_EH_MUST_NOT_THROW statement. */
728
729gimple
730gimple_build_eh_must_not_throw (tree decl)
731{
786f715d 732 gimple p = gimple_alloc (GIMPLE_EH_MUST_NOT_THROW, 0);
1d65f45c
RH
733
734 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
735 gcc_assert (flags_from_decl_or_type (decl) & ECF_NORETURN);
d7f09764 736 gimple_eh_must_not_throw_set_fndecl (p, decl);
1d65f45c
RH
737
738 return p;
739}
740
0a35513e
AH
741/* Build a GIMPLE_EH_ELSE statement. */
742
743gimple
744gimple_build_eh_else (gimple_seq n_body, gimple_seq e_body)
745{
746 gimple p = gimple_alloc (GIMPLE_EH_ELSE, 0);
747 gimple_eh_else_set_n_body (p, n_body);
748 gimple_eh_else_set_e_body (p, e_body);
749 return p;
750}
751
726a989a
RB
752/* Build a GIMPLE_TRY statement.
753
754 EVAL is the expression to evaluate.
755 CLEANUP is the cleanup expression.
756 KIND is either GIMPLE_TRY_CATCH or GIMPLE_TRY_FINALLY depending on
757 whether this is a try/catch or a try/finally respectively. */
758
759gimple
760gimple_build_try (gimple_seq eval, gimple_seq cleanup,
761 enum gimple_try_flags kind)
762{
763 gimple p;
764
765 gcc_assert (kind == GIMPLE_TRY_CATCH || kind == GIMPLE_TRY_FINALLY);
766 p = gimple_alloc (GIMPLE_TRY, 0);
767 gimple_set_subcode (p, kind);
768 if (eval)
769 gimple_try_set_eval (p, eval);
770 if (cleanup)
771 gimple_try_set_cleanup (p, cleanup);
772
773 return p;
774}
775
776/* Construct a GIMPLE_WITH_CLEANUP_EXPR statement.
777
778 CLEANUP is the cleanup expression. */
779
780gimple
781gimple_build_wce (gimple_seq cleanup)
782{
783 gimple p = gimple_alloc (GIMPLE_WITH_CLEANUP_EXPR, 0);
784 if (cleanup)
785 gimple_wce_set_cleanup (p, cleanup);
786
787 return p;
788}
789
790
1d65f45c 791/* Build a GIMPLE_RESX statement. */
726a989a
RB
792
793gimple
794gimple_build_resx (int region)
795{
1d65f45c
RH
796 gimple p = gimple_build_with_ops (GIMPLE_RESX, ERROR_MARK, 0);
797 p->gimple_eh_ctrl.region = region;
726a989a
RB
798 return p;
799}
800
801
802/* The helper for constructing a gimple switch statement.
803 INDEX is the switch's index.
804 NLABELS is the number of labels in the switch excluding the default.
805 DEFAULT_LABEL is the default label for the switch statement. */
806
b8698a0f 807gimple
1d65f45c 808gimple_build_switch_nlabels (unsigned nlabels, tree index, tree default_label)
726a989a
RB
809{
810 /* nlabels + 1 default label + 1 index. */
bbbbb16a 811 gimple p = gimple_build_with_ops (GIMPLE_SWITCH, ERROR_MARK,
1d65f45c 812 1 + (default_label != NULL) + nlabels);
726a989a 813 gimple_switch_set_index (p, index);
1d65f45c
RH
814 if (default_label)
815 gimple_switch_set_default_label (p, default_label);
726a989a
RB
816 return p;
817}
818
819
820/* Build a GIMPLE_SWITCH statement.
821
822 INDEX is the switch's index.
b8698a0f 823 NLABELS is the number of labels in the switch excluding the DEFAULT_LABEL.
726a989a
RB
824 ... are the labels excluding the default. */
825
b8698a0f 826gimple
726a989a
RB
827gimple_build_switch (unsigned nlabels, tree index, tree default_label, ...)
828{
829 va_list al;
1d65f45c
RH
830 unsigned i, offset;
831 gimple p = gimple_build_switch_nlabels (nlabels, index, default_label);
726a989a
RB
832
833 /* Store the rest of the labels. */
834 va_start (al, default_label);
1d65f45c
RH
835 offset = (default_label != NULL);
836 for (i = 0; i < nlabels; i++)
837 gimple_switch_set_label (p, i + offset, va_arg (al, tree));
726a989a
RB
838 va_end (al);
839
840 return p;
841}
842
843
844/* Build a GIMPLE_SWITCH statement.
845
846 INDEX is the switch's index.
847 DEFAULT_LABEL is the default label
848 ARGS is a vector of labels excluding the default. */
849
850gimple
851gimple_build_switch_vec (tree index, tree default_label, VEC(tree, heap) *args)
852{
1d65f45c
RH
853 unsigned i, offset, nlabels = VEC_length (tree, args);
854 gimple p = gimple_build_switch_nlabels (nlabels, index, default_label);
726a989a 855
1d65f45c
RH
856 /* Copy the labels from the vector to the switch statement. */
857 offset = (default_label != NULL);
858 for (i = 0; i < nlabels; i++)
859 gimple_switch_set_label (p, i + offset, VEC_index (tree, args, i));
726a989a
RB
860
861 return p;
862}
863
1d65f45c
RH
864/* Build a GIMPLE_EH_DISPATCH statement. */
865
866gimple
867gimple_build_eh_dispatch (int region)
868{
869 gimple p = gimple_build_with_ops (GIMPLE_EH_DISPATCH, ERROR_MARK, 0);
870 p->gimple_eh_ctrl.region = region;
871 return p;
872}
726a989a 873
b5b8b0ac
AO
874/* Build a new GIMPLE_DEBUG_BIND statement.
875
876 VAR is bound to VALUE; block and location are taken from STMT. */
877
878gimple
879gimple_build_debug_bind_stat (tree var, tree value, gimple stmt MEM_STAT_DECL)
880{
881 gimple p = gimple_build_with_ops_stat (GIMPLE_DEBUG,
882 (unsigned)GIMPLE_DEBUG_BIND, 2
883 PASS_MEM_STAT);
884
885 gimple_debug_bind_set_var (p, var);
886 gimple_debug_bind_set_value (p, value);
887 if (stmt)
888 {
889 gimple_set_block (p, gimple_block (stmt));
890 gimple_set_location (p, gimple_location (stmt));
891 }
892
893 return p;
894}
895
896
ddb555ed
JJ
897/* Build a new GIMPLE_DEBUG_SOURCE_BIND statement.
898
899 VAR is bound to VALUE; block and location are taken from STMT. */
900
901gimple
902gimple_build_debug_source_bind_stat (tree var, tree value,
903 gimple stmt MEM_STAT_DECL)
904{
905 gimple p = gimple_build_with_ops_stat (GIMPLE_DEBUG,
906 (unsigned)GIMPLE_DEBUG_SOURCE_BIND, 2
907 PASS_MEM_STAT);
908
909 gimple_debug_source_bind_set_var (p, var);
910 gimple_debug_source_bind_set_value (p, value);
911 if (stmt)
912 {
913 gimple_set_block (p, gimple_block (stmt));
914 gimple_set_location (p, gimple_location (stmt));
915 }
916
917 return p;
918}
919
920
726a989a
RB
921/* Build a GIMPLE_OMP_CRITICAL statement.
922
923 BODY is the sequence of statements for which only one thread can execute.
924 NAME is optional identifier for this critical block. */
925
b8698a0f 926gimple
726a989a
RB
927gimple_build_omp_critical (gimple_seq body, tree name)
928{
929 gimple p = gimple_alloc (GIMPLE_OMP_CRITICAL, 0);
930 gimple_omp_critical_set_name (p, name);
931 if (body)
932 gimple_omp_set_body (p, body);
933
934 return p;
935}
936
937/* Build a GIMPLE_OMP_FOR statement.
938
939 BODY is sequence of statements inside the for loop.
b8698a0f 940 CLAUSES, are any of the OMP loop construct's clauses: private, firstprivate,
726a989a
RB
941 lastprivate, reductions, ordered, schedule, and nowait.
942 COLLAPSE is the collapse count.
943 PRE_BODY is the sequence of statements that are loop invariant. */
944
945gimple
946gimple_build_omp_for (gimple_seq body, tree clauses, size_t collapse,
947 gimple_seq pre_body)
948{
949 gimple p = gimple_alloc (GIMPLE_OMP_FOR, 0);
950 if (body)
951 gimple_omp_set_body (p, body);
952 gimple_omp_for_set_clauses (p, clauses);
953 p->gimple_omp_for.collapse = collapse;
a9429e29
LB
954 p->gimple_omp_for.iter
955 = ggc_alloc_cleared_vec_gimple_omp_for_iter (collapse);
726a989a
RB
956 if (pre_body)
957 gimple_omp_for_set_pre_body (p, pre_body);
958
959 return p;
960}
961
962
963/* Build a GIMPLE_OMP_PARALLEL statement.
964
965 BODY is sequence of statements which are executed in parallel.
966 CLAUSES, are the OMP parallel construct's clauses.
967 CHILD_FN is the function created for the parallel threads to execute.
968 DATA_ARG are the shared data argument(s). */
969
b8698a0f
L
970gimple
971gimple_build_omp_parallel (gimple_seq body, tree clauses, tree child_fn,
726a989a
RB
972 tree data_arg)
973{
974 gimple p = gimple_alloc (GIMPLE_OMP_PARALLEL, 0);
975 if (body)
976 gimple_omp_set_body (p, body);
977 gimple_omp_parallel_set_clauses (p, clauses);
978 gimple_omp_parallel_set_child_fn (p, child_fn);
979 gimple_omp_parallel_set_data_arg (p, data_arg);
980
981 return p;
982}
983
984
985/* Build a GIMPLE_OMP_TASK statement.
986
987 BODY is sequence of statements which are executed by the explicit task.
988 CLAUSES, are the OMP parallel construct's clauses.
989 CHILD_FN is the function created for the parallel threads to execute.
990 DATA_ARG are the shared data argument(s).
991 COPY_FN is the optional function for firstprivate initialization.
992 ARG_SIZE and ARG_ALIGN are size and alignment of the data block. */
993
b8698a0f 994gimple
726a989a
RB
995gimple_build_omp_task (gimple_seq body, tree clauses, tree child_fn,
996 tree data_arg, tree copy_fn, tree arg_size,
997 tree arg_align)
998{
999 gimple p = gimple_alloc (GIMPLE_OMP_TASK, 0);
1000 if (body)
1001 gimple_omp_set_body (p, body);
1002 gimple_omp_task_set_clauses (p, clauses);
1003 gimple_omp_task_set_child_fn (p, child_fn);
1004 gimple_omp_task_set_data_arg (p, data_arg);
1005 gimple_omp_task_set_copy_fn (p, copy_fn);
1006 gimple_omp_task_set_arg_size (p, arg_size);
1007 gimple_omp_task_set_arg_align (p, arg_align);
1008
1009 return p;
1010}
1011
1012
1013/* Build a GIMPLE_OMP_SECTION statement for a sections statement.
1014
1015 BODY is the sequence of statements in the section. */
1016
1017gimple
1018gimple_build_omp_section (gimple_seq body)
1019{
1020 gimple p = gimple_alloc (GIMPLE_OMP_SECTION, 0);
1021 if (body)
1022 gimple_omp_set_body (p, body);
1023
1024 return p;
1025}
1026
1027
1028/* Build a GIMPLE_OMP_MASTER statement.
1029
1030 BODY is the sequence of statements to be executed by just the master. */
1031
b8698a0f 1032gimple
726a989a
RB
1033gimple_build_omp_master (gimple_seq body)
1034{
1035 gimple p = gimple_alloc (GIMPLE_OMP_MASTER, 0);
1036 if (body)
1037 gimple_omp_set_body (p, body);
1038
1039 return p;
1040}
1041
1042
1043/* Build a GIMPLE_OMP_CONTINUE statement.
1044
1045 CONTROL_DEF is the definition of the control variable.
1046 CONTROL_USE is the use of the control variable. */
1047
b8698a0f 1048gimple
726a989a
RB
1049gimple_build_omp_continue (tree control_def, tree control_use)
1050{
1051 gimple p = gimple_alloc (GIMPLE_OMP_CONTINUE, 0);
1052 gimple_omp_continue_set_control_def (p, control_def);
1053 gimple_omp_continue_set_control_use (p, control_use);
1054 return p;
1055}
1056
1057/* Build a GIMPLE_OMP_ORDERED statement.
1058
1059 BODY is the sequence of statements inside a loop that will executed in
1060 sequence. */
1061
b8698a0f 1062gimple
726a989a
RB
1063gimple_build_omp_ordered (gimple_seq body)
1064{
1065 gimple p = gimple_alloc (GIMPLE_OMP_ORDERED, 0);
1066 if (body)
1067 gimple_omp_set_body (p, body);
1068
1069 return p;
1070}
1071
1072
1073/* Build a GIMPLE_OMP_RETURN statement.
1074 WAIT_P is true if this is a non-waiting return. */
1075
b8698a0f 1076gimple
726a989a
RB
1077gimple_build_omp_return (bool wait_p)
1078{
1079 gimple p = gimple_alloc (GIMPLE_OMP_RETURN, 0);
1080 if (wait_p)
1081 gimple_omp_return_set_nowait (p);
1082
1083 return p;
1084}
1085
1086
1087/* Build a GIMPLE_OMP_SECTIONS statement.
1088
1089 BODY is a sequence of section statements.
1090 CLAUSES are any of the OMP sections contsruct's clauses: private,
1091 firstprivate, lastprivate, reduction, and nowait. */
1092
b8698a0f 1093gimple
726a989a
RB
1094gimple_build_omp_sections (gimple_seq body, tree clauses)
1095{
1096 gimple p = gimple_alloc (GIMPLE_OMP_SECTIONS, 0);
1097 if (body)
1098 gimple_omp_set_body (p, body);
1099 gimple_omp_sections_set_clauses (p, clauses);
1100
1101 return p;
1102}
1103
1104
1105/* Build a GIMPLE_OMP_SECTIONS_SWITCH. */
1106
1107gimple
1108gimple_build_omp_sections_switch (void)
1109{
1110 return gimple_alloc (GIMPLE_OMP_SECTIONS_SWITCH, 0);
1111}
1112
1113
1114/* Build a GIMPLE_OMP_SINGLE statement.
1115
1116 BODY is the sequence of statements that will be executed once.
1117 CLAUSES are any of the OMP single construct's clauses: private, firstprivate,
1118 copyprivate, nowait. */
1119
b8698a0f 1120gimple
726a989a
RB
1121gimple_build_omp_single (gimple_seq body, tree clauses)
1122{
1123 gimple p = gimple_alloc (GIMPLE_OMP_SINGLE, 0);
1124 if (body)
1125 gimple_omp_set_body (p, body);
1126 gimple_omp_single_set_clauses (p, clauses);
1127
1128 return p;
1129}
1130
1131
726a989a
RB
1132/* Build a GIMPLE_OMP_ATOMIC_LOAD statement. */
1133
1134gimple
1135gimple_build_omp_atomic_load (tree lhs, tree rhs)
1136{
1137 gimple p = gimple_alloc (GIMPLE_OMP_ATOMIC_LOAD, 0);
1138 gimple_omp_atomic_load_set_lhs (p, lhs);
1139 gimple_omp_atomic_load_set_rhs (p, rhs);
1140 return p;
1141}
1142
1143/* Build a GIMPLE_OMP_ATOMIC_STORE statement.
1144
1145 VAL is the value we are storing. */
1146
1147gimple
1148gimple_build_omp_atomic_store (tree val)
1149{
1150 gimple p = gimple_alloc (GIMPLE_OMP_ATOMIC_STORE, 0);
1151 gimple_omp_atomic_store_set_val (p, val);
1152 return p;
1153}
1154
0a35513e
AH
1155/* Build a GIMPLE_TRANSACTION statement. */
1156
1157gimple
1158gimple_build_transaction (gimple_seq body, tree label)
1159{
1160 gimple p = gimple_alloc (GIMPLE_TRANSACTION, 0);
1161 gimple_transaction_set_body (p, body);
1162 gimple_transaction_set_label (p, label);
1163 return p;
1164}
1165
726a989a
RB
1166/* Build a GIMPLE_PREDICT statement. PREDICT is one of the predictors from
1167 predict.def, OUTCOME is NOT_TAKEN or TAKEN. */
1168
1169gimple
1170gimple_build_predict (enum br_predictor predictor, enum prediction outcome)
1171{
1172 gimple p = gimple_alloc (GIMPLE_PREDICT, 0);
1173 /* Ensure all the predictors fit into the lower bits of the subcode. */
e0c68ce9 1174 gcc_assert ((int) END_PREDICTORS <= GF_PREDICT_TAKEN);
726a989a
RB
1175 gimple_predict_set_predictor (p, predictor);
1176 gimple_predict_set_outcome (p, outcome);
1177 return p;
1178}
1179
cea094ed 1180#if defined ENABLE_GIMPLE_CHECKING
726a989a
RB
1181/* Complain of a gimple type mismatch and die. */
1182
1183void
1184gimple_check_failed (const_gimple gs, const char *file, int line,
1185 const char *function, enum gimple_code code,
1186 enum tree_code subcode)
1187{
1188 internal_error ("gimple check: expected %s(%s), have %s(%s) in %s, at %s:%d",
1189 gimple_code_name[code],
1190 tree_code_name[subcode],
1191 gimple_code_name[gimple_code (gs)],
1192 gs->gsbase.subcode > 0
1193 ? tree_code_name[gs->gsbase.subcode]
1194 : "",
1195 function, trim_filename (file), line);
1196}
726a989a
RB
1197#endif /* ENABLE_GIMPLE_CHECKING */
1198
1199
726a989a
RB
1200/* Link gimple statement GS to the end of the sequence *SEQ_P. If
1201 *SEQ_P is NULL, a new sequence is allocated. */
1202
1203void
1204gimple_seq_add_stmt (gimple_seq *seq_p, gimple gs)
1205{
1206 gimple_stmt_iterator si;
726a989a
RB
1207 if (gs == NULL)
1208 return;
1209
726a989a
RB
1210 si = gsi_last (*seq_p);
1211 gsi_insert_after (&si, gs, GSI_NEW_STMT);
1212}
1213
1214
1215/* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
1216 NULL, a new sequence is allocated. */
1217
1218void
1219gimple_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
1220{
1221 gimple_stmt_iterator si;
726a989a
RB
1222 if (src == NULL)
1223 return;
1224
726a989a
RB
1225 si = gsi_last (*dst_p);
1226 gsi_insert_seq_after (&si, src, GSI_NEW_STMT);
1227}
1228
1229
1230/* Helper function of empty_body_p. Return true if STMT is an empty
1231 statement. */
1232
1233static bool
1234empty_stmt_p (gimple stmt)
1235{
1236 if (gimple_code (stmt) == GIMPLE_NOP)
1237 return true;
1238 if (gimple_code (stmt) == GIMPLE_BIND)
1239 return empty_body_p (gimple_bind_body (stmt));
1240 return false;
1241}
1242
1243
1244/* Return true if BODY contains nothing but empty statements. */
1245
1246bool
1247empty_body_p (gimple_seq body)
1248{
1249 gimple_stmt_iterator i;
1250
726a989a
RB
1251 if (gimple_seq_empty_p (body))
1252 return true;
1253 for (i = gsi_start (body); !gsi_end_p (i); gsi_next (&i))
b5b8b0ac
AO
1254 if (!empty_stmt_p (gsi_stmt (i))
1255 && !is_gimple_debug (gsi_stmt (i)))
726a989a
RB
1256 return false;
1257
1258 return true;
1259}
1260
1261
1262/* Perform a deep copy of sequence SRC and return the result. */
1263
1264gimple_seq
1265gimple_seq_copy (gimple_seq src)
1266{
1267 gimple_stmt_iterator gsi;
355a7673 1268 gimple_seq new_seq = NULL;
726a989a
RB
1269 gimple stmt;
1270
1271 for (gsi = gsi_start (src); !gsi_end_p (gsi); gsi_next (&gsi))
1272 {
1273 stmt = gimple_copy (gsi_stmt (gsi));
82d6e6fc 1274 gimple_seq_add_stmt (&new_seq, stmt);
726a989a
RB
1275 }
1276
82d6e6fc 1277 return new_seq;
726a989a
RB
1278}
1279
1280
355a7673 1281/* Walk all the statements in the sequence *PSEQ calling walk_gimple_stmt
726a989a 1282 on each one. WI is as in walk_gimple_stmt.
b8698a0f 1283
0a35513e
AH
1284 If walk_gimple_stmt returns non-NULL, the walk is stopped, and the
1285 value is stored in WI->CALLBACK_RESULT. Also, the statement that
1286 produced the value is returned if this statement has not been
1287 removed by a callback (wi->removed_stmt). If the statement has
1288 been removed, NULL is returned.
726a989a
RB
1289
1290 Otherwise, all the statements are walked and NULL returned. */
1291
1292gimple
355a7673
MM
1293walk_gimple_seq_mod (gimple_seq *pseq, walk_stmt_fn callback_stmt,
1294 walk_tree_fn callback_op, struct walk_stmt_info *wi)
726a989a
RB
1295{
1296 gimple_stmt_iterator gsi;
1297
355a7673 1298 for (gsi = gsi_start (*pseq); !gsi_end_p (gsi); )
726a989a
RB
1299 {
1300 tree ret = walk_gimple_stmt (&gsi, callback_stmt, callback_op, wi);
1301 if (ret)
1302 {
1303 /* If CALLBACK_STMT or CALLBACK_OP return a value, WI must exist
1304 to hold it. */
1305 gcc_assert (wi);
1306 wi->callback_result = ret;
0a35513e
AH
1307
1308 return wi->removed_stmt ? NULL : gsi_stmt (gsi);
726a989a 1309 }
0a35513e
AH
1310
1311 if (!wi->removed_stmt)
1312 gsi_next (&gsi);
726a989a
RB
1313 }
1314
1315 if (wi)
1316 wi->callback_result = NULL_TREE;
1317
1318 return NULL;
1319}
1320
1321
355a7673
MM
1322/* Like walk_gimple_seq_mod, but ensure that the head of SEQ isn't
1323 changed by the callbacks. */
1324
1325gimple
1326walk_gimple_seq (gimple_seq seq, walk_stmt_fn callback_stmt,
1327 walk_tree_fn callback_op, struct walk_stmt_info *wi)
1328{
1329 gimple_seq seq2 = seq;
1330 gimple ret = walk_gimple_seq_mod (&seq2, callback_stmt, callback_op, wi);
1331 gcc_assert (seq2 == seq);
1332 return ret;
1333}
1334
1335
726a989a
RB
1336/* Helper function for walk_gimple_stmt. Walk operands of a GIMPLE_ASM. */
1337
1338static tree
1339walk_gimple_asm (gimple stmt, walk_tree_fn callback_op,
1340 struct walk_stmt_info *wi)
1341{
1c384bf1 1342 tree ret, op;
726a989a
RB
1343 unsigned noutputs;
1344 const char **oconstraints;
1c384bf1 1345 unsigned i, n;
726a989a
RB
1346 const char *constraint;
1347 bool allows_mem, allows_reg, is_inout;
1348
1349 noutputs = gimple_asm_noutputs (stmt);
1350 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
1351
1352 if (wi)
1353 wi->is_lhs = true;
1354
1355 for (i = 0; i < noutputs; i++)
1356 {
1c384bf1 1357 op = gimple_asm_output_op (stmt, i);
726a989a
RB
1358 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
1359 oconstraints[i] = constraint;
1360 parse_output_constraint (&constraint, i, 0, 0, &allows_mem, &allows_reg,
1361 &is_inout);
1362 if (wi)
1363 wi->val_only = (allows_reg || !allows_mem);
1364 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
1365 if (ret)
1366 return ret;
1367 }
1368
1c384bf1
RH
1369 n = gimple_asm_ninputs (stmt);
1370 for (i = 0; i < n; i++)
726a989a 1371 {
1c384bf1 1372 op = gimple_asm_input_op (stmt, i);
726a989a
RB
1373 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
1374 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1375 oconstraints, &allows_mem, &allows_reg);
1376 if (wi)
1c384bf1
RH
1377 {
1378 wi->val_only = (allows_reg || !allows_mem);
1379 /* Although input "m" is not really a LHS, we need a lvalue. */
1380 wi->is_lhs = !wi->val_only;
1381 }
726a989a
RB
1382 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
1383 if (ret)
1384 return ret;
1385 }
1386
1387 if (wi)
1388 {
1389 wi->is_lhs = false;
1390 wi->val_only = true;
1391 }
1392
1c384bf1
RH
1393 n = gimple_asm_nlabels (stmt);
1394 for (i = 0; i < n; i++)
1395 {
1396 op = gimple_asm_label_op (stmt, i);
1397 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
1398 if (ret)
1399 return ret;
1400 }
1401
726a989a
RB
1402 return NULL_TREE;
1403}
1404
1405
1406/* Helper function of WALK_GIMPLE_STMT. Walk every tree operand in
1407 STMT. CALLBACK_OP and WI are as in WALK_GIMPLE_STMT.
1408
1409 CALLBACK_OP is called on each operand of STMT via walk_tree.
1410 Additional parameters to walk_tree must be stored in WI. For each operand
1411 OP, walk_tree is called as:
1412
1413 walk_tree (&OP, CALLBACK_OP, WI, WI->PSET)
1414
1415 If CALLBACK_OP returns non-NULL for an operand, the remaining
1416 operands are not scanned.
1417
1418 The return value is that returned by the last call to walk_tree, or
1419 NULL_TREE if no CALLBACK_OP is specified. */
1420
6a4d4e8a 1421tree
726a989a
RB
1422walk_gimple_op (gimple stmt, walk_tree_fn callback_op,
1423 struct walk_stmt_info *wi)
1424{
1425 struct pointer_set_t *pset = (wi) ? wi->pset : NULL;
1426 unsigned i;
1427 tree ret = NULL_TREE;
1428
1429 switch (gimple_code (stmt))
1430 {
1431 case GIMPLE_ASSIGN:
cb3d597d
EB
1432 /* Walk the RHS operands. If the LHS is of a non-renamable type or
1433 is a register variable, we may use a COMPONENT_REF on the RHS. */
726a989a 1434 if (wi)
cb3d597d
EB
1435 {
1436 tree lhs = gimple_assign_lhs (stmt);
1437 wi->val_only
1438 = (is_gimple_reg_type (TREE_TYPE (lhs)) && !is_gimple_reg (lhs))
b9af73fc 1439 || gimple_assign_rhs_class (stmt) != GIMPLE_SINGLE_RHS;
cb3d597d 1440 }
726a989a
RB
1441
1442 for (i = 1; i < gimple_num_ops (stmt); i++)
1443 {
1444 ret = walk_tree (gimple_op_ptr (stmt, i), callback_op, wi,
1445 pset);
1446 if (ret)
1447 return ret;
1448 }
1449
1450 /* Walk the LHS. If the RHS is appropriate for a memory, we
1451 may use a COMPONENT_REF on the LHS. */
1452 if (wi)
1453 {
216820a4
RG
1454 /* If the RHS is of a non-renamable type or is a register variable,
1455 we may use a COMPONENT_REF on the LHS. */
b9af73fc 1456 tree rhs1 = gimple_assign_rhs1 (stmt);
216820a4
RG
1457 wi->val_only
1458 = (is_gimple_reg_type (TREE_TYPE (rhs1)) && !is_gimple_reg (rhs1))
1459 || gimple_assign_rhs_class (stmt) != GIMPLE_SINGLE_RHS;
726a989a
RB
1460 wi->is_lhs = true;
1461 }
1462
1463 ret = walk_tree (gimple_op_ptr (stmt, 0), callback_op, wi, pset);
1464 if (ret)
1465 return ret;
1466
1467 if (wi)
1468 {
1469 wi->val_only = true;
1470 wi->is_lhs = false;
1471 }
1472 break;
1473
1474 case GIMPLE_CALL:
1475 if (wi)
523968bf
RG
1476 {
1477 wi->is_lhs = false;
1478 wi->val_only = true;
1479 }
726a989a
RB
1480
1481 ret = walk_tree (gimple_call_chain_ptr (stmt), callback_op, wi, pset);
1482 if (ret)
1483 return ret;
1484
1485 ret = walk_tree (gimple_call_fn_ptr (stmt), callback_op, wi, pset);
1486 if (ret)
1487 return ret;
1488
1489 for (i = 0; i < gimple_call_num_args (stmt); i++)
1490 {
523968bf 1491 if (wi)
4d931f41
EB
1492 wi->val_only
1493 = is_gimple_reg_type (TREE_TYPE (gimple_call_arg (stmt, i)));
726a989a
RB
1494 ret = walk_tree (gimple_call_arg_ptr (stmt, i), callback_op, wi,
1495 pset);
1496 if (ret)
1497 return ret;
1498 }
1499
523968bf
RG
1500 if (gimple_call_lhs (stmt))
1501 {
1502 if (wi)
1503 {
1504 wi->is_lhs = true;
4d931f41
EB
1505 wi->val_only
1506 = is_gimple_reg_type (TREE_TYPE (gimple_call_lhs (stmt)));
523968bf 1507 }
726a989a 1508
523968bf
RG
1509 ret = walk_tree (gimple_call_lhs_ptr (stmt), callback_op, wi, pset);
1510 if (ret)
1511 return ret;
1512 }
726a989a
RB
1513
1514 if (wi)
523968bf
RG
1515 {
1516 wi->is_lhs = false;
1517 wi->val_only = true;
1518 }
726a989a
RB
1519 break;
1520
1521 case GIMPLE_CATCH:
1522 ret = walk_tree (gimple_catch_types_ptr (stmt), callback_op, wi,
1523 pset);
1524 if (ret)
1525 return ret;
1526 break;
1527
1528 case GIMPLE_EH_FILTER:
1529 ret = walk_tree (gimple_eh_filter_types_ptr (stmt), callback_op, wi,
1530 pset);
1531 if (ret)
1532 return ret;
1533 break;
1534
726a989a
RB
1535 case GIMPLE_ASM:
1536 ret = walk_gimple_asm (stmt, callback_op, wi);
1537 if (ret)
1538 return ret;
1539 break;
1540
1541 case GIMPLE_OMP_CONTINUE:
1542 ret = walk_tree (gimple_omp_continue_control_def_ptr (stmt),
1543 callback_op, wi, pset);
1544 if (ret)
1545 return ret;
1546
1547 ret = walk_tree (gimple_omp_continue_control_use_ptr (stmt),
1548 callback_op, wi, pset);
1549 if (ret)
1550 return ret;
1551 break;
1552
1553 case GIMPLE_OMP_CRITICAL:
1554 ret = walk_tree (gimple_omp_critical_name_ptr (stmt), callback_op, wi,
1555 pset);
1556 if (ret)
1557 return ret;
1558 break;
1559
1560 case GIMPLE_OMP_FOR:
1561 ret = walk_tree (gimple_omp_for_clauses_ptr (stmt), callback_op, wi,
1562 pset);
1563 if (ret)
1564 return ret;
1565 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1566 {
1567 ret = walk_tree (gimple_omp_for_index_ptr (stmt, i), callback_op,
1568 wi, pset);
1569 if (ret)
1570 return ret;
1571 ret = walk_tree (gimple_omp_for_initial_ptr (stmt, i), callback_op,
1572 wi, pset);
1573 if (ret)
1574 return ret;
1575 ret = walk_tree (gimple_omp_for_final_ptr (stmt, i), callback_op,
1576 wi, pset);
1577 if (ret)
1578 return ret;
1579 ret = walk_tree (gimple_omp_for_incr_ptr (stmt, i), callback_op,
1580 wi, pset);
1581 }
1582 if (ret)
1583 return ret;
1584 break;
1585
1586 case GIMPLE_OMP_PARALLEL:
1587 ret = walk_tree (gimple_omp_parallel_clauses_ptr (stmt), callback_op,
1588 wi, pset);
1589 if (ret)
1590 return ret;
1591 ret = walk_tree (gimple_omp_parallel_child_fn_ptr (stmt), callback_op,
1592 wi, pset);
1593 if (ret)
1594 return ret;
1595 ret = walk_tree (gimple_omp_parallel_data_arg_ptr (stmt), callback_op,
1596 wi, pset);
1597 if (ret)
1598 return ret;
1599 break;
1600
1601 case GIMPLE_OMP_TASK:
1602 ret = walk_tree (gimple_omp_task_clauses_ptr (stmt), callback_op,
1603 wi, pset);
1604 if (ret)
1605 return ret;
1606 ret = walk_tree (gimple_omp_task_child_fn_ptr (stmt), callback_op,
1607 wi, pset);
1608 if (ret)
1609 return ret;
1610 ret = walk_tree (gimple_omp_task_data_arg_ptr (stmt), callback_op,
1611 wi, pset);
1612 if (ret)
1613 return ret;
1614 ret = walk_tree (gimple_omp_task_copy_fn_ptr (stmt), callback_op,
1615 wi, pset);
1616 if (ret)
1617 return ret;
1618 ret = walk_tree (gimple_omp_task_arg_size_ptr (stmt), callback_op,
1619 wi, pset);
1620 if (ret)
1621 return ret;
1622 ret = walk_tree (gimple_omp_task_arg_align_ptr (stmt), callback_op,
1623 wi, pset);
1624 if (ret)
1625 return ret;
1626 break;
1627
1628 case GIMPLE_OMP_SECTIONS:
1629 ret = walk_tree (gimple_omp_sections_clauses_ptr (stmt), callback_op,
1630 wi, pset);
1631 if (ret)
1632 return ret;
1633
1634 ret = walk_tree (gimple_omp_sections_control_ptr (stmt), callback_op,
1635 wi, pset);
1636 if (ret)
1637 return ret;
1638
1639 break;
1640
1641 case GIMPLE_OMP_SINGLE:
1642 ret = walk_tree (gimple_omp_single_clauses_ptr (stmt), callback_op, wi,
1643 pset);
1644 if (ret)
1645 return ret;
1646 break;
1647
1648 case GIMPLE_OMP_ATOMIC_LOAD:
1649 ret = walk_tree (gimple_omp_atomic_load_lhs_ptr (stmt), callback_op, wi,
1650 pset);
1651 if (ret)
1652 return ret;
1653
1654 ret = walk_tree (gimple_omp_atomic_load_rhs_ptr (stmt), callback_op, wi,
1655 pset);
1656 if (ret)
1657 return ret;
1658 break;
1659
1660 case GIMPLE_OMP_ATOMIC_STORE:
1661 ret = walk_tree (gimple_omp_atomic_store_val_ptr (stmt), callback_op,
1662 wi, pset);
1663 if (ret)
1664 return ret;
1665 break;
1666
0a35513e
AH
1667 case GIMPLE_TRANSACTION:
1668 ret = walk_tree (gimple_transaction_label_ptr (stmt), callback_op,
1669 wi, pset);
1670 if (ret)
1671 return ret;
1672 break;
1673
726a989a
RB
1674 /* Tuples that do not have operands. */
1675 case GIMPLE_NOP:
1676 case GIMPLE_RESX:
1677 case GIMPLE_OMP_RETURN:
1678 case GIMPLE_PREDICT:
1679 break;
1680
1681 default:
1682 {
1683 enum gimple_statement_structure_enum gss;
1684 gss = gimple_statement_structure (stmt);
1685 if (gss == GSS_WITH_OPS || gss == GSS_WITH_MEM_OPS)
1686 for (i = 0; i < gimple_num_ops (stmt); i++)
1687 {
1688 ret = walk_tree (gimple_op_ptr (stmt, i), callback_op, wi, pset);
1689 if (ret)
1690 return ret;
1691 }
1692 }
1693 break;
1694 }
1695
1696 return NULL_TREE;
1697}
1698
1699
1700/* Walk the current statement in GSI (optionally using traversal state
1701 stored in WI). If WI is NULL, no state is kept during traversal.
1702 The callback CALLBACK_STMT is called. If CALLBACK_STMT indicates
1703 that it has handled all the operands of the statement, its return
1704 value is returned. Otherwise, the return value from CALLBACK_STMT
1705 is discarded and its operands are scanned.
1706
1707 If CALLBACK_STMT is NULL or it didn't handle the operands,
1708 CALLBACK_OP is called on each operand of the statement via
1709 walk_gimple_op. If walk_gimple_op returns non-NULL for any
1710 operand, the remaining operands are not scanned. In this case, the
1711 return value from CALLBACK_OP is returned.
1712
1713 In any other case, NULL_TREE is returned. */
1714
1715tree
1716walk_gimple_stmt (gimple_stmt_iterator *gsi, walk_stmt_fn callback_stmt,
1717 walk_tree_fn callback_op, struct walk_stmt_info *wi)
1718{
1719 gimple ret;
1720 tree tree_ret;
1721 gimple stmt = gsi_stmt (*gsi);
1722
1723 if (wi)
0a35513e
AH
1724 {
1725 wi->gsi = *gsi;
1726 wi->removed_stmt = false;
726a989a 1727
0a35513e
AH
1728 if (wi->want_locations && gimple_has_location (stmt))
1729 input_location = gimple_location (stmt);
1730 }
726a989a
RB
1731
1732 ret = NULL;
1733
1734 /* Invoke the statement callback. Return if the callback handled
1735 all of STMT operands by itself. */
1736 if (callback_stmt)
1737 {
1738 bool handled_ops = false;
1739 tree_ret = callback_stmt (gsi, &handled_ops, wi);
1740 if (handled_ops)
1741 return tree_ret;
1742
1743 /* If CALLBACK_STMT did not handle operands, it should not have
1744 a value to return. */
1745 gcc_assert (tree_ret == NULL);
1746
0a35513e
AH
1747 if (wi && wi->removed_stmt)
1748 return NULL;
1749
726a989a
RB
1750 /* Re-read stmt in case the callback changed it. */
1751 stmt = gsi_stmt (*gsi);
1752 }
1753
1754 /* If CALLBACK_OP is defined, invoke it on every operand of STMT. */
1755 if (callback_op)
1756 {
1757 tree_ret = walk_gimple_op (stmt, callback_op, wi);
1758 if (tree_ret)
1759 return tree_ret;
1760 }
1761
1762 /* If STMT can have statements inside (e.g. GIMPLE_BIND), walk them. */
1763 switch (gimple_code (stmt))
1764 {
1765 case GIMPLE_BIND:
355a7673
MM
1766 ret = walk_gimple_seq_mod (gimple_bind_body_ptr (stmt), callback_stmt,
1767 callback_op, wi);
726a989a
RB
1768 if (ret)
1769 return wi->callback_result;
1770 break;
1771
1772 case GIMPLE_CATCH:
355a7673
MM
1773 ret = walk_gimple_seq_mod (gimple_catch_handler_ptr (stmt), callback_stmt,
1774 callback_op, wi);
726a989a
RB
1775 if (ret)
1776 return wi->callback_result;
1777 break;
1778
1779 case GIMPLE_EH_FILTER:
355a7673 1780 ret = walk_gimple_seq_mod (gimple_eh_filter_failure_ptr (stmt), callback_stmt,
726a989a
RB
1781 callback_op, wi);
1782 if (ret)
1783 return wi->callback_result;
1784 break;
1785
0a35513e 1786 case GIMPLE_EH_ELSE:
355a7673 1787 ret = walk_gimple_seq_mod (gimple_eh_else_n_body_ptr (stmt),
0a35513e
AH
1788 callback_stmt, callback_op, wi);
1789 if (ret)
1790 return wi->callback_result;
355a7673 1791 ret = walk_gimple_seq_mod (gimple_eh_else_e_body_ptr (stmt),
0a35513e
AH
1792 callback_stmt, callback_op, wi);
1793 if (ret)
1794 return wi->callback_result;
1795 break;
1796
726a989a 1797 case GIMPLE_TRY:
355a7673 1798 ret = walk_gimple_seq_mod (gimple_try_eval_ptr (stmt), callback_stmt, callback_op,
726a989a
RB
1799 wi);
1800 if (ret)
1801 return wi->callback_result;
1802
355a7673 1803 ret = walk_gimple_seq_mod (gimple_try_cleanup_ptr (stmt), callback_stmt,
726a989a
RB
1804 callback_op, wi);
1805 if (ret)
1806 return wi->callback_result;
1807 break;
1808
1809 case GIMPLE_OMP_FOR:
355a7673 1810 ret = walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt), callback_stmt,
726a989a
RB
1811 callback_op, wi);
1812 if (ret)
1813 return wi->callback_result;
1814
1815 /* FALL THROUGH. */
1816 case GIMPLE_OMP_CRITICAL:
1817 case GIMPLE_OMP_MASTER:
1818 case GIMPLE_OMP_ORDERED:
1819 case GIMPLE_OMP_SECTION:
1820 case GIMPLE_OMP_PARALLEL:
1821 case GIMPLE_OMP_TASK:
1822 case GIMPLE_OMP_SECTIONS:
1823 case GIMPLE_OMP_SINGLE:
355a7673 1824 ret = walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), callback_stmt,
0a35513e 1825 callback_op, wi);
726a989a
RB
1826 if (ret)
1827 return wi->callback_result;
1828 break;
1829
1830 case GIMPLE_WITH_CLEANUP_EXPR:
355a7673 1831 ret = walk_gimple_seq_mod (gimple_wce_cleanup_ptr (stmt), callback_stmt,
726a989a
RB
1832 callback_op, wi);
1833 if (ret)
1834 return wi->callback_result;
1835 break;
1836
0a35513e 1837 case GIMPLE_TRANSACTION:
355a7673 1838 ret = walk_gimple_seq_mod (gimple_transaction_body_ptr (stmt),
0a35513e
AH
1839 callback_stmt, callback_op, wi);
1840 if (ret)
1841 return wi->callback_result;
1842 break;
1843
726a989a
RB
1844 default:
1845 gcc_assert (!gimple_has_substatements (stmt));
1846 break;
1847 }
1848
1849 return NULL;
1850}
1851
1852
1853/* Set sequence SEQ to be the GIMPLE body for function FN. */
1854
1855void
1856gimple_set_body (tree fndecl, gimple_seq seq)
1857{
1858 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
1859 if (fn == NULL)
1860 {
1861 /* If FNDECL still does not have a function structure associated
1862 with it, then it does not make sense for it to receive a
1863 GIMPLE body. */
1864 gcc_assert (seq == NULL);
1865 }
1866 else
1867 fn->gimple_body = seq;
1868}
1869
1870
abbd64b9
JS
1871/* Return the body of GIMPLE statements for function FN. After the
1872 CFG pass, the function body doesn't exist anymore because it has
1873 been split up into basic blocks. In this case, it returns
1874 NULL. */
726a989a
RB
1875
1876gimple_seq
1877gimple_body (tree fndecl)
1878{
1879 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
1880 return fn ? fn->gimple_body : NULL;
1881}
1882
39ecc018
JH
1883/* Return true when FNDECL has Gimple body either in unlowered
1884 or CFG form. */
1885bool
1886gimple_has_body_p (tree fndecl)
1887{
1888 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
1889 return (gimple_body (fndecl) || (fn && fn->cfg));
1890}
726a989a 1891
25583c4f
RS
1892/* Return true if calls C1 and C2 are known to go to the same function. */
1893
1894bool
1895gimple_call_same_target_p (const_gimple c1, const_gimple c2)
1896{
1897 if (gimple_call_internal_p (c1))
1898 return (gimple_call_internal_p (c2)
1899 && gimple_call_internal_fn (c1) == gimple_call_internal_fn (c2));
1900 else
1901 return (gimple_call_fn (c1) == gimple_call_fn (c2)
1902 || (gimple_call_fndecl (c1)
1903 && gimple_call_fndecl (c1) == gimple_call_fndecl (c2)));
1904}
1905
726a989a
RB
1906/* Detect flags from a GIMPLE_CALL. This is just like
1907 call_expr_flags, but for gimple tuples. */
1908
1909int
1910gimple_call_flags (const_gimple stmt)
1911{
1912 int flags;
1913 tree decl = gimple_call_fndecl (stmt);
726a989a
RB
1914
1915 if (decl)
1916 flags = flags_from_decl_or_type (decl);
25583c4f
RS
1917 else if (gimple_call_internal_p (stmt))
1918 flags = internal_fn_flags (gimple_call_internal_fn (stmt));
726a989a 1919 else
97e03fa1 1920 flags = flags_from_decl_or_type (gimple_call_fntype (stmt));
726a989a 1921
9bb1a81b
JM
1922 if (stmt->gsbase.subcode & GF_CALL_NOTHROW)
1923 flags |= ECF_NOTHROW;
1924
726a989a
RB
1925 return flags;
1926}
1927
25583c4f
RS
1928/* Return the "fn spec" string for call STMT. */
1929
1930static tree
1931gimple_call_fnspec (const_gimple stmt)
1932{
1933 tree type, attr;
1934
1935 type = gimple_call_fntype (stmt);
1936 if (!type)
1937 return NULL_TREE;
1938
1939 attr = lookup_attribute ("fn spec", TYPE_ATTRIBUTES (type));
1940 if (!attr)
1941 return NULL_TREE;
1942
1943 return TREE_VALUE (TREE_VALUE (attr));
1944}
1945
0b7b376d
RG
1946/* Detects argument flags for argument number ARG on call STMT. */
1947
1948int
1949gimple_call_arg_flags (const_gimple stmt, unsigned arg)
1950{
25583c4f 1951 tree attr = gimple_call_fnspec (stmt);
0b7b376d 1952
25583c4f 1953 if (!attr || 1 + arg >= (unsigned) TREE_STRING_LENGTH (attr))
0b7b376d
RG
1954 return 0;
1955
1956 switch (TREE_STRING_POINTER (attr)[1 + arg])
1957 {
1958 case 'x':
1959 case 'X':
1960 return EAF_UNUSED;
1961
1962 case 'R':
1963 return EAF_DIRECT | EAF_NOCLOBBER | EAF_NOESCAPE;
1964
1965 case 'r':
1966 return EAF_NOCLOBBER | EAF_NOESCAPE;
1967
1968 case 'W':
1969 return EAF_DIRECT | EAF_NOESCAPE;
1970
1971 case 'w':
1972 return EAF_NOESCAPE;
1973
1974 case '.':
1975 default:
1976 return 0;
1977 }
1978}
1979
1980/* Detects return flags for the call STMT. */
1981
1982int
1983gimple_call_return_flags (const_gimple stmt)
1984{
25583c4f 1985 tree attr;
0b7b376d
RG
1986
1987 if (gimple_call_flags (stmt) & ECF_MALLOC)
1988 return ERF_NOALIAS;
1989
25583c4f
RS
1990 attr = gimple_call_fnspec (stmt);
1991 if (!attr || TREE_STRING_LENGTH (attr) < 1)
0b7b376d
RG
1992 return 0;
1993
1994 switch (TREE_STRING_POINTER (attr)[0])
1995 {
1996 case '1':
1997 case '2':
1998 case '3':
1999 case '4':
2000 return ERF_RETURNS_ARG | (TREE_STRING_POINTER (attr)[0] - '1');
2001
2002 case 'm':
2003 return ERF_NOALIAS;
2004
2005 case '.':
2006 default:
2007 return 0;
2008 }
2009}
726a989a 2010
3dbe9454 2011
726a989a
RB
2012/* Return true if GS is a copy assignment. */
2013
2014bool
2015gimple_assign_copy_p (gimple gs)
2016{
3dbe9454
RG
2017 return (gimple_assign_single_p (gs)
2018 && is_gimple_val (gimple_op (gs, 1)));
726a989a
RB
2019}
2020
2021
2022/* Return true if GS is a SSA_NAME copy assignment. */
2023
2024bool
2025gimple_assign_ssa_name_copy_p (gimple gs)
2026{
3dbe9454 2027 return (gimple_assign_single_p (gs)
726a989a
RB
2028 && TREE_CODE (gimple_assign_lhs (gs)) == SSA_NAME
2029 && TREE_CODE (gimple_assign_rhs1 (gs)) == SSA_NAME);
2030}
2031
2032
726a989a
RB
2033/* Return true if GS is an assignment with a unary RHS, but the
2034 operator has no effect on the assigned value. The logic is adapted
2035 from STRIP_NOPS. This predicate is intended to be used in tuplifying
2036 instances in which STRIP_NOPS was previously applied to the RHS of
2037 an assignment.
2038
2039 NOTE: In the use cases that led to the creation of this function
2040 and of gimple_assign_single_p, it is typical to test for either
2041 condition and to proceed in the same manner. In each case, the
2042 assigned value is represented by the single RHS operand of the
2043 assignment. I suspect there may be cases where gimple_assign_copy_p,
2044 gimple_assign_single_p, or equivalent logic is used where a similar
2045 treatment of unary NOPs is appropriate. */
b8698a0f 2046
726a989a
RB
2047bool
2048gimple_assign_unary_nop_p (gimple gs)
2049{
3dbe9454 2050 return (is_gimple_assign (gs)
1a87cf0c 2051 && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (gs))
726a989a
RB
2052 || gimple_assign_rhs_code (gs) == NON_LVALUE_EXPR)
2053 && gimple_assign_rhs1 (gs) != error_mark_node
2054 && (TYPE_MODE (TREE_TYPE (gimple_assign_lhs (gs)))
2055 == TYPE_MODE (TREE_TYPE (gimple_assign_rhs1 (gs)))));
2056}
2057
2058/* Set BB to be the basic block holding G. */
2059
2060void
2061gimple_set_bb (gimple stmt, basic_block bb)
2062{
2063 stmt->gsbase.bb = bb;
2064
2065 /* If the statement is a label, add the label to block-to-labels map
2066 so that we can speed up edge creation for GIMPLE_GOTOs. */
2067 if (cfun->cfg && gimple_code (stmt) == GIMPLE_LABEL)
2068 {
2069 tree t;
2070 int uid;
2071
2072 t = gimple_label_label (stmt);
2073 uid = LABEL_DECL_UID (t);
2074 if (uid == -1)
2075 {
2076 unsigned old_len = VEC_length (basic_block, label_to_block_map);
2077 LABEL_DECL_UID (t) = uid = cfun->cfg->last_label_uid++;
2078 if (old_len <= (unsigned) uid)
2079 {
5006671f 2080 unsigned new_len = 3 * uid / 2 + 1;
726a989a
RB
2081
2082 VEC_safe_grow_cleared (basic_block, gc, label_to_block_map,
2083 new_len);
2084 }
2085 }
2086
2087 VEC_replace (basic_block, label_to_block_map, uid, bb);
2088 }
2089}
2090
2091
726a989a
RB
2092/* Modify the RHS of the assignment pointed-to by GSI using the
2093 operands in the expression tree EXPR.
2094
2095 NOTE: The statement pointed-to by GSI may be reallocated if it
2096 did not have enough operand slots.
2097
2098 This function is useful to convert an existing tree expression into
2099 the flat representation used for the RHS of a GIMPLE assignment.
2100 It will reallocate memory as needed to expand or shrink the number
2101 of operand slots needed to represent EXPR.
2102
2103 NOTE: If you find yourself building a tree and then calling this
2104 function, you are most certainly doing it the slow way. It is much
2105 better to build a new assignment or to use the function
2106 gimple_assign_set_rhs_with_ops, which does not require an
2107 expression tree to be built. */
2108
2109void
2110gimple_assign_set_rhs_from_tree (gimple_stmt_iterator *gsi, tree expr)
2111{
2112 enum tree_code subcode;
0354c0c7 2113 tree op1, op2, op3;
726a989a 2114
0354c0c7
BS
2115 extract_ops_from_tree_1 (expr, &subcode, &op1, &op2, &op3);
2116 gimple_assign_set_rhs_with_ops_1 (gsi, subcode, op1, op2, op3);
726a989a
RB
2117}
2118
2119
2120/* Set the RHS of assignment statement pointed-to by GSI to CODE with
0354c0c7 2121 operands OP1, OP2 and OP3.
726a989a
RB
2122
2123 NOTE: The statement pointed-to by GSI may be reallocated if it
2124 did not have enough operand slots. */
2125
2126void
0354c0c7
BS
2127gimple_assign_set_rhs_with_ops_1 (gimple_stmt_iterator *gsi, enum tree_code code,
2128 tree op1, tree op2, tree op3)
726a989a
RB
2129{
2130 unsigned new_rhs_ops = get_gimple_rhs_num_ops (code);
2131 gimple stmt = gsi_stmt (*gsi);
2132
2133 /* If the new CODE needs more operands, allocate a new statement. */
2134 if (gimple_num_ops (stmt) < new_rhs_ops + 1)
2135 {
2136 tree lhs = gimple_assign_lhs (stmt);
2137 gimple new_stmt = gimple_alloc (gimple_code (stmt), new_rhs_ops + 1);
2138 memcpy (new_stmt, stmt, gimple_size (gimple_code (stmt)));
355a7673 2139 gimple_init_singleton (new_stmt);
726a989a
RB
2140 gsi_replace (gsi, new_stmt, true);
2141 stmt = new_stmt;
2142
2143 /* The LHS needs to be reset as this also changes the SSA name
2144 on the LHS. */
2145 gimple_assign_set_lhs (stmt, lhs);
2146 }
2147
2148 gimple_set_num_ops (stmt, new_rhs_ops + 1);
2149 gimple_set_subcode (stmt, code);
2150 gimple_assign_set_rhs1 (stmt, op1);
2151 if (new_rhs_ops > 1)
2152 gimple_assign_set_rhs2 (stmt, op2);
0354c0c7
BS
2153 if (new_rhs_ops > 2)
2154 gimple_assign_set_rhs3 (stmt, op3);
726a989a
RB
2155}
2156
2157
2158/* Return the LHS of a statement that performs an assignment,
2159 either a GIMPLE_ASSIGN or a GIMPLE_CALL. Returns NULL_TREE
2160 for a call to a function that returns no value, or for a
2161 statement other than an assignment or a call. */
2162
2163tree
2164gimple_get_lhs (const_gimple stmt)
2165{
e0c68ce9 2166 enum gimple_code code = gimple_code (stmt);
726a989a
RB
2167
2168 if (code == GIMPLE_ASSIGN)
2169 return gimple_assign_lhs (stmt);
2170 else if (code == GIMPLE_CALL)
2171 return gimple_call_lhs (stmt);
2172 else
2173 return NULL_TREE;
2174}
2175
2176
2177/* Set the LHS of a statement that performs an assignment,
2178 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
2179
2180void
2181gimple_set_lhs (gimple stmt, tree lhs)
2182{
e0c68ce9 2183 enum gimple_code code = gimple_code (stmt);
726a989a
RB
2184
2185 if (code == GIMPLE_ASSIGN)
2186 gimple_assign_set_lhs (stmt, lhs);
2187 else if (code == GIMPLE_CALL)
2188 gimple_call_set_lhs (stmt, lhs);
2189 else
2190 gcc_unreachable();
2191}
2192
21cf7180
AO
2193/* Replace the LHS of STMT, an assignment, either a GIMPLE_ASSIGN or a
2194 GIMPLE_CALL, with NLHS, in preparation for modifying the RHS to an
2195 expression with a different value.
2196
2197 This will update any annotations (say debug bind stmts) referring
2198 to the original LHS, so that they use the RHS instead. This is
2199 done even if NLHS and LHS are the same, for it is understood that
2200 the RHS will be modified afterwards, and NLHS will not be assigned
2201 an equivalent value.
2202
2203 Adjusting any non-annotation uses of the LHS, if needed, is a
2204 responsibility of the caller.
2205
2206 The effect of this call should be pretty much the same as that of
2207 inserting a copy of STMT before STMT, and then removing the
2208 original stmt, at which time gsi_remove() would have update
2209 annotations, but using this function saves all the inserting,
2210 copying and removing. */
2211
2212void
2213gimple_replace_lhs (gimple stmt, tree nlhs)
2214{
2215 if (MAY_HAVE_DEBUG_STMTS)
2216 {
2217 tree lhs = gimple_get_lhs (stmt);
2218
2219 gcc_assert (SSA_NAME_DEF_STMT (lhs) == stmt);
2220
2221 insert_debug_temp_for_var_def (NULL, lhs);
2222 }
2223
2224 gimple_set_lhs (stmt, nlhs);
2225}
726a989a
RB
2226
2227/* Return a deep copy of statement STMT. All the operands from STMT
2228 are reallocated and copied using unshare_expr. The DEF, USE, VDEF
355a7673
MM
2229 and VUSE operand arrays are set to empty in the new copy. The new
2230 copy isn't part of any sequence. */
726a989a
RB
2231
2232gimple
2233gimple_copy (gimple stmt)
2234{
2235 enum gimple_code code = gimple_code (stmt);
2236 unsigned num_ops = gimple_num_ops (stmt);
2237 gimple copy = gimple_alloc (code, num_ops);
2238 unsigned i;
2239
2240 /* Shallow copy all the fields from STMT. */
2241 memcpy (copy, stmt, gimple_size (code));
355a7673 2242 gimple_init_singleton (copy);
726a989a
RB
2243
2244 /* If STMT has sub-statements, deep-copy them as well. */
2245 if (gimple_has_substatements (stmt))
2246 {
2247 gimple_seq new_seq;
2248 tree t;
2249
2250 switch (gimple_code (stmt))
2251 {
2252 case GIMPLE_BIND:
2253 new_seq = gimple_seq_copy (gimple_bind_body (stmt));
2254 gimple_bind_set_body (copy, new_seq);
2255 gimple_bind_set_vars (copy, unshare_expr (gimple_bind_vars (stmt)));
2256 gimple_bind_set_block (copy, gimple_bind_block (stmt));
2257 break;
2258
2259 case GIMPLE_CATCH:
2260 new_seq = gimple_seq_copy (gimple_catch_handler (stmt));
2261 gimple_catch_set_handler (copy, new_seq);
2262 t = unshare_expr (gimple_catch_types (stmt));
2263 gimple_catch_set_types (copy, t);
2264 break;
2265
2266 case GIMPLE_EH_FILTER:
2267 new_seq = gimple_seq_copy (gimple_eh_filter_failure (stmt));
2268 gimple_eh_filter_set_failure (copy, new_seq);
2269 t = unshare_expr (gimple_eh_filter_types (stmt));
2270 gimple_eh_filter_set_types (copy, t);
2271 break;
2272
0a35513e
AH
2273 case GIMPLE_EH_ELSE:
2274 new_seq = gimple_seq_copy (gimple_eh_else_n_body (stmt));
2275 gimple_eh_else_set_n_body (copy, new_seq);
2276 new_seq = gimple_seq_copy (gimple_eh_else_e_body (stmt));
2277 gimple_eh_else_set_e_body (copy, new_seq);
2278 break;
2279
726a989a
RB
2280 case GIMPLE_TRY:
2281 new_seq = gimple_seq_copy (gimple_try_eval (stmt));
2282 gimple_try_set_eval (copy, new_seq);
2283 new_seq = gimple_seq_copy (gimple_try_cleanup (stmt));
2284 gimple_try_set_cleanup (copy, new_seq);
2285 break;
2286
2287 case GIMPLE_OMP_FOR:
2288 new_seq = gimple_seq_copy (gimple_omp_for_pre_body (stmt));
2289 gimple_omp_for_set_pre_body (copy, new_seq);
2290 t = unshare_expr (gimple_omp_for_clauses (stmt));
2291 gimple_omp_for_set_clauses (copy, t);
2292 copy->gimple_omp_for.iter
a9429e29
LB
2293 = ggc_alloc_vec_gimple_omp_for_iter
2294 (gimple_omp_for_collapse (stmt));
726a989a
RB
2295 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2296 {
2297 gimple_omp_for_set_cond (copy, i,
2298 gimple_omp_for_cond (stmt, i));
2299 gimple_omp_for_set_index (copy, i,
2300 gimple_omp_for_index (stmt, i));
2301 t = unshare_expr (gimple_omp_for_initial (stmt, i));
2302 gimple_omp_for_set_initial (copy, i, t);
2303 t = unshare_expr (gimple_omp_for_final (stmt, i));
2304 gimple_omp_for_set_final (copy, i, t);
2305 t = unshare_expr (gimple_omp_for_incr (stmt, i));
2306 gimple_omp_for_set_incr (copy, i, t);
2307 }
2308 goto copy_omp_body;
2309
2310 case GIMPLE_OMP_PARALLEL:
2311 t = unshare_expr (gimple_omp_parallel_clauses (stmt));
2312 gimple_omp_parallel_set_clauses (copy, t);
2313 t = unshare_expr (gimple_omp_parallel_child_fn (stmt));
2314 gimple_omp_parallel_set_child_fn (copy, t);
2315 t = unshare_expr (gimple_omp_parallel_data_arg (stmt));
2316 gimple_omp_parallel_set_data_arg (copy, t);
2317 goto copy_omp_body;
2318
2319 case GIMPLE_OMP_TASK:
2320 t = unshare_expr (gimple_omp_task_clauses (stmt));
2321 gimple_omp_task_set_clauses (copy, t);
2322 t = unshare_expr (gimple_omp_task_child_fn (stmt));
2323 gimple_omp_task_set_child_fn (copy, t);
2324 t = unshare_expr (gimple_omp_task_data_arg (stmt));
2325 gimple_omp_task_set_data_arg (copy, t);
2326 t = unshare_expr (gimple_omp_task_copy_fn (stmt));
2327 gimple_omp_task_set_copy_fn (copy, t);
2328 t = unshare_expr (gimple_omp_task_arg_size (stmt));
2329 gimple_omp_task_set_arg_size (copy, t);
2330 t = unshare_expr (gimple_omp_task_arg_align (stmt));
2331 gimple_omp_task_set_arg_align (copy, t);
2332 goto copy_omp_body;
2333
2334 case GIMPLE_OMP_CRITICAL:
2335 t = unshare_expr (gimple_omp_critical_name (stmt));
2336 gimple_omp_critical_set_name (copy, t);
2337 goto copy_omp_body;
2338
2339 case GIMPLE_OMP_SECTIONS:
2340 t = unshare_expr (gimple_omp_sections_clauses (stmt));
2341 gimple_omp_sections_set_clauses (copy, t);
2342 t = unshare_expr (gimple_omp_sections_control (stmt));
2343 gimple_omp_sections_set_control (copy, t);
2344 /* FALLTHRU */
2345
2346 case GIMPLE_OMP_SINGLE:
2347 case GIMPLE_OMP_SECTION:
2348 case GIMPLE_OMP_MASTER:
2349 case GIMPLE_OMP_ORDERED:
2350 copy_omp_body:
2351 new_seq = gimple_seq_copy (gimple_omp_body (stmt));
2352 gimple_omp_set_body (copy, new_seq);
2353 break;
2354
0a35513e
AH
2355 case GIMPLE_TRANSACTION:
2356 new_seq = gimple_seq_copy (gimple_transaction_body (stmt));
2357 gimple_transaction_set_body (copy, new_seq);
2358 break;
2359
726a989a
RB
2360 case GIMPLE_WITH_CLEANUP_EXPR:
2361 new_seq = gimple_seq_copy (gimple_wce_cleanup (stmt));
2362 gimple_wce_set_cleanup (copy, new_seq);
2363 break;
2364
2365 default:
2366 gcc_unreachable ();
2367 }
2368 }
2369
2370 /* Make copy of operands. */
2371 if (num_ops > 0)
2372 {
2373 for (i = 0; i < num_ops; i++)
2374 gimple_set_op (copy, i, unshare_expr (gimple_op (stmt, i)));
2375
ccacdf06 2376 /* Clear out SSA operand vectors on COPY. */
726a989a
RB
2377 if (gimple_has_ops (stmt))
2378 {
2379 gimple_set_def_ops (copy, NULL);
2380 gimple_set_use_ops (copy, NULL);
726a989a
RB
2381 }
2382
2383 if (gimple_has_mem_ops (stmt))
2384 {
5006671f
RG
2385 gimple_set_vdef (copy, gimple_vdef (stmt));
2386 gimple_set_vuse (copy, gimple_vuse (stmt));
726a989a
RB
2387 }
2388
5006671f
RG
2389 /* SSA operands need to be updated. */
2390 gimple_set_modified (copy, true);
726a989a
RB
2391 }
2392
2393 return copy;
2394}
2395
2396
2397/* Set the MODIFIED flag to MODIFIEDP, iff the gimple statement G has
2398 a MODIFIED field. */
2399
2400void
2401gimple_set_modified (gimple s, bool modifiedp)
2402{
2403 if (gimple_has_ops (s))
0f8c63cc 2404 s->gsbase.modified = (unsigned) modifiedp;
726a989a
RB
2405}
2406
2407
2408/* Return true if statement S has side-effects. We consider a
2409 statement to have side effects if:
2410
2411 - It is a GIMPLE_CALL not marked with ECF_PURE or ECF_CONST.
2412 - Any of its operands are marked TREE_THIS_VOLATILE or TREE_SIDE_EFFECTS. */
2413
2414bool
2415gimple_has_side_effects (const_gimple s)
2416{
b5b8b0ac
AO
2417 if (is_gimple_debug (s))
2418 return false;
2419
726a989a
RB
2420 /* We don't have to scan the arguments to check for
2421 volatile arguments, though, at present, we still
2422 do a scan to check for TREE_SIDE_EFFECTS. */
2423 if (gimple_has_volatile_ops (s))
2424 return true;
2425
179184e3
RG
2426 if (gimple_code (s) == GIMPLE_ASM
2427 && gimple_asm_volatile_p (s))
2428 return true;
2429
726a989a
RB
2430 if (is_gimple_call (s))
2431 {
723afc44 2432 int flags = gimple_call_flags (s);
726a989a 2433
723afc44
RG
2434 /* An infinite loop is considered a side effect. */
2435 if (!(flags & (ECF_CONST | ECF_PURE))
2436 || (flags & ECF_LOOPING_CONST_OR_PURE))
726a989a
RB
2437 return true;
2438
726a989a
RB
2439 return false;
2440 }
726a989a
RB
2441
2442 return false;
2443}
2444
726a989a 2445/* Helper for gimple_could_trap_p and gimple_assign_rhs_could_trap_p.
e1fd038a
SP
2446 Return true if S can trap. When INCLUDE_MEM is true, check whether
2447 the memory operations could trap. When INCLUDE_STORES is true and
2448 S is a GIMPLE_ASSIGN, the LHS of the assignment is also checked. */
726a989a 2449
e1fd038a
SP
2450bool
2451gimple_could_trap_p_1 (gimple s, bool include_mem, bool include_stores)
726a989a 2452{
726a989a
RB
2453 tree t, div = NULL_TREE;
2454 enum tree_code op;
2455
e1fd038a
SP
2456 if (include_mem)
2457 {
2458 unsigned i, start = (is_gimple_assign (s) && !include_stores) ? 1 : 0;
726a989a 2459
e1fd038a
SP
2460 for (i = start; i < gimple_num_ops (s); i++)
2461 if (tree_could_trap_p (gimple_op (s, i)))
2462 return true;
2463 }
726a989a
RB
2464
2465 switch (gimple_code (s))
2466 {
2467 case GIMPLE_ASM:
2468 return gimple_asm_volatile_p (s);
2469
2470 case GIMPLE_CALL:
2471 t = gimple_call_fndecl (s);
2472 /* Assume that calls to weak functions may trap. */
2473 if (!t || !DECL_P (t) || DECL_WEAK (t))
2474 return true;
2475 return false;
2476
2477 case GIMPLE_ASSIGN:
2478 t = gimple_expr_type (s);
2479 op = gimple_assign_rhs_code (s);
2480 if (get_gimple_rhs_class (op) == GIMPLE_BINARY_RHS)
2481 div = gimple_assign_rhs2 (s);
2482 return (operation_could_trap_p (op, FLOAT_TYPE_P (t),
2483 (INTEGRAL_TYPE_P (t)
2484 && TYPE_OVERFLOW_TRAPS (t)),
2485 div));
2486
2487 default:
2488 break;
2489 }
2490
2491 return false;
726a989a
RB
2492}
2493
726a989a
RB
2494/* Return true if statement S can trap. */
2495
2496bool
2497gimple_could_trap_p (gimple s)
2498{
e1fd038a 2499 return gimple_could_trap_p_1 (s, true, true);
726a989a
RB
2500}
2501
726a989a
RB
2502/* Return true if RHS of a GIMPLE_ASSIGN S can trap. */
2503
2504bool
2505gimple_assign_rhs_could_trap_p (gimple s)
2506{
2507 gcc_assert (is_gimple_assign (s));
e1fd038a 2508 return gimple_could_trap_p_1 (s, true, false);
726a989a
RB
2509}
2510
2511
2512/* Print debugging information for gimple stmts generated. */
2513
2514void
2515dump_gimple_statistics (void)
2516{
2517#ifdef GATHER_STATISTICS
2518 int i, total_tuples = 0, total_bytes = 0;
2519
2520 fprintf (stderr, "\nGIMPLE statements\n");
2521 fprintf (stderr, "Kind Stmts Bytes\n");
2522 fprintf (stderr, "---------------------------------------\n");
2523 for (i = 0; i < (int) gimple_alloc_kind_all; ++i)
2524 {
2525 fprintf (stderr, "%-20s %7d %10d\n", gimple_alloc_kind_names[i],
2526 gimple_alloc_counts[i], gimple_alloc_sizes[i]);
2527 total_tuples += gimple_alloc_counts[i];
2528 total_bytes += gimple_alloc_sizes[i];
2529 }
2530 fprintf (stderr, "---------------------------------------\n");
2531 fprintf (stderr, "%-20s %7d %10d\n", "Total", total_tuples, total_bytes);
2532 fprintf (stderr, "---------------------------------------\n");
2533#else
2534 fprintf (stderr, "No gimple statistics\n");
2535#endif
2536}
2537
2538
726a989a
RB
2539/* Return the number of operands needed on the RHS of a GIMPLE
2540 assignment for an expression with tree code CODE. */
2541
2542unsigned
2543get_gimple_rhs_num_ops (enum tree_code code)
2544{
2545 enum gimple_rhs_class rhs_class = get_gimple_rhs_class (code);
2546
2547 if (rhs_class == GIMPLE_UNARY_RHS || rhs_class == GIMPLE_SINGLE_RHS)
2548 return 1;
2549 else if (rhs_class == GIMPLE_BINARY_RHS)
2550 return 2;
0354c0c7
BS
2551 else if (rhs_class == GIMPLE_TERNARY_RHS)
2552 return 3;
726a989a
RB
2553 else
2554 gcc_unreachable ();
2555}
2556
2557#define DEFTREECODE(SYM, STRING, TYPE, NARGS) \
2558 (unsigned char) \
2559 ((TYPE) == tcc_unary ? GIMPLE_UNARY_RHS \
2560 : ((TYPE) == tcc_binary \
2561 || (TYPE) == tcc_comparison) ? GIMPLE_BINARY_RHS \
2562 : ((TYPE) == tcc_constant \
2563 || (TYPE) == tcc_declaration \
2564 || (TYPE) == tcc_reference) ? GIMPLE_SINGLE_RHS \
2565 : ((SYM) == TRUTH_AND_EXPR \
2566 || (SYM) == TRUTH_OR_EXPR \
2567 || (SYM) == TRUTH_XOR_EXPR) ? GIMPLE_BINARY_RHS \
2568 : (SYM) == TRUTH_NOT_EXPR ? GIMPLE_UNARY_RHS \
4e71066d
RG
2569 : ((SYM) == COND_EXPR \
2570 || (SYM) == WIDEN_MULT_PLUS_EXPR \
16949072 2571 || (SYM) == WIDEN_MULT_MINUS_EXPR \
f471fe72
RG
2572 || (SYM) == DOT_PROD_EXPR \
2573 || (SYM) == REALIGN_LOAD_EXPR \
4e71066d 2574 || (SYM) == VEC_COND_EXPR \
2205ed25 2575 || (SYM) == VEC_PERM_EXPR \
16949072 2576 || (SYM) == FMA_EXPR) ? GIMPLE_TERNARY_RHS \
4e71066d 2577 : ((SYM) == CONSTRUCTOR \
726a989a
RB
2578 || (SYM) == OBJ_TYPE_REF \
2579 || (SYM) == ASSERT_EXPR \
2580 || (SYM) == ADDR_EXPR \
2581 || (SYM) == WITH_SIZE_EXPR \
4e71066d 2582 || (SYM) == SSA_NAME) ? GIMPLE_SINGLE_RHS \
726a989a
RB
2583 : GIMPLE_INVALID_RHS),
2584#define END_OF_BASE_TREE_CODES (unsigned char) GIMPLE_INVALID_RHS,
2585
2586const unsigned char gimple_rhs_class_table[] = {
2587#include "all-tree.def"
2588};
2589
2590#undef DEFTREECODE
2591#undef END_OF_BASE_TREE_CODES
2592
2593/* For the definitive definition of GIMPLE, see doc/tree-ssa.texi. */
2594
2595/* Validation of GIMPLE expressions. */
2596
726a989a
RB
2597/* Return true if T is a valid LHS for a GIMPLE assignment expression. */
2598
2599bool
2600is_gimple_lvalue (tree t)
2601{
2602 return (is_gimple_addressable (t)
2603 || TREE_CODE (t) == WITH_SIZE_EXPR
2604 /* These are complex lvalues, but don't have addresses, so they
2605 go here. */
2606 || TREE_CODE (t) == BIT_FIELD_REF);
2607}
2608
2609/* Return true if T is a GIMPLE condition. */
2610
2611bool
2612is_gimple_condexpr (tree t)
2613{
2614 return (is_gimple_val (t) || (COMPARISON_CLASS_P (t)
f9613c9a 2615 && !tree_could_throw_p (t)
726a989a
RB
2616 && is_gimple_val (TREE_OPERAND (t, 0))
2617 && is_gimple_val (TREE_OPERAND (t, 1))));
2618}
2619
2620/* Return true if T is something whose address can be taken. */
2621
2622bool
2623is_gimple_addressable (tree t)
2624{
70f34814
RG
2625 return (is_gimple_id (t) || handled_component_p (t)
2626 || TREE_CODE (t) == MEM_REF);
726a989a
RB
2627}
2628
2629/* Return true if T is a valid gimple constant. */
2630
2631bool
2632is_gimple_constant (const_tree t)
2633{
2634 switch (TREE_CODE (t))
2635 {
2636 case INTEGER_CST:
2637 case REAL_CST:
2638 case FIXED_CST:
2639 case STRING_CST:
2640 case COMPLEX_CST:
2641 case VECTOR_CST:
2642 return true;
2643
2644 /* Vector constant constructors are gimple invariant. */
2645 case CONSTRUCTOR:
2646 if (TREE_TYPE (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
2647 return TREE_CONSTANT (t);
2648 else
2649 return false;
2650
2651 default:
2652 return false;
2653 }
2654}
2655
2656/* Return true if T is a gimple address. */
2657
2658bool
2659is_gimple_address (const_tree t)
2660{
2661 tree op;
2662
2663 if (TREE_CODE (t) != ADDR_EXPR)
2664 return false;
2665
2666 op = TREE_OPERAND (t, 0);
2667 while (handled_component_p (op))
2668 {
2669 if ((TREE_CODE (op) == ARRAY_REF
2670 || TREE_CODE (op) == ARRAY_RANGE_REF)
2671 && !is_gimple_val (TREE_OPERAND (op, 1)))
2672 return false;
2673
2674 op = TREE_OPERAND (op, 0);
2675 }
2676
70f34814 2677 if (CONSTANT_CLASS_P (op) || TREE_CODE (op) == MEM_REF)
726a989a
RB
2678 return true;
2679
2680 switch (TREE_CODE (op))
2681 {
2682 case PARM_DECL:
2683 case RESULT_DECL:
2684 case LABEL_DECL:
2685 case FUNCTION_DECL:
2686 case VAR_DECL:
2687 case CONST_DECL:
2688 return true;
2689
2690 default:
2691 return false;
2692 }
2693}
2694
00fc2333
JH
2695/* Return true if T is a gimple invariant address. */
2696
2697bool
2698is_gimple_invariant_address (const_tree t)
2699{
2700 const_tree op;
2701
2702 if (TREE_CODE (t) != ADDR_EXPR)
2703 return false;
2704
2705 op = strip_invariant_refs (TREE_OPERAND (t, 0));
70f34814
RG
2706 if (!op)
2707 return false;
00fc2333 2708
70f34814
RG
2709 if (TREE_CODE (op) == MEM_REF)
2710 {
2711 const_tree op0 = TREE_OPERAND (op, 0);
2712 return (TREE_CODE (op0) == ADDR_EXPR
2713 && (CONSTANT_CLASS_P (TREE_OPERAND (op0, 0))
2714 || decl_address_invariant_p (TREE_OPERAND (op0, 0))));
2715 }
2716
2717 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
00fc2333
JH
2718}
2719
2720/* Return true if T is a gimple invariant address at IPA level
2721 (so addresses of variables on stack are not allowed). */
2722
2723bool
2724is_gimple_ip_invariant_address (const_tree t)
2725{
2726 const_tree op;
2727
2728 if (TREE_CODE (t) != ADDR_EXPR)
2729 return false;
2730
2731 op = strip_invariant_refs (TREE_OPERAND (t, 0));
39cc8c3d
MJ
2732 if (!op)
2733 return false;
2734
2735 if (TREE_CODE (op) == MEM_REF)
2736 {
2737 const_tree op0 = TREE_OPERAND (op, 0);
2738 return (TREE_CODE (op0) == ADDR_EXPR
2739 && (CONSTANT_CLASS_P (TREE_OPERAND (op0, 0))
2740 || decl_address_ip_invariant_p (TREE_OPERAND (op0, 0))));
2741 }
00fc2333 2742
39cc8c3d 2743 return CONSTANT_CLASS_P (op) || decl_address_ip_invariant_p (op);
726a989a
RB
2744}
2745
2746/* Return true if T is a GIMPLE minimal invariant. It's a restricted
2747 form of function invariant. */
2748
2749bool
2750is_gimple_min_invariant (const_tree t)
2751{
2752 if (TREE_CODE (t) == ADDR_EXPR)
2753 return is_gimple_invariant_address (t);
2754
2755 return is_gimple_constant (t);
2756}
2757
00fc2333
JH
2758/* Return true if T is a GIMPLE interprocedural invariant. It's a restricted
2759 form of gimple minimal invariant. */
2760
2761bool
2762is_gimple_ip_invariant (const_tree t)
2763{
2764 if (TREE_CODE (t) == ADDR_EXPR)
2765 return is_gimple_ip_invariant_address (t);
2766
2767 return is_gimple_constant (t);
2768}
2769
726a989a
RB
2770/* Return true if T is a variable. */
2771
2772bool
2773is_gimple_variable (tree t)
2774{
2775 return (TREE_CODE (t) == VAR_DECL
2776 || TREE_CODE (t) == PARM_DECL
2777 || TREE_CODE (t) == RESULT_DECL
2778 || TREE_CODE (t) == SSA_NAME);
2779}
2780
2781/* Return true if T is a GIMPLE identifier (something with an address). */
2782
2783bool
2784is_gimple_id (tree t)
2785{
2786 return (is_gimple_variable (t)
2787 || TREE_CODE (t) == FUNCTION_DECL
2788 || TREE_CODE (t) == LABEL_DECL
2789 || TREE_CODE (t) == CONST_DECL
2790 /* Allow string constants, since they are addressable. */
2791 || TREE_CODE (t) == STRING_CST);
2792}
2793
726a989a
RB
2794/* Return true if T is a non-aggregate register variable. */
2795
2796bool
2797is_gimple_reg (tree t)
2798{
2799 if (TREE_CODE (t) == SSA_NAME)
2800 t = SSA_NAME_VAR (t);
2801
726a989a
RB
2802 if (!is_gimple_variable (t))
2803 return false;
2804
2805 if (!is_gimple_reg_type (TREE_TYPE (t)))
2806 return false;
2807
2808 /* A volatile decl is not acceptable because we can't reuse it as
2809 needed. We need to copy it into a temp first. */
2810 if (TREE_THIS_VOLATILE (t))
2811 return false;
2812
2813 /* We define "registers" as things that can be renamed as needed,
2814 which with our infrastructure does not apply to memory. */
2815 if (needs_to_live_in_memory (t))
2816 return false;
2817
2818 /* Hard register variables are an interesting case. For those that
2819 are call-clobbered, we don't know where all the calls are, since
2820 we don't (want to) take into account which operations will turn
2821 into libcalls at the rtl level. For those that are call-saved,
2822 we don't currently model the fact that calls may in fact change
2823 global hard registers, nor do we examine ASM_CLOBBERS at the tree
2824 level, and so miss variable changes that might imply. All around,
2825 it seems safest to not do too much optimization with these at the
2826 tree level at all. We'll have to rely on the rtl optimizers to
2827 clean this up, as there we've got all the appropriate bits exposed. */
2828 if (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t))
2829 return false;
2830
4636b850
RG
2831 /* Complex and vector values must have been put into SSA-like form.
2832 That is, no assignments to the individual components. */
2833 if (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
2834 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
2835 return DECL_GIMPLE_REG_P (t);
2836
726a989a
RB
2837 return true;
2838}
2839
2840
726a989a
RB
2841/* Return true if T is a GIMPLE rvalue, i.e. an identifier or a constant. */
2842
2843bool
2844is_gimple_val (tree t)
2845{
2846 /* Make loads from volatiles and memory vars explicit. */
2847 if (is_gimple_variable (t)
2848 && is_gimple_reg_type (TREE_TYPE (t))
2849 && !is_gimple_reg (t))
2850 return false;
2851
726a989a
RB
2852 return (is_gimple_variable (t) || is_gimple_min_invariant (t));
2853}
2854
2855/* Similarly, but accept hard registers as inputs to asm statements. */
2856
2857bool
2858is_gimple_asm_val (tree t)
2859{
2860 if (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t))
2861 return true;
2862
2863 return is_gimple_val (t);
2864}
2865
2866/* Return true if T is a GIMPLE minimal lvalue. */
2867
2868bool
2869is_gimple_min_lval (tree t)
2870{
ba4d8f9d
RG
2871 if (!(t = CONST_CAST_TREE (strip_invariant_refs (t))))
2872 return false;
70f34814 2873 return (is_gimple_id (t) || TREE_CODE (t) == MEM_REF);
726a989a
RB
2874}
2875
726a989a
RB
2876/* Return true if T is a valid function operand of a CALL_EXPR. */
2877
2878bool
2879is_gimple_call_addr (tree t)
2880{
2881 return (TREE_CODE (t) == OBJ_TYPE_REF || is_gimple_val (t));
2882}
2883
70f34814
RG
2884/* Return true if T is a valid address operand of a MEM_REF. */
2885
2886bool
2887is_gimple_mem_ref_addr (tree t)
2888{
2889 return (is_gimple_reg (t)
2890 || TREE_CODE (t) == INTEGER_CST
2891 || (TREE_CODE (t) == ADDR_EXPR
2892 && (CONSTANT_CLASS_P (TREE_OPERAND (t, 0))
2893 || decl_address_invariant_p (TREE_OPERAND (t, 0)))));
2894}
2895
726a989a
RB
2896
2897/* Given a memory reference expression T, return its base address.
2898 The base address of a memory reference expression is the main
2899 object being referenced. For instance, the base address for
2900 'array[i].fld[j]' is 'array'. You can think of this as stripping
2901 away the offset part from a memory address.
2902
2903 This function calls handled_component_p to strip away all the inner
2904 parts of the memory reference until it reaches the base object. */
2905
2906tree
2907get_base_address (tree t)
2908{
2909 while (handled_component_p (t))
2910 t = TREE_OPERAND (t, 0);
b8698a0f 2911
4d948885
RG
2912 if ((TREE_CODE (t) == MEM_REF
2913 || TREE_CODE (t) == TARGET_MEM_REF)
70f34814
RG
2914 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
2915 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
2916
b3b9f3d0
JH
2917 if (TREE_CODE (t) == SSA_NAME
2918 || DECL_P (t)
726a989a
RB
2919 || TREE_CODE (t) == STRING_CST
2920 || TREE_CODE (t) == CONSTRUCTOR
70f34814 2921 || INDIRECT_REF_P (t)
4d948885
RG
2922 || TREE_CODE (t) == MEM_REF
2923 || TREE_CODE (t) == TARGET_MEM_REF)
726a989a
RB
2924 return t;
2925 else
2926 return NULL_TREE;
2927}
2928
2929void
2930recalculate_side_effects (tree t)
2931{
2932 enum tree_code code = TREE_CODE (t);
2933 int len = TREE_OPERAND_LENGTH (t);
2934 int i;
2935
2936 switch (TREE_CODE_CLASS (code))
2937 {
2938 case tcc_expression:
2939 switch (code)
2940 {
2941 case INIT_EXPR:
2942 case MODIFY_EXPR:
2943 case VA_ARG_EXPR:
2944 case PREDECREMENT_EXPR:
2945 case PREINCREMENT_EXPR:
2946 case POSTDECREMENT_EXPR:
2947 case POSTINCREMENT_EXPR:
2948 /* All of these have side-effects, no matter what their
2949 operands are. */
2950 return;
2951
2952 default:
2953 break;
2954 }
2955 /* Fall through. */
2956
2957 case tcc_comparison: /* a comparison expression */
2958 case tcc_unary: /* a unary arithmetic expression */
2959 case tcc_binary: /* a binary arithmetic expression */
2960 case tcc_reference: /* a reference */
2961 case tcc_vl_exp: /* a function call */
2962 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
2963 for (i = 0; i < len; ++i)
2964 {
2965 tree op = TREE_OPERAND (t, i);
2966 if (op && TREE_SIDE_EFFECTS (op))
2967 TREE_SIDE_EFFECTS (t) = 1;
2968 }
2969 break;
2970
13f95bdb
EB
2971 case tcc_constant:
2972 /* No side-effects. */
2973 return;
2974
726a989a 2975 default:
726a989a
RB
2976 gcc_unreachable ();
2977 }
2978}
2979
2980/* Canonicalize a tree T for use in a COND_EXPR as conditional. Returns
2981 a canonicalized tree that is valid for a COND_EXPR or NULL_TREE, if
2982 we failed to create one. */
2983
2984tree
2985canonicalize_cond_expr_cond (tree t)
2986{
b66a1bac
RG
2987 /* Strip conversions around boolean operations. */
2988 if (CONVERT_EXPR_P (t)
9b80d091
KT
2989 && (truth_value_p (TREE_CODE (TREE_OPERAND (t, 0)))
2990 || TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 0)))
2991 == BOOLEAN_TYPE))
b66a1bac
RG
2992 t = TREE_OPERAND (t, 0);
2993
726a989a 2994 /* For !x use x == 0. */
12430896 2995 if (TREE_CODE (t) == TRUTH_NOT_EXPR)
726a989a
RB
2996 {
2997 tree top0 = TREE_OPERAND (t, 0);
2998 t = build2 (EQ_EXPR, TREE_TYPE (t),
2999 top0, build_int_cst (TREE_TYPE (top0), 0));
3000 }
3001 /* For cmp ? 1 : 0 use cmp. */
3002 else if (TREE_CODE (t) == COND_EXPR
3003 && COMPARISON_CLASS_P (TREE_OPERAND (t, 0))
3004 && integer_onep (TREE_OPERAND (t, 1))
3005 && integer_zerop (TREE_OPERAND (t, 2)))
3006 {
3007 tree top0 = TREE_OPERAND (t, 0);
3008 t = build2 (TREE_CODE (top0), TREE_TYPE (t),
3009 TREE_OPERAND (top0, 0), TREE_OPERAND (top0, 1));
3010 }
3011
3012 if (is_gimple_condexpr (t))
3013 return t;
3014
3015 return NULL_TREE;
3016}
3017
e6c99067
DN
3018/* Build a GIMPLE_CALL identical to STMT but skipping the arguments in
3019 the positions marked by the set ARGS_TO_SKIP. */
3020
c6f7cfc1 3021gimple
5c0466b5 3022gimple_call_copy_skip_args (gimple stmt, bitmap args_to_skip)
c6f7cfc1
JH
3023{
3024 int i;
c6f7cfc1
JH
3025 int nargs = gimple_call_num_args (stmt);
3026 VEC(tree, heap) *vargs = VEC_alloc (tree, heap, nargs);
3027 gimple new_stmt;
3028
3029 for (i = 0; i < nargs; i++)
3030 if (!bitmap_bit_p (args_to_skip, i))
3031 VEC_quick_push (tree, vargs, gimple_call_arg (stmt, i));
3032
25583c4f
RS
3033 if (gimple_call_internal_p (stmt))
3034 new_stmt = gimple_build_call_internal_vec (gimple_call_internal_fn (stmt),
3035 vargs);
3036 else
3037 new_stmt = gimple_build_call_vec (gimple_call_fn (stmt), vargs);
c6f7cfc1
JH
3038 VEC_free (tree, heap, vargs);
3039 if (gimple_call_lhs (stmt))
3040 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
3041
5006671f
RG
3042 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
3043 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
3044
c6f7cfc1
JH
3045 gimple_set_block (new_stmt, gimple_block (stmt));
3046 if (gimple_has_location (stmt))
3047 gimple_set_location (new_stmt, gimple_location (stmt));
8d2adc24 3048 gimple_call_copy_flags (new_stmt, stmt);
c6f7cfc1 3049 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
5006671f
RG
3050
3051 gimple_set_modified (new_stmt, true);
3052
c6f7cfc1
JH
3053 return new_stmt;
3054}
3055
5006671f 3056
0ca8de87
RG
3057enum gtc_mode { GTC_MERGE = 0, GTC_DIAG = 1 };
3058
b5e04de5 3059static hashval_t gimple_type_hash (const void *);
d7f09764
DN
3060
3061/* Structure used to maintain a cache of some type pairs compared by
3062 gimple_types_compatible_p when comparing aggregate types. There are
c4fcd06a 3063 three possible values for SAME_P:
d7f09764
DN
3064
3065 -2: The pair (T1, T2) has just been inserted in the table.
d7f09764
DN
3066 0: T1 and T2 are different types.
3067 1: T1 and T2 are the same type.
3068
c4fcd06a
RG
3069 The two elements in the SAME_P array are indexed by the comparison
3070 mode gtc_mode. */
3071
d7f09764
DN
3072struct type_pair_d
3073{
88ca1146
RG
3074 unsigned int uid1;
3075 unsigned int uid2;
c4fcd06a 3076 signed char same_p[2];
d7f09764
DN
3077};
3078typedef struct type_pair_d *type_pair_t;
d4398a43
RG
3079DEF_VEC_P(type_pair_t);
3080DEF_VEC_ALLOC_P(type_pair_t,heap);
3081
a30726a4
JH
3082#define GIMPLE_TYPE_PAIR_SIZE 16381
3083struct type_pair_d *type_pair_cache;
d7f09764 3084
d7f09764
DN
3085
3086/* Lookup the pair of types T1 and T2 in *VISITED_P. Insert a new
3087 entry if none existed. */
3088
a30726a4
JH
3089static inline type_pair_t
3090lookup_type_pair (tree t1, tree t2)
d7f09764 3091{
a30726a4
JH
3092 unsigned int index;
3093 unsigned int uid1, uid2;
d7f09764 3094
a30726a4
JH
3095 if (type_pair_cache == NULL)
3096 type_pair_cache = XCNEWVEC (struct type_pair_d, GIMPLE_TYPE_PAIR_SIZE);
d7f09764 3097
4fc07af9
JH
3098 if (TYPE_UID (t1) < TYPE_UID (t2))
3099 {
a30726a4
JH
3100 uid1 = TYPE_UID (t1);
3101 uid2 = TYPE_UID (t2);
4fc07af9
JH
3102 }
3103 else
3104 {
a30726a4
JH
3105 uid1 = TYPE_UID (t2);
3106 uid2 = TYPE_UID (t1);
4fc07af9 3107 }
a30726a4 3108 gcc_checking_assert (uid1 != uid2);
d7f09764 3109
a30726a4
JH
3110 /* iterative_hash_hashval_t imply an function calls.
3111 We know that UIDS are in limited range. */
3112 index = ((((unsigned HOST_WIDE_INT)uid1 << HOST_BITS_PER_WIDE_INT / 2) + uid2)
3113 % GIMPLE_TYPE_PAIR_SIZE);
3114 if (type_pair_cache [index].uid1 == uid1
3115 && type_pair_cache [index].uid2 == uid2)
3116 return &type_pair_cache[index];
d7f09764 3117
a30726a4
JH
3118 type_pair_cache [index].uid1 = uid1;
3119 type_pair_cache [index].uid2 = uid2;
3120 type_pair_cache [index].same_p[0] = -2;
3121 type_pair_cache [index].same_p[1] = -2;
3122
3123 return &type_pair_cache[index];
d7f09764
DN
3124}
3125
d4398a43
RG
3126/* Per pointer state for the SCC finding. The on_sccstack flag
3127 is not strictly required, it is true when there is no hash value
3128 recorded for the type and false otherwise. But querying that
3129 is slower. */
3130
3131struct sccs
3132{
3133 unsigned int dfsnum;
3134 unsigned int low;
3135 bool on_sccstack;
3136 union {
3137 hashval_t hash;
c4fcd06a 3138 signed char same_p;
d4398a43
RG
3139 } u;
3140};
3141
3142static unsigned int next_dfs_num;
3143static unsigned int gtc_next_dfs_num;
d7f09764 3144
4490cae6
RG
3145
3146/* GIMPLE type merging cache. A direct-mapped cache based on TYPE_UID. */
3147
3148typedef struct GTY(()) gimple_type_leader_entry_s {
3149 tree type;
3150 tree leader;
3151} gimple_type_leader_entry;
3152
3153#define GIMPLE_TYPE_LEADER_SIZE 16381
e89964e3
MM
3154static GTY((deletable, length("GIMPLE_TYPE_LEADER_SIZE")))
3155 gimple_type_leader_entry *gimple_type_leader;
4490cae6
RG
3156
3157/* Lookup an existing leader for T and return it or NULL_TREE, if
3158 there is none in the cache. */
3159
4fc07af9 3160static inline tree
4490cae6
RG
3161gimple_lookup_type_leader (tree t)
3162{
3163 gimple_type_leader_entry *leader;
3164
3165 if (!gimple_type_leader)
3166 return NULL_TREE;
3167
3168 leader = &gimple_type_leader[TYPE_UID (t) % GIMPLE_TYPE_LEADER_SIZE];
3169 if (leader->type != t)
3170 return NULL_TREE;
3171
3172 return leader->leader;
3173}
3174
77785f4f
RG
3175/* Return true if T1 and T2 have the same name. If FOR_COMPLETION_P is
3176 true then if any type has no name return false, otherwise return
3177 true if both types have no names. */
d7f09764
DN
3178
3179static bool
6a20ce76 3180compare_type_names_p (tree t1, tree t2)
d7f09764
DN
3181{
3182 tree name1 = TYPE_NAME (t1);
3183 tree name2 = TYPE_NAME (t2);
3184
ee7a54c5
RG
3185 if ((name1 != NULL_TREE) != (name2 != NULL_TREE))
3186 return false;
3187
3188 if (name1 == NULL_TREE)
3189 return true;
3190
3191 /* Either both should be a TYPE_DECL or both an IDENTIFIER_NODE. */
3192 if (TREE_CODE (name1) != TREE_CODE (name2))
3193 return false;
3194
3195 if (TREE_CODE (name1) == TYPE_DECL)
6a20ce76
RG
3196 name1 = DECL_NAME (name1);
3197 gcc_checking_assert (!name1 || TREE_CODE (name1) == IDENTIFIER_NODE);
d7f09764 3198
ee7a54c5 3199 if (TREE_CODE (name2) == TYPE_DECL)
6a20ce76
RG
3200 name2 = DECL_NAME (name2);
3201 gcc_checking_assert (!name2 || TREE_CODE (name2) == IDENTIFIER_NODE);
d7f09764
DN
3202
3203 /* Identifiers can be compared with pointer equality rather
3204 than a string comparison. */
3205 if (name1 == name2)
3206 return true;
3207
3208 return false;
3209}
3210
d025732d
EB
3211/* Return true if the field decls F1 and F2 are at the same offset.
3212
91f2fae8 3213 This is intended to be used on GIMPLE types only. */
d7f09764 3214
1e4bc4eb 3215bool
d025732d 3216gimple_compare_field_offset (tree f1, tree f2)
d7f09764
DN
3217{
3218 if (DECL_OFFSET_ALIGN (f1) == DECL_OFFSET_ALIGN (f2))
d025732d
EB
3219 {
3220 tree offset1 = DECL_FIELD_OFFSET (f1);
3221 tree offset2 = DECL_FIELD_OFFSET (f2);
3222 return ((offset1 == offset2
3223 /* Once gimplification is done, self-referential offsets are
3224 instantiated as operand #2 of the COMPONENT_REF built for
3225 each access and reset. Therefore, they are not relevant
3226 anymore and fields are interchangeable provided that they
3227 represent the same access. */
3228 || (TREE_CODE (offset1) == PLACEHOLDER_EXPR
3229 && TREE_CODE (offset2) == PLACEHOLDER_EXPR
3230 && (DECL_SIZE (f1) == DECL_SIZE (f2)
3231 || (TREE_CODE (DECL_SIZE (f1)) == PLACEHOLDER_EXPR
3232 && TREE_CODE (DECL_SIZE (f2)) == PLACEHOLDER_EXPR)
3233 || operand_equal_p (DECL_SIZE (f1), DECL_SIZE (f2), 0))
3234 && DECL_ALIGN (f1) == DECL_ALIGN (f2))
3235 || operand_equal_p (offset1, offset2, 0))
3236 && tree_int_cst_equal (DECL_FIELD_BIT_OFFSET (f1),
3237 DECL_FIELD_BIT_OFFSET (f2)));
3238 }
d7f09764
DN
3239
3240 /* Fortran and C do not always agree on what DECL_OFFSET_ALIGN
3241 should be, so handle differing ones specially by decomposing
3242 the offset into a byte and bit offset manually. */
3243 if (host_integerp (DECL_FIELD_OFFSET (f1), 0)
3244 && host_integerp (DECL_FIELD_OFFSET (f2), 0))
3245 {
3246 unsigned HOST_WIDE_INT byte_offset1, byte_offset2;
3247 unsigned HOST_WIDE_INT bit_offset1, bit_offset2;
3248 bit_offset1 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (f1));
3249 byte_offset1 = (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f1))
3250 + bit_offset1 / BITS_PER_UNIT);
3251 bit_offset2 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (f2));
3252 byte_offset2 = (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f2))
3253 + bit_offset2 / BITS_PER_UNIT);
3254 if (byte_offset1 != byte_offset2)
3255 return false;
3256 return bit_offset1 % BITS_PER_UNIT == bit_offset2 % BITS_PER_UNIT;
3257 }
3258
3259 return false;
3260}
3261
d4398a43 3262static bool
b5e04de5 3263gimple_types_compatible_p_1 (tree, tree, type_pair_t,
c4fcd06a 3264 VEC(type_pair_t, heap) **,
d4398a43 3265 struct pointer_map_t *, struct obstack *);
d7f09764 3266
d4398a43
RG
3267/* DFS visit the edge from the callers type pair with state *STATE to
3268 the pair T1, T2 while operating in FOR_MERGING_P mode.
3269 Update the merging status if it is not part of the SCC containing the
3270 callers pair and return it.
3271 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
3272
3273static bool
b5e04de5 3274gtc_visit (tree t1, tree t2,
d4398a43
RG
3275 struct sccs *state,
3276 VEC(type_pair_t, heap) **sccstack,
3277 struct pointer_map_t *sccstate,
3278 struct obstack *sccstate_obstack)
d7f09764 3279{
d4398a43
RG
3280 struct sccs *cstate = NULL;
3281 type_pair_t p;
3282 void **slot;
b5e04de5 3283 tree leader1, leader2;
d7f09764
DN
3284
3285 /* Check first for the obvious case of pointer identity. */
3286 if (t1 == t2)
d4398a43 3287 return true;
d7f09764
DN
3288
3289 /* Check that we have two types to compare. */
3290 if (t1 == NULL_TREE || t2 == NULL_TREE)
d4398a43 3291 return false;
d7f09764
DN
3292
3293 /* Can't be the same type if the types don't have the same code. */
3294 if (TREE_CODE (t1) != TREE_CODE (t2))
d4398a43 3295 return false;
b0cc341f
RG
3296
3297 /* Can't be the same type if they have different CV qualifiers. */
3298 if (TYPE_QUALS (t1) != TYPE_QUALS (t2))
d4398a43 3299 return false;
d7f09764 3300
61332f77
RG
3301 if (TREE_ADDRESSABLE (t1) != TREE_ADDRESSABLE (t2))
3302 return false;
3303
3304 /* Void types and nullptr types are always the same. */
3305 if (TREE_CODE (t1) == VOID_TYPE
3306 || TREE_CODE (t1) == NULLPTR_TYPE)
d4398a43 3307 return true;
d7f09764 3308
61332f77
RG
3309 /* Can't be the same type if they have different alignment or mode. */
3310 if (TYPE_ALIGN (t1) != TYPE_ALIGN (t2)
3311 || TYPE_MODE (t1) != TYPE_MODE (t2))
3312 return false;
3313
c9549072 3314 /* Do some simple checks before doing three hashtable queries. */
b0cc341f
RG
3315 if (INTEGRAL_TYPE_P (t1)
3316 || SCALAR_FLOAT_TYPE_P (t1)
3317 || FIXED_POINT_TYPE_P (t1)
3318 || TREE_CODE (t1) == VECTOR_TYPE
b23dc2c0 3319 || TREE_CODE (t1) == COMPLEX_TYPE
61332f77
RG
3320 || TREE_CODE (t1) == OFFSET_TYPE
3321 || POINTER_TYPE_P (t1))
b0cc341f 3322 {
61332f77
RG
3323 /* Can't be the same type if they have different sign or precision. */
3324 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2)
b0cc341f 3325 || TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
d4398a43 3326 return false;
b0cc341f
RG
3327
3328 if (TREE_CODE (t1) == INTEGER_TYPE
3329 && (TYPE_IS_SIZETYPE (t1) != TYPE_IS_SIZETYPE (t2)
3330 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)))
d4398a43 3331 return false;
b0cc341f
RG
3332
3333 /* That's all we need to check for float and fixed-point types. */
3334 if (SCALAR_FLOAT_TYPE_P (t1)
3335 || FIXED_POINT_TYPE_P (t1))
d4398a43 3336 return true;
b0cc341f 3337
61332f77 3338 /* For other types fall thru to more complex checks. */
b0cc341f 3339 }
d7f09764 3340
61332f77
RG
3341 /* If the types have been previously registered and found equal
3342 they still are. */
3343 leader1 = gimple_lookup_type_leader (t1);
3344 leader2 = gimple_lookup_type_leader (t2);
3345 if (leader1 == t2
3346 || t1 == leader2
3347 || (leader1 && leader1 == leader2))
3348 return true;
c9549072 3349
d7f09764
DN
3350 /* If the hash values of t1 and t2 are different the types can't
3351 possibly be the same. This helps keeping the type-pair hashtable
3352 small, only tracking comparisons for hash collisions. */
b5e04de5 3353 if (gimple_type_hash (t1) != gimple_type_hash (t2))
d4398a43 3354 return false;
d7f09764 3355
d4398a43 3356 /* Allocate a new cache entry for this comparison. */
a30726a4 3357 p = lookup_type_pair (t1, t2);
b5e04de5 3358 if (p->same_p[GTC_MERGE] == 0 || p->same_p[GTC_MERGE] == 1)
d7f09764
DN
3359 {
3360 /* We have already decided whether T1 and T2 are the
3361 same, return the cached result. */
b5e04de5 3362 return p->same_p[GTC_MERGE] == 1;
d7f09764 3363 }
d4398a43 3364
d4398a43
RG
3365 if ((slot = pointer_map_contains (sccstate, p)) != NULL)
3366 cstate = (struct sccs *)*slot;
67701d1d 3367 /* Not yet visited. DFS recurse. */
d4398a43 3368 if (!cstate)
d7f09764 3369 {
b5e04de5 3370 gimple_types_compatible_p_1 (t1, t2, p,
67701d1d
RG
3371 sccstack, sccstate, sccstate_obstack);
3372 cstate = (struct sccs *)* pointer_map_contains (sccstate, p);
d4398a43 3373 state->low = MIN (state->low, cstate->low);
d7f09764 3374 }
67701d1d 3375 /* If the type is still on the SCC stack adjust the parents low. */
d4398a43
RG
3376 if (cstate->dfsnum < state->dfsnum
3377 && cstate->on_sccstack)
3378 state->low = MIN (cstate->dfsnum, state->low);
d7f09764 3379
67701d1d
RG
3380 /* Return the current lattice value. We start with an equality
3381 assumption so types part of a SCC will be optimistically
3382 treated equal unless proven otherwise. */
3383 return cstate->u.same_p;
d4398a43
RG
3384}
3385
3386/* Worker for gimple_types_compatible.
3387 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
3388
3389static bool
b5e04de5 3390gimple_types_compatible_p_1 (tree t1, tree t2, type_pair_t p,
d4398a43
RG
3391 VEC(type_pair_t, heap) **sccstack,
3392 struct pointer_map_t *sccstate,
3393 struct obstack *sccstate_obstack)
3394{
d4398a43
RG
3395 struct sccs *state;
3396
b5e04de5 3397 gcc_assert (p->same_p[GTC_MERGE] == -2);
d7f09764 3398
d4398a43
RG
3399 state = XOBNEW (sccstate_obstack, struct sccs);
3400 *pointer_map_insert (sccstate, p) = state;
3401
3402 VEC_safe_push (type_pair_t, heap, *sccstack, p);
3403 state->dfsnum = gtc_next_dfs_num++;
3404 state->low = state->dfsnum;
3405 state->on_sccstack = true;
67701d1d
RG
3406 /* Start with an equality assumption. As we DFS recurse into child
3407 SCCs this assumption may get revisited. */
3408 state->u.same_p = 1;
d7f09764 3409
e1caba18 3410 /* The struct tags shall compare equal. */
6a20ce76 3411 if (!compare_type_names_p (t1, t2))
e1caba18
RG
3412 goto different_types;
3413
ee7a54c5
RG
3414 /* We may not merge typedef types to the same type in different
3415 contexts. */
3416 if (TYPE_NAME (t1)
3417 && TREE_CODE (TYPE_NAME (t1)) == TYPE_DECL
3418 && DECL_CONTEXT (TYPE_NAME (t1))
3419 && TYPE_P (DECL_CONTEXT (TYPE_NAME (t1))))
3420 {
3421 if (!gtc_visit (DECL_CONTEXT (TYPE_NAME (t1)),
3422 DECL_CONTEXT (TYPE_NAME (t2)),
3423 state, sccstack, sccstate, sccstate_obstack))
3424 goto different_types;
3425 }
3426
d7f09764
DN
3427 /* If their attributes are not the same they can't be the same type. */
3428 if (!attribute_list_equal (TYPE_ATTRIBUTES (t1), TYPE_ATTRIBUTES (t2)))
3429 goto different_types;
3430
d7f09764
DN
3431 /* Do type-specific comparisons. */
3432 switch (TREE_CODE (t1))
3433 {
d4398a43
RG
3434 case VECTOR_TYPE:
3435 case COMPLEX_TYPE:
b5e04de5 3436 if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
d4398a43
RG
3437 state, sccstack, sccstate, sccstate_obstack))
3438 goto different_types;
3439 goto same_types;
3440
d7f09764
DN
3441 case ARRAY_TYPE:
3442 /* Array types are the same if the element types are the same and
3443 the number of elements are the same. */
b5e04de5 3444 if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
d4398a43 3445 state, sccstack, sccstate, sccstate_obstack)
b0cc341f
RG
3446 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
3447 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
d7f09764
DN
3448 goto different_types;
3449 else
3450 {
3451 tree i1 = TYPE_DOMAIN (t1);
3452 tree i2 = TYPE_DOMAIN (t2);
3453
3454 /* For an incomplete external array, the type domain can be
3455 NULL_TREE. Check this condition also. */
3456 if (i1 == NULL_TREE && i2 == NULL_TREE)
3457 goto same_types;
3458 else if (i1 == NULL_TREE || i2 == NULL_TREE)
3459 goto different_types;
3460 /* If for a complete array type the possibly gimplified sizes
3461 are different the types are different. */
3462 else if (((TYPE_SIZE (i1) != NULL) ^ (TYPE_SIZE (i2) != NULL))
3463 || (TYPE_SIZE (i1)
3464 && TYPE_SIZE (i2)
3465 && !operand_equal_p (TYPE_SIZE (i1), TYPE_SIZE (i2), 0)))
3466 goto different_types;
3467 else
3468 {
3469 tree min1 = TYPE_MIN_VALUE (i1);
3470 tree min2 = TYPE_MIN_VALUE (i2);
3471 tree max1 = TYPE_MAX_VALUE (i1);
3472 tree max2 = TYPE_MAX_VALUE (i2);
3473
3474 /* The minimum/maximum values have to be the same. */
3475 if ((min1 == min2
f56000ed
EB
3476 || (min1 && min2
3477 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
3478 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
3479 || operand_equal_p (min1, min2, 0))))
d7f09764 3480 && (max1 == max2
f56000ed
EB
3481 || (max1 && max2
3482 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
3483 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
3484 || operand_equal_p (max1, max2, 0)))))
d7f09764
DN
3485 goto same_types;
3486 else
3487 goto different_types;
3488 }
3489 }
3490
3491 case METHOD_TYPE:
3492 /* Method types should belong to the same class. */
d4398a43 3493 if (!gtc_visit (TYPE_METHOD_BASETYPE (t1), TYPE_METHOD_BASETYPE (t2),
b5e04de5 3494 state, sccstack, sccstate, sccstate_obstack))
d7f09764
DN
3495 goto different_types;
3496
3497 /* Fallthru */
3498
3499 case FUNCTION_TYPE:
3500 /* Function types are the same if the return type and arguments types
3501 are the same. */
b5e04de5
RG
3502 if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
3503 state, sccstack, sccstate, sccstate_obstack))
bcee752e
RG
3504 goto different_types;
3505
ac9a30ae 3506 if (!comp_type_attributes (t1, t2))
d7f09764 3507 goto different_types;
bcee752e
RG
3508
3509 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
3510 goto same_types;
d7f09764
DN
3511 else
3512 {
bcee752e 3513 tree parms1, parms2;
d7f09764 3514
bcee752e
RG
3515 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
3516 parms1 && parms2;
3517 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
d7f09764 3518 {
b5e04de5
RG
3519 if (!gtc_visit (TREE_VALUE (parms1), TREE_VALUE (parms2),
3520 state, sccstack, sccstate, sccstate_obstack))
d7f09764 3521 goto different_types;
d7f09764 3522 }
bcee752e
RG
3523
3524 if (parms1 || parms2)
3525 goto different_types;
3526
3527 goto same_types;
d7f09764
DN
3528 }
3529
b23dc2c0
RG
3530 case OFFSET_TYPE:
3531 {
b5e04de5 3532 if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
d4398a43
RG
3533 state, sccstack, sccstate, sccstate_obstack)
3534 || !gtc_visit (TYPE_OFFSET_BASETYPE (t1),
b5e04de5 3535 TYPE_OFFSET_BASETYPE (t2),
d4398a43 3536 state, sccstack, sccstate, sccstate_obstack))
b23dc2c0
RG
3537 goto different_types;
3538
3539 goto same_types;
3540 }
3541
d7f09764
DN
3542 case POINTER_TYPE:
3543 case REFERENCE_TYPE:
e575382e
RG
3544 {
3545 /* If the two pointers have different ref-all attributes,
3546 they can't be the same type. */
3547 if (TYPE_REF_CAN_ALIAS_ALL (t1) != TYPE_REF_CAN_ALIAS_ALL (t2))
3548 goto different_types;
d7f09764 3549
e575382e
RG
3550 /* Otherwise, pointer and reference types are the same if the
3551 pointed-to types are the same. */
b5e04de5 3552 if (gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
d4398a43 3553 state, sccstack, sccstate, sccstate_obstack))
e575382e
RG
3554 goto same_types;
3555
3556 goto different_types;
3557 }
d7f09764 3558
b0cc341f
RG
3559 case INTEGER_TYPE:
3560 case BOOLEAN_TYPE:
3561 {
3562 tree min1 = TYPE_MIN_VALUE (t1);
3563 tree max1 = TYPE_MAX_VALUE (t1);
3564 tree min2 = TYPE_MIN_VALUE (t2);
3565 tree max2 = TYPE_MAX_VALUE (t2);
3566 bool min_equal_p = false;
3567 bool max_equal_p = false;
3568
3569 /* If either type has a minimum value, the other type must
3570 have the same. */
3571 if (min1 == NULL_TREE && min2 == NULL_TREE)
3572 min_equal_p = true;
3573 else if (min1 && min2 && operand_equal_p (min1, min2, 0))
3574 min_equal_p = true;
3575
3576 /* Likewise, if either type has a maximum value, the other
3577 type must have the same. */
3578 if (max1 == NULL_TREE && max2 == NULL_TREE)
3579 max_equal_p = true;
3580 else if (max1 && max2 && operand_equal_p (max1, max2, 0))
3581 max_equal_p = true;
3582
3583 if (!min_equal_p || !max_equal_p)
3584 goto different_types;
3585
3586 goto same_types;
3587 }
3588
d7f09764 3589 case ENUMERAL_TYPE:
e575382e 3590 {
b0cc341f
RG
3591 /* FIXME lto, we cannot check bounds on enumeral types because
3592 different front ends will produce different values.
3593 In C, enumeral types are integers, while in C++ each element
3594 will have its own symbolic value. We should decide how enums
3595 are to be represented in GIMPLE and have each front end lower
3596 to that. */
e575382e 3597 tree v1, v2;
d7f09764 3598
b0cc341f 3599 /* For enumeral types, all the values must be the same. */
e575382e
RG
3600 if (TYPE_VALUES (t1) == TYPE_VALUES (t2))
3601 goto same_types;
d7f09764 3602
e575382e
RG
3603 for (v1 = TYPE_VALUES (t1), v2 = TYPE_VALUES (t2);
3604 v1 && v2;
3605 v1 = TREE_CHAIN (v1), v2 = TREE_CHAIN (v2))
3606 {
3607 tree c1 = TREE_VALUE (v1);
3608 tree c2 = TREE_VALUE (v2);
d7f09764 3609
e575382e
RG
3610 if (TREE_CODE (c1) == CONST_DECL)
3611 c1 = DECL_INITIAL (c1);
d7f09764 3612
e575382e
RG
3613 if (TREE_CODE (c2) == CONST_DECL)
3614 c2 = DECL_INITIAL (c2);
d7f09764 3615
e575382e
RG
3616 if (tree_int_cst_equal (c1, c2) != 1)
3617 goto different_types;
abe36b81 3618
b5e04de5 3619 if (TREE_PURPOSE (v1) != TREE_PURPOSE (v2))
abe36b81 3620 goto different_types;
e575382e 3621 }
d7f09764 3622
e575382e
RG
3623 /* If one enumeration has more values than the other, they
3624 are not the same. */
3625 if (v1 || v2)
3626 goto different_types;
d7f09764 3627
e575382e
RG
3628 goto same_types;
3629 }
d7f09764
DN
3630
3631 case RECORD_TYPE:
3632 case UNION_TYPE:
3633 case QUAL_UNION_TYPE:
e575382e
RG
3634 {
3635 tree f1, f2;
d7f09764 3636
e575382e
RG
3637 /* For aggregate types, all the fields must be the same. */
3638 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
3639 f1 && f2;
3640 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
3641 {
17e99cdb
RG
3642 /* Different field kinds are not compatible. */
3643 if (TREE_CODE (f1) != TREE_CODE (f2))
3644 goto different_types;
3645 /* Field decls must have the same name and offset. */
3646 if (TREE_CODE (f1) == FIELD_DECL
3647 && (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
3648 || !gimple_compare_field_offset (f1, f2)))
3649 goto different_types;
3650 /* All entities should have the same name and type. */
b5e04de5 3651 if (DECL_NAME (f1) != DECL_NAME (f2)
b5e04de5 3652 || !gtc_visit (TREE_TYPE (f1), TREE_TYPE (f2),
d4398a43 3653 state, sccstack, sccstate, sccstate_obstack))
e575382e
RG
3654 goto different_types;
3655 }
d7f09764 3656
e575382e
RG
3657 /* If one aggregate has more fields than the other, they
3658 are not the same. */
3659 if (f1 || f2)
3660 goto different_types;
d7f09764 3661
e575382e
RG
3662 goto same_types;
3663 }
d7f09764 3664
d7f09764 3665 default:
b0cc341f 3666 gcc_unreachable ();
d7f09764
DN
3667 }
3668
3669 /* Common exit path for types that are not compatible. */
3670different_types:
d4398a43
RG
3671 state->u.same_p = 0;
3672 goto pop;
d7f09764
DN
3673
3674 /* Common exit path for types that are compatible. */
3675same_types:
67701d1d 3676 gcc_assert (state->u.same_p == 1);
d7f09764 3677
d4398a43
RG
3678pop:
3679 if (state->low == state->dfsnum)
3680 {
3681 type_pair_t x;
d7f09764 3682
67701d1d
RG
3683 /* Pop off the SCC and set its cache values to the final
3684 comparison result. */
d4398a43
RG
3685 do
3686 {
3687 struct sccs *cstate;
3688 x = VEC_pop (type_pair_t, *sccstack);
3689 cstate = (struct sccs *)*pointer_map_contains (sccstate, x);
3690 cstate->on_sccstack = false;
b5e04de5 3691 x->same_p[GTC_MERGE] = state->u.same_p;
d4398a43
RG
3692 }
3693 while (x != p);
3694 }
d7f09764 3695
d4398a43
RG
3696 return state->u.same_p;
3697}
d7f09764 3698
d4398a43
RG
3699/* Return true iff T1 and T2 are structurally identical. When
3700 FOR_MERGING_P is true the an incomplete type and a complete type
3701 are considered different, otherwise they are considered compatible. */
d7f09764 3702
0ca8de87 3703static bool
b5e04de5 3704gimple_types_compatible_p (tree t1, tree t2)
d7f09764 3705{
d4398a43
RG
3706 VEC(type_pair_t, heap) *sccstack = NULL;
3707 struct pointer_map_t *sccstate;
3708 struct obstack sccstate_obstack;
3709 type_pair_t p = NULL;
3710 bool res;
b5e04de5 3711 tree leader1, leader2;
d4398a43
RG
3712
3713 /* Before starting to set up the SCC machinery handle simple cases. */
3714
3715 /* Check first for the obvious case of pointer identity. */
3716 if (t1 == t2)
3717 return true;
3718
3719 /* Check that we have two types to compare. */
3720 if (t1 == NULL_TREE || t2 == NULL_TREE)
3721 return false;
3722
d4398a43
RG
3723 /* Can't be the same type if the types don't have the same code. */
3724 if (TREE_CODE (t1) != TREE_CODE (t2))
3725 return false;
3726
3727 /* Can't be the same type if they have different CV qualifiers. */
3728 if (TYPE_QUALS (t1) != TYPE_QUALS (t2))
3729 return false;
3730
61332f77
RG
3731 if (TREE_ADDRESSABLE (t1) != TREE_ADDRESSABLE (t2))
3732 return false;
3733
3734 /* Void types and nullptr types are always the same. */
3735 if (TREE_CODE (t1) == VOID_TYPE
3736 || TREE_CODE (t1) == NULLPTR_TYPE)
d4398a43
RG
3737 return true;
3738
61332f77
RG
3739 /* Can't be the same type if they have different alignment or mode. */
3740 if (TYPE_ALIGN (t1) != TYPE_ALIGN (t2)
3741 || TYPE_MODE (t1) != TYPE_MODE (t2))
3742 return false;
3743
d4398a43
RG
3744 /* Do some simple checks before doing three hashtable queries. */
3745 if (INTEGRAL_TYPE_P (t1)
3746 || SCALAR_FLOAT_TYPE_P (t1)
3747 || FIXED_POINT_TYPE_P (t1)
3748 || TREE_CODE (t1) == VECTOR_TYPE
3749 || TREE_CODE (t1) == COMPLEX_TYPE
61332f77
RG
3750 || TREE_CODE (t1) == OFFSET_TYPE
3751 || POINTER_TYPE_P (t1))
d4398a43 3752 {
61332f77
RG
3753 /* Can't be the same type if they have different sign or precision. */
3754 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2)
d4398a43
RG
3755 || TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
3756 return false;
3757
3758 if (TREE_CODE (t1) == INTEGER_TYPE
3759 && (TYPE_IS_SIZETYPE (t1) != TYPE_IS_SIZETYPE (t2)
3760 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)))
3761 return false;
3762
3763 /* That's all we need to check for float and fixed-point types. */
3764 if (SCALAR_FLOAT_TYPE_P (t1)
3765 || FIXED_POINT_TYPE_P (t1))
3766 return true;
3767
61332f77 3768 /* For other types fall thru to more complex checks. */
d4398a43
RG
3769 }
3770
61332f77
RG
3771 /* If the types have been previously registered and found equal
3772 they still are. */
3773 leader1 = gimple_lookup_type_leader (t1);
3774 leader2 = gimple_lookup_type_leader (t2);
3775 if (leader1 == t2
3776 || t1 == leader2
3777 || (leader1 && leader1 == leader2))
3778 return true;
d4398a43
RG
3779
3780 /* If the hash values of t1 and t2 are different the types can't
3781 possibly be the same. This helps keeping the type-pair hashtable
3782 small, only tracking comparisons for hash collisions. */
b5e04de5 3783 if (gimple_type_hash (t1) != gimple_type_hash (t2))
d4398a43
RG
3784 return false;
3785
3786 /* If we've visited this type pair before (in the case of aggregates
3787 with self-referential types), and we made a decision, return it. */
a30726a4 3788 p = lookup_type_pair (t1, t2);
b5e04de5 3789 if (p->same_p[GTC_MERGE] == 0 || p->same_p[GTC_MERGE] == 1)
d4398a43
RG
3790 {
3791 /* We have already decided whether T1 and T2 are the
3792 same, return the cached result. */
b5e04de5 3793 return p->same_p[GTC_MERGE] == 1;
d4398a43
RG
3794 }
3795
3796 /* Now set up the SCC machinery for the comparison. */
3797 gtc_next_dfs_num = 1;
3798 sccstate = pointer_map_create ();
3799 gcc_obstack_init (&sccstate_obstack);
b5e04de5 3800 res = gimple_types_compatible_p_1 (t1, t2, p,
d4398a43
RG
3801 &sccstack, sccstate, &sccstate_obstack);
3802 VEC_free (type_pair_t, heap, sccstack);
3803 pointer_map_destroy (sccstate);
3804 obstack_free (&sccstate_obstack, NULL);
3805
3806 return res;
3807}
d7f09764 3808
d7f09764
DN
3809
3810static hashval_t
3811iterative_hash_gimple_type (tree, hashval_t, VEC(tree, heap) **,
b5e04de5 3812 struct pointer_map_t *, struct obstack *);
d7f09764
DN
3813
3814/* DFS visit the edge from the callers type with state *STATE to T.
3815 Update the callers type hash V with the hash for T if it is not part
3816 of the SCC containing the callers type and return it.
3817 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
3818
3819static hashval_t
3820visit (tree t, struct sccs *state, hashval_t v,
3821 VEC (tree, heap) **sccstack,
3822 struct pointer_map_t *sccstate,
b5e04de5 3823 struct obstack *sccstate_obstack)
d7f09764
DN
3824{
3825 struct sccs *cstate = NULL;
0f443ad0 3826 struct tree_int_map m;
d7f09764
DN
3827 void **slot;
3828
3829 /* If there is a hash value recorded for this type then it can't
3830 possibly be part of our parent SCC. Simply mix in its hash. */
0f443ad0 3831 m.base.from = t;
b5e04de5 3832 if ((slot = htab_find_slot (type_hash_cache, &m, NO_INSERT))
0f443ad0
RG
3833 && *slot)
3834 return iterative_hash_hashval_t (((struct tree_int_map *) *slot)->to, v);
d7f09764
DN
3835
3836 if ((slot = pointer_map_contains (sccstate, t)) != NULL)
3837 cstate = (struct sccs *)*slot;
3838 if (!cstate)
3839 {
3840 hashval_t tem;
3841 /* Not yet visited. DFS recurse. */
3842 tem = iterative_hash_gimple_type (t, v,
b5e04de5 3843 sccstack, sccstate, sccstate_obstack);
d7f09764
DN
3844 if (!cstate)
3845 cstate = (struct sccs *)* pointer_map_contains (sccstate, t);
3846 state->low = MIN (state->low, cstate->low);
3847 /* If the type is no longer on the SCC stack and thus is not part
3848 of the parents SCC mix in its hash value. Otherwise we will
3849 ignore the type for hashing purposes and return the unaltered
3850 hash value. */
3851 if (!cstate->on_sccstack)
3852 return tem;
3853 }
3854 if (cstate->dfsnum < state->dfsnum
3855 && cstate->on_sccstack)
3856 state->low = MIN (cstate->dfsnum, state->low);
3857
3858 /* We are part of our parents SCC, skip this type during hashing
3859 and return the unaltered hash value. */
3860 return v;
3861}
3862
77785f4f 3863/* Hash NAME with the previous hash value V and return it. */
d7f09764
DN
3864
3865static hashval_t
77785f4f 3866iterative_hash_name (tree name, hashval_t v)
d7f09764 3867{
d7f09764
DN
3868 if (!name)
3869 return v;
ee7a54c5 3870 v = iterative_hash_hashval_t (TREE_CODE (name), v);
d7f09764
DN
3871 if (TREE_CODE (name) == TYPE_DECL)
3872 name = DECL_NAME (name);
3873 if (!name)
3874 return v;
3875 gcc_assert (TREE_CODE (name) == IDENTIFIER_NODE);
d7f09764
DN
3876 return iterative_hash_object (IDENTIFIER_HASH_VALUE (name), v);
3877}
3878
3066f593
RG
3879/* A type, hashvalue pair for sorting SCC members. */
3880
3881struct type_hash_pair {
3882 tree type;
3883 hashval_t hash;
3884};
3885
3886/* Compare two type, hashvalue pairs. */
3887
3888static int
3889type_hash_pair_compare (const void *p1_, const void *p2_)
3890{
3891 const struct type_hash_pair *p1 = (const struct type_hash_pair *) p1_;
3892 const struct type_hash_pair *p2 = (const struct type_hash_pair *) p2_;
5beaf664
RG
3893 if (p1->hash < p2->hash)
3894 return -1;
3895 else if (p1->hash > p2->hash)
3896 return 1;
3897 return 0;
3066f593
RG
3898}
3899
d7f09764
DN
3900/* Returning a hash value for gimple type TYPE combined with VAL.
3901 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done.
3902
3903 To hash a type we end up hashing in types that are reachable.
3904 Through pointers we can end up with cycles which messes up the
3905 required property that we need to compute the same hash value
3906 for structurally equivalent types. To avoid this we have to
3907 hash all types in a cycle (the SCC) in a commutative way. The
3908 easiest way is to not mix in the hashes of the SCC members at
3909 all. To make this work we have to delay setting the hash
3910 values of the SCC until it is complete. */
3911
3912static hashval_t
3913iterative_hash_gimple_type (tree type, hashval_t val,
3914 VEC(tree, heap) **sccstack,
3915 struct pointer_map_t *sccstate,
b5e04de5 3916 struct obstack *sccstate_obstack)
d7f09764
DN
3917{
3918 hashval_t v;
3919 void **slot;
3920 struct sccs *state;
3921
0f443ad0 3922 /* Not visited during this DFS walk. */
77a74ed7 3923 gcc_checking_assert (!pointer_map_contains (sccstate, type));
d7f09764
DN
3924 state = XOBNEW (sccstate_obstack, struct sccs);
3925 *pointer_map_insert (sccstate, type) = state;
3926
3927 VEC_safe_push (tree, heap, *sccstack, type);
3928 state->dfsnum = next_dfs_num++;
3929 state->low = state->dfsnum;
3930 state->on_sccstack = true;
3931
3932 /* Combine a few common features of types so that types are grouped into
3933 smaller sets; when searching for existing matching types to merge,
3934 only existing types having the same features as the new type will be
3935 checked. */
e1caba18 3936 v = iterative_hash_name (TYPE_NAME (type), 0);
ee7a54c5
RG
3937 if (TYPE_NAME (type)
3938 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
3939 && DECL_CONTEXT (TYPE_NAME (type))
3940 && TYPE_P (DECL_CONTEXT (TYPE_NAME (type))))
3941 v = visit (DECL_CONTEXT (TYPE_NAME (type)), state, v,
3942 sccstack, sccstate, sccstate_obstack);
e1caba18 3943 v = iterative_hash_hashval_t (TREE_CODE (type), v);
d7f09764
DN
3944 v = iterative_hash_hashval_t (TYPE_QUALS (type), v);
3945 v = iterative_hash_hashval_t (TREE_ADDRESSABLE (type), v);
3946
3947 /* Do not hash the types size as this will cause differences in
3948 hash values for the complete vs. the incomplete type variant. */
3949
3950 /* Incorporate common features of numerical types. */
3951 if (INTEGRAL_TYPE_P (type)
3952 || SCALAR_FLOAT_TYPE_P (type)
3953 || FIXED_POINT_TYPE_P (type))
3954 {
3955 v = iterative_hash_hashval_t (TYPE_PRECISION (type), v);
3956 v = iterative_hash_hashval_t (TYPE_MODE (type), v);
3957 v = iterative_hash_hashval_t (TYPE_UNSIGNED (type), v);
3958 }
3959
3960 /* For pointer and reference types, fold in information about the type
1e83b5f1 3961 pointed to. */
d7f09764 3962 if (POINTER_TYPE_P (type))
1e83b5f1
RG
3963 v = visit (TREE_TYPE (type), state, v,
3964 sccstack, sccstate, sccstate_obstack);
d7f09764 3965
f798226d
RG
3966 /* For integer types hash the types min/max values and the string flag. */
3967 if (TREE_CODE (type) == INTEGER_TYPE)
3968 {
429c98c9
RG
3969 /* OMP lowering can introduce error_mark_node in place of
3970 random local decls in types. */
3971 if (TYPE_MIN_VALUE (type) != error_mark_node)
3972 v = iterative_hash_expr (TYPE_MIN_VALUE (type), v);
3973 if (TYPE_MAX_VALUE (type) != error_mark_node)
3974 v = iterative_hash_expr (TYPE_MAX_VALUE (type), v);
f798226d
RG
3975 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
3976 }
3977
3978 /* For array types hash their domain and the string flag. */
3979 if (TREE_CODE (type) == ARRAY_TYPE
3980 && TYPE_DOMAIN (type))
3981 {
3982 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
3983 v = visit (TYPE_DOMAIN (type), state, v,
b5e04de5 3984 sccstack, sccstate, sccstate_obstack);
f798226d
RG
3985 }
3986
3987 /* Recurse for aggregates with a single element type. */
d7f09764
DN
3988 if (TREE_CODE (type) == ARRAY_TYPE
3989 || TREE_CODE (type) == COMPLEX_TYPE
3990 || TREE_CODE (type) == VECTOR_TYPE)
3991 v = visit (TREE_TYPE (type), state, v,
b5e04de5 3992 sccstack, sccstate, sccstate_obstack);
d7f09764
DN
3993
3994 /* Incorporate function return and argument types. */
3995 if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE)
3996 {
3997 unsigned na;
3998 tree p;
3999
4000 /* For method types also incorporate their parent class. */
4001 if (TREE_CODE (type) == METHOD_TYPE)
4002 v = visit (TYPE_METHOD_BASETYPE (type), state, v,
b5e04de5 4003 sccstack, sccstate, sccstate_obstack);
d7f09764 4004
1e83b5f1
RG
4005 /* Check result and argument types. */
4006 v = visit (TREE_TYPE (type), state, v,
4007 sccstack, sccstate, sccstate_obstack);
d7f09764
DN
4008 for (p = TYPE_ARG_TYPES (type), na = 0; p; p = TREE_CHAIN (p))
4009 {
1e83b5f1
RG
4010 v = visit (TREE_VALUE (p), state, v,
4011 sccstack, sccstate, sccstate_obstack);
d7f09764
DN
4012 na++;
4013 }
4014
4015 v = iterative_hash_hashval_t (na, v);
4016 }
4017
aa47290b 4018 if (RECORD_OR_UNION_TYPE_P (type))
d7f09764
DN
4019 {
4020 unsigned nf;
4021 tree f;
4022
d7f09764
DN
4023 for (f = TYPE_FIELDS (type), nf = 0; f; f = TREE_CHAIN (f))
4024 {
b5e04de5 4025 v = iterative_hash_name (DECL_NAME (f), v);
d7f09764 4026 v = visit (TREE_TYPE (f), state, v,
b5e04de5 4027 sccstack, sccstate, sccstate_obstack);
d7f09764
DN
4028 nf++;
4029 }
4030
4031 v = iterative_hash_hashval_t (nf, v);
4032 }
4033
4034 /* Record hash for us. */
d4398a43 4035 state->u.hash = v;
d7f09764
DN
4036
4037 /* See if we found an SCC. */
4038 if (state->low == state->dfsnum)
4039 {
4040 tree x;
3066f593 4041 struct tree_int_map *m;
d7f09764
DN
4042
4043 /* Pop off the SCC and set its hash values. */
3066f593 4044 x = VEC_pop (tree, *sccstack);
3066f593
RG
4045 /* Optimize SCC size one. */
4046 if (x == type)
d7f09764 4047 {
008bad7a 4048 state->on_sccstack = false;
3066f593 4049 m = ggc_alloc_cleared_tree_int_map ();
0f443ad0 4050 m->base.from = x;
008bad7a 4051 m->to = v;
b5e04de5 4052 slot = htab_find_slot (type_hash_cache, m, INSERT);
0f443ad0
RG
4053 gcc_assert (!*slot);
4054 *slot = (void *) m;
d7f09764 4055 }
3066f593
RG
4056 else
4057 {
008bad7a 4058 struct sccs *cstate;
3066f593
RG
4059 unsigned first, i, size, j;
4060 struct type_hash_pair *pairs;
4061 /* Pop off the SCC and build an array of type, hash pairs. */
4062 first = VEC_length (tree, *sccstack) - 1;
4063 while (VEC_index (tree, *sccstack, first) != type)
4064 --first;
4065 size = VEC_length (tree, *sccstack) - first + 1;
4066 pairs = XALLOCAVEC (struct type_hash_pair, size);
4067 i = 0;
008bad7a
RG
4068 cstate = (struct sccs *)*pointer_map_contains (sccstate, x);
4069 cstate->on_sccstack = false;
3066f593
RG
4070 pairs[i].type = x;
4071 pairs[i].hash = cstate->u.hash;
4072 do
4073 {
4074 x = VEC_pop (tree, *sccstack);
4075 cstate = (struct sccs *)*pointer_map_contains (sccstate, x);
4076 cstate->on_sccstack = false;
4077 ++i;
4078 pairs[i].type = x;
4079 pairs[i].hash = cstate->u.hash;
4080 }
4081 while (x != type);
4082 gcc_assert (i + 1 == size);
4083 /* Sort the arrays of type, hash pairs so that when we mix in
4084 all members of the SCC the hash value becomes independent on
4085 the order we visited the SCC. Disregard hashes equal to
4086 the hash of the type we mix into because we cannot guarantee
4087 a stable sort for those across different TUs. */
4088 qsort (pairs, size, sizeof (struct type_hash_pair),
4089 type_hash_pair_compare);
4090 for (i = 0; i < size; ++i)
4091 {
4092 hashval_t hash;
4093 m = ggc_alloc_cleared_tree_int_map ();
4094 m->base.from = pairs[i].type;
4095 hash = pairs[i].hash;
4096 /* Skip same hashes. */
4097 for (j = i + 1; j < size && pairs[j].hash == pairs[i].hash; ++j)
4098 ;
4099 for (; j < size; ++j)
4100 hash = iterative_hash_hashval_t (pairs[j].hash, hash);
4101 for (j = 0; pairs[j].hash != pairs[i].hash; ++j)
4102 hash = iterative_hash_hashval_t (pairs[j].hash, hash);
4103 m->to = hash;
008bad7a
RG
4104 if (pairs[i].type == type)
4105 v = hash;
3066f593
RG
4106 slot = htab_find_slot (type_hash_cache, m, INSERT);
4107 gcc_assert (!*slot);
4108 *slot = (void *) m;
4109 }
4110 }
d7f09764
DN
4111 }
4112
4113 return iterative_hash_hashval_t (v, val);
4114}
4115
4116
4117/* Returns a hash value for P (assumed to be a type). The hash value
4118 is computed using some distinguishing features of the type. Note
4119 that we cannot use pointer hashing here as we may be dealing with
4120 two distinct instances of the same type.
4121
4122 This function should produce the same hash value for two compatible
4123 types according to gimple_types_compatible_p. */
4124
4125static hashval_t
b5e04de5 4126gimple_type_hash (const void *p)
d7f09764 4127{
ddd4d0e1 4128 const_tree t = (const_tree) p;
d7f09764
DN
4129 VEC(tree, heap) *sccstack = NULL;
4130 struct pointer_map_t *sccstate;
4131 struct obstack sccstate_obstack;
4132 hashval_t val;
4133 void **slot;
0f443ad0 4134 struct tree_int_map m;
d7f09764 4135
b5e04de5 4136 if (type_hash_cache == NULL)
0f443ad0
RG
4137 type_hash_cache = htab_create_ggc (512, tree_int_map_hash,
4138 tree_int_map_eq, NULL);
d7f09764 4139
0f443ad0 4140 m.base.from = CONST_CAST_TREE (t);
b5e04de5 4141 if ((slot = htab_find_slot (type_hash_cache, &m, NO_INSERT))
0f443ad0
RG
4142 && *slot)
4143 return iterative_hash_hashval_t (((struct tree_int_map *) *slot)->to, 0);
d7f09764
DN
4144
4145 /* Perform a DFS walk and pre-hash all reachable types. */
4146 next_dfs_num = 1;
4147 sccstate = pointer_map_create ();
4148 gcc_obstack_init (&sccstate_obstack);
ddd4d0e1 4149 val = iterative_hash_gimple_type (CONST_CAST_TREE (t), 0,
b5e04de5 4150 &sccstack, sccstate, &sccstate_obstack);
d7f09764
DN
4151 VEC_free (tree, heap, sccstack);
4152 pointer_map_destroy (sccstate);
4153 obstack_free (&sccstate_obstack, NULL);
4154
4155 return val;
4156}
4157
825b27de
RG
4158/* Returning a hash value for gimple type TYPE combined with VAL.
4159
4160 The hash value returned is equal for types considered compatible
4161 by gimple_canonical_types_compatible_p. */
4162
4163static hashval_t
4164iterative_hash_canonical_type (tree type, hashval_t val)
4165{
4166 hashval_t v;
4167 void **slot;
4168 struct tree_int_map *mp, m;
4169
4170 m.base.from = type;
4171 if ((slot = htab_find_slot (canonical_type_hash_cache, &m, INSERT))
4172 && *slot)
d0340959 4173 return iterative_hash_hashval_t (((struct tree_int_map *) *slot)->to, val);
825b27de
RG
4174
4175 /* Combine a few common features of types so that types are grouped into
4176 smaller sets; when searching for existing matching types to merge,
4177 only existing types having the same features as the new type will be
4178 checked. */
4179 v = iterative_hash_hashval_t (TREE_CODE (type), 0);
825b27de 4180 v = iterative_hash_hashval_t (TREE_ADDRESSABLE (type), v);
61332f77
RG
4181 v = iterative_hash_hashval_t (TYPE_ALIGN (type), v);
4182 v = iterative_hash_hashval_t (TYPE_MODE (type), v);
825b27de
RG
4183
4184 /* Incorporate common features of numerical types. */
4185 if (INTEGRAL_TYPE_P (type)
4186 || SCALAR_FLOAT_TYPE_P (type)
61332f77
RG
4187 || FIXED_POINT_TYPE_P (type)
4188 || TREE_CODE (type) == VECTOR_TYPE
4189 || TREE_CODE (type) == COMPLEX_TYPE
4190 || TREE_CODE (type) == OFFSET_TYPE
4191 || POINTER_TYPE_P (type))
825b27de
RG
4192 {
4193 v = iterative_hash_hashval_t (TYPE_PRECISION (type), v);
825b27de
RG
4194 v = iterative_hash_hashval_t (TYPE_UNSIGNED (type), v);
4195 }
4196
4197 /* For pointer and reference types, fold in information about the type
4198 pointed to but do not recurse to the pointed-to type. */
4199 if (POINTER_TYPE_P (type))
4200 {
4201 v = iterative_hash_hashval_t (TYPE_REF_CAN_ALIAS_ALL (type), v);
61332f77
RG
4202 v = iterative_hash_hashval_t (TYPE_ADDR_SPACE (TREE_TYPE (type)), v);
4203 v = iterative_hash_hashval_t (TYPE_RESTRICT (type), v);
825b27de
RG
4204 v = iterative_hash_hashval_t (TREE_CODE (TREE_TYPE (type)), v);
4205 }
4206
4207 /* For integer types hash the types min/max values and the string flag. */
4208 if (TREE_CODE (type) == INTEGER_TYPE)
4209 {
825b27de 4210 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
61332f77 4211 v = iterative_hash_hashval_t (TYPE_IS_SIZETYPE (type), v);
825b27de
RG
4212 }
4213
4214 /* For array types hash their domain and the string flag. */
4215 if (TREE_CODE (type) == ARRAY_TYPE
4216 && TYPE_DOMAIN (type))
4217 {
4218 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
4219 v = iterative_hash_canonical_type (TYPE_DOMAIN (type), v);
4220 }
4221
4222 /* Recurse for aggregates with a single element type. */
4223 if (TREE_CODE (type) == ARRAY_TYPE
4224 || TREE_CODE (type) == COMPLEX_TYPE
4225 || TREE_CODE (type) == VECTOR_TYPE)
4226 v = iterative_hash_canonical_type (TREE_TYPE (type), v);
4227
4228 /* Incorporate function return and argument types. */
4229 if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE)
4230 {
4231 unsigned na;
4232 tree p;
4233
4234 /* For method types also incorporate their parent class. */
4235 if (TREE_CODE (type) == METHOD_TYPE)
4236 v = iterative_hash_canonical_type (TYPE_METHOD_BASETYPE (type), v);
4237
6a20ce76 4238 v = iterative_hash_canonical_type (TREE_TYPE (type), v);
825b27de
RG
4239
4240 for (p = TYPE_ARG_TYPES (type), na = 0; p; p = TREE_CHAIN (p))
4241 {
6a20ce76 4242 v = iterative_hash_canonical_type (TREE_VALUE (p), v);
825b27de
RG
4243 na++;
4244 }
4245
4246 v = iterative_hash_hashval_t (na, v);
4247 }
4248
aa47290b 4249 if (RECORD_OR_UNION_TYPE_P (type))
825b27de
RG
4250 {
4251 unsigned nf;
4252 tree f;
4253
4254 for (f = TYPE_FIELDS (type), nf = 0; f; f = TREE_CHAIN (f))
e7cfe241
RG
4255 if (TREE_CODE (f) == FIELD_DECL)
4256 {
4257 v = iterative_hash_canonical_type (TREE_TYPE (f), v);
4258 nf++;
4259 }
825b27de
RG
4260
4261 v = iterative_hash_hashval_t (nf, v);
4262 }
4263
4264 /* Cache the just computed hash value. */
4265 mp = ggc_alloc_cleared_tree_int_map ();
4266 mp->base.from = type;
4267 mp->to = v;
4268 *slot = (void *) mp;
4269
4270 return iterative_hash_hashval_t (v, val);
4271}
4272
a844a60b
RG
4273static hashval_t
4274gimple_canonical_type_hash (const void *p)
4275{
825b27de
RG
4276 if (canonical_type_hash_cache == NULL)
4277 canonical_type_hash_cache = htab_create_ggc (512, tree_int_map_hash,
4278 tree_int_map_eq, NULL);
4279
4280 return iterative_hash_canonical_type (CONST_CAST_TREE ((const_tree) p), 0);
a844a60b
RG
4281}
4282
d7f09764
DN
4283
4284/* Returns nonzero if P1 and P2 are equal. */
4285
4286static int
4287gimple_type_eq (const void *p1, const void *p2)
4288{
4289 const_tree t1 = (const_tree) p1;
4290 const_tree t2 = (const_tree) p2;
f5d6836a 4291 return gimple_types_compatible_p (CONST_CAST_TREE (t1),
b5e04de5 4292 CONST_CAST_TREE (t2));
d7f09764
DN
4293}
4294
4295
93b2a207
RG
4296/* Worker for gimple_register_type.
4297 Register type T in the global type table gimple_types.
4298 When REGISTERING_MV is false first recurse for the main variant of T. */
d7f09764 4299
93b2a207
RG
4300static tree
4301gimple_register_type_1 (tree t, bool registering_mv)
d7f09764
DN
4302{
4303 void **slot;
4490cae6 4304 gimple_type_leader_entry *leader;
d7f09764 4305
4490cae6
RG
4306 /* If we registered this type before return the cached result. */
4307 leader = &gimple_type_leader[TYPE_UID (t) % GIMPLE_TYPE_LEADER_SIZE];
4308 if (leader->type == t)
4309 return leader->leader;
4a2ac96f 4310
20d36f0e
RG
4311 /* Always register the main variant first. This is important so we
4312 pick up the non-typedef variants as canonical, otherwise we'll end
93b2a207
RG
4313 up taking typedef ids for structure tags during comparison.
4314 It also makes sure that main variants will be merged to main variants.
4315 As we are operating on a possibly partially fixed up type graph
4316 do not bother to recurse more than once, otherwise we may end up
31b3ca64
RG
4317 walking in circles.
4318 If we are registering a main variant it will either remain its
4319 own main variant or it will be merged to something else in which
4320 case we do not care for the main variant leader. */
93b2a207
RG
4321 if (!registering_mv
4322 && TYPE_MAIN_VARIANT (t) != t)
fb291a1e 4323 gimple_register_type_1 (TYPE_MAIN_VARIANT (t), true);
d7f09764 4324
fb291a1e 4325 /* See if we already have an equivalent type registered. */
d7f09764
DN
4326 slot = htab_find_slot (gimple_types, t, INSERT);
4327 if (*slot
4328 && *(tree *)slot != t)
4329 {
4330 tree new_type = (tree) *((tree *) slot);
4490cae6
RG
4331 leader->type = t;
4332 leader->leader = new_type;
fb291a1e 4333 return new_type;
4490cae6
RG
4334 }
4335
fb291a1e
RG
4336 /* If not, insert it to the cache and the hash. */
4337 leader->type = t;
4338 leader->leader = t;
4339 *slot = (void *) t;
4490cae6
RG
4340 return t;
4341}
4342
93b2a207
RG
4343/* Register type T in the global type table gimple_types.
4344 If another type T', compatible with T, already existed in
4345 gimple_types then return T', otherwise return T. This is used by
4346 LTO to merge identical types read from different TUs. */
4347
4348tree
4349gimple_register_type (tree t)
4350{
4351 gcc_assert (TYPE_P (t));
4352
4353 if (!gimple_type_leader)
4354 gimple_type_leader = ggc_alloc_cleared_vec_gimple_type_leader_entry_s
4355 (GIMPLE_TYPE_LEADER_SIZE);
4356
4357 if (gimple_types == NULL)
4358 gimple_types = htab_create_ggc (16381, gimple_type_hash, gimple_type_eq, 0);
4359
4360 return gimple_register_type_1 (t, false);
4361}
4490cae6 4362
825b27de
RG
4363/* The TYPE_CANONICAL merging machinery. It should closely resemble
4364 the middle-end types_compatible_p function. It needs to avoid
4365 claiming types are different for types that should be treated
4366 the same with respect to TBAA. Canonical types are also used
4367 for IL consistency checks via the useless_type_conversion_p
4368 predicate which does not handle all type kinds itself but falls
4369 back to pointer-comparison of TYPE_CANONICAL for aggregates
4370 for example. */
4371
4372/* Return true iff T1 and T2 are structurally identical for what
4373 TBAA is concerned. */
4374
4375static bool
4376gimple_canonical_types_compatible_p (tree t1, tree t2)
4377{
825b27de
RG
4378 /* Before starting to set up the SCC machinery handle simple cases. */
4379
4380 /* Check first for the obvious case of pointer identity. */
4381 if (t1 == t2)
4382 return true;
4383
4384 /* Check that we have two types to compare. */
4385 if (t1 == NULL_TREE || t2 == NULL_TREE)
4386 return false;
4387
4388 /* If the types have been previously registered and found equal
4389 they still are. */
4390 if (TYPE_CANONICAL (t1)
4391 && TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2))
4392 return true;
4393
4394 /* Can't be the same type if the types don't have the same code. */
4395 if (TREE_CODE (t1) != TREE_CODE (t2))
4396 return false;
4397
61332f77 4398 if (TREE_ADDRESSABLE (t1) != TREE_ADDRESSABLE (t2))
825b27de
RG
4399 return false;
4400
61332f77
RG
4401 /* Qualifiers do not matter for canonical type comparison purposes. */
4402
4403 /* Void types and nullptr types are always the same. */
4404 if (TREE_CODE (t1) == VOID_TYPE
4405 || TREE_CODE (t1) == NULLPTR_TYPE)
825b27de
RG
4406 return true;
4407
61332f77
RG
4408 /* Can't be the same type if they have different alignment, or mode. */
4409 if (TYPE_ALIGN (t1) != TYPE_ALIGN (t2)
4410 || TYPE_MODE (t1) != TYPE_MODE (t2))
4411 return false;
4412
4413 /* Non-aggregate types can be handled cheaply. */
825b27de
RG
4414 if (INTEGRAL_TYPE_P (t1)
4415 || SCALAR_FLOAT_TYPE_P (t1)
4416 || FIXED_POINT_TYPE_P (t1)
4417 || TREE_CODE (t1) == VECTOR_TYPE
4418 || TREE_CODE (t1) == COMPLEX_TYPE
61332f77
RG
4419 || TREE_CODE (t1) == OFFSET_TYPE
4420 || POINTER_TYPE_P (t1))
825b27de 4421 {
61332f77
RG
4422 /* Can't be the same type if they have different sign or precision. */
4423 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2)
825b27de
RG
4424 || TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
4425 return false;
4426
4427 if (TREE_CODE (t1) == INTEGER_TYPE
4428 && (TYPE_IS_SIZETYPE (t1) != TYPE_IS_SIZETYPE (t2)
4429 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)))
4430 return false;
4431
61332f77
RG
4432 /* For canonical type comparisons we do not want to build SCCs
4433 so we cannot compare pointed-to types. But we can, for now,
4434 require the same pointed-to type kind and match what
4435 useless_type_conversion_p would do. */
4436 if (POINTER_TYPE_P (t1))
4437 {
4438 /* If the two pointers have different ref-all attributes,
4439 they can't be the same type. */
4440 if (TYPE_REF_CAN_ALIAS_ALL (t1) != TYPE_REF_CAN_ALIAS_ALL (t2))
4441 return false;
825b27de 4442
61332f77
RG
4443 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
4444 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
4445 return false;
825b27de 4446
61332f77
RG
4447 if (TYPE_RESTRICT (t1) != TYPE_RESTRICT (t2))
4448 return false;
4449
4450 if (TREE_CODE (TREE_TYPE (t1)) != TREE_CODE (TREE_TYPE (t2)))
4451 return false;
4452 }
4453
4454 /* Tail-recurse to components. */
4455 if (TREE_CODE (t1) == VECTOR_TYPE
4456 || TREE_CODE (t1) == COMPLEX_TYPE)
4457 return gimple_canonical_types_compatible_p (TREE_TYPE (t1),
4458 TREE_TYPE (t2));
4459
4460 return true;
825b27de
RG
4461 }
4462
825b27de
RG
4463 /* If their attributes are not the same they can't be the same type. */
4464 if (!attribute_list_equal (TYPE_ATTRIBUTES (t1), TYPE_ATTRIBUTES (t2)))
b8a71aed 4465 return false;
825b27de
RG
4466
4467 /* Do type-specific comparisons. */
4468 switch (TREE_CODE (t1))
4469 {
825b27de
RG
4470 case ARRAY_TYPE:
4471 /* Array types are the same if the element types are the same and
4472 the number of elements are the same. */
4473 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2))
4474 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
4475 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
b8a71aed 4476 return false;
825b27de
RG
4477 else
4478 {
4479 tree i1 = TYPE_DOMAIN (t1);
4480 tree i2 = TYPE_DOMAIN (t2);
4481
4482 /* For an incomplete external array, the type domain can be
4483 NULL_TREE. Check this condition also. */
4484 if (i1 == NULL_TREE && i2 == NULL_TREE)
b8a71aed 4485 return true;
825b27de 4486 else if (i1 == NULL_TREE || i2 == NULL_TREE)
b8a71aed 4487 return false;
825b27de
RG
4488 /* If for a complete array type the possibly gimplified sizes
4489 are different the types are different. */
4490 else if (((TYPE_SIZE (i1) != NULL) ^ (TYPE_SIZE (i2) != NULL))
4491 || (TYPE_SIZE (i1)
4492 && TYPE_SIZE (i2)
4493 && !operand_equal_p (TYPE_SIZE (i1), TYPE_SIZE (i2), 0)))
b8a71aed 4494 return false;
825b27de
RG
4495 else
4496 {
4497 tree min1 = TYPE_MIN_VALUE (i1);
4498 tree min2 = TYPE_MIN_VALUE (i2);
4499 tree max1 = TYPE_MAX_VALUE (i1);
4500 tree max2 = TYPE_MAX_VALUE (i2);
4501
4502 /* The minimum/maximum values have to be the same. */
4503 if ((min1 == min2
4504 || (min1 && min2
4505 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
4506 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
4507 || operand_equal_p (min1, min2, 0))))
4508 && (max1 == max2
4509 || (max1 && max2
4510 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
4511 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
4512 || operand_equal_p (max1, max2, 0)))))
b8a71aed 4513 return true;
825b27de 4514 else
b8a71aed 4515 return false;
825b27de
RG
4516 }
4517 }
4518
4519 case METHOD_TYPE:
4520 /* Method types should belong to the same class. */
4521 if (!gimple_canonical_types_compatible_p
4522 (TYPE_METHOD_BASETYPE (t1), TYPE_METHOD_BASETYPE (t2)))
b8a71aed 4523 return false;
825b27de
RG
4524
4525 /* Fallthru */
4526
4527 case FUNCTION_TYPE:
4528 /* Function types are the same if the return type and arguments types
4529 are the same. */
6a20ce76 4530 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2)))
b8a71aed 4531 return false;
825b27de
RG
4532
4533 if (!comp_type_attributes (t1, t2))
b8a71aed 4534 return false;
825b27de
RG
4535
4536 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
b8a71aed 4537 return true;
825b27de
RG
4538 else
4539 {
4540 tree parms1, parms2;
4541
4542 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
4543 parms1 && parms2;
4544 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
4545 {
6a20ce76
RG
4546 if (!gimple_canonical_types_compatible_p
4547 (TREE_VALUE (parms1), TREE_VALUE (parms2)))
b8a71aed 4548 return false;
825b27de
RG
4549 }
4550
4551 if (parms1 || parms2)
b8a71aed 4552 return false;
825b27de 4553
b8a71aed 4554 return true;
825b27de
RG
4555 }
4556
825b27de
RG
4557 case RECORD_TYPE:
4558 case UNION_TYPE:
4559 case QUAL_UNION_TYPE:
4560 {
4561 tree f1, f2;
4562
4563 /* For aggregate types, all the fields must be the same. */
4564 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
4acd1c84 4565 f1 || f2;
825b27de
RG
4566 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
4567 {
e7cfe241
RG
4568 /* Skip non-fields. */
4569 while (f1 && TREE_CODE (f1) != FIELD_DECL)
4570 f1 = TREE_CHAIN (f1);
4571 while (f2 && TREE_CODE (f2) != FIELD_DECL)
4572 f2 = TREE_CHAIN (f2);
4573 if (!f1 || !f2)
4574 break;
825b27de
RG
4575 /* The fields must have the same name, offset and type. */
4576 if (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
4577 || !gimple_compare_field_offset (f1, f2)
4578 || !gimple_canonical_types_compatible_p
4579 (TREE_TYPE (f1), TREE_TYPE (f2)))
b8a71aed 4580 return false;
825b27de
RG
4581 }
4582
4583 /* If one aggregate has more fields than the other, they
4584 are not the same. */
4585 if (f1 || f2)
b8a71aed 4586 return false;
825b27de 4587
b8a71aed 4588 return true;
825b27de
RG
4589 }
4590
4591 default:
4592 gcc_unreachable ();
4593 }
825b27de
RG
4594}
4595
4596
4490cae6
RG
4597/* Returns nonzero if P1 and P2 are equal. */
4598
4599static int
4600gimple_canonical_type_eq (const void *p1, const void *p2)
4601{
4602 const_tree t1 = (const_tree) p1;
4603 const_tree t2 = (const_tree) p2;
825b27de
RG
4604 return gimple_canonical_types_compatible_p (CONST_CAST_TREE (t1),
4605 CONST_CAST_TREE (t2));
4490cae6
RG
4606}
4607
4608/* Register type T in the global type table gimple_types.
4609 If another type T', compatible with T, already existed in
4610 gimple_types then return T', otherwise return T. This is used by
96d91dcf
RG
4611 LTO to merge identical types read from different TUs.
4612
4613 ??? This merging does not exactly match how the tree.c middle-end
4614 functions will assign TYPE_CANONICAL when new types are created
4615 during optimization (which at least happens for pointer and array
4616 types). */
4490cae6
RG
4617
4618tree
4619gimple_register_canonical_type (tree t)
4620{
4621 void **slot;
4622
4623 gcc_assert (TYPE_P (t));
4624
61332f77
RG
4625 if (TYPE_CANONICAL (t))
4626 return TYPE_CANONICAL (t);
4627
4490cae6 4628 if (gimple_canonical_types == NULL)
a844a60b 4629 gimple_canonical_types = htab_create_ggc (16381, gimple_canonical_type_hash,
4490cae6
RG
4630 gimple_canonical_type_eq, 0);
4631
4632 slot = htab_find_slot (gimple_canonical_types, t, INSERT);
4633 if (*slot
4634 && *(tree *)slot != t)
4635 {
4636 tree new_type = (tree) *((tree *) slot);
4637
4638 TYPE_CANONICAL (t) = new_type;
4639 t = new_type;
4640 }
4641 else
4642 {
4643 TYPE_CANONICAL (t) = t;
4a2ac96f
RG
4644 *slot = (void *) t;
4645 }
d7f09764
DN
4646
4647 return t;
4648}
4649
4650
4651/* Show statistics on references to the global type table gimple_types. */
4652
4653void
4654print_gimple_types_stats (void)
4655{
4656 if (gimple_types)
4657 fprintf (stderr, "GIMPLE type table: size %ld, %ld elements, "
4658 "%ld searches, %ld collisions (ratio: %f)\n",
4659 (long) htab_size (gimple_types),
4660 (long) htab_elements (gimple_types),
4661 (long) gimple_types->searches,
4662 (long) gimple_types->collisions,
4663 htab_collisions (gimple_types));
4664 else
4665 fprintf (stderr, "GIMPLE type table is empty\n");
a844a60b
RG
4666 if (type_hash_cache)
4667 fprintf (stderr, "GIMPLE type hash table: size %ld, %ld elements, "
4668 "%ld searches, %ld collisions (ratio: %f)\n",
4669 (long) htab_size (type_hash_cache),
4670 (long) htab_elements (type_hash_cache),
4671 (long) type_hash_cache->searches,
4672 (long) type_hash_cache->collisions,
4673 htab_collisions (type_hash_cache));
4674 else
4675 fprintf (stderr, "GIMPLE type hash table is empty\n");
4490cae6
RG
4676 if (gimple_canonical_types)
4677 fprintf (stderr, "GIMPLE canonical type table: size %ld, %ld elements, "
4678 "%ld searches, %ld collisions (ratio: %f)\n",
4679 (long) htab_size (gimple_canonical_types),
4680 (long) htab_elements (gimple_canonical_types),
4681 (long) gimple_canonical_types->searches,
4682 (long) gimple_canonical_types->collisions,
4683 htab_collisions (gimple_canonical_types));
4684 else
4685 fprintf (stderr, "GIMPLE canonical type table is empty\n");
a844a60b
RG
4686 if (canonical_type_hash_cache)
4687 fprintf (stderr, "GIMPLE canonical type hash table: size %ld, %ld elements, "
0f443ad0 4688 "%ld searches, %ld collisions (ratio: %f)\n",
a844a60b
RG
4689 (long) htab_size (canonical_type_hash_cache),
4690 (long) htab_elements (canonical_type_hash_cache),
4691 (long) canonical_type_hash_cache->searches,
4692 (long) canonical_type_hash_cache->collisions,
4693 htab_collisions (canonical_type_hash_cache));
0f443ad0 4694 else
a844a60b 4695 fprintf (stderr, "GIMPLE canonical type hash table is empty\n");
d7f09764
DN
4696}
4697
0d0bfe17
RG
4698/* Free the gimple type hashtables used for LTO type merging. */
4699
4700void
4701free_gimple_type_tables (void)
4702{
4703 /* Last chance to print stats for the tables. */
4704 if (flag_lto_report)
4705 print_gimple_types_stats ();
4706
4707 if (gimple_types)
4708 {
4709 htab_delete (gimple_types);
4710 gimple_types = NULL;
4711 }
4490cae6
RG
4712 if (gimple_canonical_types)
4713 {
4714 htab_delete (gimple_canonical_types);
4715 gimple_canonical_types = NULL;
4716 }
0d0bfe17
RG
4717 if (type_hash_cache)
4718 {
0f443ad0 4719 htab_delete (type_hash_cache);
0d0bfe17
RG
4720 type_hash_cache = NULL;
4721 }
a844a60b
RG
4722 if (canonical_type_hash_cache)
4723 {
4724 htab_delete (canonical_type_hash_cache);
4725 canonical_type_hash_cache = NULL;
4726 }
a30726a4 4727 if (type_pair_cache)
0d0bfe17 4728 {
a30726a4
JH
4729 free (type_pair_cache);
4730 type_pair_cache = NULL;
0d0bfe17 4731 }
4490cae6 4732 gimple_type_leader = NULL;
0d0bfe17
RG
4733}
4734
d7f09764
DN
4735
4736/* Return a type the same as TYPE except unsigned or
4737 signed according to UNSIGNEDP. */
4738
4739static tree
4740gimple_signed_or_unsigned_type (bool unsignedp, tree type)
4741{
4742 tree type1;
4743
4744 type1 = TYPE_MAIN_VARIANT (type);
4745 if (type1 == signed_char_type_node
4746 || type1 == char_type_node
4747 || type1 == unsigned_char_type_node)
4748 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
4749 if (type1 == integer_type_node || type1 == unsigned_type_node)
4750 return unsignedp ? unsigned_type_node : integer_type_node;
4751 if (type1 == short_integer_type_node || type1 == short_unsigned_type_node)
4752 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
4753 if (type1 == long_integer_type_node || type1 == long_unsigned_type_node)
4754 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
4755 if (type1 == long_long_integer_type_node
4756 || type1 == long_long_unsigned_type_node)
4757 return unsignedp
4758 ? long_long_unsigned_type_node
4759 : long_long_integer_type_node;
a6766312
KT
4760 if (int128_integer_type_node && (type1 == int128_integer_type_node || type1 == int128_unsigned_type_node))
4761 return unsignedp
4762 ? int128_unsigned_type_node
4763 : int128_integer_type_node;
d7f09764
DN
4764#if HOST_BITS_PER_WIDE_INT >= 64
4765 if (type1 == intTI_type_node || type1 == unsigned_intTI_type_node)
4766 return unsignedp ? unsigned_intTI_type_node : intTI_type_node;
4767#endif
4768 if (type1 == intDI_type_node || type1 == unsigned_intDI_type_node)
4769 return unsignedp ? unsigned_intDI_type_node : intDI_type_node;
4770 if (type1 == intSI_type_node || type1 == unsigned_intSI_type_node)
4771 return unsignedp ? unsigned_intSI_type_node : intSI_type_node;
4772 if (type1 == intHI_type_node || type1 == unsigned_intHI_type_node)
4773 return unsignedp ? unsigned_intHI_type_node : intHI_type_node;
4774 if (type1 == intQI_type_node || type1 == unsigned_intQI_type_node)
4775 return unsignedp ? unsigned_intQI_type_node : intQI_type_node;
4776
4777#define GIMPLE_FIXED_TYPES(NAME) \
4778 if (type1 == short_ ## NAME ## _type_node \
4779 || type1 == unsigned_short_ ## NAME ## _type_node) \
4780 return unsignedp ? unsigned_short_ ## NAME ## _type_node \
4781 : short_ ## NAME ## _type_node; \
4782 if (type1 == NAME ## _type_node \
4783 || type1 == unsigned_ ## NAME ## _type_node) \
4784 return unsignedp ? unsigned_ ## NAME ## _type_node \
4785 : NAME ## _type_node; \
4786 if (type1 == long_ ## NAME ## _type_node \
4787 || type1 == unsigned_long_ ## NAME ## _type_node) \
4788 return unsignedp ? unsigned_long_ ## NAME ## _type_node \
4789 : long_ ## NAME ## _type_node; \
4790 if (type1 == long_long_ ## NAME ## _type_node \
4791 || type1 == unsigned_long_long_ ## NAME ## _type_node) \
4792 return unsignedp ? unsigned_long_long_ ## NAME ## _type_node \
4793 : long_long_ ## NAME ## _type_node;
4794
4795#define GIMPLE_FIXED_MODE_TYPES(NAME) \
4796 if (type1 == NAME ## _type_node \
4797 || type1 == u ## NAME ## _type_node) \
4798 return unsignedp ? u ## NAME ## _type_node \
4799 : NAME ## _type_node;
4800
4801#define GIMPLE_FIXED_TYPES_SAT(NAME) \
4802 if (type1 == sat_ ## short_ ## NAME ## _type_node \
4803 || type1 == sat_ ## unsigned_short_ ## NAME ## _type_node) \
4804 return unsignedp ? sat_ ## unsigned_short_ ## NAME ## _type_node \
4805 : sat_ ## short_ ## NAME ## _type_node; \
4806 if (type1 == sat_ ## NAME ## _type_node \
4807 || type1 == sat_ ## unsigned_ ## NAME ## _type_node) \
4808 return unsignedp ? sat_ ## unsigned_ ## NAME ## _type_node \
4809 : sat_ ## NAME ## _type_node; \
4810 if (type1 == sat_ ## long_ ## NAME ## _type_node \
4811 || type1 == sat_ ## unsigned_long_ ## NAME ## _type_node) \
4812 return unsignedp ? sat_ ## unsigned_long_ ## NAME ## _type_node \
4813 : sat_ ## long_ ## NAME ## _type_node; \
4814 if (type1 == sat_ ## long_long_ ## NAME ## _type_node \
4815 || type1 == sat_ ## unsigned_long_long_ ## NAME ## _type_node) \
4816 return unsignedp ? sat_ ## unsigned_long_long_ ## NAME ## _type_node \
4817 : sat_ ## long_long_ ## NAME ## _type_node;
4818
4819#define GIMPLE_FIXED_MODE_TYPES_SAT(NAME) \
4820 if (type1 == sat_ ## NAME ## _type_node \
4821 || type1 == sat_ ## u ## NAME ## _type_node) \
4822 return unsignedp ? sat_ ## u ## NAME ## _type_node \
4823 : sat_ ## NAME ## _type_node;
4824
4825 GIMPLE_FIXED_TYPES (fract);
4826 GIMPLE_FIXED_TYPES_SAT (fract);
4827 GIMPLE_FIXED_TYPES (accum);
4828 GIMPLE_FIXED_TYPES_SAT (accum);
4829
4830 GIMPLE_FIXED_MODE_TYPES (qq);
4831 GIMPLE_FIXED_MODE_TYPES (hq);
4832 GIMPLE_FIXED_MODE_TYPES (sq);
4833 GIMPLE_FIXED_MODE_TYPES (dq);
4834 GIMPLE_FIXED_MODE_TYPES (tq);
4835 GIMPLE_FIXED_MODE_TYPES_SAT (qq);
4836 GIMPLE_FIXED_MODE_TYPES_SAT (hq);
4837 GIMPLE_FIXED_MODE_TYPES_SAT (sq);
4838 GIMPLE_FIXED_MODE_TYPES_SAT (dq);
4839 GIMPLE_FIXED_MODE_TYPES_SAT (tq);
4840 GIMPLE_FIXED_MODE_TYPES (ha);
4841 GIMPLE_FIXED_MODE_TYPES (sa);
4842 GIMPLE_FIXED_MODE_TYPES (da);
4843 GIMPLE_FIXED_MODE_TYPES (ta);
4844 GIMPLE_FIXED_MODE_TYPES_SAT (ha);
4845 GIMPLE_FIXED_MODE_TYPES_SAT (sa);
4846 GIMPLE_FIXED_MODE_TYPES_SAT (da);
4847 GIMPLE_FIXED_MODE_TYPES_SAT (ta);
4848
4849 /* For ENUMERAL_TYPEs in C++, must check the mode of the types, not
4850 the precision; they have precision set to match their range, but
4851 may use a wider mode to match an ABI. If we change modes, we may
4852 wind up with bad conversions. For INTEGER_TYPEs in C, must check
4853 the precision as well, so as to yield correct results for
4854 bit-field types. C++ does not have these separate bit-field
4855 types, and producing a signed or unsigned variant of an
4856 ENUMERAL_TYPE may cause other problems as well. */
4857 if (!INTEGRAL_TYPE_P (type)
4858 || TYPE_UNSIGNED (type) == unsignedp)
4859 return type;
4860
4861#define TYPE_OK(node) \
4862 (TYPE_MODE (type) == TYPE_MODE (node) \
4863 && TYPE_PRECISION (type) == TYPE_PRECISION (node))
4864 if (TYPE_OK (signed_char_type_node))
4865 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
4866 if (TYPE_OK (integer_type_node))
4867 return unsignedp ? unsigned_type_node : integer_type_node;
4868 if (TYPE_OK (short_integer_type_node))
4869 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
4870 if (TYPE_OK (long_integer_type_node))
4871 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
4872 if (TYPE_OK (long_long_integer_type_node))
4873 return (unsignedp
4874 ? long_long_unsigned_type_node
4875 : long_long_integer_type_node);
a6766312
KT
4876 if (int128_integer_type_node && TYPE_OK (int128_integer_type_node))
4877 return (unsignedp
4878 ? int128_unsigned_type_node
4879 : int128_integer_type_node);
d7f09764
DN
4880
4881#if HOST_BITS_PER_WIDE_INT >= 64
4882 if (TYPE_OK (intTI_type_node))
4883 return unsignedp ? unsigned_intTI_type_node : intTI_type_node;
4884#endif
4885 if (TYPE_OK (intDI_type_node))
4886 return unsignedp ? unsigned_intDI_type_node : intDI_type_node;
4887 if (TYPE_OK (intSI_type_node))
4888 return unsignedp ? unsigned_intSI_type_node : intSI_type_node;
4889 if (TYPE_OK (intHI_type_node))
4890 return unsignedp ? unsigned_intHI_type_node : intHI_type_node;
4891 if (TYPE_OK (intQI_type_node))
4892 return unsignedp ? unsigned_intQI_type_node : intQI_type_node;
4893
4894#undef GIMPLE_FIXED_TYPES
4895#undef GIMPLE_FIXED_MODE_TYPES
4896#undef GIMPLE_FIXED_TYPES_SAT
4897#undef GIMPLE_FIXED_MODE_TYPES_SAT
4898#undef TYPE_OK
4899
4900 return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp);
4901}
4902
4903
4904/* Return an unsigned type the same as TYPE in other respects. */
4905
4906tree
4907gimple_unsigned_type (tree type)
4908{
4909 return gimple_signed_or_unsigned_type (true, type);
4910}
4911
4912
4913/* Return a signed type the same as TYPE in other respects. */
4914
4915tree
4916gimple_signed_type (tree type)
4917{
4918 return gimple_signed_or_unsigned_type (false, type);
4919}
4920
4921
4922/* Return the typed-based alias set for T, which may be an expression
4923 or a type. Return -1 if we don't do anything special. */
4924
4925alias_set_type
4926gimple_get_alias_set (tree t)
4927{
4928 tree u;
4929
4930 /* Permit type-punning when accessing a union, provided the access
4931 is directly through the union. For example, this code does not
4932 permit taking the address of a union member and then storing
4933 through it. Even the type-punning allowed here is a GCC
4934 extension, albeit a common and useful one; the C standard says
4935 that such accesses have implementation-defined behavior. */
4936 for (u = t;
4937 TREE_CODE (u) == COMPONENT_REF || TREE_CODE (u) == ARRAY_REF;
4938 u = TREE_OPERAND (u, 0))
4939 if (TREE_CODE (u) == COMPONENT_REF
4940 && TREE_CODE (TREE_TYPE (TREE_OPERAND (u, 0))) == UNION_TYPE)
4941 return 0;
4942
4943 /* That's all the expressions we handle specially. */
4944 if (!TYPE_P (t))
4945 return -1;
4946
4947 /* For convenience, follow the C standard when dealing with
4948 character types. Any object may be accessed via an lvalue that
4949 has character type. */
4950 if (t == char_type_node
4951 || t == signed_char_type_node
4952 || t == unsigned_char_type_node)
4953 return 0;
4954
4955 /* Allow aliasing between signed and unsigned variants of the same
4956 type. We treat the signed variant as canonical. */
4957 if (TREE_CODE (t) == INTEGER_TYPE && TYPE_UNSIGNED (t))
4958 {
4959 tree t1 = gimple_signed_type (t);
4960
4961 /* t1 == t can happen for boolean nodes which are always unsigned. */
4962 if (t1 != t)
4963 return get_alias_set (t1);
4964 }
d7f09764
DN
4965
4966 return -1;
4967}
4968
4969
5006671f
RG
4970/* Data structure used to count the number of dereferences to PTR
4971 inside an expression. */
4972struct count_ptr_d
4973{
4974 tree ptr;
4975 unsigned num_stores;
4976 unsigned num_loads;
4977};
4978
4979/* Helper for count_uses_and_derefs. Called by walk_tree to look for
4980 (ALIGN/MISALIGNED_)INDIRECT_REF nodes for the pointer passed in DATA. */
4981
4982static tree
4983count_ptr_derefs (tree *tp, int *walk_subtrees, void *data)
4984{
4985 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
4986 struct count_ptr_d *count_p = (struct count_ptr_d *) wi_p->info;
4987
4988 /* Do not walk inside ADDR_EXPR nodes. In the expression &ptr->fld,
4989 pointer 'ptr' is *not* dereferenced, it is simply used to compute
4990 the address of 'fld' as 'ptr + offsetof(fld)'. */
4991 if (TREE_CODE (*tp) == ADDR_EXPR)
4992 {
4993 *walk_subtrees = 0;
4994 return NULL_TREE;
4995 }
4996
70f34814 4997 if (TREE_CODE (*tp) == MEM_REF && TREE_OPERAND (*tp, 0) == count_p->ptr)
5006671f
RG
4998 {
4999 if (wi_p->is_lhs)
5000 count_p->num_stores++;
5001 else
5002 count_p->num_loads++;
5003 }
5004
5005 return NULL_TREE;
5006}
5007
5008/* Count the number of direct and indirect uses for pointer PTR in
5009 statement STMT. The number of direct uses is stored in
5010 *NUM_USES_P. Indirect references are counted separately depending
5011 on whether they are store or load operations. The counts are
5012 stored in *NUM_STORES_P and *NUM_LOADS_P. */
5013
5014void
5015count_uses_and_derefs (tree ptr, gimple stmt, unsigned *num_uses_p,
5016 unsigned *num_loads_p, unsigned *num_stores_p)
5017{
5018 ssa_op_iter i;
5019 tree use;
5020
5021 *num_uses_p = 0;
5022 *num_loads_p = 0;
5023 *num_stores_p = 0;
5024
5025 /* Find out the total number of uses of PTR in STMT. */
5026 FOR_EACH_SSA_TREE_OPERAND (use, stmt, i, SSA_OP_USE)
5027 if (use == ptr)
5028 (*num_uses_p)++;
5029
5030 /* Now count the number of indirect references to PTR. This is
5031 truly awful, but we don't have much choice. There are no parent
5032 pointers inside INDIRECT_REFs, so an expression like
5033 '*x_1 = foo (x_1, *x_1)' needs to be traversed piece by piece to
5034 find all the indirect and direct uses of x_1 inside. The only
5035 shortcut we can take is the fact that GIMPLE only allows
5036 INDIRECT_REFs inside the expressions below. */
5037 if (is_gimple_assign (stmt)
5038 || gimple_code (stmt) == GIMPLE_RETURN
5039 || gimple_code (stmt) == GIMPLE_ASM
5040 || is_gimple_call (stmt))
5041 {
5042 struct walk_stmt_info wi;
5043 struct count_ptr_d count;
5044
5045 count.ptr = ptr;
5046 count.num_stores = 0;
5047 count.num_loads = 0;
5048
5049 memset (&wi, 0, sizeof (wi));
5050 wi.info = &count;
5051 walk_gimple_op (stmt, count_ptr_derefs, &wi);
5052
5053 *num_stores_p = count.num_stores;
5054 *num_loads_p = count.num_loads;
5055 }
5056
5057 gcc_assert (*num_uses_p >= *num_loads_p + *num_stores_p);
5058}
5059
346ef3fa
RG
5060/* From a tree operand OP return the base of a load or store operation
5061 or NULL_TREE if OP is not a load or a store. */
5062
5063static tree
5064get_base_loadstore (tree op)
5065{
5066 while (handled_component_p (op))
5067 op = TREE_OPERAND (op, 0);
5068 if (DECL_P (op)
5069 || INDIRECT_REF_P (op)
70f34814 5070 || TREE_CODE (op) == MEM_REF
346ef3fa
RG
5071 || TREE_CODE (op) == TARGET_MEM_REF)
5072 return op;
5073 return NULL_TREE;
5074}
5075
5076/* For the statement STMT call the callbacks VISIT_LOAD, VISIT_STORE and
5077 VISIT_ADDR if non-NULL on loads, store and address-taken operands
5078 passing the STMT, the base of the operand and DATA to it. The base
5079 will be either a decl, an indirect reference (including TARGET_MEM_REF)
5080 or the argument of an address expression.
5081 Returns the results of these callbacks or'ed. */
5082
5083bool
5084walk_stmt_load_store_addr_ops (gimple stmt, void *data,
5085 bool (*visit_load)(gimple, tree, void *),
5086 bool (*visit_store)(gimple, tree, void *),
5087 bool (*visit_addr)(gimple, tree, void *))
5088{
5089 bool ret = false;
5090 unsigned i;
5091 if (gimple_assign_single_p (stmt))
5092 {
5093 tree lhs, rhs;
5094 if (visit_store)
5095 {
5096 lhs = get_base_loadstore (gimple_assign_lhs (stmt));
5097 if (lhs)
5098 ret |= visit_store (stmt, lhs, data);
5099 }
5100 rhs = gimple_assign_rhs1 (stmt);
ad8a1ac0
RG
5101 while (handled_component_p (rhs))
5102 rhs = TREE_OPERAND (rhs, 0);
346ef3fa
RG
5103 if (visit_addr)
5104 {
5105 if (TREE_CODE (rhs) == ADDR_EXPR)
5106 ret |= visit_addr (stmt, TREE_OPERAND (rhs, 0), data);
5107 else if (TREE_CODE (rhs) == TARGET_MEM_REF
5108 && TREE_CODE (TMR_BASE (rhs)) == ADDR_EXPR)
5109 ret |= visit_addr (stmt, TREE_OPERAND (TMR_BASE (rhs), 0), data);
5110 else if (TREE_CODE (rhs) == OBJ_TYPE_REF
5111 && TREE_CODE (OBJ_TYPE_REF_OBJECT (rhs)) == ADDR_EXPR)
5112 ret |= visit_addr (stmt, TREE_OPERAND (OBJ_TYPE_REF_OBJECT (rhs),
5113 0), data);
cb3d2e33
JJ
5114 else if (TREE_CODE (rhs) == CONSTRUCTOR)
5115 {
5116 unsigned int ix;
5117 tree val;
5118
5119 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), ix, val)
5120 if (TREE_CODE (val) == ADDR_EXPR)
5121 ret |= visit_addr (stmt, TREE_OPERAND (val, 0), data);
5122 else if (TREE_CODE (val) == OBJ_TYPE_REF
5123 && TREE_CODE (OBJ_TYPE_REF_OBJECT (val)) == ADDR_EXPR)
5124 ret |= visit_addr (stmt,
5125 TREE_OPERAND (OBJ_TYPE_REF_OBJECT (val),
5126 0), data);
5127 }
fff1894c
AB
5128 lhs = gimple_assign_lhs (stmt);
5129 if (TREE_CODE (lhs) == TARGET_MEM_REF
fff1894c
AB
5130 && TREE_CODE (TMR_BASE (lhs)) == ADDR_EXPR)
5131 ret |= visit_addr (stmt, TREE_OPERAND (TMR_BASE (lhs), 0), data);
346ef3fa
RG
5132 }
5133 if (visit_load)
5134 {
5135 rhs = get_base_loadstore (rhs);
5136 if (rhs)
5137 ret |= visit_load (stmt, rhs, data);
5138 }
5139 }
5140 else if (visit_addr
5141 && (is_gimple_assign (stmt)
4d7a65ea 5142 || gimple_code (stmt) == GIMPLE_COND))
346ef3fa
RG
5143 {
5144 for (i = 0; i < gimple_num_ops (stmt); ++i)
9dd58aa4
JJ
5145 {
5146 tree op = gimple_op (stmt, i);
5147 if (op == NULL_TREE)
5148 ;
5149 else if (TREE_CODE (op) == ADDR_EXPR)
5150 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
5151 /* COND_EXPR and VCOND_EXPR rhs1 argument is a comparison
5152 tree with two operands. */
5153 else if (i == 1 && COMPARISON_CLASS_P (op))
5154 {
5155 if (TREE_CODE (TREE_OPERAND (op, 0)) == ADDR_EXPR)
5156 ret |= visit_addr (stmt, TREE_OPERAND (TREE_OPERAND (op, 0),
5157 0), data);
5158 if (TREE_CODE (TREE_OPERAND (op, 1)) == ADDR_EXPR)
5159 ret |= visit_addr (stmt, TREE_OPERAND (TREE_OPERAND (op, 1),
5160 0), data);
5161 }
5162 }
346ef3fa
RG
5163 }
5164 else if (is_gimple_call (stmt))
5165 {
5166 if (visit_store)
5167 {
5168 tree lhs = gimple_call_lhs (stmt);
5169 if (lhs)
5170 {
5171 lhs = get_base_loadstore (lhs);
5172 if (lhs)
5173 ret |= visit_store (stmt, lhs, data);
5174 }
5175 }
5176 if (visit_load || visit_addr)
5177 for (i = 0; i < gimple_call_num_args (stmt); ++i)
5178 {
5179 tree rhs = gimple_call_arg (stmt, i);
5180 if (visit_addr
5181 && TREE_CODE (rhs) == ADDR_EXPR)
5182 ret |= visit_addr (stmt, TREE_OPERAND (rhs, 0), data);
5183 else if (visit_load)
5184 {
5185 rhs = get_base_loadstore (rhs);
5186 if (rhs)
5187 ret |= visit_load (stmt, rhs, data);
5188 }
5189 }
5190 if (visit_addr
5191 && gimple_call_chain (stmt)
5192 && TREE_CODE (gimple_call_chain (stmt)) == ADDR_EXPR)
5193 ret |= visit_addr (stmt, TREE_OPERAND (gimple_call_chain (stmt), 0),
5194 data);
1d24fdd9
RG
5195 if (visit_addr
5196 && gimple_call_return_slot_opt_p (stmt)
5197 && gimple_call_lhs (stmt) != NULL_TREE
4d61856d 5198 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
1d24fdd9 5199 ret |= visit_addr (stmt, gimple_call_lhs (stmt), data);
346ef3fa
RG
5200 }
5201 else if (gimple_code (stmt) == GIMPLE_ASM)
5202 {
5203 unsigned noutputs;
5204 const char *constraint;
5205 const char **oconstraints;
5206 bool allows_mem, allows_reg, is_inout;
5207 noutputs = gimple_asm_noutputs (stmt);
5208 oconstraints = XALLOCAVEC (const char *, noutputs);
5209 if (visit_store || visit_addr)
5210 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
5211 {
5212 tree link = gimple_asm_output_op (stmt, i);
5213 tree op = get_base_loadstore (TREE_VALUE (link));
5214 if (op && visit_store)
5215 ret |= visit_store (stmt, op, data);
5216 if (visit_addr)
5217 {
5218 constraint = TREE_STRING_POINTER
5219 (TREE_VALUE (TREE_PURPOSE (link)));
5220 oconstraints[i] = constraint;
5221 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
5222 &allows_reg, &is_inout);
5223 if (op && !allows_reg && allows_mem)
5224 ret |= visit_addr (stmt, op, data);
5225 }
5226 }
5227 if (visit_load || visit_addr)
5228 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
5229 {
5230 tree link = gimple_asm_input_op (stmt, i);
5231 tree op = TREE_VALUE (link);
5232 if (visit_addr
5233 && TREE_CODE (op) == ADDR_EXPR)
5234 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
5235 else if (visit_load || visit_addr)
5236 {
5237 op = get_base_loadstore (op);
5238 if (op)
5239 {
5240 if (visit_load)
5241 ret |= visit_load (stmt, op, data);
5242 if (visit_addr)
5243 {
5244 constraint = TREE_STRING_POINTER
5245 (TREE_VALUE (TREE_PURPOSE (link)));
5246 parse_input_constraint (&constraint, 0, 0, noutputs,
5247 0, oconstraints,
5248 &allows_mem, &allows_reg);
5249 if (!allows_reg && allows_mem)
5250 ret |= visit_addr (stmt, op, data);
5251 }
5252 }
5253 }
5254 }
5255 }
5256 else if (gimple_code (stmt) == GIMPLE_RETURN)
5257 {
5258 tree op = gimple_return_retval (stmt);
5259 if (op)
5260 {
5261 if (visit_addr
5262 && TREE_CODE (op) == ADDR_EXPR)
5263 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
5264 else if (visit_load)
5265 {
5266 op = get_base_loadstore (op);
5267 if (op)
5268 ret |= visit_load (stmt, op, data);
5269 }
5270 }
5271 }
5272 else if (visit_addr
5273 && gimple_code (stmt) == GIMPLE_PHI)
5274 {
5275 for (i = 0; i < gimple_phi_num_args (stmt); ++i)
5276 {
5277 tree op = PHI_ARG_DEF (stmt, i);
5278 if (TREE_CODE (op) == ADDR_EXPR)
5279 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
5280 }
5281 }
5282
5283 return ret;
5284}
5285
5286/* Like walk_stmt_load_store_addr_ops but with NULL visit_addr. IPA-CP
5287 should make a faster clone for this case. */
5288
5289bool
5290walk_stmt_load_store_ops (gimple stmt, void *data,
5291 bool (*visit_load)(gimple, tree, void *),
5292 bool (*visit_store)(gimple, tree, void *))
5293{
5294 return walk_stmt_load_store_addr_ops (stmt, data,
5295 visit_load, visit_store, NULL);
5296}
5297
ccacdf06
RG
5298/* Helper for gimple_ior_addresses_taken_1. */
5299
5300static bool
5301gimple_ior_addresses_taken_1 (gimple stmt ATTRIBUTE_UNUSED,
5302 tree addr, void *data)
5303{
5304 bitmap addresses_taken = (bitmap)data;
2ea9dc64
RG
5305 addr = get_base_address (addr);
5306 if (addr
5307 && DECL_P (addr))
ccacdf06
RG
5308 {
5309 bitmap_set_bit (addresses_taken, DECL_UID (addr));
5310 return true;
5311 }
5312 return false;
5313}
5314
5315/* Set the bit for the uid of all decls that have their address taken
5316 in STMT in the ADDRESSES_TAKEN bitmap. Returns true if there
5317 were any in this stmt. */
5318
5319bool
5320gimple_ior_addresses_taken (bitmap addresses_taken, gimple stmt)
5321{
5322 return walk_stmt_load_store_addr_ops (stmt, addresses_taken, NULL, NULL,
5323 gimple_ior_addresses_taken_1);
5324}
5325
4537ec0c
DN
5326
5327/* Return a printable name for symbol DECL. */
5328
5329const char *
5330gimple_decl_printable_name (tree decl, int verbosity)
5331{
98b2dfbb
RG
5332 if (!DECL_NAME (decl))
5333 return NULL;
4537ec0c
DN
5334
5335 if (DECL_ASSEMBLER_NAME_SET_P (decl))
5336 {
5337 const char *str, *mangled_str;
5338 int dmgl_opts = DMGL_NO_OPTS;
5339
5340 if (verbosity >= 2)
5341 {
5342 dmgl_opts = DMGL_VERBOSE
4537ec0c
DN
5343 | DMGL_ANSI
5344 | DMGL_GNU_V3
5345 | DMGL_RET_POSTFIX;
5346 if (TREE_CODE (decl) == FUNCTION_DECL)
5347 dmgl_opts |= DMGL_PARAMS;
5348 }
5349
5350 mangled_str = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
5351 str = cplus_demangle_v3 (mangled_str, dmgl_opts);
5352 return (str) ? str : mangled_str;
5353 }
5354
5355 return IDENTIFIER_POINTER (DECL_NAME (decl));
5356}
5357
c54c785d
JH
5358/* Return true when STMT is builtins call to CODE. */
5359
5360bool
5361gimple_call_builtin_p (gimple stmt, enum built_in_function code)
5362{
5363 tree fndecl;
5364 return (is_gimple_call (stmt)
5365 && (fndecl = gimple_call_fndecl (stmt)) != NULL
5366 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
5367 && DECL_FUNCTION_CODE (fndecl) == code);
5368}
5369
edcdea5b
NF
5370/* Return true if STMT clobbers memory. STMT is required to be a
5371 GIMPLE_ASM. */
5372
5373bool
5374gimple_asm_clobbers_memory_p (const_gimple stmt)
5375{
5376 unsigned i;
5377
5378 for (i = 0; i < gimple_asm_nclobbers (stmt); i++)
5379 {
5380 tree op = gimple_asm_clobber_op (stmt, i);
5381 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (op)), "memory") == 0)
5382 return true;
5383 }
5384
5385 return false;
5386}
726a989a 5387#include "gt-gimple.h"