]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/gimple.c
gcc/
[thirdparty/gcc.git] / gcc / gimple.c
1 /* Gimple IR support functions.
2
3 Copyright 2007, 2008, 2009, 2010 Free Software Foundation, Inc.
4 Contributed by Aldy Hernandez <aldyh@redhat.com>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "target.h"
27 #include "tree.h"
28 #include "ggc.h"
29 #include "hard-reg-set.h"
30 #include "basic-block.h"
31 #include "gimple.h"
32 #include "diagnostic.h"
33 #include "tree-flow.h"
34 #include "value-prof.h"
35 #include "flags.h"
36 #include "alias.h"
37 #include "demangle.h"
38 #include "langhooks.h"
39
40 /* Global type table. FIXME lto, it should be possible to re-use some
41 of the type hashing routines in tree.c (type_hash_canon, type_hash_lookup,
42 etc), but those assume that types were built with the various
43 build_*_type routines which is not the case with the streamer. */
44 static GTY((if_marked ("ggc_marked_p"), param_is (union tree_node)))
45 htab_t gimple_types;
46 static GTY((if_marked ("ggc_marked_p"), param_is (union tree_node)))
47 htab_t gimple_canonical_types;
48 static GTY((if_marked ("tree_int_map_marked_p"), param_is (struct tree_int_map)))
49 htab_t type_hash_cache;
50 static GTY((if_marked ("tree_int_map_marked_p"), param_is (struct tree_int_map)))
51 htab_t canonical_type_hash_cache;
52
53 /* Global type comparison cache. This is by TYPE_UID for space efficiency
54 and thus cannot use and does not need GC. */
55 static htab_t gtc_visited;
56 static struct obstack gtc_ob;
57
58 /* All the tuples have their operand vector (if present) at the very bottom
59 of the structure. Therefore, the offset required to find the
60 operands vector the size of the structure minus the size of the 1
61 element tree array at the end (see gimple_ops). */
62 #define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) \
63 (HAS_TREE_OP ? sizeof (struct STRUCT) - sizeof (tree) : 0),
64 EXPORTED_CONST size_t gimple_ops_offset_[] = {
65 #include "gsstruct.def"
66 };
67 #undef DEFGSSTRUCT
68
69 #define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) sizeof(struct STRUCT),
70 static const size_t gsstruct_code_size[] = {
71 #include "gsstruct.def"
72 };
73 #undef DEFGSSTRUCT
74
75 #define DEFGSCODE(SYM, NAME, GSSCODE) NAME,
76 const char *const gimple_code_name[] = {
77 #include "gimple.def"
78 };
79 #undef DEFGSCODE
80
81 #define DEFGSCODE(SYM, NAME, GSSCODE) GSSCODE,
82 EXPORTED_CONST enum gimple_statement_structure_enum gss_for_code_[] = {
83 #include "gimple.def"
84 };
85 #undef DEFGSCODE
86
87 #ifdef GATHER_STATISTICS
88 /* Gimple stats. */
89
90 int gimple_alloc_counts[(int) gimple_alloc_kind_all];
91 int gimple_alloc_sizes[(int) gimple_alloc_kind_all];
92
93 /* Keep in sync with gimple.h:enum gimple_alloc_kind. */
94 static const char * const gimple_alloc_kind_names[] = {
95 "assignments",
96 "phi nodes",
97 "conditionals",
98 "sequences",
99 "everything else"
100 };
101
102 #endif /* GATHER_STATISTICS */
103
104 /* A cache of gimple_seq objects. Sequences are created and destroyed
105 fairly often during gimplification. */
106 static GTY ((deletable)) struct gimple_seq_d *gimple_seq_cache;
107
108 /* Private API manipulation functions shared only with some
109 other files. */
110 extern void gimple_set_stored_syms (gimple, bitmap, bitmap_obstack *);
111 extern void gimple_set_loaded_syms (gimple, bitmap, bitmap_obstack *);
112
113 /* Gimple tuple constructors.
114 Note: Any constructor taking a ``gimple_seq'' as a parameter, can
115 be passed a NULL to start with an empty sequence. */
116
117 /* Set the code for statement G to CODE. */
118
119 static inline void
120 gimple_set_code (gimple g, enum gimple_code code)
121 {
122 g->gsbase.code = code;
123 }
124
125 /* Return the number of bytes needed to hold a GIMPLE statement with
126 code CODE. */
127
128 static inline size_t
129 gimple_size (enum gimple_code code)
130 {
131 return gsstruct_code_size[gss_for_code (code)];
132 }
133
134 /* Allocate memory for a GIMPLE statement with code CODE and NUM_OPS
135 operands. */
136
137 gimple
138 gimple_alloc_stat (enum gimple_code code, unsigned num_ops MEM_STAT_DECL)
139 {
140 size_t size;
141 gimple stmt;
142
143 size = gimple_size (code);
144 if (num_ops > 0)
145 size += sizeof (tree) * (num_ops - 1);
146
147 #ifdef GATHER_STATISTICS
148 {
149 enum gimple_alloc_kind kind = gimple_alloc_kind (code);
150 gimple_alloc_counts[(int) kind]++;
151 gimple_alloc_sizes[(int) kind] += size;
152 }
153 #endif
154
155 stmt = ggc_alloc_cleared_gimple_statement_d_stat (size PASS_MEM_STAT);
156 gimple_set_code (stmt, code);
157 gimple_set_num_ops (stmt, num_ops);
158
159 /* Do not call gimple_set_modified here as it has other side
160 effects and this tuple is still not completely built. */
161 stmt->gsbase.modified = 1;
162
163 return stmt;
164 }
165
166 /* Set SUBCODE to be the code of the expression computed by statement G. */
167
168 static inline void
169 gimple_set_subcode (gimple g, unsigned subcode)
170 {
171 /* We only have 16 bits for the RHS code. Assert that we are not
172 overflowing it. */
173 gcc_assert (subcode < (1 << 16));
174 g->gsbase.subcode = subcode;
175 }
176
177
178
179 /* Build a tuple with operands. CODE is the statement to build (which
180 must be one of the GIMPLE_WITH_OPS tuples). SUBCODE is the sub-code
181 for the new tuple. NUM_OPS is the number of operands to allocate. */
182
183 #define gimple_build_with_ops(c, s, n) \
184 gimple_build_with_ops_stat (c, s, n MEM_STAT_INFO)
185
186 static gimple
187 gimple_build_with_ops_stat (enum gimple_code code, unsigned subcode,
188 unsigned num_ops MEM_STAT_DECL)
189 {
190 gimple s = gimple_alloc_stat (code, num_ops PASS_MEM_STAT);
191 gimple_set_subcode (s, subcode);
192
193 return s;
194 }
195
196
197 /* Build a GIMPLE_RETURN statement returning RETVAL. */
198
199 gimple
200 gimple_build_return (tree retval)
201 {
202 gimple s = gimple_build_with_ops (GIMPLE_RETURN, ERROR_MARK, 1);
203 if (retval)
204 gimple_return_set_retval (s, retval);
205 return s;
206 }
207
208 /* Reset alias information on call S. */
209
210 void
211 gimple_call_reset_alias_info (gimple s)
212 {
213 if (gimple_call_flags (s) & ECF_CONST)
214 memset (gimple_call_use_set (s), 0, sizeof (struct pt_solution));
215 else
216 pt_solution_reset (gimple_call_use_set (s));
217 if (gimple_call_flags (s) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
218 memset (gimple_call_clobber_set (s), 0, sizeof (struct pt_solution));
219 else
220 pt_solution_reset (gimple_call_clobber_set (s));
221 }
222
223 /* Helper for gimple_build_call, gimple_build_call_vec and
224 gimple_build_call_from_tree. Build the basic components of a
225 GIMPLE_CALL statement to function FN with NARGS arguments. */
226
227 static inline gimple
228 gimple_build_call_1 (tree fn, unsigned nargs)
229 {
230 gimple s = gimple_build_with_ops (GIMPLE_CALL, ERROR_MARK, nargs + 3);
231 if (TREE_CODE (fn) == FUNCTION_DECL)
232 fn = build_fold_addr_expr (fn);
233 gimple_set_op (s, 1, fn);
234 gimple_call_set_fntype (s, TREE_TYPE (TREE_TYPE (fn)));
235 gimple_call_reset_alias_info (s);
236 return s;
237 }
238
239
240 /* Build a GIMPLE_CALL statement to function FN with the arguments
241 specified in vector ARGS. */
242
243 gimple
244 gimple_build_call_vec (tree fn, VEC(tree, heap) *args)
245 {
246 unsigned i;
247 unsigned nargs = VEC_length (tree, args);
248 gimple call = gimple_build_call_1 (fn, nargs);
249
250 for (i = 0; i < nargs; i++)
251 gimple_call_set_arg (call, i, VEC_index (tree, args, i));
252
253 return call;
254 }
255
256
257 /* Build a GIMPLE_CALL statement to function FN. NARGS is the number of
258 arguments. The ... are the arguments. */
259
260 gimple
261 gimple_build_call (tree fn, unsigned nargs, ...)
262 {
263 va_list ap;
264 gimple call;
265 unsigned i;
266
267 gcc_assert (TREE_CODE (fn) == FUNCTION_DECL || is_gimple_call_addr (fn));
268
269 call = gimple_build_call_1 (fn, nargs);
270
271 va_start (ap, nargs);
272 for (i = 0; i < nargs; i++)
273 gimple_call_set_arg (call, i, va_arg (ap, tree));
274 va_end (ap);
275
276 return call;
277 }
278
279
280 /* Helper for gimple_build_call_internal and gimple_build_call_internal_vec.
281 Build the basic components of a GIMPLE_CALL statement to internal
282 function FN with NARGS arguments. */
283
284 static inline gimple
285 gimple_build_call_internal_1 (enum internal_fn fn, unsigned nargs)
286 {
287 gimple s = gimple_build_with_ops (GIMPLE_CALL, ERROR_MARK, nargs + 3);
288 s->gsbase.subcode |= GF_CALL_INTERNAL;
289 gimple_call_set_internal_fn (s, fn);
290 gimple_call_reset_alias_info (s);
291 return s;
292 }
293
294
295 /* Build a GIMPLE_CALL statement to internal function FN. NARGS is
296 the number of arguments. The ... are the arguments. */
297
298 gimple
299 gimple_build_call_internal (enum internal_fn fn, unsigned nargs, ...)
300 {
301 va_list ap;
302 gimple call;
303 unsigned i;
304
305 call = gimple_build_call_internal_1 (fn, nargs);
306 va_start (ap, nargs);
307 for (i = 0; i < nargs; i++)
308 gimple_call_set_arg (call, i, va_arg (ap, tree));
309 va_end (ap);
310
311 return call;
312 }
313
314
315 /* Build a GIMPLE_CALL statement to internal function FN with the arguments
316 specified in vector ARGS. */
317
318 gimple
319 gimple_build_call_internal_vec (enum internal_fn fn, VEC(tree, heap) *args)
320 {
321 unsigned i, nargs;
322 gimple call;
323
324 nargs = VEC_length (tree, args);
325 call = gimple_build_call_internal_1 (fn, nargs);
326 for (i = 0; i < nargs; i++)
327 gimple_call_set_arg (call, i, VEC_index (tree, args, i));
328
329 return call;
330 }
331
332
333 /* Build a GIMPLE_CALL statement from CALL_EXPR T. Note that T is
334 assumed to be in GIMPLE form already. Minimal checking is done of
335 this fact. */
336
337 gimple
338 gimple_build_call_from_tree (tree t)
339 {
340 unsigned i, nargs;
341 gimple call;
342 tree fndecl = get_callee_fndecl (t);
343
344 gcc_assert (TREE_CODE (t) == CALL_EXPR);
345
346 nargs = call_expr_nargs (t);
347 call = gimple_build_call_1 (fndecl ? fndecl : CALL_EXPR_FN (t), nargs);
348
349 for (i = 0; i < nargs; i++)
350 gimple_call_set_arg (call, i, CALL_EXPR_ARG (t, i));
351
352 gimple_set_block (call, TREE_BLOCK (t));
353
354 /* Carry all the CALL_EXPR flags to the new GIMPLE_CALL. */
355 gimple_call_set_chain (call, CALL_EXPR_STATIC_CHAIN (t));
356 gimple_call_set_tail (call, CALL_EXPR_TAILCALL (t));
357 gimple_call_set_cannot_inline (call, CALL_CANNOT_INLINE_P (t));
358 gimple_call_set_return_slot_opt (call, CALL_EXPR_RETURN_SLOT_OPT (t));
359 if (fndecl
360 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
361 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA)
362 gimple_call_set_alloca_for_var (call, CALL_ALLOCA_FOR_VAR_P (t));
363 else
364 gimple_call_set_from_thunk (call, CALL_FROM_THUNK_P (t));
365 gimple_call_set_va_arg_pack (call, CALL_EXPR_VA_ARG_PACK (t));
366 gimple_call_set_nothrow (call, TREE_NOTHROW (t));
367 gimple_set_no_warning (call, TREE_NO_WARNING (t));
368
369 return call;
370 }
371
372
373 /* Extract the operands and code for expression EXPR into *SUBCODE_P,
374 *OP1_P, *OP2_P and *OP3_P respectively. */
375
376 void
377 extract_ops_from_tree_1 (tree expr, enum tree_code *subcode_p, tree *op1_p,
378 tree *op2_p, tree *op3_p)
379 {
380 enum gimple_rhs_class grhs_class;
381
382 *subcode_p = TREE_CODE (expr);
383 grhs_class = get_gimple_rhs_class (*subcode_p);
384
385 if (grhs_class == GIMPLE_TERNARY_RHS)
386 {
387 *op1_p = TREE_OPERAND (expr, 0);
388 *op2_p = TREE_OPERAND (expr, 1);
389 *op3_p = TREE_OPERAND (expr, 2);
390 }
391 else if (grhs_class == GIMPLE_BINARY_RHS)
392 {
393 *op1_p = TREE_OPERAND (expr, 0);
394 *op2_p = TREE_OPERAND (expr, 1);
395 *op3_p = NULL_TREE;
396 }
397 else if (grhs_class == GIMPLE_UNARY_RHS)
398 {
399 *op1_p = TREE_OPERAND (expr, 0);
400 *op2_p = NULL_TREE;
401 *op3_p = NULL_TREE;
402 }
403 else if (grhs_class == GIMPLE_SINGLE_RHS)
404 {
405 *op1_p = expr;
406 *op2_p = NULL_TREE;
407 *op3_p = NULL_TREE;
408 }
409 else
410 gcc_unreachable ();
411 }
412
413
414 /* Build a GIMPLE_ASSIGN statement.
415
416 LHS of the assignment.
417 RHS of the assignment which can be unary or binary. */
418
419 gimple
420 gimple_build_assign_stat (tree lhs, tree rhs MEM_STAT_DECL)
421 {
422 enum tree_code subcode;
423 tree op1, op2, op3;
424
425 extract_ops_from_tree_1 (rhs, &subcode, &op1, &op2, &op3);
426 return gimple_build_assign_with_ops_stat (subcode, lhs, op1, op2, op3
427 PASS_MEM_STAT);
428 }
429
430
431 /* Build a GIMPLE_ASSIGN statement with sub-code SUBCODE and operands
432 OP1 and OP2. If OP2 is NULL then SUBCODE must be of class
433 GIMPLE_UNARY_RHS or GIMPLE_SINGLE_RHS. */
434
435 gimple
436 gimple_build_assign_with_ops_stat (enum tree_code subcode, tree lhs, tree op1,
437 tree op2, tree op3 MEM_STAT_DECL)
438 {
439 unsigned num_ops;
440 gimple p;
441
442 /* Need 1 operand for LHS and 1 or 2 for the RHS (depending on the
443 code). */
444 num_ops = get_gimple_rhs_num_ops (subcode) + 1;
445
446 p = gimple_build_with_ops_stat (GIMPLE_ASSIGN, (unsigned)subcode, num_ops
447 PASS_MEM_STAT);
448 gimple_assign_set_lhs (p, lhs);
449 gimple_assign_set_rhs1 (p, op1);
450 if (op2)
451 {
452 gcc_assert (num_ops > 2);
453 gimple_assign_set_rhs2 (p, op2);
454 }
455
456 if (op3)
457 {
458 gcc_assert (num_ops > 3);
459 gimple_assign_set_rhs3 (p, op3);
460 }
461
462 return p;
463 }
464
465
466 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
467
468 DST/SRC are the destination and source respectively. You can pass
469 ungimplified trees in DST or SRC, in which case they will be
470 converted to a gimple operand if necessary.
471
472 This function returns the newly created GIMPLE_ASSIGN tuple. */
473
474 gimple
475 gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
476 {
477 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
478 gimplify_and_add (t, seq_p);
479 ggc_free (t);
480 return gimple_seq_last_stmt (*seq_p);
481 }
482
483
484 /* Build a GIMPLE_COND statement.
485
486 PRED is the condition used to compare LHS and the RHS.
487 T_LABEL is the label to jump to if the condition is true.
488 F_LABEL is the label to jump to otherwise. */
489
490 gimple
491 gimple_build_cond (enum tree_code pred_code, tree lhs, tree rhs,
492 tree t_label, tree f_label)
493 {
494 gimple p;
495
496 gcc_assert (TREE_CODE_CLASS (pred_code) == tcc_comparison);
497 p = gimple_build_with_ops (GIMPLE_COND, pred_code, 4);
498 gimple_cond_set_lhs (p, lhs);
499 gimple_cond_set_rhs (p, rhs);
500 gimple_cond_set_true_label (p, t_label);
501 gimple_cond_set_false_label (p, f_label);
502 return p;
503 }
504
505
506 /* Extract operands for a GIMPLE_COND statement out of COND_EXPR tree COND. */
507
508 void
509 gimple_cond_get_ops_from_tree (tree cond, enum tree_code *code_p,
510 tree *lhs_p, tree *rhs_p)
511 {
512 gcc_assert (TREE_CODE_CLASS (TREE_CODE (cond)) == tcc_comparison
513 || TREE_CODE (cond) == TRUTH_NOT_EXPR
514 || is_gimple_min_invariant (cond)
515 || SSA_VAR_P (cond));
516
517 extract_ops_from_tree (cond, code_p, lhs_p, rhs_p);
518
519 /* Canonicalize conditionals of the form 'if (!VAL)'. */
520 if (*code_p == TRUTH_NOT_EXPR)
521 {
522 *code_p = EQ_EXPR;
523 gcc_assert (*lhs_p && *rhs_p == NULL_TREE);
524 *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
525 }
526 /* Canonicalize conditionals of the form 'if (VAL)' */
527 else if (TREE_CODE_CLASS (*code_p) != tcc_comparison)
528 {
529 *code_p = NE_EXPR;
530 gcc_assert (*lhs_p && *rhs_p == NULL_TREE);
531 *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
532 }
533 }
534
535
536 /* Build a GIMPLE_COND statement from the conditional expression tree
537 COND. T_LABEL and F_LABEL are as in gimple_build_cond. */
538
539 gimple
540 gimple_build_cond_from_tree (tree cond, tree t_label, tree f_label)
541 {
542 enum tree_code code;
543 tree lhs, rhs;
544
545 gimple_cond_get_ops_from_tree (cond, &code, &lhs, &rhs);
546 return gimple_build_cond (code, lhs, rhs, t_label, f_label);
547 }
548
549 /* Set code, lhs, and rhs of a GIMPLE_COND from a suitable
550 boolean expression tree COND. */
551
552 void
553 gimple_cond_set_condition_from_tree (gimple stmt, tree cond)
554 {
555 enum tree_code code;
556 tree lhs, rhs;
557
558 gimple_cond_get_ops_from_tree (cond, &code, &lhs, &rhs);
559 gimple_cond_set_condition (stmt, code, lhs, rhs);
560 }
561
562 /* Build a GIMPLE_LABEL statement for LABEL. */
563
564 gimple
565 gimple_build_label (tree label)
566 {
567 gimple p = gimple_build_with_ops (GIMPLE_LABEL, ERROR_MARK, 1);
568 gimple_label_set_label (p, label);
569 return p;
570 }
571
572 /* Build a GIMPLE_GOTO statement to label DEST. */
573
574 gimple
575 gimple_build_goto (tree dest)
576 {
577 gimple p = gimple_build_with_ops (GIMPLE_GOTO, ERROR_MARK, 1);
578 gimple_goto_set_dest (p, dest);
579 return p;
580 }
581
582
583 /* Build a GIMPLE_NOP statement. */
584
585 gimple
586 gimple_build_nop (void)
587 {
588 return gimple_alloc (GIMPLE_NOP, 0);
589 }
590
591
592 /* Build a GIMPLE_BIND statement.
593 VARS are the variables in BODY.
594 BLOCK is the containing block. */
595
596 gimple
597 gimple_build_bind (tree vars, gimple_seq body, tree block)
598 {
599 gimple p = gimple_alloc (GIMPLE_BIND, 0);
600 gimple_bind_set_vars (p, vars);
601 if (body)
602 gimple_bind_set_body (p, body);
603 if (block)
604 gimple_bind_set_block (p, block);
605 return p;
606 }
607
608 /* Helper function to set the simple fields of a asm stmt.
609
610 STRING is a pointer to a string that is the asm blocks assembly code.
611 NINPUT is the number of register inputs.
612 NOUTPUT is the number of register outputs.
613 NCLOBBERS is the number of clobbered registers.
614 */
615
616 static inline gimple
617 gimple_build_asm_1 (const char *string, unsigned ninputs, unsigned noutputs,
618 unsigned nclobbers, unsigned nlabels)
619 {
620 gimple p;
621 int size = strlen (string);
622
623 /* ASMs with labels cannot have outputs. This should have been
624 enforced by the front end. */
625 gcc_assert (nlabels == 0 || noutputs == 0);
626
627 p = gimple_build_with_ops (GIMPLE_ASM, ERROR_MARK,
628 ninputs + noutputs + nclobbers + nlabels);
629
630 p->gimple_asm.ni = ninputs;
631 p->gimple_asm.no = noutputs;
632 p->gimple_asm.nc = nclobbers;
633 p->gimple_asm.nl = nlabels;
634 p->gimple_asm.string = ggc_alloc_string (string, size);
635
636 #ifdef GATHER_STATISTICS
637 gimple_alloc_sizes[(int) gimple_alloc_kind (GIMPLE_ASM)] += size;
638 #endif
639
640 return p;
641 }
642
643 /* Build a GIMPLE_ASM statement.
644
645 STRING is the assembly code.
646 NINPUT is the number of register inputs.
647 NOUTPUT is the number of register outputs.
648 NCLOBBERS is the number of clobbered registers.
649 INPUTS is a vector of the input register parameters.
650 OUTPUTS is a vector of the output register parameters.
651 CLOBBERS is a vector of the clobbered register parameters.
652 LABELS is a vector of destination labels. */
653
654 gimple
655 gimple_build_asm_vec (const char *string, VEC(tree,gc)* inputs,
656 VEC(tree,gc)* outputs, VEC(tree,gc)* clobbers,
657 VEC(tree,gc)* labels)
658 {
659 gimple p;
660 unsigned i;
661
662 p = gimple_build_asm_1 (string,
663 VEC_length (tree, inputs),
664 VEC_length (tree, outputs),
665 VEC_length (tree, clobbers),
666 VEC_length (tree, labels));
667
668 for (i = 0; i < VEC_length (tree, inputs); i++)
669 gimple_asm_set_input_op (p, i, VEC_index (tree, inputs, i));
670
671 for (i = 0; i < VEC_length (tree, outputs); i++)
672 gimple_asm_set_output_op (p, i, VEC_index (tree, outputs, i));
673
674 for (i = 0; i < VEC_length (tree, clobbers); i++)
675 gimple_asm_set_clobber_op (p, i, VEC_index (tree, clobbers, i));
676
677 for (i = 0; i < VEC_length (tree, labels); i++)
678 gimple_asm_set_label_op (p, i, VEC_index (tree, labels, i));
679
680 return p;
681 }
682
683 /* Build a GIMPLE_CATCH statement.
684
685 TYPES are the catch types.
686 HANDLER is the exception handler. */
687
688 gimple
689 gimple_build_catch (tree types, gimple_seq handler)
690 {
691 gimple p = gimple_alloc (GIMPLE_CATCH, 0);
692 gimple_catch_set_types (p, types);
693 if (handler)
694 gimple_catch_set_handler (p, handler);
695
696 return p;
697 }
698
699 /* Build a GIMPLE_EH_FILTER statement.
700
701 TYPES are the filter's types.
702 FAILURE is the filter's failure action. */
703
704 gimple
705 gimple_build_eh_filter (tree types, gimple_seq failure)
706 {
707 gimple p = gimple_alloc (GIMPLE_EH_FILTER, 0);
708 gimple_eh_filter_set_types (p, types);
709 if (failure)
710 gimple_eh_filter_set_failure (p, failure);
711
712 return p;
713 }
714
715 /* Build a GIMPLE_EH_MUST_NOT_THROW statement. */
716
717 gimple
718 gimple_build_eh_must_not_throw (tree decl)
719 {
720 gimple p = gimple_alloc (GIMPLE_EH_MUST_NOT_THROW, 0);
721
722 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
723 gcc_assert (flags_from_decl_or_type (decl) & ECF_NORETURN);
724 gimple_eh_must_not_throw_set_fndecl (p, decl);
725
726 return p;
727 }
728
729 /* Build a GIMPLE_TRY statement.
730
731 EVAL is the expression to evaluate.
732 CLEANUP is the cleanup expression.
733 KIND is either GIMPLE_TRY_CATCH or GIMPLE_TRY_FINALLY depending on
734 whether this is a try/catch or a try/finally respectively. */
735
736 gimple
737 gimple_build_try (gimple_seq eval, gimple_seq cleanup,
738 enum gimple_try_flags kind)
739 {
740 gimple p;
741
742 gcc_assert (kind == GIMPLE_TRY_CATCH || kind == GIMPLE_TRY_FINALLY);
743 p = gimple_alloc (GIMPLE_TRY, 0);
744 gimple_set_subcode (p, kind);
745 if (eval)
746 gimple_try_set_eval (p, eval);
747 if (cleanup)
748 gimple_try_set_cleanup (p, cleanup);
749
750 return p;
751 }
752
753 /* Construct a GIMPLE_WITH_CLEANUP_EXPR statement.
754
755 CLEANUP is the cleanup expression. */
756
757 gimple
758 gimple_build_wce (gimple_seq cleanup)
759 {
760 gimple p = gimple_alloc (GIMPLE_WITH_CLEANUP_EXPR, 0);
761 if (cleanup)
762 gimple_wce_set_cleanup (p, cleanup);
763
764 return p;
765 }
766
767
768 /* Build a GIMPLE_RESX statement. */
769
770 gimple
771 gimple_build_resx (int region)
772 {
773 gimple p = gimple_build_with_ops (GIMPLE_RESX, ERROR_MARK, 0);
774 p->gimple_eh_ctrl.region = region;
775 return p;
776 }
777
778
779 /* The helper for constructing a gimple switch statement.
780 INDEX is the switch's index.
781 NLABELS is the number of labels in the switch excluding the default.
782 DEFAULT_LABEL is the default label for the switch statement. */
783
784 gimple
785 gimple_build_switch_nlabels (unsigned nlabels, tree index, tree default_label)
786 {
787 /* nlabels + 1 default label + 1 index. */
788 gimple p = gimple_build_with_ops (GIMPLE_SWITCH, ERROR_MARK,
789 1 + (default_label != NULL) + nlabels);
790 gimple_switch_set_index (p, index);
791 if (default_label)
792 gimple_switch_set_default_label (p, default_label);
793 return p;
794 }
795
796
797 /* Build a GIMPLE_SWITCH statement.
798
799 INDEX is the switch's index.
800 NLABELS is the number of labels in the switch excluding the DEFAULT_LABEL.
801 ... are the labels excluding the default. */
802
803 gimple
804 gimple_build_switch (unsigned nlabels, tree index, tree default_label, ...)
805 {
806 va_list al;
807 unsigned i, offset;
808 gimple p = gimple_build_switch_nlabels (nlabels, index, default_label);
809
810 /* Store the rest of the labels. */
811 va_start (al, default_label);
812 offset = (default_label != NULL);
813 for (i = 0; i < nlabels; i++)
814 gimple_switch_set_label (p, i + offset, va_arg (al, tree));
815 va_end (al);
816
817 return p;
818 }
819
820
821 /* Build a GIMPLE_SWITCH statement.
822
823 INDEX is the switch's index.
824 DEFAULT_LABEL is the default label
825 ARGS is a vector of labels excluding the default. */
826
827 gimple
828 gimple_build_switch_vec (tree index, tree default_label, VEC(tree, heap) *args)
829 {
830 unsigned i, offset, nlabels = VEC_length (tree, args);
831 gimple p = gimple_build_switch_nlabels (nlabels, index, default_label);
832
833 /* Copy the labels from the vector to the switch statement. */
834 offset = (default_label != NULL);
835 for (i = 0; i < nlabels; i++)
836 gimple_switch_set_label (p, i + offset, VEC_index (tree, args, i));
837
838 return p;
839 }
840
841 /* Build a GIMPLE_EH_DISPATCH statement. */
842
843 gimple
844 gimple_build_eh_dispatch (int region)
845 {
846 gimple p = gimple_build_with_ops (GIMPLE_EH_DISPATCH, ERROR_MARK, 0);
847 p->gimple_eh_ctrl.region = region;
848 return p;
849 }
850
851 /* Build a new GIMPLE_DEBUG_BIND statement.
852
853 VAR is bound to VALUE; block and location are taken from STMT. */
854
855 gimple
856 gimple_build_debug_bind_stat (tree var, tree value, gimple stmt MEM_STAT_DECL)
857 {
858 gimple p = gimple_build_with_ops_stat (GIMPLE_DEBUG,
859 (unsigned)GIMPLE_DEBUG_BIND, 2
860 PASS_MEM_STAT);
861
862 gimple_debug_bind_set_var (p, var);
863 gimple_debug_bind_set_value (p, value);
864 if (stmt)
865 {
866 gimple_set_block (p, gimple_block (stmt));
867 gimple_set_location (p, gimple_location (stmt));
868 }
869
870 return p;
871 }
872
873
874 /* Build a GIMPLE_OMP_CRITICAL statement.
875
876 BODY is the sequence of statements for which only one thread can execute.
877 NAME is optional identifier for this critical block. */
878
879 gimple
880 gimple_build_omp_critical (gimple_seq body, tree name)
881 {
882 gimple p = gimple_alloc (GIMPLE_OMP_CRITICAL, 0);
883 gimple_omp_critical_set_name (p, name);
884 if (body)
885 gimple_omp_set_body (p, body);
886
887 return p;
888 }
889
890 /* Build a GIMPLE_OMP_FOR statement.
891
892 BODY is sequence of statements inside the for loop.
893 CLAUSES, are any of the OMP loop construct's clauses: private, firstprivate,
894 lastprivate, reductions, ordered, schedule, and nowait.
895 COLLAPSE is the collapse count.
896 PRE_BODY is the sequence of statements that are loop invariant. */
897
898 gimple
899 gimple_build_omp_for (gimple_seq body, tree clauses, size_t collapse,
900 gimple_seq pre_body)
901 {
902 gimple p = gimple_alloc (GIMPLE_OMP_FOR, 0);
903 if (body)
904 gimple_omp_set_body (p, body);
905 gimple_omp_for_set_clauses (p, clauses);
906 p->gimple_omp_for.collapse = collapse;
907 p->gimple_omp_for.iter
908 = ggc_alloc_cleared_vec_gimple_omp_for_iter (collapse);
909 if (pre_body)
910 gimple_omp_for_set_pre_body (p, pre_body);
911
912 return p;
913 }
914
915
916 /* Build a GIMPLE_OMP_PARALLEL statement.
917
918 BODY is sequence of statements which are executed in parallel.
919 CLAUSES, are the OMP parallel construct's clauses.
920 CHILD_FN is the function created for the parallel threads to execute.
921 DATA_ARG are the shared data argument(s). */
922
923 gimple
924 gimple_build_omp_parallel (gimple_seq body, tree clauses, tree child_fn,
925 tree data_arg)
926 {
927 gimple p = gimple_alloc (GIMPLE_OMP_PARALLEL, 0);
928 if (body)
929 gimple_omp_set_body (p, body);
930 gimple_omp_parallel_set_clauses (p, clauses);
931 gimple_omp_parallel_set_child_fn (p, child_fn);
932 gimple_omp_parallel_set_data_arg (p, data_arg);
933
934 return p;
935 }
936
937
938 /* Build a GIMPLE_OMP_TASK statement.
939
940 BODY is sequence of statements which are executed by the explicit task.
941 CLAUSES, are the OMP parallel construct's clauses.
942 CHILD_FN is the function created for the parallel threads to execute.
943 DATA_ARG are the shared data argument(s).
944 COPY_FN is the optional function for firstprivate initialization.
945 ARG_SIZE and ARG_ALIGN are size and alignment of the data block. */
946
947 gimple
948 gimple_build_omp_task (gimple_seq body, tree clauses, tree child_fn,
949 tree data_arg, tree copy_fn, tree arg_size,
950 tree arg_align)
951 {
952 gimple p = gimple_alloc (GIMPLE_OMP_TASK, 0);
953 if (body)
954 gimple_omp_set_body (p, body);
955 gimple_omp_task_set_clauses (p, clauses);
956 gimple_omp_task_set_child_fn (p, child_fn);
957 gimple_omp_task_set_data_arg (p, data_arg);
958 gimple_omp_task_set_copy_fn (p, copy_fn);
959 gimple_omp_task_set_arg_size (p, arg_size);
960 gimple_omp_task_set_arg_align (p, arg_align);
961
962 return p;
963 }
964
965
966 /* Build a GIMPLE_OMP_SECTION statement for a sections statement.
967
968 BODY is the sequence of statements in the section. */
969
970 gimple
971 gimple_build_omp_section (gimple_seq body)
972 {
973 gimple p = gimple_alloc (GIMPLE_OMP_SECTION, 0);
974 if (body)
975 gimple_omp_set_body (p, body);
976
977 return p;
978 }
979
980
981 /* Build a GIMPLE_OMP_MASTER statement.
982
983 BODY is the sequence of statements to be executed by just the master. */
984
985 gimple
986 gimple_build_omp_master (gimple_seq body)
987 {
988 gimple p = gimple_alloc (GIMPLE_OMP_MASTER, 0);
989 if (body)
990 gimple_omp_set_body (p, body);
991
992 return p;
993 }
994
995
996 /* Build a GIMPLE_OMP_CONTINUE statement.
997
998 CONTROL_DEF is the definition of the control variable.
999 CONTROL_USE is the use of the control variable. */
1000
1001 gimple
1002 gimple_build_omp_continue (tree control_def, tree control_use)
1003 {
1004 gimple p = gimple_alloc (GIMPLE_OMP_CONTINUE, 0);
1005 gimple_omp_continue_set_control_def (p, control_def);
1006 gimple_omp_continue_set_control_use (p, control_use);
1007 return p;
1008 }
1009
1010 /* Build a GIMPLE_OMP_ORDERED statement.
1011
1012 BODY is the sequence of statements inside a loop that will executed in
1013 sequence. */
1014
1015 gimple
1016 gimple_build_omp_ordered (gimple_seq body)
1017 {
1018 gimple p = gimple_alloc (GIMPLE_OMP_ORDERED, 0);
1019 if (body)
1020 gimple_omp_set_body (p, body);
1021
1022 return p;
1023 }
1024
1025
1026 /* Build a GIMPLE_OMP_RETURN statement.
1027 WAIT_P is true if this is a non-waiting return. */
1028
1029 gimple
1030 gimple_build_omp_return (bool wait_p)
1031 {
1032 gimple p = gimple_alloc (GIMPLE_OMP_RETURN, 0);
1033 if (wait_p)
1034 gimple_omp_return_set_nowait (p);
1035
1036 return p;
1037 }
1038
1039
1040 /* Build a GIMPLE_OMP_SECTIONS statement.
1041
1042 BODY is a sequence of section statements.
1043 CLAUSES are any of the OMP sections contsruct's clauses: private,
1044 firstprivate, lastprivate, reduction, and nowait. */
1045
1046 gimple
1047 gimple_build_omp_sections (gimple_seq body, tree clauses)
1048 {
1049 gimple p = gimple_alloc (GIMPLE_OMP_SECTIONS, 0);
1050 if (body)
1051 gimple_omp_set_body (p, body);
1052 gimple_omp_sections_set_clauses (p, clauses);
1053
1054 return p;
1055 }
1056
1057
1058 /* Build a GIMPLE_OMP_SECTIONS_SWITCH. */
1059
1060 gimple
1061 gimple_build_omp_sections_switch (void)
1062 {
1063 return gimple_alloc (GIMPLE_OMP_SECTIONS_SWITCH, 0);
1064 }
1065
1066
1067 /* Build a GIMPLE_OMP_SINGLE statement.
1068
1069 BODY is the sequence of statements that will be executed once.
1070 CLAUSES are any of the OMP single construct's clauses: private, firstprivate,
1071 copyprivate, nowait. */
1072
1073 gimple
1074 gimple_build_omp_single (gimple_seq body, tree clauses)
1075 {
1076 gimple p = gimple_alloc (GIMPLE_OMP_SINGLE, 0);
1077 if (body)
1078 gimple_omp_set_body (p, body);
1079 gimple_omp_single_set_clauses (p, clauses);
1080
1081 return p;
1082 }
1083
1084
1085 /* Build a GIMPLE_OMP_ATOMIC_LOAD statement. */
1086
1087 gimple
1088 gimple_build_omp_atomic_load (tree lhs, tree rhs)
1089 {
1090 gimple p = gimple_alloc (GIMPLE_OMP_ATOMIC_LOAD, 0);
1091 gimple_omp_atomic_load_set_lhs (p, lhs);
1092 gimple_omp_atomic_load_set_rhs (p, rhs);
1093 return p;
1094 }
1095
1096 /* Build a GIMPLE_OMP_ATOMIC_STORE statement.
1097
1098 VAL is the value we are storing. */
1099
1100 gimple
1101 gimple_build_omp_atomic_store (tree val)
1102 {
1103 gimple p = gimple_alloc (GIMPLE_OMP_ATOMIC_STORE, 0);
1104 gimple_omp_atomic_store_set_val (p, val);
1105 return p;
1106 }
1107
1108 /* Build a GIMPLE_PREDICT statement. PREDICT is one of the predictors from
1109 predict.def, OUTCOME is NOT_TAKEN or TAKEN. */
1110
1111 gimple
1112 gimple_build_predict (enum br_predictor predictor, enum prediction outcome)
1113 {
1114 gimple p = gimple_alloc (GIMPLE_PREDICT, 0);
1115 /* Ensure all the predictors fit into the lower bits of the subcode. */
1116 gcc_assert ((int) END_PREDICTORS <= GF_PREDICT_TAKEN);
1117 gimple_predict_set_predictor (p, predictor);
1118 gimple_predict_set_outcome (p, outcome);
1119 return p;
1120 }
1121
1122 #if defined ENABLE_GIMPLE_CHECKING
1123 /* Complain of a gimple type mismatch and die. */
1124
1125 void
1126 gimple_check_failed (const_gimple gs, const char *file, int line,
1127 const char *function, enum gimple_code code,
1128 enum tree_code subcode)
1129 {
1130 internal_error ("gimple check: expected %s(%s), have %s(%s) in %s, at %s:%d",
1131 gimple_code_name[code],
1132 tree_code_name[subcode],
1133 gimple_code_name[gimple_code (gs)],
1134 gs->gsbase.subcode > 0
1135 ? tree_code_name[gs->gsbase.subcode]
1136 : "",
1137 function, trim_filename (file), line);
1138 }
1139 #endif /* ENABLE_GIMPLE_CHECKING */
1140
1141
1142 /* Allocate a new GIMPLE sequence in GC memory and return it. If
1143 there are free sequences in GIMPLE_SEQ_CACHE return one of those
1144 instead. */
1145
1146 gimple_seq
1147 gimple_seq_alloc (void)
1148 {
1149 gimple_seq seq = gimple_seq_cache;
1150 if (seq)
1151 {
1152 gimple_seq_cache = gimple_seq_cache->next_free;
1153 gcc_assert (gimple_seq_cache != seq);
1154 memset (seq, 0, sizeof (*seq));
1155 }
1156 else
1157 {
1158 seq = ggc_alloc_cleared_gimple_seq_d ();
1159 #ifdef GATHER_STATISTICS
1160 gimple_alloc_counts[(int) gimple_alloc_kind_seq]++;
1161 gimple_alloc_sizes[(int) gimple_alloc_kind_seq] += sizeof (*seq);
1162 #endif
1163 }
1164
1165 return seq;
1166 }
1167
1168 /* Return SEQ to the free pool of GIMPLE sequences. */
1169
1170 void
1171 gimple_seq_free (gimple_seq seq)
1172 {
1173 if (seq == NULL)
1174 return;
1175
1176 gcc_assert (gimple_seq_first (seq) == NULL);
1177 gcc_assert (gimple_seq_last (seq) == NULL);
1178
1179 /* If this triggers, it's a sign that the same list is being freed
1180 twice. */
1181 gcc_assert (seq != gimple_seq_cache || gimple_seq_cache == NULL);
1182
1183 /* Add SEQ to the pool of free sequences. */
1184 seq->next_free = gimple_seq_cache;
1185 gimple_seq_cache = seq;
1186 }
1187
1188
1189 /* Link gimple statement GS to the end of the sequence *SEQ_P. If
1190 *SEQ_P is NULL, a new sequence is allocated. */
1191
1192 void
1193 gimple_seq_add_stmt (gimple_seq *seq_p, gimple gs)
1194 {
1195 gimple_stmt_iterator si;
1196
1197 if (gs == NULL)
1198 return;
1199
1200 if (*seq_p == NULL)
1201 *seq_p = gimple_seq_alloc ();
1202
1203 si = gsi_last (*seq_p);
1204 gsi_insert_after (&si, gs, GSI_NEW_STMT);
1205 }
1206
1207
1208 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
1209 NULL, a new sequence is allocated. */
1210
1211 void
1212 gimple_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
1213 {
1214 gimple_stmt_iterator si;
1215
1216 if (src == NULL)
1217 return;
1218
1219 if (*dst_p == NULL)
1220 *dst_p = gimple_seq_alloc ();
1221
1222 si = gsi_last (*dst_p);
1223 gsi_insert_seq_after (&si, src, GSI_NEW_STMT);
1224 }
1225
1226
1227 /* Helper function of empty_body_p. Return true if STMT is an empty
1228 statement. */
1229
1230 static bool
1231 empty_stmt_p (gimple stmt)
1232 {
1233 if (gimple_code (stmt) == GIMPLE_NOP)
1234 return true;
1235 if (gimple_code (stmt) == GIMPLE_BIND)
1236 return empty_body_p (gimple_bind_body (stmt));
1237 return false;
1238 }
1239
1240
1241 /* Return true if BODY contains nothing but empty statements. */
1242
1243 bool
1244 empty_body_p (gimple_seq body)
1245 {
1246 gimple_stmt_iterator i;
1247
1248 if (gimple_seq_empty_p (body))
1249 return true;
1250 for (i = gsi_start (body); !gsi_end_p (i); gsi_next (&i))
1251 if (!empty_stmt_p (gsi_stmt (i))
1252 && !is_gimple_debug (gsi_stmt (i)))
1253 return false;
1254
1255 return true;
1256 }
1257
1258
1259 /* Perform a deep copy of sequence SRC and return the result. */
1260
1261 gimple_seq
1262 gimple_seq_copy (gimple_seq src)
1263 {
1264 gimple_stmt_iterator gsi;
1265 gimple_seq new_seq = gimple_seq_alloc ();
1266 gimple stmt;
1267
1268 for (gsi = gsi_start (src); !gsi_end_p (gsi); gsi_next (&gsi))
1269 {
1270 stmt = gimple_copy (gsi_stmt (gsi));
1271 gimple_seq_add_stmt (&new_seq, stmt);
1272 }
1273
1274 return new_seq;
1275 }
1276
1277
1278 /* Walk all the statements in the sequence SEQ calling walk_gimple_stmt
1279 on each one. WI is as in walk_gimple_stmt.
1280
1281 If walk_gimple_stmt returns non-NULL, the walk is stopped, the
1282 value is stored in WI->CALLBACK_RESULT and the statement that
1283 produced the value is returned.
1284
1285 Otherwise, all the statements are walked and NULL returned. */
1286
1287 gimple
1288 walk_gimple_seq (gimple_seq seq, walk_stmt_fn callback_stmt,
1289 walk_tree_fn callback_op, struct walk_stmt_info *wi)
1290 {
1291 gimple_stmt_iterator gsi;
1292
1293 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
1294 {
1295 tree ret = walk_gimple_stmt (&gsi, callback_stmt, callback_op, wi);
1296 if (ret)
1297 {
1298 /* If CALLBACK_STMT or CALLBACK_OP return a value, WI must exist
1299 to hold it. */
1300 gcc_assert (wi);
1301 wi->callback_result = ret;
1302 return gsi_stmt (gsi);
1303 }
1304 }
1305
1306 if (wi)
1307 wi->callback_result = NULL_TREE;
1308
1309 return NULL;
1310 }
1311
1312
1313 /* Helper function for walk_gimple_stmt. Walk operands of a GIMPLE_ASM. */
1314
1315 static tree
1316 walk_gimple_asm (gimple stmt, walk_tree_fn callback_op,
1317 struct walk_stmt_info *wi)
1318 {
1319 tree ret, op;
1320 unsigned noutputs;
1321 const char **oconstraints;
1322 unsigned i, n;
1323 const char *constraint;
1324 bool allows_mem, allows_reg, is_inout;
1325
1326 noutputs = gimple_asm_noutputs (stmt);
1327 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
1328
1329 if (wi)
1330 wi->is_lhs = true;
1331
1332 for (i = 0; i < noutputs; i++)
1333 {
1334 op = gimple_asm_output_op (stmt, i);
1335 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
1336 oconstraints[i] = constraint;
1337 parse_output_constraint (&constraint, i, 0, 0, &allows_mem, &allows_reg,
1338 &is_inout);
1339 if (wi)
1340 wi->val_only = (allows_reg || !allows_mem);
1341 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
1342 if (ret)
1343 return ret;
1344 }
1345
1346 n = gimple_asm_ninputs (stmt);
1347 for (i = 0; i < n; i++)
1348 {
1349 op = gimple_asm_input_op (stmt, i);
1350 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
1351 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1352 oconstraints, &allows_mem, &allows_reg);
1353 if (wi)
1354 {
1355 wi->val_only = (allows_reg || !allows_mem);
1356 /* Although input "m" is not really a LHS, we need a lvalue. */
1357 wi->is_lhs = !wi->val_only;
1358 }
1359 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
1360 if (ret)
1361 return ret;
1362 }
1363
1364 if (wi)
1365 {
1366 wi->is_lhs = false;
1367 wi->val_only = true;
1368 }
1369
1370 n = gimple_asm_nlabels (stmt);
1371 for (i = 0; i < n; i++)
1372 {
1373 op = gimple_asm_label_op (stmt, i);
1374 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
1375 if (ret)
1376 return ret;
1377 }
1378
1379 return NULL_TREE;
1380 }
1381
1382
1383 /* Helper function of WALK_GIMPLE_STMT. Walk every tree operand in
1384 STMT. CALLBACK_OP and WI are as in WALK_GIMPLE_STMT.
1385
1386 CALLBACK_OP is called on each operand of STMT via walk_tree.
1387 Additional parameters to walk_tree must be stored in WI. For each operand
1388 OP, walk_tree is called as:
1389
1390 walk_tree (&OP, CALLBACK_OP, WI, WI->PSET)
1391
1392 If CALLBACK_OP returns non-NULL for an operand, the remaining
1393 operands are not scanned.
1394
1395 The return value is that returned by the last call to walk_tree, or
1396 NULL_TREE if no CALLBACK_OP is specified. */
1397
1398 tree
1399 walk_gimple_op (gimple stmt, walk_tree_fn callback_op,
1400 struct walk_stmt_info *wi)
1401 {
1402 struct pointer_set_t *pset = (wi) ? wi->pset : NULL;
1403 unsigned i;
1404 tree ret = NULL_TREE;
1405
1406 switch (gimple_code (stmt))
1407 {
1408 case GIMPLE_ASSIGN:
1409 /* Walk the RHS operands. If the LHS is of a non-renamable type or
1410 is a register variable, we may use a COMPONENT_REF on the RHS. */
1411 if (wi)
1412 {
1413 tree lhs = gimple_assign_lhs (stmt);
1414 wi->val_only
1415 = (is_gimple_reg_type (TREE_TYPE (lhs)) && !is_gimple_reg (lhs))
1416 || !gimple_assign_single_p (stmt);
1417 }
1418
1419 for (i = 1; i < gimple_num_ops (stmt); i++)
1420 {
1421 ret = walk_tree (gimple_op_ptr (stmt, i), callback_op, wi,
1422 pset);
1423 if (ret)
1424 return ret;
1425 }
1426
1427 /* Walk the LHS. If the RHS is appropriate for a memory, we
1428 may use a COMPONENT_REF on the LHS. */
1429 if (wi)
1430 {
1431 /* If the RHS has more than 1 operand, it is not appropriate
1432 for the memory. */
1433 wi->val_only = !is_gimple_mem_rhs (gimple_assign_rhs1 (stmt))
1434 || !gimple_assign_single_p (stmt);
1435 wi->is_lhs = true;
1436 }
1437
1438 ret = walk_tree (gimple_op_ptr (stmt, 0), callback_op, wi, pset);
1439 if (ret)
1440 return ret;
1441
1442 if (wi)
1443 {
1444 wi->val_only = true;
1445 wi->is_lhs = false;
1446 }
1447 break;
1448
1449 case GIMPLE_CALL:
1450 if (wi)
1451 {
1452 wi->is_lhs = false;
1453 wi->val_only = true;
1454 }
1455
1456 ret = walk_tree (gimple_call_chain_ptr (stmt), callback_op, wi, pset);
1457 if (ret)
1458 return ret;
1459
1460 ret = walk_tree (gimple_call_fn_ptr (stmt), callback_op, wi, pset);
1461 if (ret)
1462 return ret;
1463
1464 for (i = 0; i < gimple_call_num_args (stmt); i++)
1465 {
1466 if (wi)
1467 wi->val_only = is_gimple_reg_type (gimple_call_arg (stmt, i));
1468 ret = walk_tree (gimple_call_arg_ptr (stmt, i), callback_op, wi,
1469 pset);
1470 if (ret)
1471 return ret;
1472 }
1473
1474 if (gimple_call_lhs (stmt))
1475 {
1476 if (wi)
1477 {
1478 wi->is_lhs = true;
1479 wi->val_only = is_gimple_reg_type (gimple_call_lhs (stmt));
1480 }
1481
1482 ret = walk_tree (gimple_call_lhs_ptr (stmt), callback_op, wi, pset);
1483 if (ret)
1484 return ret;
1485 }
1486
1487 if (wi)
1488 {
1489 wi->is_lhs = false;
1490 wi->val_only = true;
1491 }
1492 break;
1493
1494 case GIMPLE_CATCH:
1495 ret = walk_tree (gimple_catch_types_ptr (stmt), callback_op, wi,
1496 pset);
1497 if (ret)
1498 return ret;
1499 break;
1500
1501 case GIMPLE_EH_FILTER:
1502 ret = walk_tree (gimple_eh_filter_types_ptr (stmt), callback_op, wi,
1503 pset);
1504 if (ret)
1505 return ret;
1506 break;
1507
1508 case GIMPLE_ASM:
1509 ret = walk_gimple_asm (stmt, callback_op, wi);
1510 if (ret)
1511 return ret;
1512 break;
1513
1514 case GIMPLE_OMP_CONTINUE:
1515 ret = walk_tree (gimple_omp_continue_control_def_ptr (stmt),
1516 callback_op, wi, pset);
1517 if (ret)
1518 return ret;
1519
1520 ret = walk_tree (gimple_omp_continue_control_use_ptr (stmt),
1521 callback_op, wi, pset);
1522 if (ret)
1523 return ret;
1524 break;
1525
1526 case GIMPLE_OMP_CRITICAL:
1527 ret = walk_tree (gimple_omp_critical_name_ptr (stmt), callback_op, wi,
1528 pset);
1529 if (ret)
1530 return ret;
1531 break;
1532
1533 case GIMPLE_OMP_FOR:
1534 ret = walk_tree (gimple_omp_for_clauses_ptr (stmt), callback_op, wi,
1535 pset);
1536 if (ret)
1537 return ret;
1538 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1539 {
1540 ret = walk_tree (gimple_omp_for_index_ptr (stmt, i), callback_op,
1541 wi, pset);
1542 if (ret)
1543 return ret;
1544 ret = walk_tree (gimple_omp_for_initial_ptr (stmt, i), callback_op,
1545 wi, pset);
1546 if (ret)
1547 return ret;
1548 ret = walk_tree (gimple_omp_for_final_ptr (stmt, i), callback_op,
1549 wi, pset);
1550 if (ret)
1551 return ret;
1552 ret = walk_tree (gimple_omp_for_incr_ptr (stmt, i), callback_op,
1553 wi, pset);
1554 }
1555 if (ret)
1556 return ret;
1557 break;
1558
1559 case GIMPLE_OMP_PARALLEL:
1560 ret = walk_tree (gimple_omp_parallel_clauses_ptr (stmt), callback_op,
1561 wi, pset);
1562 if (ret)
1563 return ret;
1564 ret = walk_tree (gimple_omp_parallel_child_fn_ptr (stmt), callback_op,
1565 wi, pset);
1566 if (ret)
1567 return ret;
1568 ret = walk_tree (gimple_omp_parallel_data_arg_ptr (stmt), callback_op,
1569 wi, pset);
1570 if (ret)
1571 return ret;
1572 break;
1573
1574 case GIMPLE_OMP_TASK:
1575 ret = walk_tree (gimple_omp_task_clauses_ptr (stmt), callback_op,
1576 wi, pset);
1577 if (ret)
1578 return ret;
1579 ret = walk_tree (gimple_omp_task_child_fn_ptr (stmt), callback_op,
1580 wi, pset);
1581 if (ret)
1582 return ret;
1583 ret = walk_tree (gimple_omp_task_data_arg_ptr (stmt), callback_op,
1584 wi, pset);
1585 if (ret)
1586 return ret;
1587 ret = walk_tree (gimple_omp_task_copy_fn_ptr (stmt), callback_op,
1588 wi, pset);
1589 if (ret)
1590 return ret;
1591 ret = walk_tree (gimple_omp_task_arg_size_ptr (stmt), callback_op,
1592 wi, pset);
1593 if (ret)
1594 return ret;
1595 ret = walk_tree (gimple_omp_task_arg_align_ptr (stmt), callback_op,
1596 wi, pset);
1597 if (ret)
1598 return ret;
1599 break;
1600
1601 case GIMPLE_OMP_SECTIONS:
1602 ret = walk_tree (gimple_omp_sections_clauses_ptr (stmt), callback_op,
1603 wi, pset);
1604 if (ret)
1605 return ret;
1606
1607 ret = walk_tree (gimple_omp_sections_control_ptr (stmt), callback_op,
1608 wi, pset);
1609 if (ret)
1610 return ret;
1611
1612 break;
1613
1614 case GIMPLE_OMP_SINGLE:
1615 ret = walk_tree (gimple_omp_single_clauses_ptr (stmt), callback_op, wi,
1616 pset);
1617 if (ret)
1618 return ret;
1619 break;
1620
1621 case GIMPLE_OMP_ATOMIC_LOAD:
1622 ret = walk_tree (gimple_omp_atomic_load_lhs_ptr (stmt), callback_op, wi,
1623 pset);
1624 if (ret)
1625 return ret;
1626
1627 ret = walk_tree (gimple_omp_atomic_load_rhs_ptr (stmt), callback_op, wi,
1628 pset);
1629 if (ret)
1630 return ret;
1631 break;
1632
1633 case GIMPLE_OMP_ATOMIC_STORE:
1634 ret = walk_tree (gimple_omp_atomic_store_val_ptr (stmt), callback_op,
1635 wi, pset);
1636 if (ret)
1637 return ret;
1638 break;
1639
1640 /* Tuples that do not have operands. */
1641 case GIMPLE_NOP:
1642 case GIMPLE_RESX:
1643 case GIMPLE_OMP_RETURN:
1644 case GIMPLE_PREDICT:
1645 break;
1646
1647 default:
1648 {
1649 enum gimple_statement_structure_enum gss;
1650 gss = gimple_statement_structure (stmt);
1651 if (gss == GSS_WITH_OPS || gss == GSS_WITH_MEM_OPS)
1652 for (i = 0; i < gimple_num_ops (stmt); i++)
1653 {
1654 ret = walk_tree (gimple_op_ptr (stmt, i), callback_op, wi, pset);
1655 if (ret)
1656 return ret;
1657 }
1658 }
1659 break;
1660 }
1661
1662 return NULL_TREE;
1663 }
1664
1665
1666 /* Walk the current statement in GSI (optionally using traversal state
1667 stored in WI). If WI is NULL, no state is kept during traversal.
1668 The callback CALLBACK_STMT is called. If CALLBACK_STMT indicates
1669 that it has handled all the operands of the statement, its return
1670 value is returned. Otherwise, the return value from CALLBACK_STMT
1671 is discarded and its operands are scanned.
1672
1673 If CALLBACK_STMT is NULL or it didn't handle the operands,
1674 CALLBACK_OP is called on each operand of the statement via
1675 walk_gimple_op. If walk_gimple_op returns non-NULL for any
1676 operand, the remaining operands are not scanned. In this case, the
1677 return value from CALLBACK_OP is returned.
1678
1679 In any other case, NULL_TREE is returned. */
1680
1681 tree
1682 walk_gimple_stmt (gimple_stmt_iterator *gsi, walk_stmt_fn callback_stmt,
1683 walk_tree_fn callback_op, struct walk_stmt_info *wi)
1684 {
1685 gimple ret;
1686 tree tree_ret;
1687 gimple stmt = gsi_stmt (*gsi);
1688
1689 if (wi)
1690 wi->gsi = *gsi;
1691
1692 if (wi && wi->want_locations && gimple_has_location (stmt))
1693 input_location = gimple_location (stmt);
1694
1695 ret = NULL;
1696
1697 /* Invoke the statement callback. Return if the callback handled
1698 all of STMT operands by itself. */
1699 if (callback_stmt)
1700 {
1701 bool handled_ops = false;
1702 tree_ret = callback_stmt (gsi, &handled_ops, wi);
1703 if (handled_ops)
1704 return tree_ret;
1705
1706 /* If CALLBACK_STMT did not handle operands, it should not have
1707 a value to return. */
1708 gcc_assert (tree_ret == NULL);
1709
1710 /* Re-read stmt in case the callback changed it. */
1711 stmt = gsi_stmt (*gsi);
1712 }
1713
1714 /* If CALLBACK_OP is defined, invoke it on every operand of STMT. */
1715 if (callback_op)
1716 {
1717 tree_ret = walk_gimple_op (stmt, callback_op, wi);
1718 if (tree_ret)
1719 return tree_ret;
1720 }
1721
1722 /* If STMT can have statements inside (e.g. GIMPLE_BIND), walk them. */
1723 switch (gimple_code (stmt))
1724 {
1725 case GIMPLE_BIND:
1726 ret = walk_gimple_seq (gimple_bind_body (stmt), callback_stmt,
1727 callback_op, wi);
1728 if (ret)
1729 return wi->callback_result;
1730 break;
1731
1732 case GIMPLE_CATCH:
1733 ret = walk_gimple_seq (gimple_catch_handler (stmt), callback_stmt,
1734 callback_op, wi);
1735 if (ret)
1736 return wi->callback_result;
1737 break;
1738
1739 case GIMPLE_EH_FILTER:
1740 ret = walk_gimple_seq (gimple_eh_filter_failure (stmt), callback_stmt,
1741 callback_op, wi);
1742 if (ret)
1743 return wi->callback_result;
1744 break;
1745
1746 case GIMPLE_TRY:
1747 ret = walk_gimple_seq (gimple_try_eval (stmt), callback_stmt, callback_op,
1748 wi);
1749 if (ret)
1750 return wi->callback_result;
1751
1752 ret = walk_gimple_seq (gimple_try_cleanup (stmt), callback_stmt,
1753 callback_op, wi);
1754 if (ret)
1755 return wi->callback_result;
1756 break;
1757
1758 case GIMPLE_OMP_FOR:
1759 ret = walk_gimple_seq (gimple_omp_for_pre_body (stmt), callback_stmt,
1760 callback_op, wi);
1761 if (ret)
1762 return wi->callback_result;
1763
1764 /* FALL THROUGH. */
1765 case GIMPLE_OMP_CRITICAL:
1766 case GIMPLE_OMP_MASTER:
1767 case GIMPLE_OMP_ORDERED:
1768 case GIMPLE_OMP_SECTION:
1769 case GIMPLE_OMP_PARALLEL:
1770 case GIMPLE_OMP_TASK:
1771 case GIMPLE_OMP_SECTIONS:
1772 case GIMPLE_OMP_SINGLE:
1773 ret = walk_gimple_seq (gimple_omp_body (stmt), callback_stmt, callback_op,
1774 wi);
1775 if (ret)
1776 return wi->callback_result;
1777 break;
1778
1779 case GIMPLE_WITH_CLEANUP_EXPR:
1780 ret = walk_gimple_seq (gimple_wce_cleanup (stmt), callback_stmt,
1781 callback_op, wi);
1782 if (ret)
1783 return wi->callback_result;
1784 break;
1785
1786 default:
1787 gcc_assert (!gimple_has_substatements (stmt));
1788 break;
1789 }
1790
1791 return NULL;
1792 }
1793
1794
1795 /* Set sequence SEQ to be the GIMPLE body for function FN. */
1796
1797 void
1798 gimple_set_body (tree fndecl, gimple_seq seq)
1799 {
1800 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
1801 if (fn == NULL)
1802 {
1803 /* If FNDECL still does not have a function structure associated
1804 with it, then it does not make sense for it to receive a
1805 GIMPLE body. */
1806 gcc_assert (seq == NULL);
1807 }
1808 else
1809 fn->gimple_body = seq;
1810 }
1811
1812
1813 /* Return the body of GIMPLE statements for function FN. After the
1814 CFG pass, the function body doesn't exist anymore because it has
1815 been split up into basic blocks. In this case, it returns
1816 NULL. */
1817
1818 gimple_seq
1819 gimple_body (tree fndecl)
1820 {
1821 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
1822 return fn ? fn->gimple_body : NULL;
1823 }
1824
1825 /* Return true when FNDECL has Gimple body either in unlowered
1826 or CFG form. */
1827 bool
1828 gimple_has_body_p (tree fndecl)
1829 {
1830 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
1831 return (gimple_body (fndecl) || (fn && fn->cfg));
1832 }
1833
1834 /* Return true if calls C1 and C2 are known to go to the same function. */
1835
1836 bool
1837 gimple_call_same_target_p (const_gimple c1, const_gimple c2)
1838 {
1839 if (gimple_call_internal_p (c1))
1840 return (gimple_call_internal_p (c2)
1841 && gimple_call_internal_fn (c1) == gimple_call_internal_fn (c2));
1842 else
1843 return (gimple_call_fn (c1) == gimple_call_fn (c2)
1844 || (gimple_call_fndecl (c1)
1845 && gimple_call_fndecl (c1) == gimple_call_fndecl (c2)));
1846 }
1847
1848 /* Detect flags from a GIMPLE_CALL. This is just like
1849 call_expr_flags, but for gimple tuples. */
1850
1851 int
1852 gimple_call_flags (const_gimple stmt)
1853 {
1854 int flags;
1855 tree decl = gimple_call_fndecl (stmt);
1856
1857 if (decl)
1858 flags = flags_from_decl_or_type (decl);
1859 else if (gimple_call_internal_p (stmt))
1860 flags = internal_fn_flags (gimple_call_internal_fn (stmt));
1861 else
1862 flags = flags_from_decl_or_type (gimple_call_fntype (stmt));
1863
1864 if (stmt->gsbase.subcode & GF_CALL_NOTHROW)
1865 flags |= ECF_NOTHROW;
1866
1867 return flags;
1868 }
1869
1870 /* Return the "fn spec" string for call STMT. */
1871
1872 static tree
1873 gimple_call_fnspec (const_gimple stmt)
1874 {
1875 tree type, attr;
1876
1877 type = gimple_call_fntype (stmt);
1878 if (!type)
1879 return NULL_TREE;
1880
1881 attr = lookup_attribute ("fn spec", TYPE_ATTRIBUTES (type));
1882 if (!attr)
1883 return NULL_TREE;
1884
1885 return TREE_VALUE (TREE_VALUE (attr));
1886 }
1887
1888 /* Detects argument flags for argument number ARG on call STMT. */
1889
1890 int
1891 gimple_call_arg_flags (const_gimple stmt, unsigned arg)
1892 {
1893 tree attr = gimple_call_fnspec (stmt);
1894
1895 if (!attr || 1 + arg >= (unsigned) TREE_STRING_LENGTH (attr))
1896 return 0;
1897
1898 switch (TREE_STRING_POINTER (attr)[1 + arg])
1899 {
1900 case 'x':
1901 case 'X':
1902 return EAF_UNUSED;
1903
1904 case 'R':
1905 return EAF_DIRECT | EAF_NOCLOBBER | EAF_NOESCAPE;
1906
1907 case 'r':
1908 return EAF_NOCLOBBER | EAF_NOESCAPE;
1909
1910 case 'W':
1911 return EAF_DIRECT | EAF_NOESCAPE;
1912
1913 case 'w':
1914 return EAF_NOESCAPE;
1915
1916 case '.':
1917 default:
1918 return 0;
1919 }
1920 }
1921
1922 /* Detects return flags for the call STMT. */
1923
1924 int
1925 gimple_call_return_flags (const_gimple stmt)
1926 {
1927 tree attr;
1928
1929 if (gimple_call_flags (stmt) & ECF_MALLOC)
1930 return ERF_NOALIAS;
1931
1932 attr = gimple_call_fnspec (stmt);
1933 if (!attr || TREE_STRING_LENGTH (attr) < 1)
1934 return 0;
1935
1936 switch (TREE_STRING_POINTER (attr)[0])
1937 {
1938 case '1':
1939 case '2':
1940 case '3':
1941 case '4':
1942 return ERF_RETURNS_ARG | (TREE_STRING_POINTER (attr)[0] - '1');
1943
1944 case 'm':
1945 return ERF_NOALIAS;
1946
1947 case '.':
1948 default:
1949 return 0;
1950 }
1951 }
1952
1953
1954 /* Return true if GS is a copy assignment. */
1955
1956 bool
1957 gimple_assign_copy_p (gimple gs)
1958 {
1959 return (gimple_assign_single_p (gs)
1960 && is_gimple_val (gimple_op (gs, 1)));
1961 }
1962
1963
1964 /* Return true if GS is a SSA_NAME copy assignment. */
1965
1966 bool
1967 gimple_assign_ssa_name_copy_p (gimple gs)
1968 {
1969 return (gimple_assign_single_p (gs)
1970 && TREE_CODE (gimple_assign_lhs (gs)) == SSA_NAME
1971 && TREE_CODE (gimple_assign_rhs1 (gs)) == SSA_NAME);
1972 }
1973
1974
1975 /* Return true if GS is an assignment with a unary RHS, but the
1976 operator has no effect on the assigned value. The logic is adapted
1977 from STRIP_NOPS. This predicate is intended to be used in tuplifying
1978 instances in which STRIP_NOPS was previously applied to the RHS of
1979 an assignment.
1980
1981 NOTE: In the use cases that led to the creation of this function
1982 and of gimple_assign_single_p, it is typical to test for either
1983 condition and to proceed in the same manner. In each case, the
1984 assigned value is represented by the single RHS operand of the
1985 assignment. I suspect there may be cases where gimple_assign_copy_p,
1986 gimple_assign_single_p, or equivalent logic is used where a similar
1987 treatment of unary NOPs is appropriate. */
1988
1989 bool
1990 gimple_assign_unary_nop_p (gimple gs)
1991 {
1992 return (is_gimple_assign (gs)
1993 && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (gs))
1994 || gimple_assign_rhs_code (gs) == NON_LVALUE_EXPR)
1995 && gimple_assign_rhs1 (gs) != error_mark_node
1996 && (TYPE_MODE (TREE_TYPE (gimple_assign_lhs (gs)))
1997 == TYPE_MODE (TREE_TYPE (gimple_assign_rhs1 (gs)))));
1998 }
1999
2000 /* Set BB to be the basic block holding G. */
2001
2002 void
2003 gimple_set_bb (gimple stmt, basic_block bb)
2004 {
2005 stmt->gsbase.bb = bb;
2006
2007 /* If the statement is a label, add the label to block-to-labels map
2008 so that we can speed up edge creation for GIMPLE_GOTOs. */
2009 if (cfun->cfg && gimple_code (stmt) == GIMPLE_LABEL)
2010 {
2011 tree t;
2012 int uid;
2013
2014 t = gimple_label_label (stmt);
2015 uid = LABEL_DECL_UID (t);
2016 if (uid == -1)
2017 {
2018 unsigned old_len = VEC_length (basic_block, label_to_block_map);
2019 LABEL_DECL_UID (t) = uid = cfun->cfg->last_label_uid++;
2020 if (old_len <= (unsigned) uid)
2021 {
2022 unsigned new_len = 3 * uid / 2 + 1;
2023
2024 VEC_safe_grow_cleared (basic_block, gc, label_to_block_map,
2025 new_len);
2026 }
2027 }
2028
2029 VEC_replace (basic_block, label_to_block_map, uid, bb);
2030 }
2031 }
2032
2033
2034 /* Modify the RHS of the assignment pointed-to by GSI using the
2035 operands in the expression tree EXPR.
2036
2037 NOTE: The statement pointed-to by GSI may be reallocated if it
2038 did not have enough operand slots.
2039
2040 This function is useful to convert an existing tree expression into
2041 the flat representation used for the RHS of a GIMPLE assignment.
2042 It will reallocate memory as needed to expand or shrink the number
2043 of operand slots needed to represent EXPR.
2044
2045 NOTE: If you find yourself building a tree and then calling this
2046 function, you are most certainly doing it the slow way. It is much
2047 better to build a new assignment or to use the function
2048 gimple_assign_set_rhs_with_ops, which does not require an
2049 expression tree to be built. */
2050
2051 void
2052 gimple_assign_set_rhs_from_tree (gimple_stmt_iterator *gsi, tree expr)
2053 {
2054 enum tree_code subcode;
2055 tree op1, op2, op3;
2056
2057 extract_ops_from_tree_1 (expr, &subcode, &op1, &op2, &op3);
2058 gimple_assign_set_rhs_with_ops_1 (gsi, subcode, op1, op2, op3);
2059 }
2060
2061
2062 /* Set the RHS of assignment statement pointed-to by GSI to CODE with
2063 operands OP1, OP2 and OP3.
2064
2065 NOTE: The statement pointed-to by GSI may be reallocated if it
2066 did not have enough operand slots. */
2067
2068 void
2069 gimple_assign_set_rhs_with_ops_1 (gimple_stmt_iterator *gsi, enum tree_code code,
2070 tree op1, tree op2, tree op3)
2071 {
2072 unsigned new_rhs_ops = get_gimple_rhs_num_ops (code);
2073 gimple stmt = gsi_stmt (*gsi);
2074
2075 /* If the new CODE needs more operands, allocate a new statement. */
2076 if (gimple_num_ops (stmt) < new_rhs_ops + 1)
2077 {
2078 tree lhs = gimple_assign_lhs (stmt);
2079 gimple new_stmt = gimple_alloc (gimple_code (stmt), new_rhs_ops + 1);
2080 memcpy (new_stmt, stmt, gimple_size (gimple_code (stmt)));
2081 gsi_replace (gsi, new_stmt, true);
2082 stmt = new_stmt;
2083
2084 /* The LHS needs to be reset as this also changes the SSA name
2085 on the LHS. */
2086 gimple_assign_set_lhs (stmt, lhs);
2087 }
2088
2089 gimple_set_num_ops (stmt, new_rhs_ops + 1);
2090 gimple_set_subcode (stmt, code);
2091 gimple_assign_set_rhs1 (stmt, op1);
2092 if (new_rhs_ops > 1)
2093 gimple_assign_set_rhs2 (stmt, op2);
2094 if (new_rhs_ops > 2)
2095 gimple_assign_set_rhs3 (stmt, op3);
2096 }
2097
2098
2099 /* Return the LHS of a statement that performs an assignment,
2100 either a GIMPLE_ASSIGN or a GIMPLE_CALL. Returns NULL_TREE
2101 for a call to a function that returns no value, or for a
2102 statement other than an assignment or a call. */
2103
2104 tree
2105 gimple_get_lhs (const_gimple stmt)
2106 {
2107 enum gimple_code code = gimple_code (stmt);
2108
2109 if (code == GIMPLE_ASSIGN)
2110 return gimple_assign_lhs (stmt);
2111 else if (code == GIMPLE_CALL)
2112 return gimple_call_lhs (stmt);
2113 else
2114 return NULL_TREE;
2115 }
2116
2117
2118 /* Set the LHS of a statement that performs an assignment,
2119 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
2120
2121 void
2122 gimple_set_lhs (gimple stmt, tree lhs)
2123 {
2124 enum gimple_code code = gimple_code (stmt);
2125
2126 if (code == GIMPLE_ASSIGN)
2127 gimple_assign_set_lhs (stmt, lhs);
2128 else if (code == GIMPLE_CALL)
2129 gimple_call_set_lhs (stmt, lhs);
2130 else
2131 gcc_unreachable();
2132 }
2133
2134 /* Replace the LHS of STMT, an assignment, either a GIMPLE_ASSIGN or a
2135 GIMPLE_CALL, with NLHS, in preparation for modifying the RHS to an
2136 expression with a different value.
2137
2138 This will update any annotations (say debug bind stmts) referring
2139 to the original LHS, so that they use the RHS instead. This is
2140 done even if NLHS and LHS are the same, for it is understood that
2141 the RHS will be modified afterwards, and NLHS will not be assigned
2142 an equivalent value.
2143
2144 Adjusting any non-annotation uses of the LHS, if needed, is a
2145 responsibility of the caller.
2146
2147 The effect of this call should be pretty much the same as that of
2148 inserting a copy of STMT before STMT, and then removing the
2149 original stmt, at which time gsi_remove() would have update
2150 annotations, but using this function saves all the inserting,
2151 copying and removing. */
2152
2153 void
2154 gimple_replace_lhs (gimple stmt, tree nlhs)
2155 {
2156 if (MAY_HAVE_DEBUG_STMTS)
2157 {
2158 tree lhs = gimple_get_lhs (stmt);
2159
2160 gcc_assert (SSA_NAME_DEF_STMT (lhs) == stmt);
2161
2162 insert_debug_temp_for_var_def (NULL, lhs);
2163 }
2164
2165 gimple_set_lhs (stmt, nlhs);
2166 }
2167
2168 /* Return a deep copy of statement STMT. All the operands from STMT
2169 are reallocated and copied using unshare_expr. The DEF, USE, VDEF
2170 and VUSE operand arrays are set to empty in the new copy. */
2171
2172 gimple
2173 gimple_copy (gimple stmt)
2174 {
2175 enum gimple_code code = gimple_code (stmt);
2176 unsigned num_ops = gimple_num_ops (stmt);
2177 gimple copy = gimple_alloc (code, num_ops);
2178 unsigned i;
2179
2180 /* Shallow copy all the fields from STMT. */
2181 memcpy (copy, stmt, gimple_size (code));
2182
2183 /* If STMT has sub-statements, deep-copy them as well. */
2184 if (gimple_has_substatements (stmt))
2185 {
2186 gimple_seq new_seq;
2187 tree t;
2188
2189 switch (gimple_code (stmt))
2190 {
2191 case GIMPLE_BIND:
2192 new_seq = gimple_seq_copy (gimple_bind_body (stmt));
2193 gimple_bind_set_body (copy, new_seq);
2194 gimple_bind_set_vars (copy, unshare_expr (gimple_bind_vars (stmt)));
2195 gimple_bind_set_block (copy, gimple_bind_block (stmt));
2196 break;
2197
2198 case GIMPLE_CATCH:
2199 new_seq = gimple_seq_copy (gimple_catch_handler (stmt));
2200 gimple_catch_set_handler (copy, new_seq);
2201 t = unshare_expr (gimple_catch_types (stmt));
2202 gimple_catch_set_types (copy, t);
2203 break;
2204
2205 case GIMPLE_EH_FILTER:
2206 new_seq = gimple_seq_copy (gimple_eh_filter_failure (stmt));
2207 gimple_eh_filter_set_failure (copy, new_seq);
2208 t = unshare_expr (gimple_eh_filter_types (stmt));
2209 gimple_eh_filter_set_types (copy, t);
2210 break;
2211
2212 case GIMPLE_TRY:
2213 new_seq = gimple_seq_copy (gimple_try_eval (stmt));
2214 gimple_try_set_eval (copy, new_seq);
2215 new_seq = gimple_seq_copy (gimple_try_cleanup (stmt));
2216 gimple_try_set_cleanup (copy, new_seq);
2217 break;
2218
2219 case GIMPLE_OMP_FOR:
2220 new_seq = gimple_seq_copy (gimple_omp_for_pre_body (stmt));
2221 gimple_omp_for_set_pre_body (copy, new_seq);
2222 t = unshare_expr (gimple_omp_for_clauses (stmt));
2223 gimple_omp_for_set_clauses (copy, t);
2224 copy->gimple_omp_for.iter
2225 = ggc_alloc_vec_gimple_omp_for_iter
2226 (gimple_omp_for_collapse (stmt));
2227 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2228 {
2229 gimple_omp_for_set_cond (copy, i,
2230 gimple_omp_for_cond (stmt, i));
2231 gimple_omp_for_set_index (copy, i,
2232 gimple_omp_for_index (stmt, i));
2233 t = unshare_expr (gimple_omp_for_initial (stmt, i));
2234 gimple_omp_for_set_initial (copy, i, t);
2235 t = unshare_expr (gimple_omp_for_final (stmt, i));
2236 gimple_omp_for_set_final (copy, i, t);
2237 t = unshare_expr (gimple_omp_for_incr (stmt, i));
2238 gimple_omp_for_set_incr (copy, i, t);
2239 }
2240 goto copy_omp_body;
2241
2242 case GIMPLE_OMP_PARALLEL:
2243 t = unshare_expr (gimple_omp_parallel_clauses (stmt));
2244 gimple_omp_parallel_set_clauses (copy, t);
2245 t = unshare_expr (gimple_omp_parallel_child_fn (stmt));
2246 gimple_omp_parallel_set_child_fn (copy, t);
2247 t = unshare_expr (gimple_omp_parallel_data_arg (stmt));
2248 gimple_omp_parallel_set_data_arg (copy, t);
2249 goto copy_omp_body;
2250
2251 case GIMPLE_OMP_TASK:
2252 t = unshare_expr (gimple_omp_task_clauses (stmt));
2253 gimple_omp_task_set_clauses (copy, t);
2254 t = unshare_expr (gimple_omp_task_child_fn (stmt));
2255 gimple_omp_task_set_child_fn (copy, t);
2256 t = unshare_expr (gimple_omp_task_data_arg (stmt));
2257 gimple_omp_task_set_data_arg (copy, t);
2258 t = unshare_expr (gimple_omp_task_copy_fn (stmt));
2259 gimple_omp_task_set_copy_fn (copy, t);
2260 t = unshare_expr (gimple_omp_task_arg_size (stmt));
2261 gimple_omp_task_set_arg_size (copy, t);
2262 t = unshare_expr (gimple_omp_task_arg_align (stmt));
2263 gimple_omp_task_set_arg_align (copy, t);
2264 goto copy_omp_body;
2265
2266 case GIMPLE_OMP_CRITICAL:
2267 t = unshare_expr (gimple_omp_critical_name (stmt));
2268 gimple_omp_critical_set_name (copy, t);
2269 goto copy_omp_body;
2270
2271 case GIMPLE_OMP_SECTIONS:
2272 t = unshare_expr (gimple_omp_sections_clauses (stmt));
2273 gimple_omp_sections_set_clauses (copy, t);
2274 t = unshare_expr (gimple_omp_sections_control (stmt));
2275 gimple_omp_sections_set_control (copy, t);
2276 /* FALLTHRU */
2277
2278 case GIMPLE_OMP_SINGLE:
2279 case GIMPLE_OMP_SECTION:
2280 case GIMPLE_OMP_MASTER:
2281 case GIMPLE_OMP_ORDERED:
2282 copy_omp_body:
2283 new_seq = gimple_seq_copy (gimple_omp_body (stmt));
2284 gimple_omp_set_body (copy, new_seq);
2285 break;
2286
2287 case GIMPLE_WITH_CLEANUP_EXPR:
2288 new_seq = gimple_seq_copy (gimple_wce_cleanup (stmt));
2289 gimple_wce_set_cleanup (copy, new_seq);
2290 break;
2291
2292 default:
2293 gcc_unreachable ();
2294 }
2295 }
2296
2297 /* Make copy of operands. */
2298 if (num_ops > 0)
2299 {
2300 for (i = 0; i < num_ops; i++)
2301 gimple_set_op (copy, i, unshare_expr (gimple_op (stmt, i)));
2302
2303 /* Clear out SSA operand vectors on COPY. */
2304 if (gimple_has_ops (stmt))
2305 {
2306 gimple_set_def_ops (copy, NULL);
2307 gimple_set_use_ops (copy, NULL);
2308 }
2309
2310 if (gimple_has_mem_ops (stmt))
2311 {
2312 gimple_set_vdef (copy, gimple_vdef (stmt));
2313 gimple_set_vuse (copy, gimple_vuse (stmt));
2314 }
2315
2316 /* SSA operands need to be updated. */
2317 gimple_set_modified (copy, true);
2318 }
2319
2320 return copy;
2321 }
2322
2323
2324 /* Set the MODIFIED flag to MODIFIEDP, iff the gimple statement G has
2325 a MODIFIED field. */
2326
2327 void
2328 gimple_set_modified (gimple s, bool modifiedp)
2329 {
2330 if (gimple_has_ops (s))
2331 s->gsbase.modified = (unsigned) modifiedp;
2332 }
2333
2334
2335 /* Return true if statement S has side-effects. We consider a
2336 statement to have side effects if:
2337
2338 - It is a GIMPLE_CALL not marked with ECF_PURE or ECF_CONST.
2339 - Any of its operands are marked TREE_THIS_VOLATILE or TREE_SIDE_EFFECTS. */
2340
2341 bool
2342 gimple_has_side_effects (const_gimple s)
2343 {
2344 unsigned i;
2345
2346 if (is_gimple_debug (s))
2347 return false;
2348
2349 /* We don't have to scan the arguments to check for
2350 volatile arguments, though, at present, we still
2351 do a scan to check for TREE_SIDE_EFFECTS. */
2352 if (gimple_has_volatile_ops (s))
2353 return true;
2354
2355 if (is_gimple_call (s))
2356 {
2357 unsigned nargs = gimple_call_num_args (s);
2358 tree fn;
2359
2360 if (!(gimple_call_flags (s) & (ECF_CONST | ECF_PURE)))
2361 return true;
2362 else if (gimple_call_flags (s) & ECF_LOOPING_CONST_OR_PURE)
2363 /* An infinite loop is considered a side effect. */
2364 return true;
2365
2366 if (gimple_call_lhs (s)
2367 && TREE_SIDE_EFFECTS (gimple_call_lhs (s)))
2368 {
2369 gcc_assert (gimple_has_volatile_ops (s));
2370 return true;
2371 }
2372
2373 fn = gimple_call_fn (s);
2374 if (fn && TREE_SIDE_EFFECTS (fn))
2375 return true;
2376
2377 for (i = 0; i < nargs; i++)
2378 if (TREE_SIDE_EFFECTS (gimple_call_arg (s, i)))
2379 {
2380 gcc_assert (gimple_has_volatile_ops (s));
2381 return true;
2382 }
2383
2384 return false;
2385 }
2386 else
2387 {
2388 for (i = 0; i < gimple_num_ops (s); i++)
2389 if (TREE_SIDE_EFFECTS (gimple_op (s, i)))
2390 {
2391 gcc_assert (gimple_has_volatile_ops (s));
2392 return true;
2393 }
2394 }
2395
2396 return false;
2397 }
2398
2399 /* Return true if the RHS of statement S has side effects.
2400 We may use it to determine if it is admissable to replace
2401 an assignment or call with a copy of a previously-computed
2402 value. In such cases, side-effects due to the LHS are
2403 preserved. */
2404
2405 bool
2406 gimple_rhs_has_side_effects (const_gimple s)
2407 {
2408 unsigned i;
2409
2410 if (is_gimple_call (s))
2411 {
2412 unsigned nargs = gimple_call_num_args (s);
2413 tree fn;
2414
2415 if (!(gimple_call_flags (s) & (ECF_CONST | ECF_PURE)))
2416 return true;
2417
2418 /* We cannot use gimple_has_volatile_ops here,
2419 because we must ignore a volatile LHS. */
2420 fn = gimple_call_fn (s);
2421 if (fn && (TREE_SIDE_EFFECTS (fn) || TREE_THIS_VOLATILE (fn)))
2422 {
2423 gcc_assert (gimple_has_volatile_ops (s));
2424 return true;
2425 }
2426
2427 for (i = 0; i < nargs; i++)
2428 if (TREE_SIDE_EFFECTS (gimple_call_arg (s, i))
2429 || TREE_THIS_VOLATILE (gimple_call_arg (s, i)))
2430 return true;
2431
2432 return false;
2433 }
2434 else if (is_gimple_assign (s))
2435 {
2436 /* Skip the first operand, the LHS. */
2437 for (i = 1; i < gimple_num_ops (s); i++)
2438 if (TREE_SIDE_EFFECTS (gimple_op (s, i))
2439 || TREE_THIS_VOLATILE (gimple_op (s, i)))
2440 {
2441 gcc_assert (gimple_has_volatile_ops (s));
2442 return true;
2443 }
2444 }
2445 else if (is_gimple_debug (s))
2446 return false;
2447 else
2448 {
2449 /* For statements without an LHS, examine all arguments. */
2450 for (i = 0; i < gimple_num_ops (s); i++)
2451 if (TREE_SIDE_EFFECTS (gimple_op (s, i))
2452 || TREE_THIS_VOLATILE (gimple_op (s, i)))
2453 {
2454 gcc_assert (gimple_has_volatile_ops (s));
2455 return true;
2456 }
2457 }
2458
2459 return false;
2460 }
2461
2462 /* Helper for gimple_could_trap_p and gimple_assign_rhs_could_trap_p.
2463 Return true if S can trap. When INCLUDE_MEM is true, check whether
2464 the memory operations could trap. When INCLUDE_STORES is true and
2465 S is a GIMPLE_ASSIGN, the LHS of the assignment is also checked. */
2466
2467 bool
2468 gimple_could_trap_p_1 (gimple s, bool include_mem, bool include_stores)
2469 {
2470 tree t, div = NULL_TREE;
2471 enum tree_code op;
2472
2473 if (include_mem)
2474 {
2475 unsigned i, start = (is_gimple_assign (s) && !include_stores) ? 1 : 0;
2476
2477 for (i = start; i < gimple_num_ops (s); i++)
2478 if (tree_could_trap_p (gimple_op (s, i)))
2479 return true;
2480 }
2481
2482 switch (gimple_code (s))
2483 {
2484 case GIMPLE_ASM:
2485 return gimple_asm_volatile_p (s);
2486
2487 case GIMPLE_CALL:
2488 t = gimple_call_fndecl (s);
2489 /* Assume that calls to weak functions may trap. */
2490 if (!t || !DECL_P (t) || DECL_WEAK (t))
2491 return true;
2492 return false;
2493
2494 case GIMPLE_ASSIGN:
2495 t = gimple_expr_type (s);
2496 op = gimple_assign_rhs_code (s);
2497 if (get_gimple_rhs_class (op) == GIMPLE_BINARY_RHS)
2498 div = gimple_assign_rhs2 (s);
2499 return (operation_could_trap_p (op, FLOAT_TYPE_P (t),
2500 (INTEGRAL_TYPE_P (t)
2501 && TYPE_OVERFLOW_TRAPS (t)),
2502 div));
2503
2504 default:
2505 break;
2506 }
2507
2508 return false;
2509 }
2510
2511 /* Return true if statement S can trap. */
2512
2513 bool
2514 gimple_could_trap_p (gimple s)
2515 {
2516 return gimple_could_trap_p_1 (s, true, true);
2517 }
2518
2519 /* Return true if RHS of a GIMPLE_ASSIGN S can trap. */
2520
2521 bool
2522 gimple_assign_rhs_could_trap_p (gimple s)
2523 {
2524 gcc_assert (is_gimple_assign (s));
2525 return gimple_could_trap_p_1 (s, true, false);
2526 }
2527
2528
2529 /* Print debugging information for gimple stmts generated. */
2530
2531 void
2532 dump_gimple_statistics (void)
2533 {
2534 #ifdef GATHER_STATISTICS
2535 int i, total_tuples = 0, total_bytes = 0;
2536
2537 fprintf (stderr, "\nGIMPLE statements\n");
2538 fprintf (stderr, "Kind Stmts Bytes\n");
2539 fprintf (stderr, "---------------------------------------\n");
2540 for (i = 0; i < (int) gimple_alloc_kind_all; ++i)
2541 {
2542 fprintf (stderr, "%-20s %7d %10d\n", gimple_alloc_kind_names[i],
2543 gimple_alloc_counts[i], gimple_alloc_sizes[i]);
2544 total_tuples += gimple_alloc_counts[i];
2545 total_bytes += gimple_alloc_sizes[i];
2546 }
2547 fprintf (stderr, "---------------------------------------\n");
2548 fprintf (stderr, "%-20s %7d %10d\n", "Total", total_tuples, total_bytes);
2549 fprintf (stderr, "---------------------------------------\n");
2550 #else
2551 fprintf (stderr, "No gimple statistics\n");
2552 #endif
2553 }
2554
2555
2556 /* Return the number of operands needed on the RHS of a GIMPLE
2557 assignment for an expression with tree code CODE. */
2558
2559 unsigned
2560 get_gimple_rhs_num_ops (enum tree_code code)
2561 {
2562 enum gimple_rhs_class rhs_class = get_gimple_rhs_class (code);
2563
2564 if (rhs_class == GIMPLE_UNARY_RHS || rhs_class == GIMPLE_SINGLE_RHS)
2565 return 1;
2566 else if (rhs_class == GIMPLE_BINARY_RHS)
2567 return 2;
2568 else if (rhs_class == GIMPLE_TERNARY_RHS)
2569 return 3;
2570 else
2571 gcc_unreachable ();
2572 }
2573
2574 #define DEFTREECODE(SYM, STRING, TYPE, NARGS) \
2575 (unsigned char) \
2576 ((TYPE) == tcc_unary ? GIMPLE_UNARY_RHS \
2577 : ((TYPE) == tcc_binary \
2578 || (TYPE) == tcc_comparison) ? GIMPLE_BINARY_RHS \
2579 : ((TYPE) == tcc_constant \
2580 || (TYPE) == tcc_declaration \
2581 || (TYPE) == tcc_reference) ? GIMPLE_SINGLE_RHS \
2582 : ((SYM) == TRUTH_AND_EXPR \
2583 || (SYM) == TRUTH_OR_EXPR \
2584 || (SYM) == TRUTH_XOR_EXPR) ? GIMPLE_BINARY_RHS \
2585 : (SYM) == TRUTH_NOT_EXPR ? GIMPLE_UNARY_RHS \
2586 : ((SYM) == WIDEN_MULT_PLUS_EXPR \
2587 || (SYM) == WIDEN_MULT_MINUS_EXPR \
2588 || (SYM) == DOT_PROD_EXPR \
2589 || (SYM) == REALIGN_LOAD_EXPR \
2590 || (SYM) == FMA_EXPR) ? GIMPLE_TERNARY_RHS \
2591 : ((SYM) == COND_EXPR \
2592 || (SYM) == CONSTRUCTOR \
2593 || (SYM) == OBJ_TYPE_REF \
2594 || (SYM) == ASSERT_EXPR \
2595 || (SYM) == ADDR_EXPR \
2596 || (SYM) == WITH_SIZE_EXPR \
2597 || (SYM) == SSA_NAME \
2598 || (SYM) == VEC_COND_EXPR) ? GIMPLE_SINGLE_RHS \
2599 : GIMPLE_INVALID_RHS),
2600 #define END_OF_BASE_TREE_CODES (unsigned char) GIMPLE_INVALID_RHS,
2601
2602 const unsigned char gimple_rhs_class_table[] = {
2603 #include "all-tree.def"
2604 };
2605
2606 #undef DEFTREECODE
2607 #undef END_OF_BASE_TREE_CODES
2608
2609 /* For the definitive definition of GIMPLE, see doc/tree-ssa.texi. */
2610
2611 /* Validation of GIMPLE expressions. */
2612
2613 /* Returns true iff T is a valid RHS for an assignment to a renamed
2614 user -- or front-end generated artificial -- variable. */
2615
2616 bool
2617 is_gimple_reg_rhs (tree t)
2618 {
2619 return get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS;
2620 }
2621
2622 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
2623 LHS, or for a call argument. */
2624
2625 bool
2626 is_gimple_mem_rhs (tree t)
2627 {
2628 /* If we're dealing with a renamable type, either source or dest must be
2629 a renamed variable. */
2630 if (is_gimple_reg_type (TREE_TYPE (t)))
2631 return is_gimple_val (t);
2632 else
2633 return is_gimple_val (t) || is_gimple_lvalue (t);
2634 }
2635
2636 /* Return true if T is a valid LHS for a GIMPLE assignment expression. */
2637
2638 bool
2639 is_gimple_lvalue (tree t)
2640 {
2641 return (is_gimple_addressable (t)
2642 || TREE_CODE (t) == WITH_SIZE_EXPR
2643 /* These are complex lvalues, but don't have addresses, so they
2644 go here. */
2645 || TREE_CODE (t) == BIT_FIELD_REF);
2646 }
2647
2648 /* Return true if T is a GIMPLE condition. */
2649
2650 bool
2651 is_gimple_condexpr (tree t)
2652 {
2653 return (is_gimple_val (t) || (COMPARISON_CLASS_P (t)
2654 && !tree_could_throw_p (t)
2655 && is_gimple_val (TREE_OPERAND (t, 0))
2656 && is_gimple_val (TREE_OPERAND (t, 1))));
2657 }
2658
2659 /* Return true if T is something whose address can be taken. */
2660
2661 bool
2662 is_gimple_addressable (tree t)
2663 {
2664 return (is_gimple_id (t) || handled_component_p (t)
2665 || TREE_CODE (t) == MEM_REF);
2666 }
2667
2668 /* Return true if T is a valid gimple constant. */
2669
2670 bool
2671 is_gimple_constant (const_tree t)
2672 {
2673 switch (TREE_CODE (t))
2674 {
2675 case INTEGER_CST:
2676 case REAL_CST:
2677 case FIXED_CST:
2678 case STRING_CST:
2679 case COMPLEX_CST:
2680 case VECTOR_CST:
2681 return true;
2682
2683 /* Vector constant constructors are gimple invariant. */
2684 case CONSTRUCTOR:
2685 if (TREE_TYPE (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
2686 return TREE_CONSTANT (t);
2687 else
2688 return false;
2689
2690 default:
2691 return false;
2692 }
2693 }
2694
2695 /* Return true if T is a gimple address. */
2696
2697 bool
2698 is_gimple_address (const_tree t)
2699 {
2700 tree op;
2701
2702 if (TREE_CODE (t) != ADDR_EXPR)
2703 return false;
2704
2705 op = TREE_OPERAND (t, 0);
2706 while (handled_component_p (op))
2707 {
2708 if ((TREE_CODE (op) == ARRAY_REF
2709 || TREE_CODE (op) == ARRAY_RANGE_REF)
2710 && !is_gimple_val (TREE_OPERAND (op, 1)))
2711 return false;
2712
2713 op = TREE_OPERAND (op, 0);
2714 }
2715
2716 if (CONSTANT_CLASS_P (op) || TREE_CODE (op) == MEM_REF)
2717 return true;
2718
2719 switch (TREE_CODE (op))
2720 {
2721 case PARM_DECL:
2722 case RESULT_DECL:
2723 case LABEL_DECL:
2724 case FUNCTION_DECL:
2725 case VAR_DECL:
2726 case CONST_DECL:
2727 return true;
2728
2729 default:
2730 return false;
2731 }
2732 }
2733
2734 /* Strip out all handled components that produce invariant
2735 offsets. */
2736
2737 static const_tree
2738 strip_invariant_refs (const_tree op)
2739 {
2740 while (handled_component_p (op))
2741 {
2742 switch (TREE_CODE (op))
2743 {
2744 case ARRAY_REF:
2745 case ARRAY_RANGE_REF:
2746 if (!is_gimple_constant (TREE_OPERAND (op, 1))
2747 || TREE_OPERAND (op, 2) != NULL_TREE
2748 || TREE_OPERAND (op, 3) != NULL_TREE)
2749 return NULL;
2750 break;
2751
2752 case COMPONENT_REF:
2753 if (TREE_OPERAND (op, 2) != NULL_TREE)
2754 return NULL;
2755 break;
2756
2757 default:;
2758 }
2759 op = TREE_OPERAND (op, 0);
2760 }
2761
2762 return op;
2763 }
2764
2765 /* Return true if T is a gimple invariant address. */
2766
2767 bool
2768 is_gimple_invariant_address (const_tree t)
2769 {
2770 const_tree op;
2771
2772 if (TREE_CODE (t) != ADDR_EXPR)
2773 return false;
2774
2775 op = strip_invariant_refs (TREE_OPERAND (t, 0));
2776 if (!op)
2777 return false;
2778
2779 if (TREE_CODE (op) == MEM_REF)
2780 {
2781 const_tree op0 = TREE_OPERAND (op, 0);
2782 return (TREE_CODE (op0) == ADDR_EXPR
2783 && (CONSTANT_CLASS_P (TREE_OPERAND (op0, 0))
2784 || decl_address_invariant_p (TREE_OPERAND (op0, 0))));
2785 }
2786
2787 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
2788 }
2789
2790 /* Return true if T is a gimple invariant address at IPA level
2791 (so addresses of variables on stack are not allowed). */
2792
2793 bool
2794 is_gimple_ip_invariant_address (const_tree t)
2795 {
2796 const_tree op;
2797
2798 if (TREE_CODE (t) != ADDR_EXPR)
2799 return false;
2800
2801 op = strip_invariant_refs (TREE_OPERAND (t, 0));
2802
2803 return op && (CONSTANT_CLASS_P (op) || decl_address_ip_invariant_p (op));
2804 }
2805
2806 /* Return true if T is a GIMPLE minimal invariant. It's a restricted
2807 form of function invariant. */
2808
2809 bool
2810 is_gimple_min_invariant (const_tree t)
2811 {
2812 if (TREE_CODE (t) == ADDR_EXPR)
2813 return is_gimple_invariant_address (t);
2814
2815 return is_gimple_constant (t);
2816 }
2817
2818 /* Return true if T is a GIMPLE interprocedural invariant. It's a restricted
2819 form of gimple minimal invariant. */
2820
2821 bool
2822 is_gimple_ip_invariant (const_tree t)
2823 {
2824 if (TREE_CODE (t) == ADDR_EXPR)
2825 return is_gimple_ip_invariant_address (t);
2826
2827 return is_gimple_constant (t);
2828 }
2829
2830 /* Return true if T looks like a valid GIMPLE statement. */
2831
2832 bool
2833 is_gimple_stmt (tree t)
2834 {
2835 const enum tree_code code = TREE_CODE (t);
2836
2837 switch (code)
2838 {
2839 case NOP_EXPR:
2840 /* The only valid NOP_EXPR is the empty statement. */
2841 return IS_EMPTY_STMT (t);
2842
2843 case BIND_EXPR:
2844 case COND_EXPR:
2845 /* These are only valid if they're void. */
2846 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
2847
2848 case SWITCH_EXPR:
2849 case GOTO_EXPR:
2850 case RETURN_EXPR:
2851 case LABEL_EXPR:
2852 case CASE_LABEL_EXPR:
2853 case TRY_CATCH_EXPR:
2854 case TRY_FINALLY_EXPR:
2855 case EH_FILTER_EXPR:
2856 case CATCH_EXPR:
2857 case ASM_EXPR:
2858 case STATEMENT_LIST:
2859 case OMP_PARALLEL:
2860 case OMP_FOR:
2861 case OMP_SECTIONS:
2862 case OMP_SECTION:
2863 case OMP_SINGLE:
2864 case OMP_MASTER:
2865 case OMP_ORDERED:
2866 case OMP_CRITICAL:
2867 case OMP_TASK:
2868 /* These are always void. */
2869 return true;
2870
2871 case CALL_EXPR:
2872 case MODIFY_EXPR:
2873 case PREDICT_EXPR:
2874 /* These are valid regardless of their type. */
2875 return true;
2876
2877 default:
2878 return false;
2879 }
2880 }
2881
2882 /* Return true if T is a variable. */
2883
2884 bool
2885 is_gimple_variable (tree t)
2886 {
2887 return (TREE_CODE (t) == VAR_DECL
2888 || TREE_CODE (t) == PARM_DECL
2889 || TREE_CODE (t) == RESULT_DECL
2890 || TREE_CODE (t) == SSA_NAME);
2891 }
2892
2893 /* Return true if T is a GIMPLE identifier (something with an address). */
2894
2895 bool
2896 is_gimple_id (tree t)
2897 {
2898 return (is_gimple_variable (t)
2899 || TREE_CODE (t) == FUNCTION_DECL
2900 || TREE_CODE (t) == LABEL_DECL
2901 || TREE_CODE (t) == CONST_DECL
2902 /* Allow string constants, since they are addressable. */
2903 || TREE_CODE (t) == STRING_CST);
2904 }
2905
2906 /* Return true if TYPE is a suitable type for a scalar register variable. */
2907
2908 bool
2909 is_gimple_reg_type (tree type)
2910 {
2911 return !AGGREGATE_TYPE_P (type);
2912 }
2913
2914 /* Return true if T is a non-aggregate register variable. */
2915
2916 bool
2917 is_gimple_reg (tree t)
2918 {
2919 if (TREE_CODE (t) == SSA_NAME)
2920 t = SSA_NAME_VAR (t);
2921
2922 if (!is_gimple_variable (t))
2923 return false;
2924
2925 if (!is_gimple_reg_type (TREE_TYPE (t)))
2926 return false;
2927
2928 /* A volatile decl is not acceptable because we can't reuse it as
2929 needed. We need to copy it into a temp first. */
2930 if (TREE_THIS_VOLATILE (t))
2931 return false;
2932
2933 /* We define "registers" as things that can be renamed as needed,
2934 which with our infrastructure does not apply to memory. */
2935 if (needs_to_live_in_memory (t))
2936 return false;
2937
2938 /* Hard register variables are an interesting case. For those that
2939 are call-clobbered, we don't know where all the calls are, since
2940 we don't (want to) take into account which operations will turn
2941 into libcalls at the rtl level. For those that are call-saved,
2942 we don't currently model the fact that calls may in fact change
2943 global hard registers, nor do we examine ASM_CLOBBERS at the tree
2944 level, and so miss variable changes that might imply. All around,
2945 it seems safest to not do too much optimization with these at the
2946 tree level at all. We'll have to rely on the rtl optimizers to
2947 clean this up, as there we've got all the appropriate bits exposed. */
2948 if (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t))
2949 return false;
2950
2951 /* Complex and vector values must have been put into SSA-like form.
2952 That is, no assignments to the individual components. */
2953 if (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
2954 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
2955 return DECL_GIMPLE_REG_P (t);
2956
2957 return true;
2958 }
2959
2960
2961 /* Return true if T is a GIMPLE variable whose address is not needed. */
2962
2963 bool
2964 is_gimple_non_addressable (tree t)
2965 {
2966 if (TREE_CODE (t) == SSA_NAME)
2967 t = SSA_NAME_VAR (t);
2968
2969 return (is_gimple_variable (t) && ! needs_to_live_in_memory (t));
2970 }
2971
2972 /* Return true if T is a GIMPLE rvalue, i.e. an identifier or a constant. */
2973
2974 bool
2975 is_gimple_val (tree t)
2976 {
2977 /* Make loads from volatiles and memory vars explicit. */
2978 if (is_gimple_variable (t)
2979 && is_gimple_reg_type (TREE_TYPE (t))
2980 && !is_gimple_reg (t))
2981 return false;
2982
2983 return (is_gimple_variable (t) || is_gimple_min_invariant (t));
2984 }
2985
2986 /* Similarly, but accept hard registers as inputs to asm statements. */
2987
2988 bool
2989 is_gimple_asm_val (tree t)
2990 {
2991 if (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t))
2992 return true;
2993
2994 return is_gimple_val (t);
2995 }
2996
2997 /* Return true if T is a GIMPLE minimal lvalue. */
2998
2999 bool
3000 is_gimple_min_lval (tree t)
3001 {
3002 if (!(t = CONST_CAST_TREE (strip_invariant_refs (t))))
3003 return false;
3004 return (is_gimple_id (t) || TREE_CODE (t) == MEM_REF);
3005 }
3006
3007 /* Return true if T is a valid function operand of a CALL_EXPR. */
3008
3009 bool
3010 is_gimple_call_addr (tree t)
3011 {
3012 return (TREE_CODE (t) == OBJ_TYPE_REF || is_gimple_val (t));
3013 }
3014
3015 /* Return true if T is a valid address operand of a MEM_REF. */
3016
3017 bool
3018 is_gimple_mem_ref_addr (tree t)
3019 {
3020 return (is_gimple_reg (t)
3021 || TREE_CODE (t) == INTEGER_CST
3022 || (TREE_CODE (t) == ADDR_EXPR
3023 && (CONSTANT_CLASS_P (TREE_OPERAND (t, 0))
3024 || decl_address_invariant_p (TREE_OPERAND (t, 0)))));
3025 }
3026
3027 /* If T makes a function call, return the corresponding CALL_EXPR operand.
3028 Otherwise, return NULL_TREE. */
3029
3030 tree
3031 get_call_expr_in (tree t)
3032 {
3033 if (TREE_CODE (t) == MODIFY_EXPR)
3034 t = TREE_OPERAND (t, 1);
3035 if (TREE_CODE (t) == WITH_SIZE_EXPR)
3036 t = TREE_OPERAND (t, 0);
3037 if (TREE_CODE (t) == CALL_EXPR)
3038 return t;
3039 return NULL_TREE;
3040 }
3041
3042
3043 /* Given a memory reference expression T, return its base address.
3044 The base address of a memory reference expression is the main
3045 object being referenced. For instance, the base address for
3046 'array[i].fld[j]' is 'array'. You can think of this as stripping
3047 away the offset part from a memory address.
3048
3049 This function calls handled_component_p to strip away all the inner
3050 parts of the memory reference until it reaches the base object. */
3051
3052 tree
3053 get_base_address (tree t)
3054 {
3055 while (handled_component_p (t))
3056 t = TREE_OPERAND (t, 0);
3057
3058 if ((TREE_CODE (t) == MEM_REF
3059 || TREE_CODE (t) == TARGET_MEM_REF)
3060 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
3061 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
3062
3063 if (TREE_CODE (t) == SSA_NAME
3064 || DECL_P (t)
3065 || TREE_CODE (t) == STRING_CST
3066 || TREE_CODE (t) == CONSTRUCTOR
3067 || INDIRECT_REF_P (t)
3068 || TREE_CODE (t) == MEM_REF
3069 || TREE_CODE (t) == TARGET_MEM_REF)
3070 return t;
3071 else
3072 return NULL_TREE;
3073 }
3074
3075 void
3076 recalculate_side_effects (tree t)
3077 {
3078 enum tree_code code = TREE_CODE (t);
3079 int len = TREE_OPERAND_LENGTH (t);
3080 int i;
3081
3082 switch (TREE_CODE_CLASS (code))
3083 {
3084 case tcc_expression:
3085 switch (code)
3086 {
3087 case INIT_EXPR:
3088 case MODIFY_EXPR:
3089 case VA_ARG_EXPR:
3090 case PREDECREMENT_EXPR:
3091 case PREINCREMENT_EXPR:
3092 case POSTDECREMENT_EXPR:
3093 case POSTINCREMENT_EXPR:
3094 /* All of these have side-effects, no matter what their
3095 operands are. */
3096 return;
3097
3098 default:
3099 break;
3100 }
3101 /* Fall through. */
3102
3103 case tcc_comparison: /* a comparison expression */
3104 case tcc_unary: /* a unary arithmetic expression */
3105 case tcc_binary: /* a binary arithmetic expression */
3106 case tcc_reference: /* a reference */
3107 case tcc_vl_exp: /* a function call */
3108 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
3109 for (i = 0; i < len; ++i)
3110 {
3111 tree op = TREE_OPERAND (t, i);
3112 if (op && TREE_SIDE_EFFECTS (op))
3113 TREE_SIDE_EFFECTS (t) = 1;
3114 }
3115 break;
3116
3117 case tcc_constant:
3118 /* No side-effects. */
3119 return;
3120
3121 default:
3122 gcc_unreachable ();
3123 }
3124 }
3125
3126 /* Canonicalize a tree T for use in a COND_EXPR as conditional. Returns
3127 a canonicalized tree that is valid for a COND_EXPR or NULL_TREE, if
3128 we failed to create one. */
3129
3130 tree
3131 canonicalize_cond_expr_cond (tree t)
3132 {
3133 /* Strip conversions around boolean operations. */
3134 if (CONVERT_EXPR_P (t)
3135 && truth_value_p (TREE_CODE (TREE_OPERAND (t, 0))))
3136 t = TREE_OPERAND (t, 0);
3137
3138 /* For (bool)x use x != 0. */
3139 if (CONVERT_EXPR_P (t)
3140 && TREE_CODE (TREE_TYPE (t)) == BOOLEAN_TYPE)
3141 {
3142 tree top0 = TREE_OPERAND (t, 0);
3143 t = build2 (NE_EXPR, TREE_TYPE (t),
3144 top0, build_int_cst (TREE_TYPE (top0), 0));
3145 }
3146 /* For !x use x == 0. */
3147 else if (TREE_CODE (t) == TRUTH_NOT_EXPR)
3148 {
3149 tree top0 = TREE_OPERAND (t, 0);
3150 t = build2 (EQ_EXPR, TREE_TYPE (t),
3151 top0, build_int_cst (TREE_TYPE (top0), 0));
3152 }
3153 /* For cmp ? 1 : 0 use cmp. */
3154 else if (TREE_CODE (t) == COND_EXPR
3155 && COMPARISON_CLASS_P (TREE_OPERAND (t, 0))
3156 && integer_onep (TREE_OPERAND (t, 1))
3157 && integer_zerop (TREE_OPERAND (t, 2)))
3158 {
3159 tree top0 = TREE_OPERAND (t, 0);
3160 t = build2 (TREE_CODE (top0), TREE_TYPE (t),
3161 TREE_OPERAND (top0, 0), TREE_OPERAND (top0, 1));
3162 }
3163
3164 if (is_gimple_condexpr (t))
3165 return t;
3166
3167 return NULL_TREE;
3168 }
3169
3170 /* Build a GIMPLE_CALL identical to STMT but skipping the arguments in
3171 the positions marked by the set ARGS_TO_SKIP. */
3172
3173 gimple
3174 gimple_call_copy_skip_args (gimple stmt, bitmap args_to_skip)
3175 {
3176 int i;
3177 int nargs = gimple_call_num_args (stmt);
3178 VEC(tree, heap) *vargs = VEC_alloc (tree, heap, nargs);
3179 gimple new_stmt;
3180
3181 for (i = 0; i < nargs; i++)
3182 if (!bitmap_bit_p (args_to_skip, i))
3183 VEC_quick_push (tree, vargs, gimple_call_arg (stmt, i));
3184
3185 if (gimple_call_internal_p (stmt))
3186 new_stmt = gimple_build_call_internal_vec (gimple_call_internal_fn (stmt),
3187 vargs);
3188 else
3189 new_stmt = gimple_build_call_vec (gimple_call_fn (stmt), vargs);
3190 VEC_free (tree, heap, vargs);
3191 if (gimple_call_lhs (stmt))
3192 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
3193
3194 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
3195 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
3196
3197 gimple_set_block (new_stmt, gimple_block (stmt));
3198 if (gimple_has_location (stmt))
3199 gimple_set_location (new_stmt, gimple_location (stmt));
3200 gimple_call_copy_flags (new_stmt, stmt);
3201 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
3202
3203 gimple_set_modified (new_stmt, true);
3204
3205 return new_stmt;
3206 }
3207
3208
3209 static hashval_t gimple_type_hash_1 (const void *, enum gtc_mode);
3210
3211 /* Structure used to maintain a cache of some type pairs compared by
3212 gimple_types_compatible_p when comparing aggregate types. There are
3213 three possible values for SAME_P:
3214
3215 -2: The pair (T1, T2) has just been inserted in the table.
3216 0: T1 and T2 are different types.
3217 1: T1 and T2 are the same type.
3218
3219 The two elements in the SAME_P array are indexed by the comparison
3220 mode gtc_mode. */
3221
3222 struct type_pair_d
3223 {
3224 unsigned int uid1;
3225 unsigned int uid2;
3226 signed char same_p[2];
3227 };
3228 typedef struct type_pair_d *type_pair_t;
3229
3230 DEF_VEC_P(type_pair_t);
3231 DEF_VEC_ALLOC_P(type_pair_t,heap);
3232
3233 /* Return a hash value for the type pair pointed-to by P. */
3234
3235 static hashval_t
3236 type_pair_hash (const void *p)
3237 {
3238 const struct type_pair_d *pair = (const struct type_pair_d *) p;
3239 hashval_t val1 = pair->uid1;
3240 hashval_t val2 = pair->uid2;
3241 return (iterative_hash_hashval_t (val2, val1)
3242 ^ iterative_hash_hashval_t (val1, val2));
3243 }
3244
3245 /* Compare two type pairs pointed-to by P1 and P2. */
3246
3247 static int
3248 type_pair_eq (const void *p1, const void *p2)
3249 {
3250 const struct type_pair_d *pair1 = (const struct type_pair_d *) p1;
3251 const struct type_pair_d *pair2 = (const struct type_pair_d *) p2;
3252 return ((pair1->uid1 == pair2->uid1 && pair1->uid2 == pair2->uid2)
3253 || (pair1->uid1 == pair2->uid2 && pair1->uid2 == pair2->uid1));
3254 }
3255
3256 /* Lookup the pair of types T1 and T2 in *VISITED_P. Insert a new
3257 entry if none existed. */
3258
3259 static type_pair_t
3260 lookup_type_pair (tree t1, tree t2, htab_t *visited_p, struct obstack *ob_p)
3261 {
3262 struct type_pair_d pair;
3263 type_pair_t p;
3264 void **slot;
3265
3266 if (*visited_p == NULL)
3267 {
3268 *visited_p = htab_create (251, type_pair_hash, type_pair_eq, NULL);
3269 gcc_obstack_init (ob_p);
3270 }
3271
3272 pair.uid1 = TYPE_UID (t1);
3273 pair.uid2 = TYPE_UID (t2);
3274 slot = htab_find_slot (*visited_p, &pair, INSERT);
3275
3276 if (*slot)
3277 p = *((type_pair_t *) slot);
3278 else
3279 {
3280 p = XOBNEW (ob_p, struct type_pair_d);
3281 p->uid1 = TYPE_UID (t1);
3282 p->uid2 = TYPE_UID (t2);
3283 p->same_p[0] = -2;
3284 p->same_p[1] = -2;
3285 *slot = (void *) p;
3286 }
3287
3288 return p;
3289 }
3290
3291 /* Per pointer state for the SCC finding. The on_sccstack flag
3292 is not strictly required, it is true when there is no hash value
3293 recorded for the type and false otherwise. But querying that
3294 is slower. */
3295
3296 struct sccs
3297 {
3298 unsigned int dfsnum;
3299 unsigned int low;
3300 bool on_sccstack;
3301 union {
3302 hashval_t hash;
3303 signed char same_p;
3304 } u;
3305 };
3306
3307 static unsigned int next_dfs_num;
3308 static unsigned int gtc_next_dfs_num;
3309
3310
3311 /* GIMPLE type merging cache. A direct-mapped cache based on TYPE_UID. */
3312
3313 typedef struct GTY(()) gimple_type_leader_entry_s {
3314 tree type;
3315 tree leader;
3316 } gimple_type_leader_entry;
3317
3318 #define GIMPLE_TYPE_LEADER_SIZE 16381
3319 static GTY((deletable, length("GIMPLE_TYPE_LEADER_SIZE")))
3320 gimple_type_leader_entry *gimple_type_leader;
3321
3322 /* Lookup an existing leader for T and return it or NULL_TREE, if
3323 there is none in the cache. */
3324
3325 static tree
3326 gimple_lookup_type_leader (tree t)
3327 {
3328 gimple_type_leader_entry *leader;
3329
3330 if (!gimple_type_leader)
3331 return NULL_TREE;
3332
3333 leader = &gimple_type_leader[TYPE_UID (t) % GIMPLE_TYPE_LEADER_SIZE];
3334 if (leader->type != t)
3335 return NULL_TREE;
3336
3337 return leader->leader;
3338 }
3339
3340 /* Return true if T1 and T2 have the same name. If FOR_COMPLETION_P is
3341 true then if any type has no name return false, otherwise return
3342 true if both types have no names. */
3343
3344 static bool
3345 compare_type_names_p (tree t1, tree t2, bool for_completion_p)
3346 {
3347 tree name1 = TYPE_NAME (t1);
3348 tree name2 = TYPE_NAME (t2);
3349
3350 /* Consider anonymous types all unique for completion. */
3351 if (for_completion_p
3352 && (!name1 || !name2))
3353 return false;
3354
3355 if (name1 && TREE_CODE (name1) == TYPE_DECL)
3356 {
3357 name1 = DECL_NAME (name1);
3358 if (for_completion_p
3359 && !name1)
3360 return false;
3361 }
3362 gcc_assert (!name1 || TREE_CODE (name1) == IDENTIFIER_NODE);
3363
3364 if (name2 && TREE_CODE (name2) == TYPE_DECL)
3365 {
3366 name2 = DECL_NAME (name2);
3367 if (for_completion_p
3368 && !name2)
3369 return false;
3370 }
3371 gcc_assert (!name2 || TREE_CODE (name2) == IDENTIFIER_NODE);
3372
3373 /* Identifiers can be compared with pointer equality rather
3374 than a string comparison. */
3375 if (name1 == name2)
3376 return true;
3377
3378 return false;
3379 }
3380
3381 /* Return true if the field decls F1 and F2 are at the same offset.
3382
3383 This is intended to be used on GIMPLE types only. */
3384
3385 bool
3386 gimple_compare_field_offset (tree f1, tree f2)
3387 {
3388 if (DECL_OFFSET_ALIGN (f1) == DECL_OFFSET_ALIGN (f2))
3389 {
3390 tree offset1 = DECL_FIELD_OFFSET (f1);
3391 tree offset2 = DECL_FIELD_OFFSET (f2);
3392 return ((offset1 == offset2
3393 /* Once gimplification is done, self-referential offsets are
3394 instantiated as operand #2 of the COMPONENT_REF built for
3395 each access and reset. Therefore, they are not relevant
3396 anymore and fields are interchangeable provided that they
3397 represent the same access. */
3398 || (TREE_CODE (offset1) == PLACEHOLDER_EXPR
3399 && TREE_CODE (offset2) == PLACEHOLDER_EXPR
3400 && (DECL_SIZE (f1) == DECL_SIZE (f2)
3401 || (TREE_CODE (DECL_SIZE (f1)) == PLACEHOLDER_EXPR
3402 && TREE_CODE (DECL_SIZE (f2)) == PLACEHOLDER_EXPR)
3403 || operand_equal_p (DECL_SIZE (f1), DECL_SIZE (f2), 0))
3404 && DECL_ALIGN (f1) == DECL_ALIGN (f2))
3405 || operand_equal_p (offset1, offset2, 0))
3406 && tree_int_cst_equal (DECL_FIELD_BIT_OFFSET (f1),
3407 DECL_FIELD_BIT_OFFSET (f2)));
3408 }
3409
3410 /* Fortran and C do not always agree on what DECL_OFFSET_ALIGN
3411 should be, so handle differing ones specially by decomposing
3412 the offset into a byte and bit offset manually. */
3413 if (host_integerp (DECL_FIELD_OFFSET (f1), 0)
3414 && host_integerp (DECL_FIELD_OFFSET (f2), 0))
3415 {
3416 unsigned HOST_WIDE_INT byte_offset1, byte_offset2;
3417 unsigned HOST_WIDE_INT bit_offset1, bit_offset2;
3418 bit_offset1 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (f1));
3419 byte_offset1 = (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f1))
3420 + bit_offset1 / BITS_PER_UNIT);
3421 bit_offset2 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (f2));
3422 byte_offset2 = (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f2))
3423 + bit_offset2 / BITS_PER_UNIT);
3424 if (byte_offset1 != byte_offset2)
3425 return false;
3426 return bit_offset1 % BITS_PER_UNIT == bit_offset2 % BITS_PER_UNIT;
3427 }
3428
3429 return false;
3430 }
3431
3432 /* If the type T1 and the type T2 are a complete and an incomplete
3433 variant of the same type return true. */
3434
3435 static bool
3436 gimple_compatible_complete_and_incomplete_subtype_p (tree t1, tree t2)
3437 {
3438 /* If one pointer points to an incomplete type variant of
3439 the other pointed-to type they are the same. */
3440 if (TREE_CODE (t1) == TREE_CODE (t2)
3441 && RECORD_OR_UNION_TYPE_P (t1)
3442 && (!COMPLETE_TYPE_P (t1)
3443 || !COMPLETE_TYPE_P (t2))
3444 && TYPE_QUALS (t1) == TYPE_QUALS (t2)
3445 && compare_type_names_p (TYPE_MAIN_VARIANT (t1),
3446 TYPE_MAIN_VARIANT (t2), true))
3447 return true;
3448 return false;
3449 }
3450
3451 static bool
3452 gimple_types_compatible_p_1 (tree, tree, enum gtc_mode, type_pair_t,
3453 VEC(type_pair_t, heap) **,
3454 struct pointer_map_t *, struct obstack *);
3455
3456 /* DFS visit the edge from the callers type pair with state *STATE to
3457 the pair T1, T2 while operating in FOR_MERGING_P mode.
3458 Update the merging status if it is not part of the SCC containing the
3459 callers pair and return it.
3460 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
3461
3462 static bool
3463 gtc_visit (tree t1, tree t2, enum gtc_mode mode,
3464 struct sccs *state,
3465 VEC(type_pair_t, heap) **sccstack,
3466 struct pointer_map_t *sccstate,
3467 struct obstack *sccstate_obstack)
3468 {
3469 struct sccs *cstate = NULL;
3470 type_pair_t p;
3471 void **slot;
3472
3473 /* Check first for the obvious case of pointer identity. */
3474 if (t1 == t2)
3475 return true;
3476
3477 /* Check that we have two types to compare. */
3478 if (t1 == NULL_TREE || t2 == NULL_TREE)
3479 return false;
3480
3481 /* If the types have been previously registered and found equal
3482 they still are. */
3483 if (mode == GTC_MERGE)
3484 {
3485 tree leader1 = gimple_lookup_type_leader (t1);
3486 tree leader2 = gimple_lookup_type_leader (t2);
3487 if (leader1 == t2
3488 || t1 == leader2
3489 || (leader1 && leader1 == leader2))
3490 return true;
3491 }
3492 else if (mode == GTC_DIAG)
3493 {
3494 if (TYPE_CANONICAL (t1)
3495 && TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2))
3496 return true;
3497 }
3498
3499 /* Can't be the same type if the types don't have the same code. */
3500 if (TREE_CODE (t1) != TREE_CODE (t2))
3501 return false;
3502
3503 /* Can't be the same type if they have different CV qualifiers. */
3504 if (TYPE_QUALS (t1) != TYPE_QUALS (t2))
3505 return false;
3506
3507 /* Void types are always the same. */
3508 if (TREE_CODE (t1) == VOID_TYPE)
3509 return true;
3510
3511 /* Do some simple checks before doing three hashtable queries. */
3512 if (INTEGRAL_TYPE_P (t1)
3513 || SCALAR_FLOAT_TYPE_P (t1)
3514 || FIXED_POINT_TYPE_P (t1)
3515 || TREE_CODE (t1) == VECTOR_TYPE
3516 || TREE_CODE (t1) == COMPLEX_TYPE
3517 || TREE_CODE (t1) == OFFSET_TYPE)
3518 {
3519 /* Can't be the same type if they have different alignment,
3520 sign, precision or mode. */
3521 if (TYPE_ALIGN (t1) != TYPE_ALIGN (t2)
3522 || TYPE_PRECISION (t1) != TYPE_PRECISION (t2)
3523 || TYPE_MODE (t1) != TYPE_MODE (t2)
3524 || TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
3525 return false;
3526
3527 if (TREE_CODE (t1) == INTEGER_TYPE
3528 && (TYPE_IS_SIZETYPE (t1) != TYPE_IS_SIZETYPE (t2)
3529 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)))
3530 return false;
3531
3532 /* That's all we need to check for float and fixed-point types. */
3533 if (SCALAR_FLOAT_TYPE_P (t1)
3534 || FIXED_POINT_TYPE_P (t1))
3535 return true;
3536
3537 /* For integral types fall thru to more complex checks. */
3538 }
3539
3540 else if (AGGREGATE_TYPE_P (t1) || POINTER_TYPE_P (t1))
3541 {
3542 /* Can't be the same type if they have different alignment or mode. */
3543 if (TYPE_ALIGN (t1) != TYPE_ALIGN (t2)
3544 || TYPE_MODE (t1) != TYPE_MODE (t2))
3545 return false;
3546 }
3547
3548 /* If the hash values of t1 and t2 are different the types can't
3549 possibly be the same. This helps keeping the type-pair hashtable
3550 small, only tracking comparisons for hash collisions. */
3551 if (gimple_type_hash_1 (t1, mode) != gimple_type_hash_1 (t2, mode))
3552 return false;
3553
3554 /* Allocate a new cache entry for this comparison. */
3555 p = lookup_type_pair (t1, t2, &gtc_visited, &gtc_ob);
3556 if (p->same_p[mode] == 0 || p->same_p[mode] == 1)
3557 {
3558 /* We have already decided whether T1 and T2 are the
3559 same, return the cached result. */
3560 return p->same_p[mode] == 1;
3561 }
3562
3563 if ((slot = pointer_map_contains (sccstate, p)) != NULL)
3564 cstate = (struct sccs *)*slot;
3565 /* Not yet visited. DFS recurse. */
3566 if (!cstate)
3567 {
3568 gimple_types_compatible_p_1 (t1, t2, mode, p,
3569 sccstack, sccstate, sccstate_obstack);
3570 cstate = (struct sccs *)* pointer_map_contains (sccstate, p);
3571 state->low = MIN (state->low, cstate->low);
3572 }
3573 /* If the type is still on the SCC stack adjust the parents low. */
3574 if (cstate->dfsnum < state->dfsnum
3575 && cstate->on_sccstack)
3576 state->low = MIN (cstate->dfsnum, state->low);
3577
3578 /* Return the current lattice value. We start with an equality
3579 assumption so types part of a SCC will be optimistically
3580 treated equal unless proven otherwise. */
3581 return cstate->u.same_p;
3582 }
3583
3584 /* Worker for gimple_types_compatible.
3585 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
3586
3587 static bool
3588 gimple_types_compatible_p_1 (tree t1, tree t2, enum gtc_mode mode,
3589 type_pair_t p,
3590 VEC(type_pair_t, heap) **sccstack,
3591 struct pointer_map_t *sccstate,
3592 struct obstack *sccstate_obstack)
3593 {
3594 struct sccs *state;
3595
3596 gcc_assert (p->same_p[mode] == -2);
3597
3598 state = XOBNEW (sccstate_obstack, struct sccs);
3599 *pointer_map_insert (sccstate, p) = state;
3600
3601 VEC_safe_push (type_pair_t, heap, *sccstack, p);
3602 state->dfsnum = gtc_next_dfs_num++;
3603 state->low = state->dfsnum;
3604 state->on_sccstack = true;
3605 /* Start with an equality assumption. As we DFS recurse into child
3606 SCCs this assumption may get revisited. */
3607 state->u.same_p = 1;
3608
3609 /* If their attributes are not the same they can't be the same type. */
3610 if (!attribute_list_equal (TYPE_ATTRIBUTES (t1), TYPE_ATTRIBUTES (t2)))
3611 goto different_types;
3612
3613 /* Do type-specific comparisons. */
3614 switch (TREE_CODE (t1))
3615 {
3616 case VECTOR_TYPE:
3617 case COMPLEX_TYPE:
3618 if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2), mode,
3619 state, sccstack, sccstate, sccstate_obstack))
3620 goto different_types;
3621 goto same_types;
3622
3623 case ARRAY_TYPE:
3624 /* Array types are the same if the element types are the same and
3625 the number of elements are the same. */
3626 if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2), mode,
3627 state, sccstack, sccstate, sccstate_obstack)
3628 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
3629 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
3630 goto different_types;
3631 else
3632 {
3633 tree i1 = TYPE_DOMAIN (t1);
3634 tree i2 = TYPE_DOMAIN (t2);
3635
3636 /* For an incomplete external array, the type domain can be
3637 NULL_TREE. Check this condition also. */
3638 if (i1 == NULL_TREE && i2 == NULL_TREE)
3639 goto same_types;
3640 else if (i1 == NULL_TREE || i2 == NULL_TREE)
3641 goto different_types;
3642 /* If for a complete array type the possibly gimplified sizes
3643 are different the types are different. */
3644 else if (((TYPE_SIZE (i1) != NULL) ^ (TYPE_SIZE (i2) != NULL))
3645 || (TYPE_SIZE (i1)
3646 && TYPE_SIZE (i2)
3647 && !operand_equal_p (TYPE_SIZE (i1), TYPE_SIZE (i2), 0)))
3648 goto different_types;
3649 else
3650 {
3651 tree min1 = TYPE_MIN_VALUE (i1);
3652 tree min2 = TYPE_MIN_VALUE (i2);
3653 tree max1 = TYPE_MAX_VALUE (i1);
3654 tree max2 = TYPE_MAX_VALUE (i2);
3655
3656 /* The minimum/maximum values have to be the same. */
3657 if ((min1 == min2
3658 || (min1 && min2
3659 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
3660 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
3661 || operand_equal_p (min1, min2, 0))))
3662 && (max1 == max2
3663 || (max1 && max2
3664 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
3665 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
3666 || operand_equal_p (max1, max2, 0)))))
3667 goto same_types;
3668 else
3669 goto different_types;
3670 }
3671 }
3672
3673 case METHOD_TYPE:
3674 /* Method types should belong to the same class. */
3675 if (!gtc_visit (TYPE_METHOD_BASETYPE (t1), TYPE_METHOD_BASETYPE (t2),
3676 mode, state, sccstack, sccstate, sccstate_obstack))
3677 goto different_types;
3678
3679 /* Fallthru */
3680
3681 case FUNCTION_TYPE:
3682 /* Function types are the same if the return type and arguments types
3683 are the same. */
3684 if ((mode != GTC_DIAG
3685 || !gimple_compatible_complete_and_incomplete_subtype_p
3686 (TREE_TYPE (t1), TREE_TYPE (t2)))
3687 && !gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2), mode,
3688 state, sccstack, sccstate, sccstate_obstack))
3689 goto different_types;
3690
3691 if (!comp_type_attributes (t1, t2))
3692 goto different_types;
3693
3694 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
3695 goto same_types;
3696 else
3697 {
3698 tree parms1, parms2;
3699
3700 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
3701 parms1 && parms2;
3702 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
3703 {
3704 if ((mode == GTC_MERGE
3705 || !gimple_compatible_complete_and_incomplete_subtype_p
3706 (TREE_VALUE (parms1), TREE_VALUE (parms2)))
3707 && !gtc_visit (TREE_VALUE (parms1), TREE_VALUE (parms2), mode,
3708 state, sccstack, sccstate, sccstate_obstack))
3709 goto different_types;
3710 }
3711
3712 if (parms1 || parms2)
3713 goto different_types;
3714
3715 goto same_types;
3716 }
3717
3718 case OFFSET_TYPE:
3719 {
3720 if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2), mode,
3721 state, sccstack, sccstate, sccstate_obstack)
3722 || !gtc_visit (TYPE_OFFSET_BASETYPE (t1),
3723 TYPE_OFFSET_BASETYPE (t2), mode,
3724 state, sccstack, sccstate, sccstate_obstack))
3725 goto different_types;
3726
3727 goto same_types;
3728 }
3729
3730 case POINTER_TYPE:
3731 case REFERENCE_TYPE:
3732 {
3733 /* If the two pointers have different ref-all attributes,
3734 they can't be the same type. */
3735 if (TYPE_REF_CAN_ALIAS_ALL (t1) != TYPE_REF_CAN_ALIAS_ALL (t2))
3736 goto different_types;
3737
3738 /* If one pointer points to an incomplete type variant of
3739 the other pointed-to type they are the same. */
3740 if (mode == GTC_DIAG
3741 && gimple_compatible_complete_and_incomplete_subtype_p
3742 (TREE_TYPE (t1), TREE_TYPE (t2)))
3743 goto same_types;
3744
3745 /* Otherwise, pointer and reference types are the same if the
3746 pointed-to types are the same. */
3747 if (gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2), mode,
3748 state, sccstack, sccstate, sccstate_obstack))
3749 goto same_types;
3750
3751 goto different_types;
3752 }
3753
3754 case NULLPTR_TYPE:
3755 /* There is only one decltype(nullptr). */
3756 goto same_types;
3757
3758 case INTEGER_TYPE:
3759 case BOOLEAN_TYPE:
3760 {
3761 tree min1 = TYPE_MIN_VALUE (t1);
3762 tree max1 = TYPE_MAX_VALUE (t1);
3763 tree min2 = TYPE_MIN_VALUE (t2);
3764 tree max2 = TYPE_MAX_VALUE (t2);
3765 bool min_equal_p = false;
3766 bool max_equal_p = false;
3767
3768 /* If either type has a minimum value, the other type must
3769 have the same. */
3770 if (min1 == NULL_TREE && min2 == NULL_TREE)
3771 min_equal_p = true;
3772 else if (min1 && min2 && operand_equal_p (min1, min2, 0))
3773 min_equal_p = true;
3774
3775 /* Likewise, if either type has a maximum value, the other
3776 type must have the same. */
3777 if (max1 == NULL_TREE && max2 == NULL_TREE)
3778 max_equal_p = true;
3779 else if (max1 && max2 && operand_equal_p (max1, max2, 0))
3780 max_equal_p = true;
3781
3782 if (!min_equal_p || !max_equal_p)
3783 goto different_types;
3784
3785 goto same_types;
3786 }
3787
3788 case ENUMERAL_TYPE:
3789 {
3790 /* FIXME lto, we cannot check bounds on enumeral types because
3791 different front ends will produce different values.
3792 In C, enumeral types are integers, while in C++ each element
3793 will have its own symbolic value. We should decide how enums
3794 are to be represented in GIMPLE and have each front end lower
3795 to that. */
3796 tree v1, v2;
3797
3798 /* For enumeral types, all the values must be the same. */
3799 if (TYPE_VALUES (t1) == TYPE_VALUES (t2))
3800 goto same_types;
3801
3802 for (v1 = TYPE_VALUES (t1), v2 = TYPE_VALUES (t2);
3803 v1 && v2;
3804 v1 = TREE_CHAIN (v1), v2 = TREE_CHAIN (v2))
3805 {
3806 tree c1 = TREE_VALUE (v1);
3807 tree c2 = TREE_VALUE (v2);
3808
3809 if (TREE_CODE (c1) == CONST_DECL)
3810 c1 = DECL_INITIAL (c1);
3811
3812 if (TREE_CODE (c2) == CONST_DECL)
3813 c2 = DECL_INITIAL (c2);
3814
3815 if (tree_int_cst_equal (c1, c2) != 1)
3816 goto different_types;
3817
3818 if (mode == GTC_MERGE && TREE_PURPOSE (v1) != TREE_PURPOSE (v2))
3819 goto different_types;
3820 }
3821
3822 /* If one enumeration has more values than the other, they
3823 are not the same. */
3824 if (v1 || v2)
3825 goto different_types;
3826
3827 goto same_types;
3828 }
3829
3830 case RECORD_TYPE:
3831 case UNION_TYPE:
3832 case QUAL_UNION_TYPE:
3833 {
3834 tree f1, f2;
3835
3836 /* The struct tags shall compare equal. */
3837 if (mode == GTC_MERGE
3838 && !compare_type_names_p (TYPE_MAIN_VARIANT (t1),
3839 TYPE_MAIN_VARIANT (t2), false))
3840 goto different_types;
3841
3842 /* For aggregate types, all the fields must be the same. */
3843 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
3844 f1 && f2;
3845 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
3846 {
3847 /* The fields must have the same name, offset and type. */
3848 if ((mode == GTC_MERGE
3849 && DECL_NAME (f1) != DECL_NAME (f2))
3850 || DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
3851 || !gimple_compare_field_offset (f1, f2)
3852 || !gtc_visit (TREE_TYPE (f1), TREE_TYPE (f2), mode,
3853 state, sccstack, sccstate, sccstate_obstack))
3854 goto different_types;
3855 }
3856
3857 /* If one aggregate has more fields than the other, they
3858 are not the same. */
3859 if (f1 || f2)
3860 goto different_types;
3861
3862 goto same_types;
3863 }
3864
3865 default:
3866 gcc_unreachable ();
3867 }
3868
3869 /* Common exit path for types that are not compatible. */
3870 different_types:
3871 state->u.same_p = 0;
3872 goto pop;
3873
3874 /* Common exit path for types that are compatible. */
3875 same_types:
3876 gcc_assert (state->u.same_p == 1);
3877
3878 pop:
3879 if (state->low == state->dfsnum)
3880 {
3881 type_pair_t x;
3882
3883 /* Pop off the SCC and set its cache values to the final
3884 comparison result. */
3885 do
3886 {
3887 struct sccs *cstate;
3888 x = VEC_pop (type_pair_t, *sccstack);
3889 cstate = (struct sccs *)*pointer_map_contains (sccstate, x);
3890 cstate->on_sccstack = false;
3891 x->same_p[mode] = state->u.same_p;
3892 }
3893 while (x != p);
3894 }
3895
3896 return state->u.same_p;
3897 }
3898
3899 /* Return true iff T1 and T2 are structurally identical. When
3900 FOR_MERGING_P is true the an incomplete type and a complete type
3901 are considered different, otherwise they are considered compatible. */
3902
3903 bool
3904 gimple_types_compatible_p (tree t1, tree t2, enum gtc_mode mode)
3905 {
3906 VEC(type_pair_t, heap) *sccstack = NULL;
3907 struct pointer_map_t *sccstate;
3908 struct obstack sccstate_obstack;
3909 type_pair_t p = NULL;
3910 bool res;
3911
3912 /* Before starting to set up the SCC machinery handle simple cases. */
3913
3914 /* Check first for the obvious case of pointer identity. */
3915 if (t1 == t2)
3916 return true;
3917
3918 /* Check that we have two types to compare. */
3919 if (t1 == NULL_TREE || t2 == NULL_TREE)
3920 return false;
3921
3922 /* If the types have been previously registered and found equal
3923 they still are. */
3924 if (mode == GTC_MERGE)
3925 {
3926 tree leader1 = gimple_lookup_type_leader (t1);
3927 tree leader2 = gimple_lookup_type_leader (t2);
3928 if (leader1 == t2
3929 || t1 == leader2
3930 || (leader1 && leader1 == leader2))
3931 return true;
3932 }
3933 else if (mode == GTC_DIAG)
3934 {
3935 if (TYPE_CANONICAL (t1)
3936 && TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2))
3937 return true;
3938 }
3939
3940 /* Can't be the same type if the types don't have the same code. */
3941 if (TREE_CODE (t1) != TREE_CODE (t2))
3942 return false;
3943
3944 /* Can't be the same type if they have different CV qualifiers. */
3945 if (TYPE_QUALS (t1) != TYPE_QUALS (t2))
3946 return false;
3947
3948 /* Void types are always the same. */
3949 if (TREE_CODE (t1) == VOID_TYPE)
3950 return true;
3951
3952 /* Do some simple checks before doing three hashtable queries. */
3953 if (INTEGRAL_TYPE_P (t1)
3954 || SCALAR_FLOAT_TYPE_P (t1)
3955 || FIXED_POINT_TYPE_P (t1)
3956 || TREE_CODE (t1) == VECTOR_TYPE
3957 || TREE_CODE (t1) == COMPLEX_TYPE
3958 || TREE_CODE (t1) == OFFSET_TYPE)
3959 {
3960 /* Can't be the same type if they have different alignment,
3961 sign, precision or mode. */
3962 if (TYPE_ALIGN (t1) != TYPE_ALIGN (t2)
3963 || TYPE_PRECISION (t1) != TYPE_PRECISION (t2)
3964 || TYPE_MODE (t1) != TYPE_MODE (t2)
3965 || TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
3966 return false;
3967
3968 if (TREE_CODE (t1) == INTEGER_TYPE
3969 && (TYPE_IS_SIZETYPE (t1) != TYPE_IS_SIZETYPE (t2)
3970 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)))
3971 return false;
3972
3973 /* That's all we need to check for float and fixed-point types. */
3974 if (SCALAR_FLOAT_TYPE_P (t1)
3975 || FIXED_POINT_TYPE_P (t1))
3976 return true;
3977
3978 /* For integral types fall thru to more complex checks. */
3979 }
3980
3981 else if (AGGREGATE_TYPE_P (t1) || POINTER_TYPE_P (t1))
3982 {
3983 /* Can't be the same type if they have different alignment or mode. */
3984 if (TYPE_ALIGN (t1) != TYPE_ALIGN (t2)
3985 || TYPE_MODE (t1) != TYPE_MODE (t2))
3986 return false;
3987 }
3988
3989 /* If the hash values of t1 and t2 are different the types can't
3990 possibly be the same. This helps keeping the type-pair hashtable
3991 small, only tracking comparisons for hash collisions. */
3992 if (gimple_type_hash_1 (t1, mode) != gimple_type_hash_1 (t2, mode))
3993 return false;
3994
3995 /* If we've visited this type pair before (in the case of aggregates
3996 with self-referential types), and we made a decision, return it. */
3997 p = lookup_type_pair (t1, t2, &gtc_visited, &gtc_ob);
3998 if (p->same_p[mode] == 0 || p->same_p[mode] == 1)
3999 {
4000 /* We have already decided whether T1 and T2 are the
4001 same, return the cached result. */
4002 return p->same_p[mode] == 1;
4003 }
4004
4005 /* Now set up the SCC machinery for the comparison. */
4006 gtc_next_dfs_num = 1;
4007 sccstate = pointer_map_create ();
4008 gcc_obstack_init (&sccstate_obstack);
4009 res = gimple_types_compatible_p_1 (t1, t2, mode, p,
4010 &sccstack, sccstate, &sccstate_obstack);
4011 VEC_free (type_pair_t, heap, sccstack);
4012 pointer_map_destroy (sccstate);
4013 obstack_free (&sccstate_obstack, NULL);
4014
4015 return res;
4016 }
4017
4018
4019 static hashval_t
4020 iterative_hash_gimple_type (tree, hashval_t, VEC(tree, heap) **,
4021 struct pointer_map_t *, struct obstack *,
4022 enum gtc_mode);
4023
4024 /* DFS visit the edge from the callers type with state *STATE to T.
4025 Update the callers type hash V with the hash for T if it is not part
4026 of the SCC containing the callers type and return it.
4027 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
4028
4029 static hashval_t
4030 visit (tree t, struct sccs *state, hashval_t v,
4031 VEC (tree, heap) **sccstack,
4032 struct pointer_map_t *sccstate,
4033 struct obstack *sccstate_obstack, enum gtc_mode mode)
4034 {
4035 struct sccs *cstate = NULL;
4036 struct tree_int_map m;
4037 void **slot;
4038
4039 /* If there is a hash value recorded for this type then it can't
4040 possibly be part of our parent SCC. Simply mix in its hash. */
4041 m.base.from = t;
4042 if ((slot = htab_find_slot (mode == GTC_MERGE
4043 ? type_hash_cache : canonical_type_hash_cache,
4044 &m, NO_INSERT))
4045 && *slot)
4046 return iterative_hash_hashval_t (((struct tree_int_map *) *slot)->to, v);
4047
4048 if ((slot = pointer_map_contains (sccstate, t)) != NULL)
4049 cstate = (struct sccs *)*slot;
4050 if (!cstate)
4051 {
4052 hashval_t tem;
4053 /* Not yet visited. DFS recurse. */
4054 tem = iterative_hash_gimple_type (t, v,
4055 sccstack, sccstate, sccstate_obstack,
4056 mode);
4057 if (!cstate)
4058 cstate = (struct sccs *)* pointer_map_contains (sccstate, t);
4059 state->low = MIN (state->low, cstate->low);
4060 /* If the type is no longer on the SCC stack and thus is not part
4061 of the parents SCC mix in its hash value. Otherwise we will
4062 ignore the type for hashing purposes and return the unaltered
4063 hash value. */
4064 if (!cstate->on_sccstack)
4065 return tem;
4066 }
4067 if (cstate->dfsnum < state->dfsnum
4068 && cstate->on_sccstack)
4069 state->low = MIN (cstate->dfsnum, state->low);
4070
4071 /* We are part of our parents SCC, skip this type during hashing
4072 and return the unaltered hash value. */
4073 return v;
4074 }
4075
4076 /* Hash NAME with the previous hash value V and return it. */
4077
4078 static hashval_t
4079 iterative_hash_name (tree name, hashval_t v)
4080 {
4081 if (!name)
4082 return v;
4083 if (TREE_CODE (name) == TYPE_DECL)
4084 name = DECL_NAME (name);
4085 if (!name)
4086 return v;
4087 gcc_assert (TREE_CODE (name) == IDENTIFIER_NODE);
4088 return iterative_hash_object (IDENTIFIER_HASH_VALUE (name), v);
4089 }
4090
4091 /* Returning a hash value for gimple type TYPE combined with VAL.
4092 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done.
4093
4094 To hash a type we end up hashing in types that are reachable.
4095 Through pointers we can end up with cycles which messes up the
4096 required property that we need to compute the same hash value
4097 for structurally equivalent types. To avoid this we have to
4098 hash all types in a cycle (the SCC) in a commutative way. The
4099 easiest way is to not mix in the hashes of the SCC members at
4100 all. To make this work we have to delay setting the hash
4101 values of the SCC until it is complete. */
4102
4103 static hashval_t
4104 iterative_hash_gimple_type (tree type, hashval_t val,
4105 VEC(tree, heap) **sccstack,
4106 struct pointer_map_t *sccstate,
4107 struct obstack *sccstate_obstack,
4108 enum gtc_mode mode)
4109 {
4110 hashval_t v;
4111 void **slot;
4112 struct sccs *state;
4113
4114 /* Not visited during this DFS walk. */
4115 gcc_checking_assert (!pointer_map_contains (sccstate, type));
4116 state = XOBNEW (sccstate_obstack, struct sccs);
4117 *pointer_map_insert (sccstate, type) = state;
4118
4119 VEC_safe_push (tree, heap, *sccstack, type);
4120 state->dfsnum = next_dfs_num++;
4121 state->low = state->dfsnum;
4122 state->on_sccstack = true;
4123
4124 /* Combine a few common features of types so that types are grouped into
4125 smaller sets; when searching for existing matching types to merge,
4126 only existing types having the same features as the new type will be
4127 checked. */
4128 v = iterative_hash_hashval_t (TREE_CODE (type), 0);
4129 v = iterative_hash_hashval_t (TYPE_QUALS (type), v);
4130 v = iterative_hash_hashval_t (TREE_ADDRESSABLE (type), v);
4131
4132 /* Do not hash the types size as this will cause differences in
4133 hash values for the complete vs. the incomplete type variant. */
4134
4135 /* Incorporate common features of numerical types. */
4136 if (INTEGRAL_TYPE_P (type)
4137 || SCALAR_FLOAT_TYPE_P (type)
4138 || FIXED_POINT_TYPE_P (type))
4139 {
4140 v = iterative_hash_hashval_t (TYPE_PRECISION (type), v);
4141 v = iterative_hash_hashval_t (TYPE_MODE (type), v);
4142 v = iterative_hash_hashval_t (TYPE_UNSIGNED (type), v);
4143 }
4144
4145 /* For pointer and reference types, fold in information about the type
4146 pointed to but do not recurse into possibly incomplete types to
4147 avoid hash differences for complete vs. incomplete types. */
4148 if (POINTER_TYPE_P (type))
4149 {
4150 if (RECORD_OR_UNION_TYPE_P (TREE_TYPE (type)))
4151 {
4152 v = iterative_hash_hashval_t (TREE_CODE (TREE_TYPE (type)), v);
4153 v = iterative_hash_name
4154 (TYPE_NAME (TYPE_MAIN_VARIANT (TREE_TYPE (type))), v);
4155 }
4156 else
4157 v = visit (TREE_TYPE (type), state, v,
4158 sccstack, sccstate, sccstate_obstack, mode);
4159 }
4160
4161 /* For integer types hash the types min/max values and the string flag. */
4162 if (TREE_CODE (type) == INTEGER_TYPE)
4163 {
4164 /* OMP lowering can introduce error_mark_node in place of
4165 random local decls in types. */
4166 if (TYPE_MIN_VALUE (type) != error_mark_node)
4167 v = iterative_hash_expr (TYPE_MIN_VALUE (type), v);
4168 if (TYPE_MAX_VALUE (type) != error_mark_node)
4169 v = iterative_hash_expr (TYPE_MAX_VALUE (type), v);
4170 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
4171 }
4172
4173 /* For array types hash their domain and the string flag. */
4174 if (TREE_CODE (type) == ARRAY_TYPE
4175 && TYPE_DOMAIN (type))
4176 {
4177 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
4178 v = visit (TYPE_DOMAIN (type), state, v,
4179 sccstack, sccstate, sccstate_obstack, mode);
4180 }
4181
4182 /* Recurse for aggregates with a single element type. */
4183 if (TREE_CODE (type) == ARRAY_TYPE
4184 || TREE_CODE (type) == COMPLEX_TYPE
4185 || TREE_CODE (type) == VECTOR_TYPE)
4186 v = visit (TREE_TYPE (type), state, v,
4187 sccstack, sccstate, sccstate_obstack, mode);
4188
4189 /* Incorporate function return and argument types. */
4190 if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE)
4191 {
4192 unsigned na;
4193 tree p;
4194
4195 /* For method types also incorporate their parent class. */
4196 if (TREE_CODE (type) == METHOD_TYPE)
4197 v = visit (TYPE_METHOD_BASETYPE (type), state, v,
4198 sccstack, sccstate, sccstate_obstack, mode);
4199
4200 /* For result types allow mismatch in completeness. */
4201 if (RECORD_OR_UNION_TYPE_P (TREE_TYPE (type)))
4202 {
4203 v = iterative_hash_hashval_t (TREE_CODE (TREE_TYPE (type)), v);
4204 v = iterative_hash_name
4205 (TYPE_NAME (TYPE_MAIN_VARIANT (TREE_TYPE (type))), v);
4206 }
4207 else
4208 v = visit (TREE_TYPE (type), state, v,
4209 sccstack, sccstate, sccstate_obstack, mode);
4210
4211 for (p = TYPE_ARG_TYPES (type), na = 0; p; p = TREE_CHAIN (p))
4212 {
4213 /* For argument types allow mismatch in completeness. */
4214 if (RECORD_OR_UNION_TYPE_P (TREE_VALUE (p)))
4215 {
4216 v = iterative_hash_hashval_t (TREE_CODE (TREE_VALUE (p)), v);
4217 v = iterative_hash_name
4218 (TYPE_NAME (TYPE_MAIN_VARIANT (TREE_VALUE (p))), v);
4219 }
4220 else
4221 v = visit (TREE_VALUE (p), state, v,
4222 sccstack, sccstate, sccstate_obstack, mode);
4223 na++;
4224 }
4225
4226 v = iterative_hash_hashval_t (na, v);
4227 }
4228
4229 if (TREE_CODE (type) == RECORD_TYPE
4230 || TREE_CODE (type) == UNION_TYPE
4231 || TREE_CODE (type) == QUAL_UNION_TYPE)
4232 {
4233 unsigned nf;
4234 tree f;
4235
4236 if (mode == GTC_MERGE)
4237 v = iterative_hash_name (TYPE_NAME (TYPE_MAIN_VARIANT (type)), v);
4238
4239 for (f = TYPE_FIELDS (type), nf = 0; f; f = TREE_CHAIN (f))
4240 {
4241 if (mode == GTC_MERGE)
4242 v = iterative_hash_name (DECL_NAME (f), v);
4243 v = visit (TREE_TYPE (f), state, v,
4244 sccstack, sccstate, sccstate_obstack, mode);
4245 nf++;
4246 }
4247
4248 v = iterative_hash_hashval_t (nf, v);
4249 }
4250
4251 /* Record hash for us. */
4252 state->u.hash = v;
4253
4254 /* See if we found an SCC. */
4255 if (state->low == state->dfsnum)
4256 {
4257 tree x;
4258
4259 /* Pop off the SCC and set its hash values. */
4260 do
4261 {
4262 struct sccs *cstate;
4263 struct tree_int_map *m = ggc_alloc_cleared_tree_int_map ();
4264 x = VEC_pop (tree, *sccstack);
4265 cstate = (struct sccs *)*pointer_map_contains (sccstate, x);
4266 cstate->on_sccstack = false;
4267 m->base.from = x;
4268 m->to = cstate->u.hash;
4269 slot = htab_find_slot (mode == GTC_MERGE
4270 ? type_hash_cache : canonical_type_hash_cache,
4271 m, INSERT);
4272 gcc_assert (!*slot);
4273 *slot = (void *) m;
4274 }
4275 while (x != type);
4276 }
4277
4278 return iterative_hash_hashval_t (v, val);
4279 }
4280
4281
4282 /* Returns a hash value for P (assumed to be a type). The hash value
4283 is computed using some distinguishing features of the type. Note
4284 that we cannot use pointer hashing here as we may be dealing with
4285 two distinct instances of the same type.
4286
4287 This function should produce the same hash value for two compatible
4288 types according to gimple_types_compatible_p. */
4289
4290 static hashval_t
4291 gimple_type_hash_1 (const void *p, enum gtc_mode mode)
4292 {
4293 const_tree t = (const_tree) p;
4294 VEC(tree, heap) *sccstack = NULL;
4295 struct pointer_map_t *sccstate;
4296 struct obstack sccstate_obstack;
4297 hashval_t val;
4298 void **slot;
4299 struct tree_int_map m;
4300
4301 if (mode == GTC_MERGE
4302 && type_hash_cache == NULL)
4303 type_hash_cache = htab_create_ggc (512, tree_int_map_hash,
4304 tree_int_map_eq, NULL);
4305 else if (mode == GTC_DIAG
4306 && canonical_type_hash_cache == NULL)
4307 canonical_type_hash_cache = htab_create_ggc (512, tree_int_map_hash,
4308 tree_int_map_eq, NULL);
4309
4310 m.base.from = CONST_CAST_TREE (t);
4311 if ((slot = htab_find_slot (mode == GTC_MERGE
4312 ? type_hash_cache : canonical_type_hash_cache,
4313 &m, NO_INSERT))
4314 && *slot)
4315 return iterative_hash_hashval_t (((struct tree_int_map *) *slot)->to, 0);
4316
4317 /* Perform a DFS walk and pre-hash all reachable types. */
4318 next_dfs_num = 1;
4319 sccstate = pointer_map_create ();
4320 gcc_obstack_init (&sccstate_obstack);
4321 val = iterative_hash_gimple_type (CONST_CAST_TREE (t), 0,
4322 &sccstack, sccstate, &sccstate_obstack,
4323 mode);
4324 VEC_free (tree, heap, sccstack);
4325 pointer_map_destroy (sccstate);
4326 obstack_free (&sccstate_obstack, NULL);
4327
4328 return val;
4329 }
4330
4331 static hashval_t
4332 gimple_type_hash (const void *p)
4333 {
4334 return gimple_type_hash_1 (p, GTC_MERGE);
4335 }
4336
4337 static hashval_t
4338 gimple_canonical_type_hash (const void *p)
4339 {
4340 return gimple_type_hash_1 (p, GTC_DIAG);
4341 }
4342
4343
4344 /* Returns nonzero if P1 and P2 are equal. */
4345
4346 static int
4347 gimple_type_eq (const void *p1, const void *p2)
4348 {
4349 const_tree t1 = (const_tree) p1;
4350 const_tree t2 = (const_tree) p2;
4351 return gimple_types_compatible_p (CONST_CAST_TREE (t1),
4352 CONST_CAST_TREE (t2), GTC_MERGE);
4353 }
4354
4355
4356 /* Register type T in the global type table gimple_types.
4357 If another type T', compatible with T, already existed in
4358 gimple_types then return T', otherwise return T. This is used by
4359 LTO to merge identical types read from different TUs. */
4360
4361 tree
4362 gimple_register_type (tree t)
4363 {
4364 void **slot;
4365 gimple_type_leader_entry *leader;
4366 tree mv_leader = NULL_TREE;
4367
4368 gcc_assert (TYPE_P (t));
4369
4370 if (!gimple_type_leader)
4371 gimple_type_leader = ggc_alloc_cleared_vec_gimple_type_leader_entry_s
4372 (GIMPLE_TYPE_LEADER_SIZE);
4373 /* If we registered this type before return the cached result. */
4374 leader = &gimple_type_leader[TYPE_UID (t) % GIMPLE_TYPE_LEADER_SIZE];
4375 if (leader->type == t)
4376 return leader->leader;
4377
4378 /* Always register the main variant first. This is important so we
4379 pick up the non-typedef variants as canonical, otherwise we'll end
4380 up taking typedef ids for structure tags during comparison. */
4381 if (TYPE_MAIN_VARIANT (t) != t)
4382 mv_leader = gimple_register_type (TYPE_MAIN_VARIANT (t));
4383
4384 if (gimple_types == NULL)
4385 gimple_types = htab_create_ggc (16381, gimple_type_hash, gimple_type_eq, 0);
4386
4387 slot = htab_find_slot (gimple_types, t, INSERT);
4388 if (*slot
4389 && *(tree *)slot != t)
4390 {
4391 tree new_type = (tree) *((tree *) slot);
4392
4393 /* Do not merge types with different addressability. */
4394 gcc_assert (TREE_ADDRESSABLE (t) == TREE_ADDRESSABLE (new_type));
4395
4396 /* If t is not its main variant then make t unreachable from its
4397 main variant list. Otherwise we'd queue up a lot of duplicates
4398 there. */
4399 if (t != TYPE_MAIN_VARIANT (t))
4400 {
4401 tree tem = TYPE_MAIN_VARIANT (t);
4402 while (tem && TYPE_NEXT_VARIANT (tem) != t)
4403 tem = TYPE_NEXT_VARIANT (tem);
4404 if (tem)
4405 TYPE_NEXT_VARIANT (tem) = TYPE_NEXT_VARIANT (t);
4406 TYPE_NEXT_VARIANT (t) = NULL_TREE;
4407 }
4408
4409 /* If we are a pointer then remove us from the pointer-to or
4410 reference-to chain. Otherwise we'd queue up a lot of duplicates
4411 there. */
4412 if (TREE_CODE (t) == POINTER_TYPE)
4413 {
4414 if (TYPE_POINTER_TO (TREE_TYPE (t)) == t)
4415 TYPE_POINTER_TO (TREE_TYPE (t)) = TYPE_NEXT_PTR_TO (t);
4416 else
4417 {
4418 tree tem = TYPE_POINTER_TO (TREE_TYPE (t));
4419 while (tem && TYPE_NEXT_PTR_TO (tem) != t)
4420 tem = TYPE_NEXT_PTR_TO (tem);
4421 if (tem)
4422 TYPE_NEXT_PTR_TO (tem) = TYPE_NEXT_PTR_TO (t);
4423 }
4424 TYPE_NEXT_PTR_TO (t) = NULL_TREE;
4425 }
4426 else if (TREE_CODE (t) == REFERENCE_TYPE)
4427 {
4428 if (TYPE_REFERENCE_TO (TREE_TYPE (t)) == t)
4429 TYPE_REFERENCE_TO (TREE_TYPE (t)) = TYPE_NEXT_REF_TO (t);
4430 else
4431 {
4432 tree tem = TYPE_REFERENCE_TO (TREE_TYPE (t));
4433 while (tem && TYPE_NEXT_REF_TO (tem) != t)
4434 tem = TYPE_NEXT_REF_TO (tem);
4435 if (tem)
4436 TYPE_NEXT_REF_TO (tem) = TYPE_NEXT_REF_TO (t);
4437 }
4438 TYPE_NEXT_REF_TO (t) = NULL_TREE;
4439 }
4440
4441 leader->type = t;
4442 leader->leader = new_type;
4443 t = new_type;
4444 }
4445 else
4446 {
4447 leader->type = t;
4448 leader->leader = t;
4449 /* We're the type leader. Make our TYPE_MAIN_VARIANT valid. */
4450 if (TYPE_MAIN_VARIANT (t) != t
4451 && TYPE_MAIN_VARIANT (t) != mv_leader)
4452 {
4453 /* Remove us from our main variant list as we are not the variant
4454 leader and the variant leader will change. */
4455 tree tem = TYPE_MAIN_VARIANT (t);
4456 while (tem && TYPE_NEXT_VARIANT (tem) != t)
4457 tem = TYPE_NEXT_VARIANT (tem);
4458 if (tem)
4459 TYPE_NEXT_VARIANT (tem) = TYPE_NEXT_VARIANT (t);
4460 TYPE_NEXT_VARIANT (t) = NULL_TREE;
4461 /* Adjust our main variant. Linking us into its variant list
4462 will happen at fixup time. */
4463 TYPE_MAIN_VARIANT (t) = mv_leader;
4464 }
4465 *slot = (void *) t;
4466 }
4467
4468 return t;
4469 }
4470
4471
4472 /* Returns nonzero if P1 and P2 are equal. */
4473
4474 static int
4475 gimple_canonical_type_eq (const void *p1, const void *p2)
4476 {
4477 const_tree t1 = (const_tree) p1;
4478 const_tree t2 = (const_tree) p2;
4479 return gimple_types_compatible_p (CONST_CAST_TREE (t1),
4480 CONST_CAST_TREE (t2), GTC_DIAG);
4481 }
4482
4483 /* Register type T in the global type table gimple_types.
4484 If another type T', compatible with T, already existed in
4485 gimple_types then return T', otherwise return T. This is used by
4486 LTO to merge identical types read from different TUs. */
4487
4488 tree
4489 gimple_register_canonical_type (tree t)
4490 {
4491 void **slot;
4492 tree orig_t = t;
4493
4494 gcc_assert (TYPE_P (t));
4495
4496 if (TYPE_CANONICAL (t))
4497 return TYPE_CANONICAL (t);
4498
4499 /* Always register the type itself first so that if it turns out
4500 to be the canonical type it will be the one we merge to as well. */
4501 t = gimple_register_type (t);
4502
4503 /* Always register the main variant first. This is important so we
4504 pick up the non-typedef variants as canonical, otherwise we'll end
4505 up taking typedef ids for structure tags during comparison. */
4506 if (TYPE_MAIN_VARIANT (t) != t)
4507 gimple_register_canonical_type (TYPE_MAIN_VARIANT (t));
4508
4509 if (gimple_canonical_types == NULL)
4510 gimple_canonical_types = htab_create_ggc (16381, gimple_canonical_type_hash,
4511 gimple_canonical_type_eq, 0);
4512
4513 slot = htab_find_slot (gimple_canonical_types, t, INSERT);
4514 if (*slot
4515 && *(tree *)slot != t)
4516 {
4517 tree new_type = (tree) *((tree *) slot);
4518
4519 TYPE_CANONICAL (t) = new_type;
4520 t = new_type;
4521 }
4522 else
4523 {
4524 TYPE_CANONICAL (t) = t;
4525 *slot = (void *) t;
4526 }
4527
4528 /* Also cache the canonical type in the non-leaders. */
4529 TYPE_CANONICAL (orig_t) = t;
4530
4531 return t;
4532 }
4533
4534
4535 /* Show statistics on references to the global type table gimple_types. */
4536
4537 void
4538 print_gimple_types_stats (void)
4539 {
4540 if (gimple_types)
4541 fprintf (stderr, "GIMPLE type table: size %ld, %ld elements, "
4542 "%ld searches, %ld collisions (ratio: %f)\n",
4543 (long) htab_size (gimple_types),
4544 (long) htab_elements (gimple_types),
4545 (long) gimple_types->searches,
4546 (long) gimple_types->collisions,
4547 htab_collisions (gimple_types));
4548 else
4549 fprintf (stderr, "GIMPLE type table is empty\n");
4550 if (type_hash_cache)
4551 fprintf (stderr, "GIMPLE type hash table: size %ld, %ld elements, "
4552 "%ld searches, %ld collisions (ratio: %f)\n",
4553 (long) htab_size (type_hash_cache),
4554 (long) htab_elements (type_hash_cache),
4555 (long) type_hash_cache->searches,
4556 (long) type_hash_cache->collisions,
4557 htab_collisions (type_hash_cache));
4558 else
4559 fprintf (stderr, "GIMPLE type hash table is empty\n");
4560 if (gimple_canonical_types)
4561 fprintf (stderr, "GIMPLE canonical type table: size %ld, %ld elements, "
4562 "%ld searches, %ld collisions (ratio: %f)\n",
4563 (long) htab_size (gimple_canonical_types),
4564 (long) htab_elements (gimple_canonical_types),
4565 (long) gimple_canonical_types->searches,
4566 (long) gimple_canonical_types->collisions,
4567 htab_collisions (gimple_canonical_types));
4568 else
4569 fprintf (stderr, "GIMPLE canonical type table is empty\n");
4570 if (canonical_type_hash_cache)
4571 fprintf (stderr, "GIMPLE canonical type hash table: size %ld, %ld elements, "
4572 "%ld searches, %ld collisions (ratio: %f)\n",
4573 (long) htab_size (canonical_type_hash_cache),
4574 (long) htab_elements (canonical_type_hash_cache),
4575 (long) canonical_type_hash_cache->searches,
4576 (long) canonical_type_hash_cache->collisions,
4577 htab_collisions (canonical_type_hash_cache));
4578 else
4579 fprintf (stderr, "GIMPLE canonical type hash table is empty\n");
4580 if (gtc_visited)
4581 fprintf (stderr, "GIMPLE type comparison table: size %ld, %ld "
4582 "elements, %ld searches, %ld collisions (ratio: %f)\n",
4583 (long) htab_size (gtc_visited),
4584 (long) htab_elements (gtc_visited),
4585 (long) gtc_visited->searches,
4586 (long) gtc_visited->collisions,
4587 htab_collisions (gtc_visited));
4588 else
4589 fprintf (stderr, "GIMPLE type comparison table is empty\n");
4590 }
4591
4592 /* Free the gimple type hashtables used for LTO type merging. */
4593
4594 void
4595 free_gimple_type_tables (void)
4596 {
4597 /* Last chance to print stats for the tables. */
4598 if (flag_lto_report)
4599 print_gimple_types_stats ();
4600
4601 if (gimple_types)
4602 {
4603 htab_delete (gimple_types);
4604 gimple_types = NULL;
4605 }
4606 if (gimple_canonical_types)
4607 {
4608 htab_delete (gimple_canonical_types);
4609 gimple_canonical_types = NULL;
4610 }
4611 if (type_hash_cache)
4612 {
4613 htab_delete (type_hash_cache);
4614 type_hash_cache = NULL;
4615 }
4616 if (canonical_type_hash_cache)
4617 {
4618 htab_delete (canonical_type_hash_cache);
4619 canonical_type_hash_cache = NULL;
4620 }
4621 if (gtc_visited)
4622 {
4623 htab_delete (gtc_visited);
4624 obstack_free (&gtc_ob, NULL);
4625 gtc_visited = NULL;
4626 }
4627 gimple_type_leader = NULL;
4628 }
4629
4630
4631 /* Return a type the same as TYPE except unsigned or
4632 signed according to UNSIGNEDP. */
4633
4634 static tree
4635 gimple_signed_or_unsigned_type (bool unsignedp, tree type)
4636 {
4637 tree type1;
4638
4639 type1 = TYPE_MAIN_VARIANT (type);
4640 if (type1 == signed_char_type_node
4641 || type1 == char_type_node
4642 || type1 == unsigned_char_type_node)
4643 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
4644 if (type1 == integer_type_node || type1 == unsigned_type_node)
4645 return unsignedp ? unsigned_type_node : integer_type_node;
4646 if (type1 == short_integer_type_node || type1 == short_unsigned_type_node)
4647 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
4648 if (type1 == long_integer_type_node || type1 == long_unsigned_type_node)
4649 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
4650 if (type1 == long_long_integer_type_node
4651 || type1 == long_long_unsigned_type_node)
4652 return unsignedp
4653 ? long_long_unsigned_type_node
4654 : long_long_integer_type_node;
4655 if (int128_integer_type_node && (type1 == int128_integer_type_node || type1 == int128_unsigned_type_node))
4656 return unsignedp
4657 ? int128_unsigned_type_node
4658 : int128_integer_type_node;
4659 #if HOST_BITS_PER_WIDE_INT >= 64
4660 if (type1 == intTI_type_node || type1 == unsigned_intTI_type_node)
4661 return unsignedp ? unsigned_intTI_type_node : intTI_type_node;
4662 #endif
4663 if (type1 == intDI_type_node || type1 == unsigned_intDI_type_node)
4664 return unsignedp ? unsigned_intDI_type_node : intDI_type_node;
4665 if (type1 == intSI_type_node || type1 == unsigned_intSI_type_node)
4666 return unsignedp ? unsigned_intSI_type_node : intSI_type_node;
4667 if (type1 == intHI_type_node || type1 == unsigned_intHI_type_node)
4668 return unsignedp ? unsigned_intHI_type_node : intHI_type_node;
4669 if (type1 == intQI_type_node || type1 == unsigned_intQI_type_node)
4670 return unsignedp ? unsigned_intQI_type_node : intQI_type_node;
4671
4672 #define GIMPLE_FIXED_TYPES(NAME) \
4673 if (type1 == short_ ## NAME ## _type_node \
4674 || type1 == unsigned_short_ ## NAME ## _type_node) \
4675 return unsignedp ? unsigned_short_ ## NAME ## _type_node \
4676 : short_ ## NAME ## _type_node; \
4677 if (type1 == NAME ## _type_node \
4678 || type1 == unsigned_ ## NAME ## _type_node) \
4679 return unsignedp ? unsigned_ ## NAME ## _type_node \
4680 : NAME ## _type_node; \
4681 if (type1 == long_ ## NAME ## _type_node \
4682 || type1 == unsigned_long_ ## NAME ## _type_node) \
4683 return unsignedp ? unsigned_long_ ## NAME ## _type_node \
4684 : long_ ## NAME ## _type_node; \
4685 if (type1 == long_long_ ## NAME ## _type_node \
4686 || type1 == unsigned_long_long_ ## NAME ## _type_node) \
4687 return unsignedp ? unsigned_long_long_ ## NAME ## _type_node \
4688 : long_long_ ## NAME ## _type_node;
4689
4690 #define GIMPLE_FIXED_MODE_TYPES(NAME) \
4691 if (type1 == NAME ## _type_node \
4692 || type1 == u ## NAME ## _type_node) \
4693 return unsignedp ? u ## NAME ## _type_node \
4694 : NAME ## _type_node;
4695
4696 #define GIMPLE_FIXED_TYPES_SAT(NAME) \
4697 if (type1 == sat_ ## short_ ## NAME ## _type_node \
4698 || type1 == sat_ ## unsigned_short_ ## NAME ## _type_node) \
4699 return unsignedp ? sat_ ## unsigned_short_ ## NAME ## _type_node \
4700 : sat_ ## short_ ## NAME ## _type_node; \
4701 if (type1 == sat_ ## NAME ## _type_node \
4702 || type1 == sat_ ## unsigned_ ## NAME ## _type_node) \
4703 return unsignedp ? sat_ ## unsigned_ ## NAME ## _type_node \
4704 : sat_ ## NAME ## _type_node; \
4705 if (type1 == sat_ ## long_ ## NAME ## _type_node \
4706 || type1 == sat_ ## unsigned_long_ ## NAME ## _type_node) \
4707 return unsignedp ? sat_ ## unsigned_long_ ## NAME ## _type_node \
4708 : sat_ ## long_ ## NAME ## _type_node; \
4709 if (type1 == sat_ ## long_long_ ## NAME ## _type_node \
4710 || type1 == sat_ ## unsigned_long_long_ ## NAME ## _type_node) \
4711 return unsignedp ? sat_ ## unsigned_long_long_ ## NAME ## _type_node \
4712 : sat_ ## long_long_ ## NAME ## _type_node;
4713
4714 #define GIMPLE_FIXED_MODE_TYPES_SAT(NAME) \
4715 if (type1 == sat_ ## NAME ## _type_node \
4716 || type1 == sat_ ## u ## NAME ## _type_node) \
4717 return unsignedp ? sat_ ## u ## NAME ## _type_node \
4718 : sat_ ## NAME ## _type_node;
4719
4720 GIMPLE_FIXED_TYPES (fract);
4721 GIMPLE_FIXED_TYPES_SAT (fract);
4722 GIMPLE_FIXED_TYPES (accum);
4723 GIMPLE_FIXED_TYPES_SAT (accum);
4724
4725 GIMPLE_FIXED_MODE_TYPES (qq);
4726 GIMPLE_FIXED_MODE_TYPES (hq);
4727 GIMPLE_FIXED_MODE_TYPES (sq);
4728 GIMPLE_FIXED_MODE_TYPES (dq);
4729 GIMPLE_FIXED_MODE_TYPES (tq);
4730 GIMPLE_FIXED_MODE_TYPES_SAT (qq);
4731 GIMPLE_FIXED_MODE_TYPES_SAT (hq);
4732 GIMPLE_FIXED_MODE_TYPES_SAT (sq);
4733 GIMPLE_FIXED_MODE_TYPES_SAT (dq);
4734 GIMPLE_FIXED_MODE_TYPES_SAT (tq);
4735 GIMPLE_FIXED_MODE_TYPES (ha);
4736 GIMPLE_FIXED_MODE_TYPES (sa);
4737 GIMPLE_FIXED_MODE_TYPES (da);
4738 GIMPLE_FIXED_MODE_TYPES (ta);
4739 GIMPLE_FIXED_MODE_TYPES_SAT (ha);
4740 GIMPLE_FIXED_MODE_TYPES_SAT (sa);
4741 GIMPLE_FIXED_MODE_TYPES_SAT (da);
4742 GIMPLE_FIXED_MODE_TYPES_SAT (ta);
4743
4744 /* For ENUMERAL_TYPEs in C++, must check the mode of the types, not
4745 the precision; they have precision set to match their range, but
4746 may use a wider mode to match an ABI. If we change modes, we may
4747 wind up with bad conversions. For INTEGER_TYPEs in C, must check
4748 the precision as well, so as to yield correct results for
4749 bit-field types. C++ does not have these separate bit-field
4750 types, and producing a signed or unsigned variant of an
4751 ENUMERAL_TYPE may cause other problems as well. */
4752 if (!INTEGRAL_TYPE_P (type)
4753 || TYPE_UNSIGNED (type) == unsignedp)
4754 return type;
4755
4756 #define TYPE_OK(node) \
4757 (TYPE_MODE (type) == TYPE_MODE (node) \
4758 && TYPE_PRECISION (type) == TYPE_PRECISION (node))
4759 if (TYPE_OK (signed_char_type_node))
4760 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
4761 if (TYPE_OK (integer_type_node))
4762 return unsignedp ? unsigned_type_node : integer_type_node;
4763 if (TYPE_OK (short_integer_type_node))
4764 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
4765 if (TYPE_OK (long_integer_type_node))
4766 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
4767 if (TYPE_OK (long_long_integer_type_node))
4768 return (unsignedp
4769 ? long_long_unsigned_type_node
4770 : long_long_integer_type_node);
4771 if (int128_integer_type_node && TYPE_OK (int128_integer_type_node))
4772 return (unsignedp
4773 ? int128_unsigned_type_node
4774 : int128_integer_type_node);
4775
4776 #if HOST_BITS_PER_WIDE_INT >= 64
4777 if (TYPE_OK (intTI_type_node))
4778 return unsignedp ? unsigned_intTI_type_node : intTI_type_node;
4779 #endif
4780 if (TYPE_OK (intDI_type_node))
4781 return unsignedp ? unsigned_intDI_type_node : intDI_type_node;
4782 if (TYPE_OK (intSI_type_node))
4783 return unsignedp ? unsigned_intSI_type_node : intSI_type_node;
4784 if (TYPE_OK (intHI_type_node))
4785 return unsignedp ? unsigned_intHI_type_node : intHI_type_node;
4786 if (TYPE_OK (intQI_type_node))
4787 return unsignedp ? unsigned_intQI_type_node : intQI_type_node;
4788
4789 #undef GIMPLE_FIXED_TYPES
4790 #undef GIMPLE_FIXED_MODE_TYPES
4791 #undef GIMPLE_FIXED_TYPES_SAT
4792 #undef GIMPLE_FIXED_MODE_TYPES_SAT
4793 #undef TYPE_OK
4794
4795 return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp);
4796 }
4797
4798
4799 /* Return an unsigned type the same as TYPE in other respects. */
4800
4801 tree
4802 gimple_unsigned_type (tree type)
4803 {
4804 return gimple_signed_or_unsigned_type (true, type);
4805 }
4806
4807
4808 /* Return a signed type the same as TYPE in other respects. */
4809
4810 tree
4811 gimple_signed_type (tree type)
4812 {
4813 return gimple_signed_or_unsigned_type (false, type);
4814 }
4815
4816
4817 /* Return the typed-based alias set for T, which may be an expression
4818 or a type. Return -1 if we don't do anything special. */
4819
4820 alias_set_type
4821 gimple_get_alias_set (tree t)
4822 {
4823 tree u;
4824
4825 /* Permit type-punning when accessing a union, provided the access
4826 is directly through the union. For example, this code does not
4827 permit taking the address of a union member and then storing
4828 through it. Even the type-punning allowed here is a GCC
4829 extension, albeit a common and useful one; the C standard says
4830 that such accesses have implementation-defined behavior. */
4831 for (u = t;
4832 TREE_CODE (u) == COMPONENT_REF || TREE_CODE (u) == ARRAY_REF;
4833 u = TREE_OPERAND (u, 0))
4834 if (TREE_CODE (u) == COMPONENT_REF
4835 && TREE_CODE (TREE_TYPE (TREE_OPERAND (u, 0))) == UNION_TYPE)
4836 return 0;
4837
4838 /* That's all the expressions we handle specially. */
4839 if (!TYPE_P (t))
4840 return -1;
4841
4842 /* For convenience, follow the C standard when dealing with
4843 character types. Any object may be accessed via an lvalue that
4844 has character type. */
4845 if (t == char_type_node
4846 || t == signed_char_type_node
4847 || t == unsigned_char_type_node)
4848 return 0;
4849
4850 /* Allow aliasing between signed and unsigned variants of the same
4851 type. We treat the signed variant as canonical. */
4852 if (TREE_CODE (t) == INTEGER_TYPE && TYPE_UNSIGNED (t))
4853 {
4854 tree t1 = gimple_signed_type (t);
4855
4856 /* t1 == t can happen for boolean nodes which are always unsigned. */
4857 if (t1 != t)
4858 return get_alias_set (t1);
4859 }
4860
4861 return -1;
4862 }
4863
4864
4865 /* Data structure used to count the number of dereferences to PTR
4866 inside an expression. */
4867 struct count_ptr_d
4868 {
4869 tree ptr;
4870 unsigned num_stores;
4871 unsigned num_loads;
4872 };
4873
4874 /* Helper for count_uses_and_derefs. Called by walk_tree to look for
4875 (ALIGN/MISALIGNED_)INDIRECT_REF nodes for the pointer passed in DATA. */
4876
4877 static tree
4878 count_ptr_derefs (tree *tp, int *walk_subtrees, void *data)
4879 {
4880 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
4881 struct count_ptr_d *count_p = (struct count_ptr_d *) wi_p->info;
4882
4883 /* Do not walk inside ADDR_EXPR nodes. In the expression &ptr->fld,
4884 pointer 'ptr' is *not* dereferenced, it is simply used to compute
4885 the address of 'fld' as 'ptr + offsetof(fld)'. */
4886 if (TREE_CODE (*tp) == ADDR_EXPR)
4887 {
4888 *walk_subtrees = 0;
4889 return NULL_TREE;
4890 }
4891
4892 if (TREE_CODE (*tp) == MEM_REF && TREE_OPERAND (*tp, 0) == count_p->ptr)
4893 {
4894 if (wi_p->is_lhs)
4895 count_p->num_stores++;
4896 else
4897 count_p->num_loads++;
4898 }
4899
4900 return NULL_TREE;
4901 }
4902
4903 /* Count the number of direct and indirect uses for pointer PTR in
4904 statement STMT. The number of direct uses is stored in
4905 *NUM_USES_P. Indirect references are counted separately depending
4906 on whether they are store or load operations. The counts are
4907 stored in *NUM_STORES_P and *NUM_LOADS_P. */
4908
4909 void
4910 count_uses_and_derefs (tree ptr, gimple stmt, unsigned *num_uses_p,
4911 unsigned *num_loads_p, unsigned *num_stores_p)
4912 {
4913 ssa_op_iter i;
4914 tree use;
4915
4916 *num_uses_p = 0;
4917 *num_loads_p = 0;
4918 *num_stores_p = 0;
4919
4920 /* Find out the total number of uses of PTR in STMT. */
4921 FOR_EACH_SSA_TREE_OPERAND (use, stmt, i, SSA_OP_USE)
4922 if (use == ptr)
4923 (*num_uses_p)++;
4924
4925 /* Now count the number of indirect references to PTR. This is
4926 truly awful, but we don't have much choice. There are no parent
4927 pointers inside INDIRECT_REFs, so an expression like
4928 '*x_1 = foo (x_1, *x_1)' needs to be traversed piece by piece to
4929 find all the indirect and direct uses of x_1 inside. The only
4930 shortcut we can take is the fact that GIMPLE only allows
4931 INDIRECT_REFs inside the expressions below. */
4932 if (is_gimple_assign (stmt)
4933 || gimple_code (stmt) == GIMPLE_RETURN
4934 || gimple_code (stmt) == GIMPLE_ASM
4935 || is_gimple_call (stmt))
4936 {
4937 struct walk_stmt_info wi;
4938 struct count_ptr_d count;
4939
4940 count.ptr = ptr;
4941 count.num_stores = 0;
4942 count.num_loads = 0;
4943
4944 memset (&wi, 0, sizeof (wi));
4945 wi.info = &count;
4946 walk_gimple_op (stmt, count_ptr_derefs, &wi);
4947
4948 *num_stores_p = count.num_stores;
4949 *num_loads_p = count.num_loads;
4950 }
4951
4952 gcc_assert (*num_uses_p >= *num_loads_p + *num_stores_p);
4953 }
4954
4955 /* From a tree operand OP return the base of a load or store operation
4956 or NULL_TREE if OP is not a load or a store. */
4957
4958 static tree
4959 get_base_loadstore (tree op)
4960 {
4961 while (handled_component_p (op))
4962 op = TREE_OPERAND (op, 0);
4963 if (DECL_P (op)
4964 || INDIRECT_REF_P (op)
4965 || TREE_CODE (op) == MEM_REF
4966 || TREE_CODE (op) == TARGET_MEM_REF)
4967 return op;
4968 return NULL_TREE;
4969 }
4970
4971 /* For the statement STMT call the callbacks VISIT_LOAD, VISIT_STORE and
4972 VISIT_ADDR if non-NULL on loads, store and address-taken operands
4973 passing the STMT, the base of the operand and DATA to it. The base
4974 will be either a decl, an indirect reference (including TARGET_MEM_REF)
4975 or the argument of an address expression.
4976 Returns the results of these callbacks or'ed. */
4977
4978 bool
4979 walk_stmt_load_store_addr_ops (gimple stmt, void *data,
4980 bool (*visit_load)(gimple, tree, void *),
4981 bool (*visit_store)(gimple, tree, void *),
4982 bool (*visit_addr)(gimple, tree, void *))
4983 {
4984 bool ret = false;
4985 unsigned i;
4986 if (gimple_assign_single_p (stmt))
4987 {
4988 tree lhs, rhs;
4989 if (visit_store)
4990 {
4991 lhs = get_base_loadstore (gimple_assign_lhs (stmt));
4992 if (lhs)
4993 ret |= visit_store (stmt, lhs, data);
4994 }
4995 rhs = gimple_assign_rhs1 (stmt);
4996 while (handled_component_p (rhs))
4997 rhs = TREE_OPERAND (rhs, 0);
4998 if (visit_addr)
4999 {
5000 if (TREE_CODE (rhs) == ADDR_EXPR)
5001 ret |= visit_addr (stmt, TREE_OPERAND (rhs, 0), data);
5002 else if (TREE_CODE (rhs) == TARGET_MEM_REF
5003 && TREE_CODE (TMR_BASE (rhs)) == ADDR_EXPR)
5004 ret |= visit_addr (stmt, TREE_OPERAND (TMR_BASE (rhs), 0), data);
5005 else if (TREE_CODE (rhs) == OBJ_TYPE_REF
5006 && TREE_CODE (OBJ_TYPE_REF_OBJECT (rhs)) == ADDR_EXPR)
5007 ret |= visit_addr (stmt, TREE_OPERAND (OBJ_TYPE_REF_OBJECT (rhs),
5008 0), data);
5009 lhs = gimple_assign_lhs (stmt);
5010 if (TREE_CODE (lhs) == TARGET_MEM_REF
5011 && TREE_CODE (TMR_BASE (lhs)) == ADDR_EXPR)
5012 ret |= visit_addr (stmt, TREE_OPERAND (TMR_BASE (lhs), 0), data);
5013 }
5014 if (visit_load)
5015 {
5016 rhs = get_base_loadstore (rhs);
5017 if (rhs)
5018 ret |= visit_load (stmt, rhs, data);
5019 }
5020 }
5021 else if (visit_addr
5022 && (is_gimple_assign (stmt)
5023 || gimple_code (stmt) == GIMPLE_COND))
5024 {
5025 for (i = 0; i < gimple_num_ops (stmt); ++i)
5026 if (gimple_op (stmt, i)
5027 && TREE_CODE (gimple_op (stmt, i)) == ADDR_EXPR)
5028 ret |= visit_addr (stmt, TREE_OPERAND (gimple_op (stmt, i), 0), data);
5029 }
5030 else if (is_gimple_call (stmt))
5031 {
5032 if (visit_store)
5033 {
5034 tree lhs = gimple_call_lhs (stmt);
5035 if (lhs)
5036 {
5037 lhs = get_base_loadstore (lhs);
5038 if (lhs)
5039 ret |= visit_store (stmt, lhs, data);
5040 }
5041 }
5042 if (visit_load || visit_addr)
5043 for (i = 0; i < gimple_call_num_args (stmt); ++i)
5044 {
5045 tree rhs = gimple_call_arg (stmt, i);
5046 if (visit_addr
5047 && TREE_CODE (rhs) == ADDR_EXPR)
5048 ret |= visit_addr (stmt, TREE_OPERAND (rhs, 0), data);
5049 else if (visit_load)
5050 {
5051 rhs = get_base_loadstore (rhs);
5052 if (rhs)
5053 ret |= visit_load (stmt, rhs, data);
5054 }
5055 }
5056 if (visit_addr
5057 && gimple_call_chain (stmt)
5058 && TREE_CODE (gimple_call_chain (stmt)) == ADDR_EXPR)
5059 ret |= visit_addr (stmt, TREE_OPERAND (gimple_call_chain (stmt), 0),
5060 data);
5061 if (visit_addr
5062 && gimple_call_return_slot_opt_p (stmt)
5063 && gimple_call_lhs (stmt) != NULL_TREE
5064 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
5065 ret |= visit_addr (stmt, gimple_call_lhs (stmt), data);
5066 }
5067 else if (gimple_code (stmt) == GIMPLE_ASM)
5068 {
5069 unsigned noutputs;
5070 const char *constraint;
5071 const char **oconstraints;
5072 bool allows_mem, allows_reg, is_inout;
5073 noutputs = gimple_asm_noutputs (stmt);
5074 oconstraints = XALLOCAVEC (const char *, noutputs);
5075 if (visit_store || visit_addr)
5076 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
5077 {
5078 tree link = gimple_asm_output_op (stmt, i);
5079 tree op = get_base_loadstore (TREE_VALUE (link));
5080 if (op && visit_store)
5081 ret |= visit_store (stmt, op, data);
5082 if (visit_addr)
5083 {
5084 constraint = TREE_STRING_POINTER
5085 (TREE_VALUE (TREE_PURPOSE (link)));
5086 oconstraints[i] = constraint;
5087 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
5088 &allows_reg, &is_inout);
5089 if (op && !allows_reg && allows_mem)
5090 ret |= visit_addr (stmt, op, data);
5091 }
5092 }
5093 if (visit_load || visit_addr)
5094 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
5095 {
5096 tree link = gimple_asm_input_op (stmt, i);
5097 tree op = TREE_VALUE (link);
5098 if (visit_addr
5099 && TREE_CODE (op) == ADDR_EXPR)
5100 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
5101 else if (visit_load || visit_addr)
5102 {
5103 op = get_base_loadstore (op);
5104 if (op)
5105 {
5106 if (visit_load)
5107 ret |= visit_load (stmt, op, data);
5108 if (visit_addr)
5109 {
5110 constraint = TREE_STRING_POINTER
5111 (TREE_VALUE (TREE_PURPOSE (link)));
5112 parse_input_constraint (&constraint, 0, 0, noutputs,
5113 0, oconstraints,
5114 &allows_mem, &allows_reg);
5115 if (!allows_reg && allows_mem)
5116 ret |= visit_addr (stmt, op, data);
5117 }
5118 }
5119 }
5120 }
5121 }
5122 else if (gimple_code (stmt) == GIMPLE_RETURN)
5123 {
5124 tree op = gimple_return_retval (stmt);
5125 if (op)
5126 {
5127 if (visit_addr
5128 && TREE_CODE (op) == ADDR_EXPR)
5129 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
5130 else if (visit_load)
5131 {
5132 op = get_base_loadstore (op);
5133 if (op)
5134 ret |= visit_load (stmt, op, data);
5135 }
5136 }
5137 }
5138 else if (visit_addr
5139 && gimple_code (stmt) == GIMPLE_PHI)
5140 {
5141 for (i = 0; i < gimple_phi_num_args (stmt); ++i)
5142 {
5143 tree op = PHI_ARG_DEF (stmt, i);
5144 if (TREE_CODE (op) == ADDR_EXPR)
5145 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
5146 }
5147 }
5148
5149 return ret;
5150 }
5151
5152 /* Like walk_stmt_load_store_addr_ops but with NULL visit_addr. IPA-CP
5153 should make a faster clone for this case. */
5154
5155 bool
5156 walk_stmt_load_store_ops (gimple stmt, void *data,
5157 bool (*visit_load)(gimple, tree, void *),
5158 bool (*visit_store)(gimple, tree, void *))
5159 {
5160 return walk_stmt_load_store_addr_ops (stmt, data,
5161 visit_load, visit_store, NULL);
5162 }
5163
5164 /* Helper for gimple_ior_addresses_taken_1. */
5165
5166 static bool
5167 gimple_ior_addresses_taken_1 (gimple stmt ATTRIBUTE_UNUSED,
5168 tree addr, void *data)
5169 {
5170 bitmap addresses_taken = (bitmap)data;
5171 addr = get_base_address (addr);
5172 if (addr
5173 && DECL_P (addr))
5174 {
5175 bitmap_set_bit (addresses_taken, DECL_UID (addr));
5176 return true;
5177 }
5178 return false;
5179 }
5180
5181 /* Set the bit for the uid of all decls that have their address taken
5182 in STMT in the ADDRESSES_TAKEN bitmap. Returns true if there
5183 were any in this stmt. */
5184
5185 bool
5186 gimple_ior_addresses_taken (bitmap addresses_taken, gimple stmt)
5187 {
5188 return walk_stmt_load_store_addr_ops (stmt, addresses_taken, NULL, NULL,
5189 gimple_ior_addresses_taken_1);
5190 }
5191
5192
5193 /* Return a printable name for symbol DECL. */
5194
5195 const char *
5196 gimple_decl_printable_name (tree decl, int verbosity)
5197 {
5198 if (!DECL_NAME (decl))
5199 return NULL;
5200
5201 if (DECL_ASSEMBLER_NAME_SET_P (decl))
5202 {
5203 const char *str, *mangled_str;
5204 int dmgl_opts = DMGL_NO_OPTS;
5205
5206 if (verbosity >= 2)
5207 {
5208 dmgl_opts = DMGL_VERBOSE
5209 | DMGL_ANSI
5210 | DMGL_GNU_V3
5211 | DMGL_RET_POSTFIX;
5212 if (TREE_CODE (decl) == FUNCTION_DECL)
5213 dmgl_opts |= DMGL_PARAMS;
5214 }
5215
5216 mangled_str = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
5217 str = cplus_demangle_v3 (mangled_str, dmgl_opts);
5218 return (str) ? str : mangled_str;
5219 }
5220
5221 return IDENTIFIER_POINTER (DECL_NAME (decl));
5222 }
5223
5224 /* Return true when STMT is builtins call to CODE. */
5225
5226 bool
5227 gimple_call_builtin_p (gimple stmt, enum built_in_function code)
5228 {
5229 tree fndecl;
5230 return (is_gimple_call (stmt)
5231 && (fndecl = gimple_call_fndecl (stmt)) != NULL
5232 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
5233 && DECL_FUNCTION_CODE (fndecl) == code);
5234 }
5235
5236 /* Return true if STMT clobbers memory. STMT is required to be a
5237 GIMPLE_ASM. */
5238
5239 bool
5240 gimple_asm_clobbers_memory_p (const_gimple stmt)
5241 {
5242 unsigned i;
5243
5244 for (i = 0; i < gimple_asm_nclobbers (stmt); i++)
5245 {
5246 tree op = gimple_asm_clobber_op (stmt, i);
5247 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (op)), "memory") == 0)
5248 return true;
5249 }
5250
5251 return false;
5252 }
5253 #include "gt-gimple.h"