]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/gimple.c
* tree-flow.h (struct omp_region): Move to omp-low.c.
[thirdparty/gcc.git] / gcc / gimple.c
1 /* Gimple IR support functions.
2
3 Copyright (C) 2007-2013 Free Software Foundation, Inc.
4 Contributed by Aldy Hernandez <aldyh@redhat.com>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "target.h"
27 #include "tree.h"
28 #include "ggc.h"
29 #include "hard-reg-set.h"
30 #include "basic-block.h"
31 #include "gimple.h"
32 #include "diagnostic.h"
33 #include "tree-flow.h"
34 #include "value-prof.h"
35 #include "flags.h"
36 #include "alias.h"
37 #include "demangle.h"
38 #include "langhooks.h"
39
40
41 /* All the tuples have their operand vector (if present) at the very bottom
42 of the structure. Therefore, the offset required to find the
43 operands vector the size of the structure minus the size of the 1
44 element tree array at the end (see gimple_ops). */
45 #define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) \
46 (HAS_TREE_OP ? sizeof (struct STRUCT) - sizeof (tree) : 0),
47 EXPORTED_CONST size_t gimple_ops_offset_[] = {
48 #include "gsstruct.def"
49 };
50 #undef DEFGSSTRUCT
51
52 #define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) sizeof (struct STRUCT),
53 static const size_t gsstruct_code_size[] = {
54 #include "gsstruct.def"
55 };
56 #undef DEFGSSTRUCT
57
58 #define DEFGSCODE(SYM, NAME, GSSCODE) NAME,
59 const char *const gimple_code_name[] = {
60 #include "gimple.def"
61 };
62 #undef DEFGSCODE
63
64 #define DEFGSCODE(SYM, NAME, GSSCODE) GSSCODE,
65 EXPORTED_CONST enum gimple_statement_structure_enum gss_for_code_[] = {
66 #include "gimple.def"
67 };
68 #undef DEFGSCODE
69
70 /* Gimple stats. */
71
72 int gimple_alloc_counts[(int) gimple_alloc_kind_all];
73 int gimple_alloc_sizes[(int) gimple_alloc_kind_all];
74
75 /* Keep in sync with gimple.h:enum gimple_alloc_kind. */
76 static const char * const gimple_alloc_kind_names[] = {
77 "assignments",
78 "phi nodes",
79 "conditionals",
80 "everything else"
81 };
82
83 /* Private API manipulation functions shared only with some
84 other files. */
85 extern void gimple_set_stored_syms (gimple, bitmap, bitmap_obstack *);
86 extern void gimple_set_loaded_syms (gimple, bitmap, bitmap_obstack *);
87
88 /* Gimple tuple constructors.
89 Note: Any constructor taking a ``gimple_seq'' as a parameter, can
90 be passed a NULL to start with an empty sequence. */
91
92 /* Set the code for statement G to CODE. */
93
94 static inline void
95 gimple_set_code (gimple g, enum gimple_code code)
96 {
97 g->gsbase.code = code;
98 }
99
100 /* Return the number of bytes needed to hold a GIMPLE statement with
101 code CODE. */
102
103 static inline size_t
104 gimple_size (enum gimple_code code)
105 {
106 return gsstruct_code_size[gss_for_code (code)];
107 }
108
109 /* Allocate memory for a GIMPLE statement with code CODE and NUM_OPS
110 operands. */
111
112 gimple
113 gimple_alloc_stat (enum gimple_code code, unsigned num_ops MEM_STAT_DECL)
114 {
115 size_t size;
116 gimple stmt;
117
118 size = gimple_size (code);
119 if (num_ops > 0)
120 size += sizeof (tree) * (num_ops - 1);
121
122 if (GATHER_STATISTICS)
123 {
124 enum gimple_alloc_kind kind = gimple_alloc_kind (code);
125 gimple_alloc_counts[(int) kind]++;
126 gimple_alloc_sizes[(int) kind] += size;
127 }
128
129 stmt = ggc_alloc_cleared_gimple_statement_d_stat (size PASS_MEM_STAT);
130 gimple_set_code (stmt, code);
131 gimple_set_num_ops (stmt, num_ops);
132
133 /* Do not call gimple_set_modified here as it has other side
134 effects and this tuple is still not completely built. */
135 stmt->gsbase.modified = 1;
136 gimple_init_singleton (stmt);
137
138 return stmt;
139 }
140
141 /* Set SUBCODE to be the code of the expression computed by statement G. */
142
143 static inline void
144 gimple_set_subcode (gimple g, unsigned subcode)
145 {
146 /* We only have 16 bits for the RHS code. Assert that we are not
147 overflowing it. */
148 gcc_assert (subcode < (1 << 16));
149 g->gsbase.subcode = subcode;
150 }
151
152
153
154 /* Build a tuple with operands. CODE is the statement to build (which
155 must be one of the GIMPLE_WITH_OPS tuples). SUBCODE is the subcode
156 for the new tuple. NUM_OPS is the number of operands to allocate. */
157
158 #define gimple_build_with_ops(c, s, n) \
159 gimple_build_with_ops_stat (c, s, n MEM_STAT_INFO)
160
161 static gimple
162 gimple_build_with_ops_stat (enum gimple_code code, unsigned subcode,
163 unsigned num_ops MEM_STAT_DECL)
164 {
165 gimple s = gimple_alloc_stat (code, num_ops PASS_MEM_STAT);
166 gimple_set_subcode (s, subcode);
167
168 return s;
169 }
170
171
172 /* Build a GIMPLE_RETURN statement returning RETVAL. */
173
174 gimple
175 gimple_build_return (tree retval)
176 {
177 gimple s = gimple_build_with_ops (GIMPLE_RETURN, ERROR_MARK, 1);
178 if (retval)
179 gimple_return_set_retval (s, retval);
180 return s;
181 }
182
183 /* Reset alias information on call S. */
184
185 void
186 gimple_call_reset_alias_info (gimple s)
187 {
188 if (gimple_call_flags (s) & ECF_CONST)
189 memset (gimple_call_use_set (s), 0, sizeof (struct pt_solution));
190 else
191 pt_solution_reset (gimple_call_use_set (s));
192 if (gimple_call_flags (s) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
193 memset (gimple_call_clobber_set (s), 0, sizeof (struct pt_solution));
194 else
195 pt_solution_reset (gimple_call_clobber_set (s));
196 }
197
198 /* Helper for gimple_build_call, gimple_build_call_valist,
199 gimple_build_call_vec and gimple_build_call_from_tree. Build the basic
200 components of a GIMPLE_CALL statement to function FN with NARGS
201 arguments. */
202
203 static inline gimple
204 gimple_build_call_1 (tree fn, unsigned nargs)
205 {
206 gimple s = gimple_build_with_ops (GIMPLE_CALL, ERROR_MARK, nargs + 3);
207 if (TREE_CODE (fn) == FUNCTION_DECL)
208 fn = build_fold_addr_expr (fn);
209 gimple_set_op (s, 1, fn);
210 gimple_call_set_fntype (s, TREE_TYPE (TREE_TYPE (fn)));
211 gimple_call_reset_alias_info (s);
212 return s;
213 }
214
215
216 /* Build a GIMPLE_CALL statement to function FN with the arguments
217 specified in vector ARGS. */
218
219 gimple
220 gimple_build_call_vec (tree fn, vec<tree> args)
221 {
222 unsigned i;
223 unsigned nargs = args.length ();
224 gimple call = gimple_build_call_1 (fn, nargs);
225
226 for (i = 0; i < nargs; i++)
227 gimple_call_set_arg (call, i, args[i]);
228
229 return call;
230 }
231
232
233 /* Build a GIMPLE_CALL statement to function FN. NARGS is the number of
234 arguments. The ... are the arguments. */
235
236 gimple
237 gimple_build_call (tree fn, unsigned nargs, ...)
238 {
239 va_list ap;
240 gimple call;
241 unsigned i;
242
243 gcc_assert (TREE_CODE (fn) == FUNCTION_DECL || is_gimple_call_addr (fn));
244
245 call = gimple_build_call_1 (fn, nargs);
246
247 va_start (ap, nargs);
248 for (i = 0; i < nargs; i++)
249 gimple_call_set_arg (call, i, va_arg (ap, tree));
250 va_end (ap);
251
252 return call;
253 }
254
255
256 /* Build a GIMPLE_CALL statement to function FN. NARGS is the number of
257 arguments. AP contains the arguments. */
258
259 gimple
260 gimple_build_call_valist (tree fn, unsigned nargs, va_list ap)
261 {
262 gimple call;
263 unsigned i;
264
265 gcc_assert (TREE_CODE (fn) == FUNCTION_DECL || is_gimple_call_addr (fn));
266
267 call = gimple_build_call_1 (fn, nargs);
268
269 for (i = 0; i < nargs; i++)
270 gimple_call_set_arg (call, i, va_arg (ap, tree));
271
272 return call;
273 }
274
275
276 /* Helper for gimple_build_call_internal and gimple_build_call_internal_vec.
277 Build the basic components of a GIMPLE_CALL statement to internal
278 function FN with NARGS arguments. */
279
280 static inline gimple
281 gimple_build_call_internal_1 (enum internal_fn fn, unsigned nargs)
282 {
283 gimple s = gimple_build_with_ops (GIMPLE_CALL, ERROR_MARK, nargs + 3);
284 s->gsbase.subcode |= GF_CALL_INTERNAL;
285 gimple_call_set_internal_fn (s, fn);
286 gimple_call_reset_alias_info (s);
287 return s;
288 }
289
290
291 /* Build a GIMPLE_CALL statement to internal function FN. NARGS is
292 the number of arguments. The ... are the arguments. */
293
294 gimple
295 gimple_build_call_internal (enum internal_fn fn, unsigned nargs, ...)
296 {
297 va_list ap;
298 gimple call;
299 unsigned i;
300
301 call = gimple_build_call_internal_1 (fn, nargs);
302 va_start (ap, nargs);
303 for (i = 0; i < nargs; i++)
304 gimple_call_set_arg (call, i, va_arg (ap, tree));
305 va_end (ap);
306
307 return call;
308 }
309
310
311 /* Build a GIMPLE_CALL statement to internal function FN with the arguments
312 specified in vector ARGS. */
313
314 gimple
315 gimple_build_call_internal_vec (enum internal_fn fn, vec<tree> args)
316 {
317 unsigned i, nargs;
318 gimple call;
319
320 nargs = args.length ();
321 call = gimple_build_call_internal_1 (fn, nargs);
322 for (i = 0; i < nargs; i++)
323 gimple_call_set_arg (call, i, args[i]);
324
325 return call;
326 }
327
328
329 /* Build a GIMPLE_CALL statement from CALL_EXPR T. Note that T is
330 assumed to be in GIMPLE form already. Minimal checking is done of
331 this fact. */
332
333 gimple
334 gimple_build_call_from_tree (tree t)
335 {
336 unsigned i, nargs;
337 gimple call;
338 tree fndecl = get_callee_fndecl (t);
339
340 gcc_assert (TREE_CODE (t) == CALL_EXPR);
341
342 nargs = call_expr_nargs (t);
343 call = gimple_build_call_1 (fndecl ? fndecl : CALL_EXPR_FN (t), nargs);
344
345 for (i = 0; i < nargs; i++)
346 gimple_call_set_arg (call, i, CALL_EXPR_ARG (t, i));
347
348 gimple_set_block (call, TREE_BLOCK (t));
349
350 /* Carry all the CALL_EXPR flags to the new GIMPLE_CALL. */
351 gimple_call_set_chain (call, CALL_EXPR_STATIC_CHAIN (t));
352 gimple_call_set_tail (call, CALL_EXPR_TAILCALL (t));
353 gimple_call_set_return_slot_opt (call, CALL_EXPR_RETURN_SLOT_OPT (t));
354 if (fndecl
355 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
356 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA
357 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA_WITH_ALIGN))
358 gimple_call_set_alloca_for_var (call, CALL_ALLOCA_FOR_VAR_P (t));
359 else
360 gimple_call_set_from_thunk (call, CALL_FROM_THUNK_P (t));
361 gimple_call_set_va_arg_pack (call, CALL_EXPR_VA_ARG_PACK (t));
362 gimple_call_set_nothrow (call, TREE_NOTHROW (t));
363 gimple_set_no_warning (call, TREE_NO_WARNING (t));
364
365 return call;
366 }
367
368
369 /* Extract the operands and code for expression EXPR into *SUBCODE_P,
370 *OP1_P, *OP2_P and *OP3_P respectively. */
371
372 void
373 extract_ops_from_tree_1 (tree expr, enum tree_code *subcode_p, tree *op1_p,
374 tree *op2_p, tree *op3_p)
375 {
376 enum gimple_rhs_class grhs_class;
377
378 *subcode_p = TREE_CODE (expr);
379 grhs_class = get_gimple_rhs_class (*subcode_p);
380
381 if (grhs_class == GIMPLE_TERNARY_RHS)
382 {
383 *op1_p = TREE_OPERAND (expr, 0);
384 *op2_p = TREE_OPERAND (expr, 1);
385 *op3_p = TREE_OPERAND (expr, 2);
386 }
387 else if (grhs_class == GIMPLE_BINARY_RHS)
388 {
389 *op1_p = TREE_OPERAND (expr, 0);
390 *op2_p = TREE_OPERAND (expr, 1);
391 *op3_p = NULL_TREE;
392 }
393 else if (grhs_class == GIMPLE_UNARY_RHS)
394 {
395 *op1_p = TREE_OPERAND (expr, 0);
396 *op2_p = NULL_TREE;
397 *op3_p = NULL_TREE;
398 }
399 else if (grhs_class == GIMPLE_SINGLE_RHS)
400 {
401 *op1_p = expr;
402 *op2_p = NULL_TREE;
403 *op3_p = NULL_TREE;
404 }
405 else
406 gcc_unreachable ();
407 }
408
409
410 /* Build a GIMPLE_ASSIGN statement.
411
412 LHS of the assignment.
413 RHS of the assignment which can be unary or binary. */
414
415 gimple
416 gimple_build_assign_stat (tree lhs, tree rhs MEM_STAT_DECL)
417 {
418 enum tree_code subcode;
419 tree op1, op2, op3;
420
421 extract_ops_from_tree_1 (rhs, &subcode, &op1, &op2, &op3);
422 return gimple_build_assign_with_ops (subcode, lhs, op1, op2, op3
423 PASS_MEM_STAT);
424 }
425
426
427 /* Build a GIMPLE_ASSIGN statement with subcode SUBCODE and operands
428 OP1 and OP2. If OP2 is NULL then SUBCODE must be of class
429 GIMPLE_UNARY_RHS or GIMPLE_SINGLE_RHS. */
430
431 gimple
432 gimple_build_assign_with_ops (enum tree_code subcode, tree lhs, tree op1,
433 tree op2, tree op3 MEM_STAT_DECL)
434 {
435 unsigned num_ops;
436 gimple p;
437
438 /* Need 1 operand for LHS and 1 or 2 for the RHS (depending on the
439 code). */
440 num_ops = get_gimple_rhs_num_ops (subcode) + 1;
441
442 p = gimple_build_with_ops_stat (GIMPLE_ASSIGN, (unsigned)subcode, num_ops
443 PASS_MEM_STAT);
444 gimple_assign_set_lhs (p, lhs);
445 gimple_assign_set_rhs1 (p, op1);
446 if (op2)
447 {
448 gcc_assert (num_ops > 2);
449 gimple_assign_set_rhs2 (p, op2);
450 }
451
452 if (op3)
453 {
454 gcc_assert (num_ops > 3);
455 gimple_assign_set_rhs3 (p, op3);
456 }
457
458 return p;
459 }
460
461 gimple
462 gimple_build_assign_with_ops (enum tree_code subcode, tree lhs, tree op1,
463 tree op2 MEM_STAT_DECL)
464 {
465 return gimple_build_assign_with_ops (subcode, lhs, op1, op2, NULL_TREE
466 PASS_MEM_STAT);
467 }
468
469
470 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
471
472 DST/SRC are the destination and source respectively. You can pass
473 ungimplified trees in DST or SRC, in which case they will be
474 converted to a gimple operand if necessary.
475
476 This function returns the newly created GIMPLE_ASSIGN tuple. */
477
478 gimple
479 gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
480 {
481 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
482 gimplify_and_add (t, seq_p);
483 ggc_free (t);
484 return gimple_seq_last_stmt (*seq_p);
485 }
486
487
488 /* Build a GIMPLE_COND statement.
489
490 PRED is the condition used to compare LHS and the RHS.
491 T_LABEL is the label to jump to if the condition is true.
492 F_LABEL is the label to jump to otherwise. */
493
494 gimple
495 gimple_build_cond (enum tree_code pred_code, tree lhs, tree rhs,
496 tree t_label, tree f_label)
497 {
498 gimple p;
499
500 gcc_assert (TREE_CODE_CLASS (pred_code) == tcc_comparison);
501 p = gimple_build_with_ops (GIMPLE_COND, pred_code, 4);
502 gimple_cond_set_lhs (p, lhs);
503 gimple_cond_set_rhs (p, rhs);
504 gimple_cond_set_true_label (p, t_label);
505 gimple_cond_set_false_label (p, f_label);
506 return p;
507 }
508
509
510 /* Extract operands for a GIMPLE_COND statement out of COND_EXPR tree COND. */
511
512 void
513 gimple_cond_get_ops_from_tree (tree cond, enum tree_code *code_p,
514 tree *lhs_p, tree *rhs_p)
515 {
516 gcc_assert (TREE_CODE_CLASS (TREE_CODE (cond)) == tcc_comparison
517 || TREE_CODE (cond) == TRUTH_NOT_EXPR
518 || is_gimple_min_invariant (cond)
519 || SSA_VAR_P (cond));
520
521 extract_ops_from_tree (cond, code_p, lhs_p, rhs_p);
522
523 /* Canonicalize conditionals of the form 'if (!VAL)'. */
524 if (*code_p == TRUTH_NOT_EXPR)
525 {
526 *code_p = EQ_EXPR;
527 gcc_assert (*lhs_p && *rhs_p == NULL_TREE);
528 *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
529 }
530 /* Canonicalize conditionals of the form 'if (VAL)' */
531 else if (TREE_CODE_CLASS (*code_p) != tcc_comparison)
532 {
533 *code_p = NE_EXPR;
534 gcc_assert (*lhs_p && *rhs_p == NULL_TREE);
535 *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
536 }
537 }
538
539
540 /* Build a GIMPLE_COND statement from the conditional expression tree
541 COND. T_LABEL and F_LABEL are as in gimple_build_cond. */
542
543 gimple
544 gimple_build_cond_from_tree (tree cond, tree t_label, tree f_label)
545 {
546 enum tree_code code;
547 tree lhs, rhs;
548
549 gimple_cond_get_ops_from_tree (cond, &code, &lhs, &rhs);
550 return gimple_build_cond (code, lhs, rhs, t_label, f_label);
551 }
552
553 /* Set code, lhs, and rhs of a GIMPLE_COND from a suitable
554 boolean expression tree COND. */
555
556 void
557 gimple_cond_set_condition_from_tree (gimple stmt, tree cond)
558 {
559 enum tree_code code;
560 tree lhs, rhs;
561
562 gimple_cond_get_ops_from_tree (cond, &code, &lhs, &rhs);
563 gimple_cond_set_condition (stmt, code, lhs, rhs);
564 }
565
566 /* Build a GIMPLE_LABEL statement for LABEL. */
567
568 gimple
569 gimple_build_label (tree label)
570 {
571 gimple p = gimple_build_with_ops (GIMPLE_LABEL, ERROR_MARK, 1);
572 gimple_label_set_label (p, label);
573 return p;
574 }
575
576 /* Build a GIMPLE_GOTO statement to label DEST. */
577
578 gimple
579 gimple_build_goto (tree dest)
580 {
581 gimple p = gimple_build_with_ops (GIMPLE_GOTO, ERROR_MARK, 1);
582 gimple_goto_set_dest (p, dest);
583 return p;
584 }
585
586
587 /* Build a GIMPLE_NOP statement. */
588
589 gimple
590 gimple_build_nop (void)
591 {
592 return gimple_alloc (GIMPLE_NOP, 0);
593 }
594
595
596 /* Build a GIMPLE_BIND statement.
597 VARS are the variables in BODY.
598 BLOCK is the containing block. */
599
600 gimple
601 gimple_build_bind (tree vars, gimple_seq body, tree block)
602 {
603 gimple p = gimple_alloc (GIMPLE_BIND, 0);
604 gimple_bind_set_vars (p, vars);
605 if (body)
606 gimple_bind_set_body (p, body);
607 if (block)
608 gimple_bind_set_block (p, block);
609 return p;
610 }
611
612 /* Helper function to set the simple fields of a asm stmt.
613
614 STRING is a pointer to a string that is the asm blocks assembly code.
615 NINPUT is the number of register inputs.
616 NOUTPUT is the number of register outputs.
617 NCLOBBERS is the number of clobbered registers.
618 */
619
620 static inline gimple
621 gimple_build_asm_1 (const char *string, unsigned ninputs, unsigned noutputs,
622 unsigned nclobbers, unsigned nlabels)
623 {
624 gimple p;
625 int size = strlen (string);
626
627 /* ASMs with labels cannot have outputs. This should have been
628 enforced by the front end. */
629 gcc_assert (nlabels == 0 || noutputs == 0);
630
631 p = gimple_build_with_ops (GIMPLE_ASM, ERROR_MARK,
632 ninputs + noutputs + nclobbers + nlabels);
633
634 p->gimple_asm.ni = ninputs;
635 p->gimple_asm.no = noutputs;
636 p->gimple_asm.nc = nclobbers;
637 p->gimple_asm.nl = nlabels;
638 p->gimple_asm.string = ggc_alloc_string (string, size);
639
640 if (GATHER_STATISTICS)
641 gimple_alloc_sizes[(int) gimple_alloc_kind (GIMPLE_ASM)] += size;
642
643 return p;
644 }
645
646 /* Build a GIMPLE_ASM statement.
647
648 STRING is the assembly code.
649 NINPUT is the number of register inputs.
650 NOUTPUT is the number of register outputs.
651 NCLOBBERS is the number of clobbered registers.
652 INPUTS is a vector of the input register parameters.
653 OUTPUTS is a vector of the output register parameters.
654 CLOBBERS is a vector of the clobbered register parameters.
655 LABELS is a vector of destination labels. */
656
657 gimple
658 gimple_build_asm_vec (const char *string, vec<tree, va_gc> *inputs,
659 vec<tree, va_gc> *outputs, vec<tree, va_gc> *clobbers,
660 vec<tree, va_gc> *labels)
661 {
662 gimple p;
663 unsigned i;
664
665 p = gimple_build_asm_1 (string,
666 vec_safe_length (inputs),
667 vec_safe_length (outputs),
668 vec_safe_length (clobbers),
669 vec_safe_length (labels));
670
671 for (i = 0; i < vec_safe_length (inputs); i++)
672 gimple_asm_set_input_op (p, i, (*inputs)[i]);
673
674 for (i = 0; i < vec_safe_length (outputs); i++)
675 gimple_asm_set_output_op (p, i, (*outputs)[i]);
676
677 for (i = 0; i < vec_safe_length (clobbers); i++)
678 gimple_asm_set_clobber_op (p, i, (*clobbers)[i]);
679
680 for (i = 0; i < vec_safe_length (labels); i++)
681 gimple_asm_set_label_op (p, i, (*labels)[i]);
682
683 return p;
684 }
685
686 /* Build a GIMPLE_CATCH statement.
687
688 TYPES are the catch types.
689 HANDLER is the exception handler. */
690
691 gimple
692 gimple_build_catch (tree types, gimple_seq handler)
693 {
694 gimple p = gimple_alloc (GIMPLE_CATCH, 0);
695 gimple_catch_set_types (p, types);
696 if (handler)
697 gimple_catch_set_handler (p, handler);
698
699 return p;
700 }
701
702 /* Build a GIMPLE_EH_FILTER statement.
703
704 TYPES are the filter's types.
705 FAILURE is the filter's failure action. */
706
707 gimple
708 gimple_build_eh_filter (tree types, gimple_seq failure)
709 {
710 gimple p = gimple_alloc (GIMPLE_EH_FILTER, 0);
711 gimple_eh_filter_set_types (p, types);
712 if (failure)
713 gimple_eh_filter_set_failure (p, failure);
714
715 return p;
716 }
717
718 /* Build a GIMPLE_EH_MUST_NOT_THROW statement. */
719
720 gimple
721 gimple_build_eh_must_not_throw (tree decl)
722 {
723 gimple p = gimple_alloc (GIMPLE_EH_MUST_NOT_THROW, 0);
724
725 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
726 gcc_assert (flags_from_decl_or_type (decl) & ECF_NORETURN);
727 gimple_eh_must_not_throw_set_fndecl (p, decl);
728
729 return p;
730 }
731
732 /* Build a GIMPLE_EH_ELSE statement. */
733
734 gimple
735 gimple_build_eh_else (gimple_seq n_body, gimple_seq e_body)
736 {
737 gimple p = gimple_alloc (GIMPLE_EH_ELSE, 0);
738 gimple_eh_else_set_n_body (p, n_body);
739 gimple_eh_else_set_e_body (p, e_body);
740 return p;
741 }
742
743 /* Build a GIMPLE_TRY statement.
744
745 EVAL is the expression to evaluate.
746 CLEANUP is the cleanup expression.
747 KIND is either GIMPLE_TRY_CATCH or GIMPLE_TRY_FINALLY depending on
748 whether this is a try/catch or a try/finally respectively. */
749
750 gimple
751 gimple_build_try (gimple_seq eval, gimple_seq cleanup,
752 enum gimple_try_flags kind)
753 {
754 gimple p;
755
756 gcc_assert (kind == GIMPLE_TRY_CATCH || kind == GIMPLE_TRY_FINALLY);
757 p = gimple_alloc (GIMPLE_TRY, 0);
758 gimple_set_subcode (p, kind);
759 if (eval)
760 gimple_try_set_eval (p, eval);
761 if (cleanup)
762 gimple_try_set_cleanup (p, cleanup);
763
764 return p;
765 }
766
767 /* Construct a GIMPLE_WITH_CLEANUP_EXPR statement.
768
769 CLEANUP is the cleanup expression. */
770
771 gimple
772 gimple_build_wce (gimple_seq cleanup)
773 {
774 gimple p = gimple_alloc (GIMPLE_WITH_CLEANUP_EXPR, 0);
775 if (cleanup)
776 gimple_wce_set_cleanup (p, cleanup);
777
778 return p;
779 }
780
781
782 /* Build a GIMPLE_RESX statement. */
783
784 gimple
785 gimple_build_resx (int region)
786 {
787 gimple p = gimple_build_with_ops (GIMPLE_RESX, ERROR_MARK, 0);
788 p->gimple_eh_ctrl.region = region;
789 return p;
790 }
791
792
793 /* The helper for constructing a gimple switch statement.
794 INDEX is the switch's index.
795 NLABELS is the number of labels in the switch excluding the default.
796 DEFAULT_LABEL is the default label for the switch statement. */
797
798 gimple
799 gimple_build_switch_nlabels (unsigned nlabels, tree index, tree default_label)
800 {
801 /* nlabels + 1 default label + 1 index. */
802 gcc_checking_assert (default_label);
803 gimple p = gimple_build_with_ops (GIMPLE_SWITCH, ERROR_MARK,
804 1 + 1 + nlabels);
805 gimple_switch_set_index (p, index);
806 gimple_switch_set_default_label (p, default_label);
807 return p;
808 }
809
810 /* Build a GIMPLE_SWITCH statement.
811
812 INDEX is the switch's index.
813 DEFAULT_LABEL is the default label
814 ARGS is a vector of labels excluding the default. */
815
816 gimple
817 gimple_build_switch (tree index, tree default_label, vec<tree> args)
818 {
819 unsigned i, nlabels = args.length ();
820
821 gimple p = gimple_build_switch_nlabels (nlabels, index, default_label);
822
823 /* Copy the labels from the vector to the switch statement. */
824 for (i = 0; i < nlabels; i++)
825 gimple_switch_set_label (p, i + 1, args[i]);
826
827 return p;
828 }
829
830 /* Build a GIMPLE_EH_DISPATCH statement. */
831
832 gimple
833 gimple_build_eh_dispatch (int region)
834 {
835 gimple p = gimple_build_with_ops (GIMPLE_EH_DISPATCH, ERROR_MARK, 0);
836 p->gimple_eh_ctrl.region = region;
837 return p;
838 }
839
840 /* Build a new GIMPLE_DEBUG_BIND statement.
841
842 VAR is bound to VALUE; block and location are taken from STMT. */
843
844 gimple
845 gimple_build_debug_bind_stat (tree var, tree value, gimple stmt MEM_STAT_DECL)
846 {
847 gimple p = gimple_build_with_ops_stat (GIMPLE_DEBUG,
848 (unsigned)GIMPLE_DEBUG_BIND, 2
849 PASS_MEM_STAT);
850
851 gimple_debug_bind_set_var (p, var);
852 gimple_debug_bind_set_value (p, value);
853 if (stmt)
854 gimple_set_location (p, gimple_location (stmt));
855
856 return p;
857 }
858
859
860 /* Build a new GIMPLE_DEBUG_SOURCE_BIND statement.
861
862 VAR is bound to VALUE; block and location are taken from STMT. */
863
864 gimple
865 gimple_build_debug_source_bind_stat (tree var, tree value,
866 gimple stmt MEM_STAT_DECL)
867 {
868 gimple p = gimple_build_with_ops_stat (GIMPLE_DEBUG,
869 (unsigned)GIMPLE_DEBUG_SOURCE_BIND, 2
870 PASS_MEM_STAT);
871
872 gimple_debug_source_bind_set_var (p, var);
873 gimple_debug_source_bind_set_value (p, value);
874 if (stmt)
875 gimple_set_location (p, gimple_location (stmt));
876
877 return p;
878 }
879
880
881 /* Build a GIMPLE_OMP_CRITICAL statement.
882
883 BODY is the sequence of statements for which only one thread can execute.
884 NAME is optional identifier for this critical block. */
885
886 gimple
887 gimple_build_omp_critical (gimple_seq body, tree name)
888 {
889 gimple p = gimple_alloc (GIMPLE_OMP_CRITICAL, 0);
890 gimple_omp_critical_set_name (p, name);
891 if (body)
892 gimple_omp_set_body (p, body);
893
894 return p;
895 }
896
897 /* Build a GIMPLE_OMP_FOR statement.
898
899 BODY is sequence of statements inside the for loop.
900 KIND is the `for' variant.
901 CLAUSES, are any of the OMP loop construct's clauses: private, firstprivate,
902 lastprivate, reductions, ordered, schedule, and nowait.
903 COLLAPSE is the collapse count.
904 PRE_BODY is the sequence of statements that are loop invariant. */
905
906 gimple
907 gimple_build_omp_for (gimple_seq body, int kind, tree clauses, size_t collapse,
908 gimple_seq pre_body)
909 {
910 gimple p = gimple_alloc (GIMPLE_OMP_FOR, 0);
911 if (body)
912 gimple_omp_set_body (p, body);
913 gimple_omp_for_set_clauses (p, clauses);
914 gimple_omp_for_set_kind (p, kind);
915 p->gimple_omp_for.collapse = collapse;
916 p->gimple_omp_for.iter
917 = ggc_alloc_cleared_vec_gimple_omp_for_iter (collapse);
918 if (pre_body)
919 gimple_omp_for_set_pre_body (p, pre_body);
920
921 return p;
922 }
923
924
925 /* Build a GIMPLE_OMP_PARALLEL statement.
926
927 BODY is sequence of statements which are executed in parallel.
928 CLAUSES, are the OMP parallel construct's clauses.
929 CHILD_FN is the function created for the parallel threads to execute.
930 DATA_ARG are the shared data argument(s). */
931
932 gimple
933 gimple_build_omp_parallel (gimple_seq body, tree clauses, tree child_fn,
934 tree data_arg)
935 {
936 gimple p = gimple_alloc (GIMPLE_OMP_PARALLEL, 0);
937 if (body)
938 gimple_omp_set_body (p, body);
939 gimple_omp_parallel_set_clauses (p, clauses);
940 gimple_omp_parallel_set_child_fn (p, child_fn);
941 gimple_omp_parallel_set_data_arg (p, data_arg);
942
943 return p;
944 }
945
946
947 /* Build a GIMPLE_OMP_TASK statement.
948
949 BODY is sequence of statements which are executed by the explicit task.
950 CLAUSES, are the OMP parallel construct's clauses.
951 CHILD_FN is the function created for the parallel threads to execute.
952 DATA_ARG are the shared data argument(s).
953 COPY_FN is the optional function for firstprivate initialization.
954 ARG_SIZE and ARG_ALIGN are size and alignment of the data block. */
955
956 gimple
957 gimple_build_omp_task (gimple_seq body, tree clauses, tree child_fn,
958 tree data_arg, tree copy_fn, tree arg_size,
959 tree arg_align)
960 {
961 gimple p = gimple_alloc (GIMPLE_OMP_TASK, 0);
962 if (body)
963 gimple_omp_set_body (p, body);
964 gimple_omp_task_set_clauses (p, clauses);
965 gimple_omp_task_set_child_fn (p, child_fn);
966 gimple_omp_task_set_data_arg (p, data_arg);
967 gimple_omp_task_set_copy_fn (p, copy_fn);
968 gimple_omp_task_set_arg_size (p, arg_size);
969 gimple_omp_task_set_arg_align (p, arg_align);
970
971 return p;
972 }
973
974
975 /* Build a GIMPLE_OMP_SECTION statement for a sections statement.
976
977 BODY is the sequence of statements in the section. */
978
979 gimple
980 gimple_build_omp_section (gimple_seq body)
981 {
982 gimple p = gimple_alloc (GIMPLE_OMP_SECTION, 0);
983 if (body)
984 gimple_omp_set_body (p, body);
985
986 return p;
987 }
988
989
990 /* Build a GIMPLE_OMP_MASTER statement.
991
992 BODY is the sequence of statements to be executed by just the master. */
993
994 gimple
995 gimple_build_omp_master (gimple_seq body)
996 {
997 gimple p = gimple_alloc (GIMPLE_OMP_MASTER, 0);
998 if (body)
999 gimple_omp_set_body (p, body);
1000
1001 return p;
1002 }
1003
1004
1005 /* Build a GIMPLE_OMP_TASKGROUP statement.
1006
1007 BODY is the sequence of statements to be executed by the taskgroup
1008 construct. */
1009
1010 gimple
1011 gimple_build_omp_taskgroup (gimple_seq body)
1012 {
1013 gimple p = gimple_alloc (GIMPLE_OMP_TASKGROUP, 0);
1014 if (body)
1015 gimple_omp_set_body (p, body);
1016
1017 return p;
1018 }
1019
1020
1021 /* Build a GIMPLE_OMP_CONTINUE statement.
1022
1023 CONTROL_DEF is the definition of the control variable.
1024 CONTROL_USE is the use of the control variable. */
1025
1026 gimple
1027 gimple_build_omp_continue (tree control_def, tree control_use)
1028 {
1029 gimple p = gimple_alloc (GIMPLE_OMP_CONTINUE, 0);
1030 gimple_omp_continue_set_control_def (p, control_def);
1031 gimple_omp_continue_set_control_use (p, control_use);
1032 return p;
1033 }
1034
1035 /* Build a GIMPLE_OMP_ORDERED statement.
1036
1037 BODY is the sequence of statements inside a loop that will executed in
1038 sequence. */
1039
1040 gimple
1041 gimple_build_omp_ordered (gimple_seq body)
1042 {
1043 gimple p = gimple_alloc (GIMPLE_OMP_ORDERED, 0);
1044 if (body)
1045 gimple_omp_set_body (p, body);
1046
1047 return p;
1048 }
1049
1050
1051 /* Build a GIMPLE_OMP_RETURN statement.
1052 WAIT_P is true if this is a non-waiting return. */
1053
1054 gimple
1055 gimple_build_omp_return (bool wait_p)
1056 {
1057 gimple p = gimple_alloc (GIMPLE_OMP_RETURN, 0);
1058 if (wait_p)
1059 gimple_omp_return_set_nowait (p);
1060
1061 return p;
1062 }
1063
1064
1065 /* Build a GIMPLE_OMP_SECTIONS statement.
1066
1067 BODY is a sequence of section statements.
1068 CLAUSES are any of the OMP sections contsruct's clauses: private,
1069 firstprivate, lastprivate, reduction, and nowait. */
1070
1071 gimple
1072 gimple_build_omp_sections (gimple_seq body, tree clauses)
1073 {
1074 gimple p = gimple_alloc (GIMPLE_OMP_SECTIONS, 0);
1075 if (body)
1076 gimple_omp_set_body (p, body);
1077 gimple_omp_sections_set_clauses (p, clauses);
1078
1079 return p;
1080 }
1081
1082
1083 /* Build a GIMPLE_OMP_SECTIONS_SWITCH. */
1084
1085 gimple
1086 gimple_build_omp_sections_switch (void)
1087 {
1088 return gimple_alloc (GIMPLE_OMP_SECTIONS_SWITCH, 0);
1089 }
1090
1091
1092 /* Build a GIMPLE_OMP_SINGLE statement.
1093
1094 BODY is the sequence of statements that will be executed once.
1095 CLAUSES are any of the OMP single construct's clauses: private, firstprivate,
1096 copyprivate, nowait. */
1097
1098 gimple
1099 gimple_build_omp_single (gimple_seq body, tree clauses)
1100 {
1101 gimple p = gimple_alloc (GIMPLE_OMP_SINGLE, 0);
1102 if (body)
1103 gimple_omp_set_body (p, body);
1104 gimple_omp_single_set_clauses (p, clauses);
1105
1106 return p;
1107 }
1108
1109
1110 /* Build a GIMPLE_OMP_TARGET statement.
1111
1112 BODY is the sequence of statements that will be executed.
1113 CLAUSES are any of the OMP target construct's clauses. */
1114
1115 gimple
1116 gimple_build_omp_target (gimple_seq body, int kind, tree clauses)
1117 {
1118 gimple p = gimple_alloc (GIMPLE_OMP_TARGET, 0);
1119 if (body)
1120 gimple_omp_set_body (p, body);
1121 gimple_omp_target_set_clauses (p, clauses);
1122 gimple_omp_target_set_kind (p, kind);
1123
1124 return p;
1125 }
1126
1127
1128 /* Build a GIMPLE_OMP_TEAMS statement.
1129
1130 BODY is the sequence of statements that will be executed.
1131 CLAUSES are any of the OMP teams construct's clauses. */
1132
1133 gimple
1134 gimple_build_omp_teams (gimple_seq body, tree clauses)
1135 {
1136 gimple p = gimple_alloc (GIMPLE_OMP_TEAMS, 0);
1137 if (body)
1138 gimple_omp_set_body (p, body);
1139 gimple_omp_teams_set_clauses (p, clauses);
1140
1141 return p;
1142 }
1143
1144
1145 /* Build a GIMPLE_OMP_ATOMIC_LOAD statement. */
1146
1147 gimple
1148 gimple_build_omp_atomic_load (tree lhs, tree rhs)
1149 {
1150 gimple p = gimple_alloc (GIMPLE_OMP_ATOMIC_LOAD, 0);
1151 gimple_omp_atomic_load_set_lhs (p, lhs);
1152 gimple_omp_atomic_load_set_rhs (p, rhs);
1153 return p;
1154 }
1155
1156 /* Build a GIMPLE_OMP_ATOMIC_STORE statement.
1157
1158 VAL is the value we are storing. */
1159
1160 gimple
1161 gimple_build_omp_atomic_store (tree val)
1162 {
1163 gimple p = gimple_alloc (GIMPLE_OMP_ATOMIC_STORE, 0);
1164 gimple_omp_atomic_store_set_val (p, val);
1165 return p;
1166 }
1167
1168 /* Build a GIMPLE_TRANSACTION statement. */
1169
1170 gimple
1171 gimple_build_transaction (gimple_seq body, tree label)
1172 {
1173 gimple p = gimple_alloc (GIMPLE_TRANSACTION, 0);
1174 gimple_transaction_set_body (p, body);
1175 gimple_transaction_set_label (p, label);
1176 return p;
1177 }
1178
1179 /* Build a GIMPLE_PREDICT statement. PREDICT is one of the predictors from
1180 predict.def, OUTCOME is NOT_TAKEN or TAKEN. */
1181
1182 gimple
1183 gimple_build_predict (enum br_predictor predictor, enum prediction outcome)
1184 {
1185 gimple p = gimple_alloc (GIMPLE_PREDICT, 0);
1186 /* Ensure all the predictors fit into the lower bits of the subcode. */
1187 gcc_assert ((int) END_PREDICTORS <= GF_PREDICT_TAKEN);
1188 gimple_predict_set_predictor (p, predictor);
1189 gimple_predict_set_outcome (p, outcome);
1190 return p;
1191 }
1192
1193 #if defined ENABLE_GIMPLE_CHECKING
1194 /* Complain of a gimple type mismatch and die. */
1195
1196 void
1197 gimple_check_failed (const_gimple gs, const char *file, int line,
1198 const char *function, enum gimple_code code,
1199 enum tree_code subcode)
1200 {
1201 internal_error ("gimple check: expected %s(%s), have %s(%s) in %s, at %s:%d",
1202 gimple_code_name[code],
1203 get_tree_code_name (subcode),
1204 gimple_code_name[gimple_code (gs)],
1205 gs->gsbase.subcode > 0
1206 ? get_tree_code_name ((enum tree_code) gs->gsbase.subcode)
1207 : "",
1208 function, trim_filename (file), line);
1209 }
1210 #endif /* ENABLE_GIMPLE_CHECKING */
1211
1212
1213 /* Link gimple statement GS to the end of the sequence *SEQ_P. If
1214 *SEQ_P is NULL, a new sequence is allocated. */
1215
1216 void
1217 gimple_seq_add_stmt (gimple_seq *seq_p, gimple gs)
1218 {
1219 gimple_stmt_iterator si;
1220 if (gs == NULL)
1221 return;
1222
1223 si = gsi_last (*seq_p);
1224 gsi_insert_after (&si, gs, GSI_NEW_STMT);
1225 }
1226
1227
1228 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
1229 NULL, a new sequence is allocated. */
1230
1231 void
1232 gimple_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
1233 {
1234 gimple_stmt_iterator si;
1235 if (src == NULL)
1236 return;
1237
1238 si = gsi_last (*dst_p);
1239 gsi_insert_seq_after (&si, src, GSI_NEW_STMT);
1240 }
1241
1242
1243 /* Helper function of empty_body_p. Return true if STMT is an empty
1244 statement. */
1245
1246 static bool
1247 empty_stmt_p (gimple stmt)
1248 {
1249 if (gimple_code (stmt) == GIMPLE_NOP)
1250 return true;
1251 if (gimple_code (stmt) == GIMPLE_BIND)
1252 return empty_body_p (gimple_bind_body (stmt));
1253 return false;
1254 }
1255
1256
1257 /* Return true if BODY contains nothing but empty statements. */
1258
1259 bool
1260 empty_body_p (gimple_seq body)
1261 {
1262 gimple_stmt_iterator i;
1263
1264 if (gimple_seq_empty_p (body))
1265 return true;
1266 for (i = gsi_start (body); !gsi_end_p (i); gsi_next (&i))
1267 if (!empty_stmt_p (gsi_stmt (i))
1268 && !is_gimple_debug (gsi_stmt (i)))
1269 return false;
1270
1271 return true;
1272 }
1273
1274
1275 /* Perform a deep copy of sequence SRC and return the result. */
1276
1277 gimple_seq
1278 gimple_seq_copy (gimple_seq src)
1279 {
1280 gimple_stmt_iterator gsi;
1281 gimple_seq new_seq = NULL;
1282 gimple stmt;
1283
1284 for (gsi = gsi_start (src); !gsi_end_p (gsi); gsi_next (&gsi))
1285 {
1286 stmt = gimple_copy (gsi_stmt (gsi));
1287 gimple_seq_add_stmt (&new_seq, stmt);
1288 }
1289
1290 return new_seq;
1291 }
1292
1293
1294 /* Walk all the statements in the sequence *PSEQ calling walk_gimple_stmt
1295 on each one. WI is as in walk_gimple_stmt.
1296
1297 If walk_gimple_stmt returns non-NULL, the walk is stopped, and the
1298 value is stored in WI->CALLBACK_RESULT. Also, the statement that
1299 produced the value is returned if this statement has not been
1300 removed by a callback (wi->removed_stmt). If the statement has
1301 been removed, NULL is returned.
1302
1303 Otherwise, all the statements are walked and NULL returned. */
1304
1305 gimple
1306 walk_gimple_seq_mod (gimple_seq *pseq, walk_stmt_fn callback_stmt,
1307 walk_tree_fn callback_op, struct walk_stmt_info *wi)
1308 {
1309 gimple_stmt_iterator gsi;
1310
1311 for (gsi = gsi_start (*pseq); !gsi_end_p (gsi); )
1312 {
1313 tree ret = walk_gimple_stmt (&gsi, callback_stmt, callback_op, wi);
1314 if (ret)
1315 {
1316 /* If CALLBACK_STMT or CALLBACK_OP return a value, WI must exist
1317 to hold it. */
1318 gcc_assert (wi);
1319 wi->callback_result = ret;
1320
1321 return wi->removed_stmt ? NULL : gsi_stmt (gsi);
1322 }
1323
1324 if (!wi->removed_stmt)
1325 gsi_next (&gsi);
1326 }
1327
1328 if (wi)
1329 wi->callback_result = NULL_TREE;
1330
1331 return NULL;
1332 }
1333
1334
1335 /* Like walk_gimple_seq_mod, but ensure that the head of SEQ isn't
1336 changed by the callbacks. */
1337
1338 gimple
1339 walk_gimple_seq (gimple_seq seq, walk_stmt_fn callback_stmt,
1340 walk_tree_fn callback_op, struct walk_stmt_info *wi)
1341 {
1342 gimple_seq seq2 = seq;
1343 gimple ret = walk_gimple_seq_mod (&seq2, callback_stmt, callback_op, wi);
1344 gcc_assert (seq2 == seq);
1345 return ret;
1346 }
1347
1348
1349 /* Helper function for walk_gimple_stmt. Walk operands of a GIMPLE_ASM. */
1350
1351 static tree
1352 walk_gimple_asm (gimple stmt, walk_tree_fn callback_op,
1353 struct walk_stmt_info *wi)
1354 {
1355 tree ret, op;
1356 unsigned noutputs;
1357 const char **oconstraints;
1358 unsigned i, n;
1359 const char *constraint;
1360 bool allows_mem, allows_reg, is_inout;
1361
1362 noutputs = gimple_asm_noutputs (stmt);
1363 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
1364
1365 if (wi)
1366 wi->is_lhs = true;
1367
1368 for (i = 0; i < noutputs; i++)
1369 {
1370 op = gimple_asm_output_op (stmt, i);
1371 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
1372 oconstraints[i] = constraint;
1373 parse_output_constraint (&constraint, i, 0, 0, &allows_mem, &allows_reg,
1374 &is_inout);
1375 if (wi)
1376 wi->val_only = (allows_reg || !allows_mem);
1377 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
1378 if (ret)
1379 return ret;
1380 }
1381
1382 n = gimple_asm_ninputs (stmt);
1383 for (i = 0; i < n; i++)
1384 {
1385 op = gimple_asm_input_op (stmt, i);
1386 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
1387 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1388 oconstraints, &allows_mem, &allows_reg);
1389 if (wi)
1390 {
1391 wi->val_only = (allows_reg || !allows_mem);
1392 /* Although input "m" is not really a LHS, we need a lvalue. */
1393 wi->is_lhs = !wi->val_only;
1394 }
1395 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
1396 if (ret)
1397 return ret;
1398 }
1399
1400 if (wi)
1401 {
1402 wi->is_lhs = false;
1403 wi->val_only = true;
1404 }
1405
1406 n = gimple_asm_nlabels (stmt);
1407 for (i = 0; i < n; i++)
1408 {
1409 op = gimple_asm_label_op (stmt, i);
1410 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
1411 if (ret)
1412 return ret;
1413 }
1414
1415 return NULL_TREE;
1416 }
1417
1418
1419 /* Helper function of WALK_GIMPLE_STMT. Walk every tree operand in
1420 STMT. CALLBACK_OP and WI are as in WALK_GIMPLE_STMT.
1421
1422 CALLBACK_OP is called on each operand of STMT via walk_tree.
1423 Additional parameters to walk_tree must be stored in WI. For each operand
1424 OP, walk_tree is called as:
1425
1426 walk_tree (&OP, CALLBACK_OP, WI, WI->PSET)
1427
1428 If CALLBACK_OP returns non-NULL for an operand, the remaining
1429 operands are not scanned.
1430
1431 The return value is that returned by the last call to walk_tree, or
1432 NULL_TREE if no CALLBACK_OP is specified. */
1433
1434 tree
1435 walk_gimple_op (gimple stmt, walk_tree_fn callback_op,
1436 struct walk_stmt_info *wi)
1437 {
1438 struct pointer_set_t *pset = (wi) ? wi->pset : NULL;
1439 unsigned i;
1440 tree ret = NULL_TREE;
1441
1442 switch (gimple_code (stmt))
1443 {
1444 case GIMPLE_ASSIGN:
1445 /* Walk the RHS operands. If the LHS is of a non-renamable type or
1446 is a register variable, we may use a COMPONENT_REF on the RHS. */
1447 if (wi)
1448 {
1449 tree lhs = gimple_assign_lhs (stmt);
1450 wi->val_only
1451 = (is_gimple_reg_type (TREE_TYPE (lhs)) && !is_gimple_reg (lhs))
1452 || gimple_assign_rhs_class (stmt) != GIMPLE_SINGLE_RHS;
1453 }
1454
1455 for (i = 1; i < gimple_num_ops (stmt); i++)
1456 {
1457 ret = walk_tree (gimple_op_ptr (stmt, i), callback_op, wi,
1458 pset);
1459 if (ret)
1460 return ret;
1461 }
1462
1463 /* Walk the LHS. If the RHS is appropriate for a memory, we
1464 may use a COMPONENT_REF on the LHS. */
1465 if (wi)
1466 {
1467 /* If the RHS is of a non-renamable type or is a register variable,
1468 we may use a COMPONENT_REF on the LHS. */
1469 tree rhs1 = gimple_assign_rhs1 (stmt);
1470 wi->val_only
1471 = (is_gimple_reg_type (TREE_TYPE (rhs1)) && !is_gimple_reg (rhs1))
1472 || gimple_assign_rhs_class (stmt) != GIMPLE_SINGLE_RHS;
1473 wi->is_lhs = true;
1474 }
1475
1476 ret = walk_tree (gimple_op_ptr (stmt, 0), callback_op, wi, pset);
1477 if (ret)
1478 return ret;
1479
1480 if (wi)
1481 {
1482 wi->val_only = true;
1483 wi->is_lhs = false;
1484 }
1485 break;
1486
1487 case GIMPLE_CALL:
1488 if (wi)
1489 {
1490 wi->is_lhs = false;
1491 wi->val_only = true;
1492 }
1493
1494 ret = walk_tree (gimple_call_chain_ptr (stmt), callback_op, wi, pset);
1495 if (ret)
1496 return ret;
1497
1498 ret = walk_tree (gimple_call_fn_ptr (stmt), callback_op, wi, pset);
1499 if (ret)
1500 return ret;
1501
1502 for (i = 0; i < gimple_call_num_args (stmt); i++)
1503 {
1504 if (wi)
1505 wi->val_only
1506 = is_gimple_reg_type (TREE_TYPE (gimple_call_arg (stmt, i)));
1507 ret = walk_tree (gimple_call_arg_ptr (stmt, i), callback_op, wi,
1508 pset);
1509 if (ret)
1510 return ret;
1511 }
1512
1513 if (gimple_call_lhs (stmt))
1514 {
1515 if (wi)
1516 {
1517 wi->is_lhs = true;
1518 wi->val_only
1519 = is_gimple_reg_type (TREE_TYPE (gimple_call_lhs (stmt)));
1520 }
1521
1522 ret = walk_tree (gimple_call_lhs_ptr (stmt), callback_op, wi, pset);
1523 if (ret)
1524 return ret;
1525 }
1526
1527 if (wi)
1528 {
1529 wi->is_lhs = false;
1530 wi->val_only = true;
1531 }
1532 break;
1533
1534 case GIMPLE_CATCH:
1535 ret = walk_tree (gimple_catch_types_ptr (stmt), callback_op, wi,
1536 pset);
1537 if (ret)
1538 return ret;
1539 break;
1540
1541 case GIMPLE_EH_FILTER:
1542 ret = walk_tree (gimple_eh_filter_types_ptr (stmt), callback_op, wi,
1543 pset);
1544 if (ret)
1545 return ret;
1546 break;
1547
1548 case GIMPLE_ASM:
1549 ret = walk_gimple_asm (stmt, callback_op, wi);
1550 if (ret)
1551 return ret;
1552 break;
1553
1554 case GIMPLE_OMP_CONTINUE:
1555 ret = walk_tree (gimple_omp_continue_control_def_ptr (stmt),
1556 callback_op, wi, pset);
1557 if (ret)
1558 return ret;
1559
1560 ret = walk_tree (gimple_omp_continue_control_use_ptr (stmt),
1561 callback_op, wi, pset);
1562 if (ret)
1563 return ret;
1564 break;
1565
1566 case GIMPLE_OMP_CRITICAL:
1567 ret = walk_tree (gimple_omp_critical_name_ptr (stmt), callback_op, wi,
1568 pset);
1569 if (ret)
1570 return ret;
1571 break;
1572
1573 case GIMPLE_OMP_FOR:
1574 ret = walk_tree (gimple_omp_for_clauses_ptr (stmt), callback_op, wi,
1575 pset);
1576 if (ret)
1577 return ret;
1578 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1579 {
1580 ret = walk_tree (gimple_omp_for_index_ptr (stmt, i), callback_op,
1581 wi, pset);
1582 if (ret)
1583 return ret;
1584 ret = walk_tree (gimple_omp_for_initial_ptr (stmt, i), callback_op,
1585 wi, pset);
1586 if (ret)
1587 return ret;
1588 ret = walk_tree (gimple_omp_for_final_ptr (stmt, i), callback_op,
1589 wi, pset);
1590 if (ret)
1591 return ret;
1592 ret = walk_tree (gimple_omp_for_incr_ptr (stmt, i), callback_op,
1593 wi, pset);
1594 }
1595 if (ret)
1596 return ret;
1597 break;
1598
1599 case GIMPLE_OMP_PARALLEL:
1600 ret = walk_tree (gimple_omp_parallel_clauses_ptr (stmt), callback_op,
1601 wi, pset);
1602 if (ret)
1603 return ret;
1604 ret = walk_tree (gimple_omp_parallel_child_fn_ptr (stmt), callback_op,
1605 wi, pset);
1606 if (ret)
1607 return ret;
1608 ret = walk_tree (gimple_omp_parallel_data_arg_ptr (stmt), callback_op,
1609 wi, pset);
1610 if (ret)
1611 return ret;
1612 break;
1613
1614 case GIMPLE_OMP_TASK:
1615 ret = walk_tree (gimple_omp_task_clauses_ptr (stmt), callback_op,
1616 wi, pset);
1617 if (ret)
1618 return ret;
1619 ret = walk_tree (gimple_omp_task_child_fn_ptr (stmt), callback_op,
1620 wi, pset);
1621 if (ret)
1622 return ret;
1623 ret = walk_tree (gimple_omp_task_data_arg_ptr (stmt), callback_op,
1624 wi, pset);
1625 if (ret)
1626 return ret;
1627 ret = walk_tree (gimple_omp_task_copy_fn_ptr (stmt), callback_op,
1628 wi, pset);
1629 if (ret)
1630 return ret;
1631 ret = walk_tree (gimple_omp_task_arg_size_ptr (stmt), callback_op,
1632 wi, pset);
1633 if (ret)
1634 return ret;
1635 ret = walk_tree (gimple_omp_task_arg_align_ptr (stmt), callback_op,
1636 wi, pset);
1637 if (ret)
1638 return ret;
1639 break;
1640
1641 case GIMPLE_OMP_SECTIONS:
1642 ret = walk_tree (gimple_omp_sections_clauses_ptr (stmt), callback_op,
1643 wi, pset);
1644 if (ret)
1645 return ret;
1646
1647 ret = walk_tree (gimple_omp_sections_control_ptr (stmt), callback_op,
1648 wi, pset);
1649 if (ret)
1650 return ret;
1651
1652 break;
1653
1654 case GIMPLE_OMP_SINGLE:
1655 ret = walk_tree (gimple_omp_single_clauses_ptr (stmt), callback_op, wi,
1656 pset);
1657 if (ret)
1658 return ret;
1659 break;
1660
1661 case GIMPLE_OMP_TARGET:
1662 ret = walk_tree (gimple_omp_target_clauses_ptr (stmt), callback_op, wi,
1663 pset);
1664 if (ret)
1665 return ret;
1666 break;
1667
1668 case GIMPLE_OMP_TEAMS:
1669 ret = walk_tree (gimple_omp_teams_clauses_ptr (stmt), callback_op, wi,
1670 pset);
1671 if (ret)
1672 return ret;
1673 break;
1674
1675 case GIMPLE_OMP_ATOMIC_LOAD:
1676 ret = walk_tree (gimple_omp_atomic_load_lhs_ptr (stmt), callback_op, wi,
1677 pset);
1678 if (ret)
1679 return ret;
1680
1681 ret = walk_tree (gimple_omp_atomic_load_rhs_ptr (stmt), callback_op, wi,
1682 pset);
1683 if (ret)
1684 return ret;
1685 break;
1686
1687 case GIMPLE_OMP_ATOMIC_STORE:
1688 ret = walk_tree (gimple_omp_atomic_store_val_ptr (stmt), callback_op,
1689 wi, pset);
1690 if (ret)
1691 return ret;
1692 break;
1693
1694 case GIMPLE_TRANSACTION:
1695 ret = walk_tree (gimple_transaction_label_ptr (stmt), callback_op,
1696 wi, pset);
1697 if (ret)
1698 return ret;
1699 break;
1700
1701 case GIMPLE_OMP_RETURN:
1702 ret = walk_tree (gimple_omp_return_lhs_ptr (stmt), callback_op, wi,
1703 pset);
1704 if (ret)
1705 return ret;
1706 break;
1707
1708 /* Tuples that do not have operands. */
1709 case GIMPLE_NOP:
1710 case GIMPLE_RESX:
1711 case GIMPLE_PREDICT:
1712 break;
1713
1714 default:
1715 {
1716 enum gimple_statement_structure_enum gss;
1717 gss = gimple_statement_structure (stmt);
1718 if (gss == GSS_WITH_OPS || gss == GSS_WITH_MEM_OPS)
1719 for (i = 0; i < gimple_num_ops (stmt); i++)
1720 {
1721 ret = walk_tree (gimple_op_ptr (stmt, i), callback_op, wi, pset);
1722 if (ret)
1723 return ret;
1724 }
1725 }
1726 break;
1727 }
1728
1729 return NULL_TREE;
1730 }
1731
1732
1733 /* Walk the current statement in GSI (optionally using traversal state
1734 stored in WI). If WI is NULL, no state is kept during traversal.
1735 The callback CALLBACK_STMT is called. If CALLBACK_STMT indicates
1736 that it has handled all the operands of the statement, its return
1737 value is returned. Otherwise, the return value from CALLBACK_STMT
1738 is discarded and its operands are scanned.
1739
1740 If CALLBACK_STMT is NULL or it didn't handle the operands,
1741 CALLBACK_OP is called on each operand of the statement via
1742 walk_gimple_op. If walk_gimple_op returns non-NULL for any
1743 operand, the remaining operands are not scanned. In this case, the
1744 return value from CALLBACK_OP is returned.
1745
1746 In any other case, NULL_TREE is returned. */
1747
1748 tree
1749 walk_gimple_stmt (gimple_stmt_iterator *gsi, walk_stmt_fn callback_stmt,
1750 walk_tree_fn callback_op, struct walk_stmt_info *wi)
1751 {
1752 gimple ret;
1753 tree tree_ret;
1754 gimple stmt = gsi_stmt (*gsi);
1755
1756 if (wi)
1757 {
1758 wi->gsi = *gsi;
1759 wi->removed_stmt = false;
1760
1761 if (wi->want_locations && gimple_has_location (stmt))
1762 input_location = gimple_location (stmt);
1763 }
1764
1765 ret = NULL;
1766
1767 /* Invoke the statement callback. Return if the callback handled
1768 all of STMT operands by itself. */
1769 if (callback_stmt)
1770 {
1771 bool handled_ops = false;
1772 tree_ret = callback_stmt (gsi, &handled_ops, wi);
1773 if (handled_ops)
1774 return tree_ret;
1775
1776 /* If CALLBACK_STMT did not handle operands, it should not have
1777 a value to return. */
1778 gcc_assert (tree_ret == NULL);
1779
1780 if (wi && wi->removed_stmt)
1781 return NULL;
1782
1783 /* Re-read stmt in case the callback changed it. */
1784 stmt = gsi_stmt (*gsi);
1785 }
1786
1787 /* If CALLBACK_OP is defined, invoke it on every operand of STMT. */
1788 if (callback_op)
1789 {
1790 tree_ret = walk_gimple_op (stmt, callback_op, wi);
1791 if (tree_ret)
1792 return tree_ret;
1793 }
1794
1795 /* If STMT can have statements inside (e.g. GIMPLE_BIND), walk them. */
1796 switch (gimple_code (stmt))
1797 {
1798 case GIMPLE_BIND:
1799 ret = walk_gimple_seq_mod (gimple_bind_body_ptr (stmt), callback_stmt,
1800 callback_op, wi);
1801 if (ret)
1802 return wi->callback_result;
1803 break;
1804
1805 case GIMPLE_CATCH:
1806 ret = walk_gimple_seq_mod (gimple_catch_handler_ptr (stmt), callback_stmt,
1807 callback_op, wi);
1808 if (ret)
1809 return wi->callback_result;
1810 break;
1811
1812 case GIMPLE_EH_FILTER:
1813 ret = walk_gimple_seq_mod (gimple_eh_filter_failure_ptr (stmt), callback_stmt,
1814 callback_op, wi);
1815 if (ret)
1816 return wi->callback_result;
1817 break;
1818
1819 case GIMPLE_EH_ELSE:
1820 ret = walk_gimple_seq_mod (gimple_eh_else_n_body_ptr (stmt),
1821 callback_stmt, callback_op, wi);
1822 if (ret)
1823 return wi->callback_result;
1824 ret = walk_gimple_seq_mod (gimple_eh_else_e_body_ptr (stmt),
1825 callback_stmt, callback_op, wi);
1826 if (ret)
1827 return wi->callback_result;
1828 break;
1829
1830 case GIMPLE_TRY:
1831 ret = walk_gimple_seq_mod (gimple_try_eval_ptr (stmt), callback_stmt, callback_op,
1832 wi);
1833 if (ret)
1834 return wi->callback_result;
1835
1836 ret = walk_gimple_seq_mod (gimple_try_cleanup_ptr (stmt), callback_stmt,
1837 callback_op, wi);
1838 if (ret)
1839 return wi->callback_result;
1840 break;
1841
1842 case GIMPLE_OMP_FOR:
1843 ret = walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt), callback_stmt,
1844 callback_op, wi);
1845 if (ret)
1846 return wi->callback_result;
1847
1848 /* FALL THROUGH. */
1849 case GIMPLE_OMP_CRITICAL:
1850 case GIMPLE_OMP_MASTER:
1851 case GIMPLE_OMP_TASKGROUP:
1852 case GIMPLE_OMP_ORDERED:
1853 case GIMPLE_OMP_SECTION:
1854 case GIMPLE_OMP_PARALLEL:
1855 case GIMPLE_OMP_TASK:
1856 case GIMPLE_OMP_SECTIONS:
1857 case GIMPLE_OMP_SINGLE:
1858 case GIMPLE_OMP_TARGET:
1859 case GIMPLE_OMP_TEAMS:
1860 ret = walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), callback_stmt,
1861 callback_op, wi);
1862 if (ret)
1863 return wi->callback_result;
1864 break;
1865
1866 case GIMPLE_WITH_CLEANUP_EXPR:
1867 ret = walk_gimple_seq_mod (gimple_wce_cleanup_ptr (stmt), callback_stmt,
1868 callback_op, wi);
1869 if (ret)
1870 return wi->callback_result;
1871 break;
1872
1873 case GIMPLE_TRANSACTION:
1874 ret = walk_gimple_seq_mod (gimple_transaction_body_ptr (stmt),
1875 callback_stmt, callback_op, wi);
1876 if (ret)
1877 return wi->callback_result;
1878 break;
1879
1880 default:
1881 gcc_assert (!gimple_has_substatements (stmt));
1882 break;
1883 }
1884
1885 return NULL;
1886 }
1887
1888
1889 /* Set sequence SEQ to be the GIMPLE body for function FN. */
1890
1891 void
1892 gimple_set_body (tree fndecl, gimple_seq seq)
1893 {
1894 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
1895 if (fn == NULL)
1896 {
1897 /* If FNDECL still does not have a function structure associated
1898 with it, then it does not make sense for it to receive a
1899 GIMPLE body. */
1900 gcc_assert (seq == NULL);
1901 }
1902 else
1903 fn->gimple_body = seq;
1904 }
1905
1906
1907 /* Return the body of GIMPLE statements for function FN. After the
1908 CFG pass, the function body doesn't exist anymore because it has
1909 been split up into basic blocks. In this case, it returns
1910 NULL. */
1911
1912 gimple_seq
1913 gimple_body (tree fndecl)
1914 {
1915 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
1916 return fn ? fn->gimple_body : NULL;
1917 }
1918
1919 /* Return true when FNDECL has Gimple body either in unlowered
1920 or CFG form. */
1921 bool
1922 gimple_has_body_p (tree fndecl)
1923 {
1924 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
1925 return (gimple_body (fndecl) || (fn && fn->cfg));
1926 }
1927
1928 /* Return true if calls C1 and C2 are known to go to the same function. */
1929
1930 bool
1931 gimple_call_same_target_p (const_gimple c1, const_gimple c2)
1932 {
1933 if (gimple_call_internal_p (c1))
1934 return (gimple_call_internal_p (c2)
1935 && gimple_call_internal_fn (c1) == gimple_call_internal_fn (c2));
1936 else
1937 return (gimple_call_fn (c1) == gimple_call_fn (c2)
1938 || (gimple_call_fndecl (c1)
1939 && gimple_call_fndecl (c1) == gimple_call_fndecl (c2)));
1940 }
1941
1942 /* Detect flags from a GIMPLE_CALL. This is just like
1943 call_expr_flags, but for gimple tuples. */
1944
1945 int
1946 gimple_call_flags (const_gimple stmt)
1947 {
1948 int flags;
1949 tree decl = gimple_call_fndecl (stmt);
1950
1951 if (decl)
1952 flags = flags_from_decl_or_type (decl);
1953 else if (gimple_call_internal_p (stmt))
1954 flags = internal_fn_flags (gimple_call_internal_fn (stmt));
1955 else
1956 flags = flags_from_decl_or_type (gimple_call_fntype (stmt));
1957
1958 if (stmt->gsbase.subcode & GF_CALL_NOTHROW)
1959 flags |= ECF_NOTHROW;
1960
1961 return flags;
1962 }
1963
1964 /* Return the "fn spec" string for call STMT. */
1965
1966 static tree
1967 gimple_call_fnspec (const_gimple stmt)
1968 {
1969 tree type, attr;
1970
1971 type = gimple_call_fntype (stmt);
1972 if (!type)
1973 return NULL_TREE;
1974
1975 attr = lookup_attribute ("fn spec", TYPE_ATTRIBUTES (type));
1976 if (!attr)
1977 return NULL_TREE;
1978
1979 return TREE_VALUE (TREE_VALUE (attr));
1980 }
1981
1982 /* Detects argument flags for argument number ARG on call STMT. */
1983
1984 int
1985 gimple_call_arg_flags (const_gimple stmt, unsigned arg)
1986 {
1987 tree attr = gimple_call_fnspec (stmt);
1988
1989 if (!attr || 1 + arg >= (unsigned) TREE_STRING_LENGTH (attr))
1990 return 0;
1991
1992 switch (TREE_STRING_POINTER (attr)[1 + arg])
1993 {
1994 case 'x':
1995 case 'X':
1996 return EAF_UNUSED;
1997
1998 case 'R':
1999 return EAF_DIRECT | EAF_NOCLOBBER | EAF_NOESCAPE;
2000
2001 case 'r':
2002 return EAF_NOCLOBBER | EAF_NOESCAPE;
2003
2004 case 'W':
2005 return EAF_DIRECT | EAF_NOESCAPE;
2006
2007 case 'w':
2008 return EAF_NOESCAPE;
2009
2010 case '.':
2011 default:
2012 return 0;
2013 }
2014 }
2015
2016 /* Detects return flags for the call STMT. */
2017
2018 int
2019 gimple_call_return_flags (const_gimple stmt)
2020 {
2021 tree attr;
2022
2023 if (gimple_call_flags (stmt) & ECF_MALLOC)
2024 return ERF_NOALIAS;
2025
2026 attr = gimple_call_fnspec (stmt);
2027 if (!attr || TREE_STRING_LENGTH (attr) < 1)
2028 return 0;
2029
2030 switch (TREE_STRING_POINTER (attr)[0])
2031 {
2032 case '1':
2033 case '2':
2034 case '3':
2035 case '4':
2036 return ERF_RETURNS_ARG | (TREE_STRING_POINTER (attr)[0] - '1');
2037
2038 case 'm':
2039 return ERF_NOALIAS;
2040
2041 case '.':
2042 default:
2043 return 0;
2044 }
2045 }
2046
2047
2048 /* Return true if GS is a copy assignment. */
2049
2050 bool
2051 gimple_assign_copy_p (gimple gs)
2052 {
2053 return (gimple_assign_single_p (gs)
2054 && is_gimple_val (gimple_op (gs, 1)));
2055 }
2056
2057
2058 /* Return true if GS is a SSA_NAME copy assignment. */
2059
2060 bool
2061 gimple_assign_ssa_name_copy_p (gimple gs)
2062 {
2063 return (gimple_assign_single_p (gs)
2064 && TREE_CODE (gimple_assign_lhs (gs)) == SSA_NAME
2065 && TREE_CODE (gimple_assign_rhs1 (gs)) == SSA_NAME);
2066 }
2067
2068
2069 /* Return true if GS is an assignment with a unary RHS, but the
2070 operator has no effect on the assigned value. The logic is adapted
2071 from STRIP_NOPS. This predicate is intended to be used in tuplifying
2072 instances in which STRIP_NOPS was previously applied to the RHS of
2073 an assignment.
2074
2075 NOTE: In the use cases that led to the creation of this function
2076 and of gimple_assign_single_p, it is typical to test for either
2077 condition and to proceed in the same manner. In each case, the
2078 assigned value is represented by the single RHS operand of the
2079 assignment. I suspect there may be cases where gimple_assign_copy_p,
2080 gimple_assign_single_p, or equivalent logic is used where a similar
2081 treatment of unary NOPs is appropriate. */
2082
2083 bool
2084 gimple_assign_unary_nop_p (gimple gs)
2085 {
2086 return (is_gimple_assign (gs)
2087 && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (gs))
2088 || gimple_assign_rhs_code (gs) == NON_LVALUE_EXPR)
2089 && gimple_assign_rhs1 (gs) != error_mark_node
2090 && (TYPE_MODE (TREE_TYPE (gimple_assign_lhs (gs)))
2091 == TYPE_MODE (TREE_TYPE (gimple_assign_rhs1 (gs)))));
2092 }
2093
2094 /* Set BB to be the basic block holding G. */
2095
2096 void
2097 gimple_set_bb (gimple stmt, basic_block bb)
2098 {
2099 stmt->gsbase.bb = bb;
2100
2101 /* If the statement is a label, add the label to block-to-labels map
2102 so that we can speed up edge creation for GIMPLE_GOTOs. */
2103 if (cfun->cfg && gimple_code (stmt) == GIMPLE_LABEL)
2104 {
2105 tree t;
2106 int uid;
2107
2108 t = gimple_label_label (stmt);
2109 uid = LABEL_DECL_UID (t);
2110 if (uid == -1)
2111 {
2112 unsigned old_len = vec_safe_length (label_to_block_map);
2113 LABEL_DECL_UID (t) = uid = cfun->cfg->last_label_uid++;
2114 if (old_len <= (unsigned) uid)
2115 {
2116 unsigned new_len = 3 * uid / 2 + 1;
2117
2118 vec_safe_grow_cleared (label_to_block_map, new_len);
2119 }
2120 }
2121
2122 (*label_to_block_map)[uid] = bb;
2123 }
2124 }
2125
2126
2127 /* Modify the RHS of the assignment pointed-to by GSI using the
2128 operands in the expression tree EXPR.
2129
2130 NOTE: The statement pointed-to by GSI may be reallocated if it
2131 did not have enough operand slots.
2132
2133 This function is useful to convert an existing tree expression into
2134 the flat representation used for the RHS of a GIMPLE assignment.
2135 It will reallocate memory as needed to expand or shrink the number
2136 of operand slots needed to represent EXPR.
2137
2138 NOTE: If you find yourself building a tree and then calling this
2139 function, you are most certainly doing it the slow way. It is much
2140 better to build a new assignment or to use the function
2141 gimple_assign_set_rhs_with_ops, which does not require an
2142 expression tree to be built. */
2143
2144 void
2145 gimple_assign_set_rhs_from_tree (gimple_stmt_iterator *gsi, tree expr)
2146 {
2147 enum tree_code subcode;
2148 tree op1, op2, op3;
2149
2150 extract_ops_from_tree_1 (expr, &subcode, &op1, &op2, &op3);
2151 gimple_assign_set_rhs_with_ops_1 (gsi, subcode, op1, op2, op3);
2152 }
2153
2154
2155 /* Set the RHS of assignment statement pointed-to by GSI to CODE with
2156 operands OP1, OP2 and OP3.
2157
2158 NOTE: The statement pointed-to by GSI may be reallocated if it
2159 did not have enough operand slots. */
2160
2161 void
2162 gimple_assign_set_rhs_with_ops_1 (gimple_stmt_iterator *gsi, enum tree_code code,
2163 tree op1, tree op2, tree op3)
2164 {
2165 unsigned new_rhs_ops = get_gimple_rhs_num_ops (code);
2166 gimple stmt = gsi_stmt (*gsi);
2167
2168 /* If the new CODE needs more operands, allocate a new statement. */
2169 if (gimple_num_ops (stmt) < new_rhs_ops + 1)
2170 {
2171 tree lhs = gimple_assign_lhs (stmt);
2172 gimple new_stmt = gimple_alloc (gimple_code (stmt), new_rhs_ops + 1);
2173 memcpy (new_stmt, stmt, gimple_size (gimple_code (stmt)));
2174 gimple_init_singleton (new_stmt);
2175 gsi_replace (gsi, new_stmt, true);
2176 stmt = new_stmt;
2177
2178 /* The LHS needs to be reset as this also changes the SSA name
2179 on the LHS. */
2180 gimple_assign_set_lhs (stmt, lhs);
2181 }
2182
2183 gimple_set_num_ops (stmt, new_rhs_ops + 1);
2184 gimple_set_subcode (stmt, code);
2185 gimple_assign_set_rhs1 (stmt, op1);
2186 if (new_rhs_ops > 1)
2187 gimple_assign_set_rhs2 (stmt, op2);
2188 if (new_rhs_ops > 2)
2189 gimple_assign_set_rhs3 (stmt, op3);
2190 }
2191
2192
2193 /* Return the LHS of a statement that performs an assignment,
2194 either a GIMPLE_ASSIGN or a GIMPLE_CALL. Returns NULL_TREE
2195 for a call to a function that returns no value, or for a
2196 statement other than an assignment or a call. */
2197
2198 tree
2199 gimple_get_lhs (const_gimple stmt)
2200 {
2201 enum gimple_code code = gimple_code (stmt);
2202
2203 if (code == GIMPLE_ASSIGN)
2204 return gimple_assign_lhs (stmt);
2205 else if (code == GIMPLE_CALL)
2206 return gimple_call_lhs (stmt);
2207 else
2208 return NULL_TREE;
2209 }
2210
2211
2212 /* Set the LHS of a statement that performs an assignment,
2213 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
2214
2215 void
2216 gimple_set_lhs (gimple stmt, tree lhs)
2217 {
2218 enum gimple_code code = gimple_code (stmt);
2219
2220 if (code == GIMPLE_ASSIGN)
2221 gimple_assign_set_lhs (stmt, lhs);
2222 else if (code == GIMPLE_CALL)
2223 gimple_call_set_lhs (stmt, lhs);
2224 else
2225 gcc_unreachable ();
2226 }
2227
2228
2229 /* Return a deep copy of statement STMT. All the operands from STMT
2230 are reallocated and copied using unshare_expr. The DEF, USE, VDEF
2231 and VUSE operand arrays are set to empty in the new copy. The new
2232 copy isn't part of any sequence. */
2233
2234 gimple
2235 gimple_copy (gimple stmt)
2236 {
2237 enum gimple_code code = gimple_code (stmt);
2238 unsigned num_ops = gimple_num_ops (stmt);
2239 gimple copy = gimple_alloc (code, num_ops);
2240 unsigned i;
2241
2242 /* Shallow copy all the fields from STMT. */
2243 memcpy (copy, stmt, gimple_size (code));
2244 gimple_init_singleton (copy);
2245
2246 /* If STMT has sub-statements, deep-copy them as well. */
2247 if (gimple_has_substatements (stmt))
2248 {
2249 gimple_seq new_seq;
2250 tree t;
2251
2252 switch (gimple_code (stmt))
2253 {
2254 case GIMPLE_BIND:
2255 new_seq = gimple_seq_copy (gimple_bind_body (stmt));
2256 gimple_bind_set_body (copy, new_seq);
2257 gimple_bind_set_vars (copy, unshare_expr (gimple_bind_vars (stmt)));
2258 gimple_bind_set_block (copy, gimple_bind_block (stmt));
2259 break;
2260
2261 case GIMPLE_CATCH:
2262 new_seq = gimple_seq_copy (gimple_catch_handler (stmt));
2263 gimple_catch_set_handler (copy, new_seq);
2264 t = unshare_expr (gimple_catch_types (stmt));
2265 gimple_catch_set_types (copy, t);
2266 break;
2267
2268 case GIMPLE_EH_FILTER:
2269 new_seq = gimple_seq_copy (gimple_eh_filter_failure (stmt));
2270 gimple_eh_filter_set_failure (copy, new_seq);
2271 t = unshare_expr (gimple_eh_filter_types (stmt));
2272 gimple_eh_filter_set_types (copy, t);
2273 break;
2274
2275 case GIMPLE_EH_ELSE:
2276 new_seq = gimple_seq_copy (gimple_eh_else_n_body (stmt));
2277 gimple_eh_else_set_n_body (copy, new_seq);
2278 new_seq = gimple_seq_copy (gimple_eh_else_e_body (stmt));
2279 gimple_eh_else_set_e_body (copy, new_seq);
2280 break;
2281
2282 case GIMPLE_TRY:
2283 new_seq = gimple_seq_copy (gimple_try_eval (stmt));
2284 gimple_try_set_eval (copy, new_seq);
2285 new_seq = gimple_seq_copy (gimple_try_cleanup (stmt));
2286 gimple_try_set_cleanup (copy, new_seq);
2287 break;
2288
2289 case GIMPLE_OMP_FOR:
2290 new_seq = gimple_seq_copy (gimple_omp_for_pre_body (stmt));
2291 gimple_omp_for_set_pre_body (copy, new_seq);
2292 t = unshare_expr (gimple_omp_for_clauses (stmt));
2293 gimple_omp_for_set_clauses (copy, t);
2294 copy->gimple_omp_for.iter
2295 = ggc_alloc_vec_gimple_omp_for_iter
2296 (gimple_omp_for_collapse (stmt));
2297 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2298 {
2299 gimple_omp_for_set_cond (copy, i,
2300 gimple_omp_for_cond (stmt, i));
2301 gimple_omp_for_set_index (copy, i,
2302 gimple_omp_for_index (stmt, i));
2303 t = unshare_expr (gimple_omp_for_initial (stmt, i));
2304 gimple_omp_for_set_initial (copy, i, t);
2305 t = unshare_expr (gimple_omp_for_final (stmt, i));
2306 gimple_omp_for_set_final (copy, i, t);
2307 t = unshare_expr (gimple_omp_for_incr (stmt, i));
2308 gimple_omp_for_set_incr (copy, i, t);
2309 }
2310 goto copy_omp_body;
2311
2312 case GIMPLE_OMP_PARALLEL:
2313 t = unshare_expr (gimple_omp_parallel_clauses (stmt));
2314 gimple_omp_parallel_set_clauses (copy, t);
2315 t = unshare_expr (gimple_omp_parallel_child_fn (stmt));
2316 gimple_omp_parallel_set_child_fn (copy, t);
2317 t = unshare_expr (gimple_omp_parallel_data_arg (stmt));
2318 gimple_omp_parallel_set_data_arg (copy, t);
2319 goto copy_omp_body;
2320
2321 case GIMPLE_OMP_TASK:
2322 t = unshare_expr (gimple_omp_task_clauses (stmt));
2323 gimple_omp_task_set_clauses (copy, t);
2324 t = unshare_expr (gimple_omp_task_child_fn (stmt));
2325 gimple_omp_task_set_child_fn (copy, t);
2326 t = unshare_expr (gimple_omp_task_data_arg (stmt));
2327 gimple_omp_task_set_data_arg (copy, t);
2328 t = unshare_expr (gimple_omp_task_copy_fn (stmt));
2329 gimple_omp_task_set_copy_fn (copy, t);
2330 t = unshare_expr (gimple_omp_task_arg_size (stmt));
2331 gimple_omp_task_set_arg_size (copy, t);
2332 t = unshare_expr (gimple_omp_task_arg_align (stmt));
2333 gimple_omp_task_set_arg_align (copy, t);
2334 goto copy_omp_body;
2335
2336 case GIMPLE_OMP_CRITICAL:
2337 t = unshare_expr (gimple_omp_critical_name (stmt));
2338 gimple_omp_critical_set_name (copy, t);
2339 goto copy_omp_body;
2340
2341 case GIMPLE_OMP_SECTIONS:
2342 t = unshare_expr (gimple_omp_sections_clauses (stmt));
2343 gimple_omp_sections_set_clauses (copy, t);
2344 t = unshare_expr (gimple_omp_sections_control (stmt));
2345 gimple_omp_sections_set_control (copy, t);
2346 /* FALLTHRU */
2347
2348 case GIMPLE_OMP_SINGLE:
2349 case GIMPLE_OMP_TARGET:
2350 case GIMPLE_OMP_TEAMS:
2351 case GIMPLE_OMP_SECTION:
2352 case GIMPLE_OMP_MASTER:
2353 case GIMPLE_OMP_TASKGROUP:
2354 case GIMPLE_OMP_ORDERED:
2355 copy_omp_body:
2356 new_seq = gimple_seq_copy (gimple_omp_body (stmt));
2357 gimple_omp_set_body (copy, new_seq);
2358 break;
2359
2360 case GIMPLE_TRANSACTION:
2361 new_seq = gimple_seq_copy (gimple_transaction_body (stmt));
2362 gimple_transaction_set_body (copy, new_seq);
2363 break;
2364
2365 case GIMPLE_WITH_CLEANUP_EXPR:
2366 new_seq = gimple_seq_copy (gimple_wce_cleanup (stmt));
2367 gimple_wce_set_cleanup (copy, new_seq);
2368 break;
2369
2370 default:
2371 gcc_unreachable ();
2372 }
2373 }
2374
2375 /* Make copy of operands. */
2376 for (i = 0; i < num_ops; i++)
2377 gimple_set_op (copy, i, unshare_expr (gimple_op (stmt, i)));
2378
2379 if (gimple_has_mem_ops (stmt))
2380 {
2381 gimple_set_vdef (copy, gimple_vdef (stmt));
2382 gimple_set_vuse (copy, gimple_vuse (stmt));
2383 }
2384
2385 /* Clear out SSA operand vectors on COPY. */
2386 if (gimple_has_ops (stmt))
2387 {
2388 gimple_set_use_ops (copy, NULL);
2389
2390 /* SSA operands need to be updated. */
2391 gimple_set_modified (copy, true);
2392 }
2393
2394 return copy;
2395 }
2396
2397
2398 /* Return true if statement S has side-effects. We consider a
2399 statement to have side effects if:
2400
2401 - It is a GIMPLE_CALL not marked with ECF_PURE or ECF_CONST.
2402 - Any of its operands are marked TREE_THIS_VOLATILE or TREE_SIDE_EFFECTS. */
2403
2404 bool
2405 gimple_has_side_effects (const_gimple s)
2406 {
2407 if (is_gimple_debug (s))
2408 return false;
2409
2410 /* We don't have to scan the arguments to check for
2411 volatile arguments, though, at present, we still
2412 do a scan to check for TREE_SIDE_EFFECTS. */
2413 if (gimple_has_volatile_ops (s))
2414 return true;
2415
2416 if (gimple_code (s) == GIMPLE_ASM
2417 && gimple_asm_volatile_p (s))
2418 return true;
2419
2420 if (is_gimple_call (s))
2421 {
2422 int flags = gimple_call_flags (s);
2423
2424 /* An infinite loop is considered a side effect. */
2425 if (!(flags & (ECF_CONST | ECF_PURE))
2426 || (flags & ECF_LOOPING_CONST_OR_PURE))
2427 return true;
2428
2429 return false;
2430 }
2431
2432 return false;
2433 }
2434
2435 /* Helper for gimple_could_trap_p and gimple_assign_rhs_could_trap_p.
2436 Return true if S can trap. When INCLUDE_MEM is true, check whether
2437 the memory operations could trap. When INCLUDE_STORES is true and
2438 S is a GIMPLE_ASSIGN, the LHS of the assignment is also checked. */
2439
2440 bool
2441 gimple_could_trap_p_1 (gimple s, bool include_mem, bool include_stores)
2442 {
2443 tree t, div = NULL_TREE;
2444 enum tree_code op;
2445
2446 if (include_mem)
2447 {
2448 unsigned i, start = (is_gimple_assign (s) && !include_stores) ? 1 : 0;
2449
2450 for (i = start; i < gimple_num_ops (s); i++)
2451 if (tree_could_trap_p (gimple_op (s, i)))
2452 return true;
2453 }
2454
2455 switch (gimple_code (s))
2456 {
2457 case GIMPLE_ASM:
2458 return gimple_asm_volatile_p (s);
2459
2460 case GIMPLE_CALL:
2461 t = gimple_call_fndecl (s);
2462 /* Assume that calls to weak functions may trap. */
2463 if (!t || !DECL_P (t) || DECL_WEAK (t))
2464 return true;
2465 return false;
2466
2467 case GIMPLE_ASSIGN:
2468 t = gimple_expr_type (s);
2469 op = gimple_assign_rhs_code (s);
2470 if (get_gimple_rhs_class (op) == GIMPLE_BINARY_RHS)
2471 div = gimple_assign_rhs2 (s);
2472 return (operation_could_trap_p (op, FLOAT_TYPE_P (t),
2473 (INTEGRAL_TYPE_P (t)
2474 && TYPE_OVERFLOW_TRAPS (t)),
2475 div));
2476
2477 default:
2478 break;
2479 }
2480
2481 return false;
2482 }
2483
2484 /* Return true if statement S can trap. */
2485
2486 bool
2487 gimple_could_trap_p (gimple s)
2488 {
2489 return gimple_could_trap_p_1 (s, true, true);
2490 }
2491
2492 /* Return true if RHS of a GIMPLE_ASSIGN S can trap. */
2493
2494 bool
2495 gimple_assign_rhs_could_trap_p (gimple s)
2496 {
2497 gcc_assert (is_gimple_assign (s));
2498 return gimple_could_trap_p_1 (s, true, false);
2499 }
2500
2501
2502 /* Print debugging information for gimple stmts generated. */
2503
2504 void
2505 dump_gimple_statistics (void)
2506 {
2507 int i, total_tuples = 0, total_bytes = 0;
2508
2509 if (! GATHER_STATISTICS)
2510 {
2511 fprintf (stderr, "No gimple statistics\n");
2512 return;
2513 }
2514
2515 fprintf (stderr, "\nGIMPLE statements\n");
2516 fprintf (stderr, "Kind Stmts Bytes\n");
2517 fprintf (stderr, "---------------------------------------\n");
2518 for (i = 0; i < (int) gimple_alloc_kind_all; ++i)
2519 {
2520 fprintf (stderr, "%-20s %7d %10d\n", gimple_alloc_kind_names[i],
2521 gimple_alloc_counts[i], gimple_alloc_sizes[i]);
2522 total_tuples += gimple_alloc_counts[i];
2523 total_bytes += gimple_alloc_sizes[i];
2524 }
2525 fprintf (stderr, "---------------------------------------\n");
2526 fprintf (stderr, "%-20s %7d %10d\n", "Total", total_tuples, total_bytes);
2527 fprintf (stderr, "---------------------------------------\n");
2528 }
2529
2530
2531 /* Return the number of operands needed on the RHS of a GIMPLE
2532 assignment for an expression with tree code CODE. */
2533
2534 unsigned
2535 get_gimple_rhs_num_ops (enum tree_code code)
2536 {
2537 enum gimple_rhs_class rhs_class = get_gimple_rhs_class (code);
2538
2539 if (rhs_class == GIMPLE_UNARY_RHS || rhs_class == GIMPLE_SINGLE_RHS)
2540 return 1;
2541 else if (rhs_class == GIMPLE_BINARY_RHS)
2542 return 2;
2543 else if (rhs_class == GIMPLE_TERNARY_RHS)
2544 return 3;
2545 else
2546 gcc_unreachable ();
2547 }
2548
2549 #define DEFTREECODE(SYM, STRING, TYPE, NARGS) \
2550 (unsigned char) \
2551 ((TYPE) == tcc_unary ? GIMPLE_UNARY_RHS \
2552 : ((TYPE) == tcc_binary \
2553 || (TYPE) == tcc_comparison) ? GIMPLE_BINARY_RHS \
2554 : ((TYPE) == tcc_constant \
2555 || (TYPE) == tcc_declaration \
2556 || (TYPE) == tcc_reference) ? GIMPLE_SINGLE_RHS \
2557 : ((SYM) == TRUTH_AND_EXPR \
2558 || (SYM) == TRUTH_OR_EXPR \
2559 || (SYM) == TRUTH_XOR_EXPR) ? GIMPLE_BINARY_RHS \
2560 : (SYM) == TRUTH_NOT_EXPR ? GIMPLE_UNARY_RHS \
2561 : ((SYM) == COND_EXPR \
2562 || (SYM) == WIDEN_MULT_PLUS_EXPR \
2563 || (SYM) == WIDEN_MULT_MINUS_EXPR \
2564 || (SYM) == DOT_PROD_EXPR \
2565 || (SYM) == REALIGN_LOAD_EXPR \
2566 || (SYM) == VEC_COND_EXPR \
2567 || (SYM) == VEC_PERM_EXPR \
2568 || (SYM) == FMA_EXPR) ? GIMPLE_TERNARY_RHS \
2569 : ((SYM) == CONSTRUCTOR \
2570 || (SYM) == OBJ_TYPE_REF \
2571 || (SYM) == ASSERT_EXPR \
2572 || (SYM) == ADDR_EXPR \
2573 || (SYM) == WITH_SIZE_EXPR \
2574 || (SYM) == SSA_NAME) ? GIMPLE_SINGLE_RHS \
2575 : GIMPLE_INVALID_RHS),
2576 #define END_OF_BASE_TREE_CODES (unsigned char) GIMPLE_INVALID_RHS,
2577
2578 const unsigned char gimple_rhs_class_table[] = {
2579 #include "all-tree.def"
2580 };
2581
2582 #undef DEFTREECODE
2583 #undef END_OF_BASE_TREE_CODES
2584
2585 /* For the definitive definition of GIMPLE, see doc/tree-ssa.texi. */
2586
2587 /* Validation of GIMPLE expressions. */
2588
2589 /* Return true if T is a valid LHS for a GIMPLE assignment expression. */
2590
2591 bool
2592 is_gimple_lvalue (tree t)
2593 {
2594 return (is_gimple_addressable (t)
2595 || TREE_CODE (t) == WITH_SIZE_EXPR
2596 /* These are complex lvalues, but don't have addresses, so they
2597 go here. */
2598 || TREE_CODE (t) == BIT_FIELD_REF);
2599 }
2600
2601 /* Return true if T is a GIMPLE condition. */
2602
2603 bool
2604 is_gimple_condexpr (tree t)
2605 {
2606 return (is_gimple_val (t) || (COMPARISON_CLASS_P (t)
2607 && !tree_could_throw_p (t)
2608 && is_gimple_val (TREE_OPERAND (t, 0))
2609 && is_gimple_val (TREE_OPERAND (t, 1))));
2610 }
2611
2612 /* Return true if T is something whose address can be taken. */
2613
2614 bool
2615 is_gimple_addressable (tree t)
2616 {
2617 return (is_gimple_id (t) || handled_component_p (t)
2618 || TREE_CODE (t) == MEM_REF);
2619 }
2620
2621 /* Return true if T is a valid gimple constant. */
2622
2623 bool
2624 is_gimple_constant (const_tree t)
2625 {
2626 switch (TREE_CODE (t))
2627 {
2628 case INTEGER_CST:
2629 case REAL_CST:
2630 case FIXED_CST:
2631 case STRING_CST:
2632 case COMPLEX_CST:
2633 case VECTOR_CST:
2634 return true;
2635
2636 default:
2637 return false;
2638 }
2639 }
2640
2641 /* Return true if T is a gimple address. */
2642
2643 bool
2644 is_gimple_address (const_tree t)
2645 {
2646 tree op;
2647
2648 if (TREE_CODE (t) != ADDR_EXPR)
2649 return false;
2650
2651 op = TREE_OPERAND (t, 0);
2652 while (handled_component_p (op))
2653 {
2654 if ((TREE_CODE (op) == ARRAY_REF
2655 || TREE_CODE (op) == ARRAY_RANGE_REF)
2656 && !is_gimple_val (TREE_OPERAND (op, 1)))
2657 return false;
2658
2659 op = TREE_OPERAND (op, 0);
2660 }
2661
2662 if (CONSTANT_CLASS_P (op) || TREE_CODE (op) == MEM_REF)
2663 return true;
2664
2665 switch (TREE_CODE (op))
2666 {
2667 case PARM_DECL:
2668 case RESULT_DECL:
2669 case LABEL_DECL:
2670 case FUNCTION_DECL:
2671 case VAR_DECL:
2672 case CONST_DECL:
2673 return true;
2674
2675 default:
2676 return false;
2677 }
2678 }
2679
2680 /* Return true if T is a gimple invariant address. */
2681
2682 bool
2683 is_gimple_invariant_address (const_tree t)
2684 {
2685 const_tree op;
2686
2687 if (TREE_CODE (t) != ADDR_EXPR)
2688 return false;
2689
2690 op = strip_invariant_refs (TREE_OPERAND (t, 0));
2691 if (!op)
2692 return false;
2693
2694 if (TREE_CODE (op) == MEM_REF)
2695 {
2696 const_tree op0 = TREE_OPERAND (op, 0);
2697 return (TREE_CODE (op0) == ADDR_EXPR
2698 && (CONSTANT_CLASS_P (TREE_OPERAND (op0, 0))
2699 || decl_address_invariant_p (TREE_OPERAND (op0, 0))));
2700 }
2701
2702 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
2703 }
2704
2705 /* Return true if T is a gimple invariant address at IPA level
2706 (so addresses of variables on stack are not allowed). */
2707
2708 bool
2709 is_gimple_ip_invariant_address (const_tree t)
2710 {
2711 const_tree op;
2712
2713 if (TREE_CODE (t) != ADDR_EXPR)
2714 return false;
2715
2716 op = strip_invariant_refs (TREE_OPERAND (t, 0));
2717 if (!op)
2718 return false;
2719
2720 if (TREE_CODE (op) == MEM_REF)
2721 {
2722 const_tree op0 = TREE_OPERAND (op, 0);
2723 return (TREE_CODE (op0) == ADDR_EXPR
2724 && (CONSTANT_CLASS_P (TREE_OPERAND (op0, 0))
2725 || decl_address_ip_invariant_p (TREE_OPERAND (op0, 0))));
2726 }
2727
2728 return CONSTANT_CLASS_P (op) || decl_address_ip_invariant_p (op);
2729 }
2730
2731 /* Return true if T is a GIMPLE minimal invariant. It's a restricted
2732 form of function invariant. */
2733
2734 bool
2735 is_gimple_min_invariant (const_tree t)
2736 {
2737 if (TREE_CODE (t) == ADDR_EXPR)
2738 return is_gimple_invariant_address (t);
2739
2740 return is_gimple_constant (t);
2741 }
2742
2743 /* Return true if T is a GIMPLE interprocedural invariant. It's a restricted
2744 form of gimple minimal invariant. */
2745
2746 bool
2747 is_gimple_ip_invariant (const_tree t)
2748 {
2749 if (TREE_CODE (t) == ADDR_EXPR)
2750 return is_gimple_ip_invariant_address (t);
2751
2752 return is_gimple_constant (t);
2753 }
2754
2755 /* Return true if T is a variable. */
2756
2757 bool
2758 is_gimple_variable (tree t)
2759 {
2760 return (TREE_CODE (t) == VAR_DECL
2761 || TREE_CODE (t) == PARM_DECL
2762 || TREE_CODE (t) == RESULT_DECL
2763 || TREE_CODE (t) == SSA_NAME);
2764 }
2765
2766 /* Return true if T is a GIMPLE identifier (something with an address). */
2767
2768 bool
2769 is_gimple_id (tree t)
2770 {
2771 return (is_gimple_variable (t)
2772 || TREE_CODE (t) == FUNCTION_DECL
2773 || TREE_CODE (t) == LABEL_DECL
2774 || TREE_CODE (t) == CONST_DECL
2775 /* Allow string constants, since they are addressable. */
2776 || TREE_CODE (t) == STRING_CST);
2777 }
2778
2779 /* Return true if T is a non-aggregate register variable. */
2780
2781 bool
2782 is_gimple_reg (tree t)
2783 {
2784 if (virtual_operand_p (t))
2785 return false;
2786
2787 if (TREE_CODE (t) == SSA_NAME)
2788 return true;
2789
2790 if (!is_gimple_variable (t))
2791 return false;
2792
2793 if (!is_gimple_reg_type (TREE_TYPE (t)))
2794 return false;
2795
2796 /* A volatile decl is not acceptable because we can't reuse it as
2797 needed. We need to copy it into a temp first. */
2798 if (TREE_THIS_VOLATILE (t))
2799 return false;
2800
2801 /* We define "registers" as things that can be renamed as needed,
2802 which with our infrastructure does not apply to memory. */
2803 if (needs_to_live_in_memory (t))
2804 return false;
2805
2806 /* Hard register variables are an interesting case. For those that
2807 are call-clobbered, we don't know where all the calls are, since
2808 we don't (want to) take into account which operations will turn
2809 into libcalls at the rtl level. For those that are call-saved,
2810 we don't currently model the fact that calls may in fact change
2811 global hard registers, nor do we examine ASM_CLOBBERS at the tree
2812 level, and so miss variable changes that might imply. All around,
2813 it seems safest to not do too much optimization with these at the
2814 tree level at all. We'll have to rely on the rtl optimizers to
2815 clean this up, as there we've got all the appropriate bits exposed. */
2816 if (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t))
2817 return false;
2818
2819 /* Complex and vector values must have been put into SSA-like form.
2820 That is, no assignments to the individual components. */
2821 if (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
2822 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
2823 return DECL_GIMPLE_REG_P (t);
2824
2825 return true;
2826 }
2827
2828
2829 /* Return true if T is a GIMPLE rvalue, i.e. an identifier or a constant. */
2830
2831 bool
2832 is_gimple_val (tree t)
2833 {
2834 /* Make loads from volatiles and memory vars explicit. */
2835 if (is_gimple_variable (t)
2836 && is_gimple_reg_type (TREE_TYPE (t))
2837 && !is_gimple_reg (t))
2838 return false;
2839
2840 return (is_gimple_variable (t) || is_gimple_min_invariant (t));
2841 }
2842
2843 /* Similarly, but accept hard registers as inputs to asm statements. */
2844
2845 bool
2846 is_gimple_asm_val (tree t)
2847 {
2848 if (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t))
2849 return true;
2850
2851 return is_gimple_val (t);
2852 }
2853
2854 /* Return true if T is a GIMPLE minimal lvalue. */
2855
2856 bool
2857 is_gimple_min_lval (tree t)
2858 {
2859 if (!(t = CONST_CAST_TREE (strip_invariant_refs (t))))
2860 return false;
2861 return (is_gimple_id (t) || TREE_CODE (t) == MEM_REF);
2862 }
2863
2864 /* Return true if T is a valid function operand of a CALL_EXPR. */
2865
2866 bool
2867 is_gimple_call_addr (tree t)
2868 {
2869 return (TREE_CODE (t) == OBJ_TYPE_REF || is_gimple_val (t));
2870 }
2871
2872 /* Return true if T is a valid address operand of a MEM_REF. */
2873
2874 bool
2875 is_gimple_mem_ref_addr (tree t)
2876 {
2877 return (is_gimple_reg (t)
2878 || TREE_CODE (t) == INTEGER_CST
2879 || (TREE_CODE (t) == ADDR_EXPR
2880 && (CONSTANT_CLASS_P (TREE_OPERAND (t, 0))
2881 || decl_address_invariant_p (TREE_OPERAND (t, 0)))));
2882 }
2883
2884
2885 /* Given a memory reference expression T, return its base address.
2886 The base address of a memory reference expression is the main
2887 object being referenced. For instance, the base address for
2888 'array[i].fld[j]' is 'array'. You can think of this as stripping
2889 away the offset part from a memory address.
2890
2891 This function calls handled_component_p to strip away all the inner
2892 parts of the memory reference until it reaches the base object. */
2893
2894 tree
2895 get_base_address (tree t)
2896 {
2897 while (handled_component_p (t))
2898 t = TREE_OPERAND (t, 0);
2899
2900 if ((TREE_CODE (t) == MEM_REF
2901 || TREE_CODE (t) == TARGET_MEM_REF)
2902 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
2903 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
2904
2905 /* ??? Either the alias oracle or all callers need to properly deal
2906 with WITH_SIZE_EXPRs before we can look through those. */
2907 if (TREE_CODE (t) == WITH_SIZE_EXPR)
2908 return NULL_TREE;
2909
2910 return t;
2911 }
2912
2913 void
2914 recalculate_side_effects (tree t)
2915 {
2916 enum tree_code code = TREE_CODE (t);
2917 int len = TREE_OPERAND_LENGTH (t);
2918 int i;
2919
2920 switch (TREE_CODE_CLASS (code))
2921 {
2922 case tcc_expression:
2923 switch (code)
2924 {
2925 case INIT_EXPR:
2926 case MODIFY_EXPR:
2927 case VA_ARG_EXPR:
2928 case PREDECREMENT_EXPR:
2929 case PREINCREMENT_EXPR:
2930 case POSTDECREMENT_EXPR:
2931 case POSTINCREMENT_EXPR:
2932 /* All of these have side-effects, no matter what their
2933 operands are. */
2934 return;
2935
2936 default:
2937 break;
2938 }
2939 /* Fall through. */
2940
2941 case tcc_comparison: /* a comparison expression */
2942 case tcc_unary: /* a unary arithmetic expression */
2943 case tcc_binary: /* a binary arithmetic expression */
2944 case tcc_reference: /* a reference */
2945 case tcc_vl_exp: /* a function call */
2946 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
2947 for (i = 0; i < len; ++i)
2948 {
2949 tree op = TREE_OPERAND (t, i);
2950 if (op && TREE_SIDE_EFFECTS (op))
2951 TREE_SIDE_EFFECTS (t) = 1;
2952 }
2953 break;
2954
2955 case tcc_constant:
2956 /* No side-effects. */
2957 return;
2958
2959 default:
2960 gcc_unreachable ();
2961 }
2962 }
2963
2964 /* Canonicalize a tree T for use in a COND_EXPR as conditional. Returns
2965 a canonicalized tree that is valid for a COND_EXPR or NULL_TREE, if
2966 we failed to create one. */
2967
2968 tree
2969 canonicalize_cond_expr_cond (tree t)
2970 {
2971 /* Strip conversions around boolean operations. */
2972 if (CONVERT_EXPR_P (t)
2973 && (truth_value_p (TREE_CODE (TREE_OPERAND (t, 0)))
2974 || TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 0)))
2975 == BOOLEAN_TYPE))
2976 t = TREE_OPERAND (t, 0);
2977
2978 /* For !x use x == 0. */
2979 if (TREE_CODE (t) == TRUTH_NOT_EXPR)
2980 {
2981 tree top0 = TREE_OPERAND (t, 0);
2982 t = build2 (EQ_EXPR, TREE_TYPE (t),
2983 top0, build_int_cst (TREE_TYPE (top0), 0));
2984 }
2985 /* For cmp ? 1 : 0 use cmp. */
2986 else if (TREE_CODE (t) == COND_EXPR
2987 && COMPARISON_CLASS_P (TREE_OPERAND (t, 0))
2988 && integer_onep (TREE_OPERAND (t, 1))
2989 && integer_zerop (TREE_OPERAND (t, 2)))
2990 {
2991 tree top0 = TREE_OPERAND (t, 0);
2992 t = build2 (TREE_CODE (top0), TREE_TYPE (t),
2993 TREE_OPERAND (top0, 0), TREE_OPERAND (top0, 1));
2994 }
2995 /* For x ^ y use x != y. */
2996 else if (TREE_CODE (t) == BIT_XOR_EXPR)
2997 t = build2 (NE_EXPR, TREE_TYPE (t),
2998 TREE_OPERAND (t, 0), TREE_OPERAND (t, 1));
2999
3000 if (is_gimple_condexpr (t))
3001 return t;
3002
3003 return NULL_TREE;
3004 }
3005
3006 /* Build a GIMPLE_CALL identical to STMT but skipping the arguments in
3007 the positions marked by the set ARGS_TO_SKIP. */
3008
3009 gimple
3010 gimple_call_copy_skip_args (gimple stmt, bitmap args_to_skip)
3011 {
3012 int i;
3013 int nargs = gimple_call_num_args (stmt);
3014 vec<tree> vargs;
3015 vargs.create (nargs);
3016 gimple new_stmt;
3017
3018 for (i = 0; i < nargs; i++)
3019 if (!bitmap_bit_p (args_to_skip, i))
3020 vargs.quick_push (gimple_call_arg (stmt, i));
3021
3022 if (gimple_call_internal_p (stmt))
3023 new_stmt = gimple_build_call_internal_vec (gimple_call_internal_fn (stmt),
3024 vargs);
3025 else
3026 new_stmt = gimple_build_call_vec (gimple_call_fn (stmt), vargs);
3027 vargs.release ();
3028 if (gimple_call_lhs (stmt))
3029 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
3030
3031 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
3032 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
3033
3034 if (gimple_has_location (stmt))
3035 gimple_set_location (new_stmt, gimple_location (stmt));
3036 gimple_call_copy_flags (new_stmt, stmt);
3037 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
3038
3039 gimple_set_modified (new_stmt, true);
3040
3041 return new_stmt;
3042 }
3043
3044
3045
3046 /* Return true if the field decls F1 and F2 are at the same offset.
3047
3048 This is intended to be used on GIMPLE types only. */
3049
3050 bool
3051 gimple_compare_field_offset (tree f1, tree f2)
3052 {
3053 if (DECL_OFFSET_ALIGN (f1) == DECL_OFFSET_ALIGN (f2))
3054 {
3055 tree offset1 = DECL_FIELD_OFFSET (f1);
3056 tree offset2 = DECL_FIELD_OFFSET (f2);
3057 return ((offset1 == offset2
3058 /* Once gimplification is done, self-referential offsets are
3059 instantiated as operand #2 of the COMPONENT_REF built for
3060 each access and reset. Therefore, they are not relevant
3061 anymore and fields are interchangeable provided that they
3062 represent the same access. */
3063 || (TREE_CODE (offset1) == PLACEHOLDER_EXPR
3064 && TREE_CODE (offset2) == PLACEHOLDER_EXPR
3065 && (DECL_SIZE (f1) == DECL_SIZE (f2)
3066 || (TREE_CODE (DECL_SIZE (f1)) == PLACEHOLDER_EXPR
3067 && TREE_CODE (DECL_SIZE (f2)) == PLACEHOLDER_EXPR)
3068 || operand_equal_p (DECL_SIZE (f1), DECL_SIZE (f2), 0))
3069 && DECL_ALIGN (f1) == DECL_ALIGN (f2))
3070 || operand_equal_p (offset1, offset2, 0))
3071 && tree_int_cst_equal (DECL_FIELD_BIT_OFFSET (f1),
3072 DECL_FIELD_BIT_OFFSET (f2)));
3073 }
3074
3075 /* Fortran and C do not always agree on what DECL_OFFSET_ALIGN
3076 should be, so handle differing ones specially by decomposing
3077 the offset into a byte and bit offset manually. */
3078 if (host_integerp (DECL_FIELD_OFFSET (f1), 0)
3079 && host_integerp (DECL_FIELD_OFFSET (f2), 0))
3080 {
3081 unsigned HOST_WIDE_INT byte_offset1, byte_offset2;
3082 unsigned HOST_WIDE_INT bit_offset1, bit_offset2;
3083 bit_offset1 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (f1));
3084 byte_offset1 = (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f1))
3085 + bit_offset1 / BITS_PER_UNIT);
3086 bit_offset2 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (f2));
3087 byte_offset2 = (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f2))
3088 + bit_offset2 / BITS_PER_UNIT);
3089 if (byte_offset1 != byte_offset2)
3090 return false;
3091 return bit_offset1 % BITS_PER_UNIT == bit_offset2 % BITS_PER_UNIT;
3092 }
3093
3094 return false;
3095 }
3096
3097
3098 /* Return a type the same as TYPE except unsigned or
3099 signed according to UNSIGNEDP. */
3100
3101 static tree
3102 gimple_signed_or_unsigned_type (bool unsignedp, tree type)
3103 {
3104 tree type1;
3105
3106 type1 = TYPE_MAIN_VARIANT (type);
3107 if (type1 == signed_char_type_node
3108 || type1 == char_type_node
3109 || type1 == unsigned_char_type_node)
3110 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
3111 if (type1 == integer_type_node || type1 == unsigned_type_node)
3112 return unsignedp ? unsigned_type_node : integer_type_node;
3113 if (type1 == short_integer_type_node || type1 == short_unsigned_type_node)
3114 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
3115 if (type1 == long_integer_type_node || type1 == long_unsigned_type_node)
3116 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
3117 if (type1 == long_long_integer_type_node
3118 || type1 == long_long_unsigned_type_node)
3119 return unsignedp
3120 ? long_long_unsigned_type_node
3121 : long_long_integer_type_node;
3122 if (int128_integer_type_node && (type1 == int128_integer_type_node || type1 == int128_unsigned_type_node))
3123 return unsignedp
3124 ? int128_unsigned_type_node
3125 : int128_integer_type_node;
3126 #if HOST_BITS_PER_WIDE_INT >= 64
3127 if (type1 == intTI_type_node || type1 == unsigned_intTI_type_node)
3128 return unsignedp ? unsigned_intTI_type_node : intTI_type_node;
3129 #endif
3130 if (type1 == intDI_type_node || type1 == unsigned_intDI_type_node)
3131 return unsignedp ? unsigned_intDI_type_node : intDI_type_node;
3132 if (type1 == intSI_type_node || type1 == unsigned_intSI_type_node)
3133 return unsignedp ? unsigned_intSI_type_node : intSI_type_node;
3134 if (type1 == intHI_type_node || type1 == unsigned_intHI_type_node)
3135 return unsignedp ? unsigned_intHI_type_node : intHI_type_node;
3136 if (type1 == intQI_type_node || type1 == unsigned_intQI_type_node)
3137 return unsignedp ? unsigned_intQI_type_node : intQI_type_node;
3138
3139 #define GIMPLE_FIXED_TYPES(NAME) \
3140 if (type1 == short_ ## NAME ## _type_node \
3141 || type1 == unsigned_short_ ## NAME ## _type_node) \
3142 return unsignedp ? unsigned_short_ ## NAME ## _type_node \
3143 : short_ ## NAME ## _type_node; \
3144 if (type1 == NAME ## _type_node \
3145 || type1 == unsigned_ ## NAME ## _type_node) \
3146 return unsignedp ? unsigned_ ## NAME ## _type_node \
3147 : NAME ## _type_node; \
3148 if (type1 == long_ ## NAME ## _type_node \
3149 || type1 == unsigned_long_ ## NAME ## _type_node) \
3150 return unsignedp ? unsigned_long_ ## NAME ## _type_node \
3151 : long_ ## NAME ## _type_node; \
3152 if (type1 == long_long_ ## NAME ## _type_node \
3153 || type1 == unsigned_long_long_ ## NAME ## _type_node) \
3154 return unsignedp ? unsigned_long_long_ ## NAME ## _type_node \
3155 : long_long_ ## NAME ## _type_node;
3156
3157 #define GIMPLE_FIXED_MODE_TYPES(NAME) \
3158 if (type1 == NAME ## _type_node \
3159 || type1 == u ## NAME ## _type_node) \
3160 return unsignedp ? u ## NAME ## _type_node \
3161 : NAME ## _type_node;
3162
3163 #define GIMPLE_FIXED_TYPES_SAT(NAME) \
3164 if (type1 == sat_ ## short_ ## NAME ## _type_node \
3165 || type1 == sat_ ## unsigned_short_ ## NAME ## _type_node) \
3166 return unsignedp ? sat_ ## unsigned_short_ ## NAME ## _type_node \
3167 : sat_ ## short_ ## NAME ## _type_node; \
3168 if (type1 == sat_ ## NAME ## _type_node \
3169 || type1 == sat_ ## unsigned_ ## NAME ## _type_node) \
3170 return unsignedp ? sat_ ## unsigned_ ## NAME ## _type_node \
3171 : sat_ ## NAME ## _type_node; \
3172 if (type1 == sat_ ## long_ ## NAME ## _type_node \
3173 || type1 == sat_ ## unsigned_long_ ## NAME ## _type_node) \
3174 return unsignedp ? sat_ ## unsigned_long_ ## NAME ## _type_node \
3175 : sat_ ## long_ ## NAME ## _type_node; \
3176 if (type1 == sat_ ## long_long_ ## NAME ## _type_node \
3177 || type1 == sat_ ## unsigned_long_long_ ## NAME ## _type_node) \
3178 return unsignedp ? sat_ ## unsigned_long_long_ ## NAME ## _type_node \
3179 : sat_ ## long_long_ ## NAME ## _type_node;
3180
3181 #define GIMPLE_FIXED_MODE_TYPES_SAT(NAME) \
3182 if (type1 == sat_ ## NAME ## _type_node \
3183 || type1 == sat_ ## u ## NAME ## _type_node) \
3184 return unsignedp ? sat_ ## u ## NAME ## _type_node \
3185 : sat_ ## NAME ## _type_node;
3186
3187 GIMPLE_FIXED_TYPES (fract);
3188 GIMPLE_FIXED_TYPES_SAT (fract);
3189 GIMPLE_FIXED_TYPES (accum);
3190 GIMPLE_FIXED_TYPES_SAT (accum);
3191
3192 GIMPLE_FIXED_MODE_TYPES (qq);
3193 GIMPLE_FIXED_MODE_TYPES (hq);
3194 GIMPLE_FIXED_MODE_TYPES (sq);
3195 GIMPLE_FIXED_MODE_TYPES (dq);
3196 GIMPLE_FIXED_MODE_TYPES (tq);
3197 GIMPLE_FIXED_MODE_TYPES_SAT (qq);
3198 GIMPLE_FIXED_MODE_TYPES_SAT (hq);
3199 GIMPLE_FIXED_MODE_TYPES_SAT (sq);
3200 GIMPLE_FIXED_MODE_TYPES_SAT (dq);
3201 GIMPLE_FIXED_MODE_TYPES_SAT (tq);
3202 GIMPLE_FIXED_MODE_TYPES (ha);
3203 GIMPLE_FIXED_MODE_TYPES (sa);
3204 GIMPLE_FIXED_MODE_TYPES (da);
3205 GIMPLE_FIXED_MODE_TYPES (ta);
3206 GIMPLE_FIXED_MODE_TYPES_SAT (ha);
3207 GIMPLE_FIXED_MODE_TYPES_SAT (sa);
3208 GIMPLE_FIXED_MODE_TYPES_SAT (da);
3209 GIMPLE_FIXED_MODE_TYPES_SAT (ta);
3210
3211 /* For ENUMERAL_TYPEs in C++, must check the mode of the types, not
3212 the precision; they have precision set to match their range, but
3213 may use a wider mode to match an ABI. If we change modes, we may
3214 wind up with bad conversions. For INTEGER_TYPEs in C, must check
3215 the precision as well, so as to yield correct results for
3216 bit-field types. C++ does not have these separate bit-field
3217 types, and producing a signed or unsigned variant of an
3218 ENUMERAL_TYPE may cause other problems as well. */
3219 if (!INTEGRAL_TYPE_P (type)
3220 || TYPE_UNSIGNED (type) == unsignedp)
3221 return type;
3222
3223 #define TYPE_OK(node) \
3224 (TYPE_MODE (type) == TYPE_MODE (node) \
3225 && TYPE_PRECISION (type) == TYPE_PRECISION (node))
3226 if (TYPE_OK (signed_char_type_node))
3227 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
3228 if (TYPE_OK (integer_type_node))
3229 return unsignedp ? unsigned_type_node : integer_type_node;
3230 if (TYPE_OK (short_integer_type_node))
3231 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
3232 if (TYPE_OK (long_integer_type_node))
3233 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
3234 if (TYPE_OK (long_long_integer_type_node))
3235 return (unsignedp
3236 ? long_long_unsigned_type_node
3237 : long_long_integer_type_node);
3238 if (int128_integer_type_node && TYPE_OK (int128_integer_type_node))
3239 return (unsignedp
3240 ? int128_unsigned_type_node
3241 : int128_integer_type_node);
3242
3243 #if HOST_BITS_PER_WIDE_INT >= 64
3244 if (TYPE_OK (intTI_type_node))
3245 return unsignedp ? unsigned_intTI_type_node : intTI_type_node;
3246 #endif
3247 if (TYPE_OK (intDI_type_node))
3248 return unsignedp ? unsigned_intDI_type_node : intDI_type_node;
3249 if (TYPE_OK (intSI_type_node))
3250 return unsignedp ? unsigned_intSI_type_node : intSI_type_node;
3251 if (TYPE_OK (intHI_type_node))
3252 return unsignedp ? unsigned_intHI_type_node : intHI_type_node;
3253 if (TYPE_OK (intQI_type_node))
3254 return unsignedp ? unsigned_intQI_type_node : intQI_type_node;
3255
3256 #undef GIMPLE_FIXED_TYPES
3257 #undef GIMPLE_FIXED_MODE_TYPES
3258 #undef GIMPLE_FIXED_TYPES_SAT
3259 #undef GIMPLE_FIXED_MODE_TYPES_SAT
3260 #undef TYPE_OK
3261
3262 return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp);
3263 }
3264
3265
3266 /* Return an unsigned type the same as TYPE in other respects. */
3267
3268 tree
3269 gimple_unsigned_type (tree type)
3270 {
3271 return gimple_signed_or_unsigned_type (true, type);
3272 }
3273
3274
3275 /* Return a signed type the same as TYPE in other respects. */
3276
3277 tree
3278 gimple_signed_type (tree type)
3279 {
3280 return gimple_signed_or_unsigned_type (false, type);
3281 }
3282
3283
3284 /* Return the typed-based alias set for T, which may be an expression
3285 or a type. Return -1 if we don't do anything special. */
3286
3287 alias_set_type
3288 gimple_get_alias_set (tree t)
3289 {
3290 tree u;
3291
3292 /* Permit type-punning when accessing a union, provided the access
3293 is directly through the union. For example, this code does not
3294 permit taking the address of a union member and then storing
3295 through it. Even the type-punning allowed here is a GCC
3296 extension, albeit a common and useful one; the C standard says
3297 that such accesses have implementation-defined behavior. */
3298 for (u = t;
3299 TREE_CODE (u) == COMPONENT_REF || TREE_CODE (u) == ARRAY_REF;
3300 u = TREE_OPERAND (u, 0))
3301 if (TREE_CODE (u) == COMPONENT_REF
3302 && TREE_CODE (TREE_TYPE (TREE_OPERAND (u, 0))) == UNION_TYPE)
3303 return 0;
3304
3305 /* That's all the expressions we handle specially. */
3306 if (!TYPE_P (t))
3307 return -1;
3308
3309 /* For convenience, follow the C standard when dealing with
3310 character types. Any object may be accessed via an lvalue that
3311 has character type. */
3312 if (t == char_type_node
3313 || t == signed_char_type_node
3314 || t == unsigned_char_type_node)
3315 return 0;
3316
3317 /* Allow aliasing between signed and unsigned variants of the same
3318 type. We treat the signed variant as canonical. */
3319 if (TREE_CODE (t) == INTEGER_TYPE && TYPE_UNSIGNED (t))
3320 {
3321 tree t1 = gimple_signed_type (t);
3322
3323 /* t1 == t can happen for boolean nodes which are always unsigned. */
3324 if (t1 != t)
3325 return get_alias_set (t1);
3326 }
3327
3328 return -1;
3329 }
3330
3331
3332 /* From a tree operand OP return the base of a load or store operation
3333 or NULL_TREE if OP is not a load or a store. */
3334
3335 static tree
3336 get_base_loadstore (tree op)
3337 {
3338 while (handled_component_p (op))
3339 op = TREE_OPERAND (op, 0);
3340 if (DECL_P (op)
3341 || INDIRECT_REF_P (op)
3342 || TREE_CODE (op) == MEM_REF
3343 || TREE_CODE (op) == TARGET_MEM_REF)
3344 return op;
3345 return NULL_TREE;
3346 }
3347
3348 /* For the statement STMT call the callbacks VISIT_LOAD, VISIT_STORE and
3349 VISIT_ADDR if non-NULL on loads, store and address-taken operands
3350 passing the STMT, the base of the operand and DATA to it. The base
3351 will be either a decl, an indirect reference (including TARGET_MEM_REF)
3352 or the argument of an address expression.
3353 Returns the results of these callbacks or'ed. */
3354
3355 bool
3356 walk_stmt_load_store_addr_ops (gimple stmt, void *data,
3357 bool (*visit_load)(gimple, tree, void *),
3358 bool (*visit_store)(gimple, tree, void *),
3359 bool (*visit_addr)(gimple, tree, void *))
3360 {
3361 bool ret = false;
3362 unsigned i;
3363 if (gimple_assign_single_p (stmt))
3364 {
3365 tree lhs, rhs;
3366 if (visit_store)
3367 {
3368 lhs = get_base_loadstore (gimple_assign_lhs (stmt));
3369 if (lhs)
3370 ret |= visit_store (stmt, lhs, data);
3371 }
3372 rhs = gimple_assign_rhs1 (stmt);
3373 while (handled_component_p (rhs))
3374 rhs = TREE_OPERAND (rhs, 0);
3375 if (visit_addr)
3376 {
3377 if (TREE_CODE (rhs) == ADDR_EXPR)
3378 ret |= visit_addr (stmt, TREE_OPERAND (rhs, 0), data);
3379 else if (TREE_CODE (rhs) == TARGET_MEM_REF
3380 && TREE_CODE (TMR_BASE (rhs)) == ADDR_EXPR)
3381 ret |= visit_addr (stmt, TREE_OPERAND (TMR_BASE (rhs), 0), data);
3382 else if (TREE_CODE (rhs) == OBJ_TYPE_REF
3383 && TREE_CODE (OBJ_TYPE_REF_OBJECT (rhs)) == ADDR_EXPR)
3384 ret |= visit_addr (stmt, TREE_OPERAND (OBJ_TYPE_REF_OBJECT (rhs),
3385 0), data);
3386 else if (TREE_CODE (rhs) == CONSTRUCTOR)
3387 {
3388 unsigned int ix;
3389 tree val;
3390
3391 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), ix, val)
3392 if (TREE_CODE (val) == ADDR_EXPR)
3393 ret |= visit_addr (stmt, TREE_OPERAND (val, 0), data);
3394 else if (TREE_CODE (val) == OBJ_TYPE_REF
3395 && TREE_CODE (OBJ_TYPE_REF_OBJECT (val)) == ADDR_EXPR)
3396 ret |= visit_addr (stmt,
3397 TREE_OPERAND (OBJ_TYPE_REF_OBJECT (val),
3398 0), data);
3399 }
3400 lhs = gimple_assign_lhs (stmt);
3401 if (TREE_CODE (lhs) == TARGET_MEM_REF
3402 && TREE_CODE (TMR_BASE (lhs)) == ADDR_EXPR)
3403 ret |= visit_addr (stmt, TREE_OPERAND (TMR_BASE (lhs), 0), data);
3404 }
3405 if (visit_load)
3406 {
3407 rhs = get_base_loadstore (rhs);
3408 if (rhs)
3409 ret |= visit_load (stmt, rhs, data);
3410 }
3411 }
3412 else if (visit_addr
3413 && (is_gimple_assign (stmt)
3414 || gimple_code (stmt) == GIMPLE_COND))
3415 {
3416 for (i = 0; i < gimple_num_ops (stmt); ++i)
3417 {
3418 tree op = gimple_op (stmt, i);
3419 if (op == NULL_TREE)
3420 ;
3421 else if (TREE_CODE (op) == ADDR_EXPR)
3422 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
3423 /* COND_EXPR and VCOND_EXPR rhs1 argument is a comparison
3424 tree with two operands. */
3425 else if (i == 1 && COMPARISON_CLASS_P (op))
3426 {
3427 if (TREE_CODE (TREE_OPERAND (op, 0)) == ADDR_EXPR)
3428 ret |= visit_addr (stmt, TREE_OPERAND (TREE_OPERAND (op, 0),
3429 0), data);
3430 if (TREE_CODE (TREE_OPERAND (op, 1)) == ADDR_EXPR)
3431 ret |= visit_addr (stmt, TREE_OPERAND (TREE_OPERAND (op, 1),
3432 0), data);
3433 }
3434 }
3435 }
3436 else if (is_gimple_call (stmt))
3437 {
3438 if (visit_store)
3439 {
3440 tree lhs = gimple_call_lhs (stmt);
3441 if (lhs)
3442 {
3443 lhs = get_base_loadstore (lhs);
3444 if (lhs)
3445 ret |= visit_store (stmt, lhs, data);
3446 }
3447 }
3448 if (visit_load || visit_addr)
3449 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3450 {
3451 tree rhs = gimple_call_arg (stmt, i);
3452 if (visit_addr
3453 && TREE_CODE (rhs) == ADDR_EXPR)
3454 ret |= visit_addr (stmt, TREE_OPERAND (rhs, 0), data);
3455 else if (visit_load)
3456 {
3457 rhs = get_base_loadstore (rhs);
3458 if (rhs)
3459 ret |= visit_load (stmt, rhs, data);
3460 }
3461 }
3462 if (visit_addr
3463 && gimple_call_chain (stmt)
3464 && TREE_CODE (gimple_call_chain (stmt)) == ADDR_EXPR)
3465 ret |= visit_addr (stmt, TREE_OPERAND (gimple_call_chain (stmt), 0),
3466 data);
3467 if (visit_addr
3468 && gimple_call_return_slot_opt_p (stmt)
3469 && gimple_call_lhs (stmt) != NULL_TREE
3470 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
3471 ret |= visit_addr (stmt, gimple_call_lhs (stmt), data);
3472 }
3473 else if (gimple_code (stmt) == GIMPLE_ASM)
3474 {
3475 unsigned noutputs;
3476 const char *constraint;
3477 const char **oconstraints;
3478 bool allows_mem, allows_reg, is_inout;
3479 noutputs = gimple_asm_noutputs (stmt);
3480 oconstraints = XALLOCAVEC (const char *, noutputs);
3481 if (visit_store || visit_addr)
3482 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
3483 {
3484 tree link = gimple_asm_output_op (stmt, i);
3485 tree op = get_base_loadstore (TREE_VALUE (link));
3486 if (op && visit_store)
3487 ret |= visit_store (stmt, op, data);
3488 if (visit_addr)
3489 {
3490 constraint = TREE_STRING_POINTER
3491 (TREE_VALUE (TREE_PURPOSE (link)));
3492 oconstraints[i] = constraint;
3493 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
3494 &allows_reg, &is_inout);
3495 if (op && !allows_reg && allows_mem)
3496 ret |= visit_addr (stmt, op, data);
3497 }
3498 }
3499 if (visit_load || visit_addr)
3500 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
3501 {
3502 tree link = gimple_asm_input_op (stmt, i);
3503 tree op = TREE_VALUE (link);
3504 if (visit_addr
3505 && TREE_CODE (op) == ADDR_EXPR)
3506 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
3507 else if (visit_load || visit_addr)
3508 {
3509 op = get_base_loadstore (op);
3510 if (op)
3511 {
3512 if (visit_load)
3513 ret |= visit_load (stmt, op, data);
3514 if (visit_addr)
3515 {
3516 constraint = TREE_STRING_POINTER
3517 (TREE_VALUE (TREE_PURPOSE (link)));
3518 parse_input_constraint (&constraint, 0, 0, noutputs,
3519 0, oconstraints,
3520 &allows_mem, &allows_reg);
3521 if (!allows_reg && allows_mem)
3522 ret |= visit_addr (stmt, op, data);
3523 }
3524 }
3525 }
3526 }
3527 }
3528 else if (gimple_code (stmt) == GIMPLE_RETURN)
3529 {
3530 tree op = gimple_return_retval (stmt);
3531 if (op)
3532 {
3533 if (visit_addr
3534 && TREE_CODE (op) == ADDR_EXPR)
3535 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
3536 else if (visit_load)
3537 {
3538 op = get_base_loadstore (op);
3539 if (op)
3540 ret |= visit_load (stmt, op, data);
3541 }
3542 }
3543 }
3544 else if (visit_addr
3545 && gimple_code (stmt) == GIMPLE_PHI)
3546 {
3547 for (i = 0; i < gimple_phi_num_args (stmt); ++i)
3548 {
3549 tree op = gimple_phi_arg_def (stmt, i);
3550 if (TREE_CODE (op) == ADDR_EXPR)
3551 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
3552 }
3553 }
3554 else if (visit_addr
3555 && gimple_code (stmt) == GIMPLE_GOTO)
3556 {
3557 tree op = gimple_goto_dest (stmt);
3558 if (TREE_CODE (op) == ADDR_EXPR)
3559 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
3560 }
3561
3562 return ret;
3563 }
3564
3565 /* Like walk_stmt_load_store_addr_ops but with NULL visit_addr. IPA-CP
3566 should make a faster clone for this case. */
3567
3568 bool
3569 walk_stmt_load_store_ops (gimple stmt, void *data,
3570 bool (*visit_load)(gimple, tree, void *),
3571 bool (*visit_store)(gimple, tree, void *))
3572 {
3573 return walk_stmt_load_store_addr_ops (stmt, data,
3574 visit_load, visit_store, NULL);
3575 }
3576
3577 /* Helper for gimple_ior_addresses_taken_1. */
3578
3579 static bool
3580 gimple_ior_addresses_taken_1 (gimple stmt ATTRIBUTE_UNUSED,
3581 tree addr, void *data)
3582 {
3583 bitmap addresses_taken = (bitmap)data;
3584 addr = get_base_address (addr);
3585 if (addr
3586 && DECL_P (addr))
3587 {
3588 bitmap_set_bit (addresses_taken, DECL_UID (addr));
3589 return true;
3590 }
3591 return false;
3592 }
3593
3594 /* Set the bit for the uid of all decls that have their address taken
3595 in STMT in the ADDRESSES_TAKEN bitmap. Returns true if there
3596 were any in this stmt. */
3597
3598 bool
3599 gimple_ior_addresses_taken (bitmap addresses_taken, gimple stmt)
3600 {
3601 return walk_stmt_load_store_addr_ops (stmt, addresses_taken, NULL, NULL,
3602 gimple_ior_addresses_taken_1);
3603 }
3604
3605
3606 /* Return a printable name for symbol DECL. */
3607
3608 const char *
3609 gimple_decl_printable_name (tree decl, int verbosity)
3610 {
3611 if (!DECL_NAME (decl))
3612 return NULL;
3613
3614 if (DECL_ASSEMBLER_NAME_SET_P (decl))
3615 {
3616 const char *str, *mangled_str;
3617 int dmgl_opts = DMGL_NO_OPTS;
3618
3619 if (verbosity >= 2)
3620 {
3621 dmgl_opts = DMGL_VERBOSE
3622 | DMGL_ANSI
3623 | DMGL_GNU_V3
3624 | DMGL_RET_POSTFIX;
3625 if (TREE_CODE (decl) == FUNCTION_DECL)
3626 dmgl_opts |= DMGL_PARAMS;
3627 }
3628
3629 mangled_str = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
3630 str = cplus_demangle_v3 (mangled_str, dmgl_opts);
3631 return (str) ? str : mangled_str;
3632 }
3633
3634 return IDENTIFIER_POINTER (DECL_NAME (decl));
3635 }
3636
3637 /* Return TRUE iff stmt is a call to a built-in function. */
3638
3639 bool
3640 is_gimple_builtin_call (gimple stmt)
3641 {
3642 tree callee;
3643
3644 if (is_gimple_call (stmt)
3645 && (callee = gimple_call_fndecl (stmt))
3646 && is_builtin_fn (callee)
3647 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
3648 return true;
3649
3650 return false;
3651 }
3652
3653 /* Return true when STMTs arguments match those of FNDECL. */
3654
3655 static bool
3656 validate_call (gimple stmt, tree fndecl)
3657 {
3658 tree targs = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3659 unsigned nargs = gimple_call_num_args (stmt);
3660 for (unsigned i = 0; i < nargs; ++i)
3661 {
3662 /* Variadic args follow. */
3663 if (!targs)
3664 return true;
3665 tree arg = gimple_call_arg (stmt, i);
3666 if (INTEGRAL_TYPE_P (TREE_TYPE (arg))
3667 && INTEGRAL_TYPE_P (TREE_VALUE (targs)))
3668 ;
3669 else if (POINTER_TYPE_P (TREE_TYPE (arg))
3670 && POINTER_TYPE_P (TREE_VALUE (targs)))
3671 ;
3672 else if (TREE_CODE (TREE_TYPE (arg))
3673 != TREE_CODE (TREE_VALUE (targs)))
3674 return false;
3675 targs = TREE_CHAIN (targs);
3676 }
3677 if (targs && !VOID_TYPE_P (TREE_VALUE (targs)))
3678 return false;
3679 return true;
3680 }
3681
3682 /* Return true when STMT is builtins call to CLASS. */
3683
3684 bool
3685 gimple_call_builtin_p (gimple stmt, enum built_in_class klass)
3686 {
3687 tree fndecl;
3688 if (is_gimple_call (stmt)
3689 && (fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
3690 && DECL_BUILT_IN_CLASS (fndecl) == klass)
3691 return validate_call (stmt, fndecl);
3692 return false;
3693 }
3694
3695 /* Return true when STMT is builtins call to CODE of CLASS. */
3696
3697 bool
3698 gimple_call_builtin_p (gimple stmt, enum built_in_function code)
3699 {
3700 tree fndecl;
3701 if (is_gimple_call (stmt)
3702 && (fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
3703 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
3704 && DECL_FUNCTION_CODE (fndecl) == code)
3705 return validate_call (stmt, fndecl);
3706 return false;
3707 }
3708
3709 /* Return true if STMT clobbers memory. STMT is required to be a
3710 GIMPLE_ASM. */
3711
3712 bool
3713 gimple_asm_clobbers_memory_p (const_gimple stmt)
3714 {
3715 unsigned i;
3716
3717 for (i = 0; i < gimple_asm_nclobbers (stmt); i++)
3718 {
3719 tree op = gimple_asm_clobber_op (stmt, i);
3720 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (op)), "memory") == 0)
3721 return true;
3722 }
3723
3724 return false;
3725 }
3726
3727
3728 /* Return true if the conversion from INNER_TYPE to OUTER_TYPE is a
3729 useless type conversion, otherwise return false.
3730
3731 This function implicitly defines the middle-end type system. With
3732 the notion of 'a < b' meaning that useless_type_conversion_p (a, b)
3733 holds and 'a > b' meaning that useless_type_conversion_p (b, a) holds,
3734 the following invariants shall be fulfilled:
3735
3736 1) useless_type_conversion_p is transitive.
3737 If a < b and b < c then a < c.
3738
3739 2) useless_type_conversion_p is not symmetric.
3740 From a < b does not follow a > b.
3741
3742 3) Types define the available set of operations applicable to values.
3743 A type conversion is useless if the operations for the target type
3744 is a subset of the operations for the source type. For example
3745 casts to void* are useless, casts from void* are not (void* can't
3746 be dereferenced or offsetted, but copied, hence its set of operations
3747 is a strict subset of that of all other data pointer types). Casts
3748 to const T* are useless (can't be written to), casts from const T*
3749 to T* are not. */
3750
3751 bool
3752 useless_type_conversion_p (tree outer_type, tree inner_type)
3753 {
3754 /* Do the following before stripping toplevel qualifiers. */
3755 if (POINTER_TYPE_P (inner_type)
3756 && POINTER_TYPE_P (outer_type))
3757 {
3758 /* Do not lose casts between pointers to different address spaces. */
3759 if (TYPE_ADDR_SPACE (TREE_TYPE (outer_type))
3760 != TYPE_ADDR_SPACE (TREE_TYPE (inner_type)))
3761 return false;
3762 }
3763
3764 /* From now on qualifiers on value types do not matter. */
3765 inner_type = TYPE_MAIN_VARIANT (inner_type);
3766 outer_type = TYPE_MAIN_VARIANT (outer_type);
3767
3768 if (inner_type == outer_type)
3769 return true;
3770
3771 /* If we know the canonical types, compare them. */
3772 if (TYPE_CANONICAL (inner_type)
3773 && TYPE_CANONICAL (inner_type) == TYPE_CANONICAL (outer_type))
3774 return true;
3775
3776 /* Changes in machine mode are never useless conversions unless we
3777 deal with aggregate types in which case we defer to later checks. */
3778 if (TYPE_MODE (inner_type) != TYPE_MODE (outer_type)
3779 && !AGGREGATE_TYPE_P (inner_type))
3780 return false;
3781
3782 /* If both the inner and outer types are integral types, then the
3783 conversion is not necessary if they have the same mode and
3784 signedness and precision, and both or neither are boolean. */
3785 if (INTEGRAL_TYPE_P (inner_type)
3786 && INTEGRAL_TYPE_P (outer_type))
3787 {
3788 /* Preserve changes in signedness or precision. */
3789 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
3790 || TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
3791 return false;
3792
3793 /* Preserve conversions to/from BOOLEAN_TYPE if types are not
3794 of precision one. */
3795 if (((TREE_CODE (inner_type) == BOOLEAN_TYPE)
3796 != (TREE_CODE (outer_type) == BOOLEAN_TYPE))
3797 && TYPE_PRECISION (outer_type) != 1)
3798 return false;
3799
3800 /* We don't need to preserve changes in the types minimum or
3801 maximum value in general as these do not generate code
3802 unless the types precisions are different. */
3803 return true;
3804 }
3805
3806 /* Scalar floating point types with the same mode are compatible. */
3807 else if (SCALAR_FLOAT_TYPE_P (inner_type)
3808 && SCALAR_FLOAT_TYPE_P (outer_type))
3809 return true;
3810
3811 /* Fixed point types with the same mode are compatible. */
3812 else if (FIXED_POINT_TYPE_P (inner_type)
3813 && FIXED_POINT_TYPE_P (outer_type))
3814 return true;
3815
3816 /* We need to take special care recursing to pointed-to types. */
3817 else if (POINTER_TYPE_P (inner_type)
3818 && POINTER_TYPE_P (outer_type))
3819 {
3820 /* Do not lose casts to function pointer types. */
3821 if ((TREE_CODE (TREE_TYPE (outer_type)) == FUNCTION_TYPE
3822 || TREE_CODE (TREE_TYPE (outer_type)) == METHOD_TYPE)
3823 && !(TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE
3824 || TREE_CODE (TREE_TYPE (inner_type)) == METHOD_TYPE))
3825 return false;
3826
3827 /* We do not care for const qualification of the pointed-to types
3828 as const qualification has no semantic value to the middle-end. */
3829
3830 /* Otherwise pointers/references are equivalent. */
3831 return true;
3832 }
3833
3834 /* Recurse for complex types. */
3835 else if (TREE_CODE (inner_type) == COMPLEX_TYPE
3836 && TREE_CODE (outer_type) == COMPLEX_TYPE)
3837 return useless_type_conversion_p (TREE_TYPE (outer_type),
3838 TREE_TYPE (inner_type));
3839
3840 /* Recurse for vector types with the same number of subparts. */
3841 else if (TREE_CODE (inner_type) == VECTOR_TYPE
3842 && TREE_CODE (outer_type) == VECTOR_TYPE
3843 && TYPE_PRECISION (inner_type) == TYPE_PRECISION (outer_type))
3844 return useless_type_conversion_p (TREE_TYPE (outer_type),
3845 TREE_TYPE (inner_type));
3846
3847 else if (TREE_CODE (inner_type) == ARRAY_TYPE
3848 && TREE_CODE (outer_type) == ARRAY_TYPE)
3849 {
3850 /* Preserve string attributes. */
3851 if (TYPE_STRING_FLAG (inner_type) != TYPE_STRING_FLAG (outer_type))
3852 return false;
3853
3854 /* Conversions from array types with unknown extent to
3855 array types with known extent are not useless. */
3856 if (!TYPE_DOMAIN (inner_type)
3857 && TYPE_DOMAIN (outer_type))
3858 return false;
3859
3860 /* Nor are conversions from array types with non-constant size to
3861 array types with constant size or to different size. */
3862 if (TYPE_SIZE (outer_type)
3863 && TREE_CODE (TYPE_SIZE (outer_type)) == INTEGER_CST
3864 && (!TYPE_SIZE (inner_type)
3865 || TREE_CODE (TYPE_SIZE (inner_type)) != INTEGER_CST
3866 || !tree_int_cst_equal (TYPE_SIZE (outer_type),
3867 TYPE_SIZE (inner_type))))
3868 return false;
3869
3870 /* Check conversions between arrays with partially known extents.
3871 If the array min/max values are constant they have to match.
3872 Otherwise allow conversions to unknown and variable extents.
3873 In particular this declares conversions that may change the
3874 mode to BLKmode as useless. */
3875 if (TYPE_DOMAIN (inner_type)
3876 && TYPE_DOMAIN (outer_type)
3877 && TYPE_DOMAIN (inner_type) != TYPE_DOMAIN (outer_type))
3878 {
3879 tree inner_min = TYPE_MIN_VALUE (TYPE_DOMAIN (inner_type));
3880 tree outer_min = TYPE_MIN_VALUE (TYPE_DOMAIN (outer_type));
3881 tree inner_max = TYPE_MAX_VALUE (TYPE_DOMAIN (inner_type));
3882 tree outer_max = TYPE_MAX_VALUE (TYPE_DOMAIN (outer_type));
3883
3884 /* After gimplification a variable min/max value carries no
3885 additional information compared to a NULL value. All that
3886 matters has been lowered to be part of the IL. */
3887 if (inner_min && TREE_CODE (inner_min) != INTEGER_CST)
3888 inner_min = NULL_TREE;
3889 if (outer_min && TREE_CODE (outer_min) != INTEGER_CST)
3890 outer_min = NULL_TREE;
3891 if (inner_max && TREE_CODE (inner_max) != INTEGER_CST)
3892 inner_max = NULL_TREE;
3893 if (outer_max && TREE_CODE (outer_max) != INTEGER_CST)
3894 outer_max = NULL_TREE;
3895
3896 /* Conversions NULL / variable <- cst are useless, but not
3897 the other way around. */
3898 if (outer_min
3899 && (!inner_min
3900 || !tree_int_cst_equal (inner_min, outer_min)))
3901 return false;
3902 if (outer_max
3903 && (!inner_max
3904 || !tree_int_cst_equal (inner_max, outer_max)))
3905 return false;
3906 }
3907
3908 /* Recurse on the element check. */
3909 return useless_type_conversion_p (TREE_TYPE (outer_type),
3910 TREE_TYPE (inner_type));
3911 }
3912
3913 else if ((TREE_CODE (inner_type) == FUNCTION_TYPE
3914 || TREE_CODE (inner_type) == METHOD_TYPE)
3915 && TREE_CODE (inner_type) == TREE_CODE (outer_type))
3916 {
3917 tree outer_parm, inner_parm;
3918
3919 /* If the return types are not compatible bail out. */
3920 if (!useless_type_conversion_p (TREE_TYPE (outer_type),
3921 TREE_TYPE (inner_type)))
3922 return false;
3923
3924 /* Method types should belong to a compatible base class. */
3925 if (TREE_CODE (inner_type) == METHOD_TYPE
3926 && !useless_type_conversion_p (TYPE_METHOD_BASETYPE (outer_type),
3927 TYPE_METHOD_BASETYPE (inner_type)))
3928 return false;
3929
3930 /* A conversion to an unprototyped argument list is ok. */
3931 if (!prototype_p (outer_type))
3932 return true;
3933
3934 /* If the unqualified argument types are compatible the conversion
3935 is useless. */
3936 if (TYPE_ARG_TYPES (outer_type) == TYPE_ARG_TYPES (inner_type))
3937 return true;
3938
3939 for (outer_parm = TYPE_ARG_TYPES (outer_type),
3940 inner_parm = TYPE_ARG_TYPES (inner_type);
3941 outer_parm && inner_parm;
3942 outer_parm = TREE_CHAIN (outer_parm),
3943 inner_parm = TREE_CHAIN (inner_parm))
3944 if (!useless_type_conversion_p
3945 (TYPE_MAIN_VARIANT (TREE_VALUE (outer_parm)),
3946 TYPE_MAIN_VARIANT (TREE_VALUE (inner_parm))))
3947 return false;
3948
3949 /* If there is a mismatch in the number of arguments the functions
3950 are not compatible. */
3951 if (outer_parm || inner_parm)
3952 return false;
3953
3954 /* Defer to the target if necessary. */
3955 if (TYPE_ATTRIBUTES (inner_type) || TYPE_ATTRIBUTES (outer_type))
3956 return comp_type_attributes (outer_type, inner_type) != 0;
3957
3958 return true;
3959 }
3960
3961 /* For aggregates we rely on TYPE_CANONICAL exclusively and require
3962 explicit conversions for types involving to be structurally
3963 compared types. */
3964 else if (AGGREGATE_TYPE_P (inner_type)
3965 && TREE_CODE (inner_type) == TREE_CODE (outer_type))
3966 return false;
3967
3968 return false;
3969 }
3970
3971 /* Return true if a conversion from either type of TYPE1 and TYPE2
3972 to the other is not required. Otherwise return false. */
3973
3974 bool
3975 types_compatible_p (tree type1, tree type2)
3976 {
3977 return (type1 == type2
3978 || (useless_type_conversion_p (type1, type2)
3979 && useless_type_conversion_p (type2, type1)));
3980 }
3981
3982 /* Dump bitmap SET (assumed to contain VAR_DECLs) to FILE. */
3983
3984 void
3985 dump_decl_set (FILE *file, bitmap set)
3986 {
3987 if (set)
3988 {
3989 bitmap_iterator bi;
3990 unsigned i;
3991
3992 fprintf (file, "{ ");
3993
3994 EXECUTE_IF_SET_IN_BITMAP (set, 0, i, bi)
3995 {
3996 fprintf (file, "D.%u", i);
3997 fprintf (file, " ");
3998 }
3999
4000 fprintf (file, "}");
4001 }
4002 else
4003 fprintf (file, "NIL");
4004 }
4005
4006 /* Given SSA_NAMEs NAME1 and NAME2, return true if they are candidates for
4007 coalescing together, false otherwise.
4008
4009 This must stay consistent with var_map_base_init in tree-ssa-live.c. */
4010
4011 bool
4012 gimple_can_coalesce_p (tree name1, tree name2)
4013 {
4014 /* First check the SSA_NAME's associated DECL. We only want to
4015 coalesce if they have the same DECL or both have no associated DECL. */
4016 tree var1 = SSA_NAME_VAR (name1);
4017 tree var2 = SSA_NAME_VAR (name2);
4018 var1 = (var1 && (!VAR_P (var1) || !DECL_IGNORED_P (var1))) ? var1 : NULL_TREE;
4019 var2 = (var2 && (!VAR_P (var2) || !DECL_IGNORED_P (var2))) ? var2 : NULL_TREE;
4020 if (var1 != var2)
4021 return false;
4022
4023 /* Now check the types. If the types are the same, then we should
4024 try to coalesce V1 and V2. */
4025 tree t1 = TREE_TYPE (name1);
4026 tree t2 = TREE_TYPE (name2);
4027 if (t1 == t2)
4028 return true;
4029
4030 /* If the types are not the same, check for a canonical type match. This
4031 (for example) allows coalescing when the types are fundamentally the
4032 same, but just have different names.
4033
4034 Note pointer types with different address spaces may have the same
4035 canonical type. Those are rejected for coalescing by the
4036 types_compatible_p check. */
4037 if (TYPE_CANONICAL (t1)
4038 && TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2)
4039 && types_compatible_p (t1, t2))
4040 return true;
4041
4042 return false;
4043 }
4044
4045 /* Return true when CALL is a call stmt that definitely doesn't
4046 free any memory or makes it unavailable otherwise. */
4047 bool
4048 nonfreeing_call_p (gimple call)
4049 {
4050 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL)
4051 && gimple_call_flags (call) & ECF_LEAF)
4052 switch (DECL_FUNCTION_CODE (gimple_call_fndecl (call)))
4053 {
4054 /* Just in case these become ECF_LEAF in the future. */
4055 case BUILT_IN_FREE:
4056 case BUILT_IN_TM_FREE:
4057 case BUILT_IN_REALLOC:
4058 case BUILT_IN_STACK_RESTORE:
4059 return false;
4060 default:
4061 return true;
4062 }
4063
4064 return false;
4065 }
4066
4067 /* Create a new VAR_DECL and copy information from VAR to it. */
4068
4069 tree
4070 copy_var_decl (tree var, tree name, tree type)
4071 {
4072 tree copy = build_decl (DECL_SOURCE_LOCATION (var), VAR_DECL, name, type);
4073
4074 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (var);
4075 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (var);
4076 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (var);
4077 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (var);
4078 DECL_IGNORED_P (copy) = DECL_IGNORED_P (var);
4079 DECL_CONTEXT (copy) = DECL_CONTEXT (var);
4080 TREE_NO_WARNING (copy) = TREE_NO_WARNING (var);
4081 TREE_USED (copy) = 1;
4082 DECL_SEEN_IN_BIND_EXPR_P (copy) = 1;
4083 DECL_ATTRIBUTES (copy) = DECL_ATTRIBUTES (var);
4084
4085 return copy;
4086 }