1 /* Gimple IR support functions.
3 Copyright (C) 2007-2013 Free Software Foundation, Inc.
4 Contributed by Aldy Hernandez <aldyh@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
29 #include "hard-reg-set.h"
30 #include "basic-block.h"
32 #include "gimple-iterator.h"
33 #include "gimple-walk.h"
35 #include "diagnostic.h"
36 #include "value-prof.h"
40 #include "langhooks.h"
44 /* All the tuples have their operand vector (if present) at the very bottom
45 of the structure. Therefore, the offset required to find the
46 operands vector the size of the structure minus the size of the 1
47 element tree array at the end (see gimple_ops). */
48 #define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) \
49 (HAS_TREE_OP ? sizeof (struct STRUCT) - sizeof (tree) : 0),
50 EXPORTED_CONST
size_t gimple_ops_offset_
[] = {
51 #include "gsstruct.def"
55 #define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) sizeof (struct STRUCT),
56 static const size_t gsstruct_code_size
[] = {
57 #include "gsstruct.def"
61 #define DEFGSCODE(SYM, NAME, GSSCODE) NAME,
62 const char *const gimple_code_name
[] = {
67 #define DEFGSCODE(SYM, NAME, GSSCODE) GSSCODE,
68 EXPORTED_CONST
enum gimple_statement_structure_enum gss_for_code_
[] = {
75 int gimple_alloc_counts
[(int) gimple_alloc_kind_all
];
76 int gimple_alloc_sizes
[(int) gimple_alloc_kind_all
];
78 /* Keep in sync with gimple.h:enum gimple_alloc_kind. */
79 static const char * const gimple_alloc_kind_names
[] = {
86 /* Private API manipulation functions shared only with some
88 extern void gimple_set_stored_syms (gimple
, bitmap
, bitmap_obstack
*);
89 extern void gimple_set_loaded_syms (gimple
, bitmap
, bitmap_obstack
*);
91 /* Gimple tuple constructors.
92 Note: Any constructor taking a ``gimple_seq'' as a parameter, can
93 be passed a NULL to start with an empty sequence. */
95 /* Set the code for statement G to CODE. */
98 gimple_set_code (gimple g
, enum gimple_code code
)
100 g
->gsbase
.code
= code
;
103 /* Return the number of bytes needed to hold a GIMPLE statement with
107 gimple_size (enum gimple_code code
)
109 return gsstruct_code_size
[gss_for_code (code
)];
112 /* Allocate memory for a GIMPLE statement with code CODE and NUM_OPS
116 gimple_alloc_stat (enum gimple_code code
, unsigned num_ops MEM_STAT_DECL
)
121 size
= gimple_size (code
);
123 size
+= sizeof (tree
) * (num_ops
- 1);
125 if (GATHER_STATISTICS
)
127 enum gimple_alloc_kind kind
= gimple_alloc_kind (code
);
128 gimple_alloc_counts
[(int) kind
]++;
129 gimple_alloc_sizes
[(int) kind
] += size
;
132 stmt
= ggc_alloc_cleared_gimple_statement_d_stat (size PASS_MEM_STAT
);
133 gimple_set_code (stmt
, code
);
134 gimple_set_num_ops (stmt
, num_ops
);
136 /* Do not call gimple_set_modified here as it has other side
137 effects and this tuple is still not completely built. */
138 stmt
->gsbase
.modified
= 1;
139 gimple_init_singleton (stmt
);
144 /* Set SUBCODE to be the code of the expression computed by statement G. */
147 gimple_set_subcode (gimple g
, unsigned subcode
)
149 /* We only have 16 bits for the RHS code. Assert that we are not
151 gcc_assert (subcode
< (1 << 16));
152 g
->gsbase
.subcode
= subcode
;
157 /* Build a tuple with operands. CODE is the statement to build (which
158 must be one of the GIMPLE_WITH_OPS tuples). SUBCODE is the subcode
159 for the new tuple. NUM_OPS is the number of operands to allocate. */
161 #define gimple_build_with_ops(c, s, n) \
162 gimple_build_with_ops_stat (c, s, n MEM_STAT_INFO)
165 gimple_build_with_ops_stat (enum gimple_code code
, unsigned subcode
,
166 unsigned num_ops MEM_STAT_DECL
)
168 gimple s
= gimple_alloc_stat (code
, num_ops PASS_MEM_STAT
);
169 gimple_set_subcode (s
, subcode
);
175 /* Build a GIMPLE_RETURN statement returning RETVAL. */
178 gimple_build_return (tree retval
)
180 gimple s
= gimple_build_with_ops (GIMPLE_RETURN
, ERROR_MARK
, 2);
182 gimple_return_set_retval (s
, retval
);
186 /* Reset alias information on call S. */
189 gimple_call_reset_alias_info (gimple s
)
191 if (gimple_call_flags (s
) & ECF_CONST
)
192 memset (gimple_call_use_set (s
), 0, sizeof (struct pt_solution
));
194 pt_solution_reset (gimple_call_use_set (s
));
195 if (gimple_call_flags (s
) & (ECF_CONST
|ECF_PURE
|ECF_NOVOPS
))
196 memset (gimple_call_clobber_set (s
), 0, sizeof (struct pt_solution
));
198 pt_solution_reset (gimple_call_clobber_set (s
));
201 /* Helper for gimple_build_call, gimple_build_call_valist,
202 gimple_build_call_vec and gimple_build_call_from_tree. Build the basic
203 components of a GIMPLE_CALL statement to function FN with NARGS
207 gimple_build_call_1 (tree fn
, unsigned nargs
)
209 gimple s
= gimple_build_with_ops (GIMPLE_CALL
, ERROR_MARK
, nargs
+ 3);
210 if (TREE_CODE (fn
) == FUNCTION_DECL
)
211 fn
= build_fold_addr_expr (fn
);
212 gimple_set_op (s
, 1, fn
);
213 gimple_call_set_fntype (s
, TREE_TYPE (TREE_TYPE (fn
)));
214 gimple_call_reset_alias_info (s
);
219 /* Build a GIMPLE_CALL statement to function FN with the arguments
220 specified in vector ARGS. */
223 gimple_build_call_vec (tree fn
, vec
<tree
> args
)
226 unsigned nargs
= args
.length ();
227 gimple call
= gimple_build_call_1 (fn
, nargs
);
229 for (i
= 0; i
< nargs
; i
++)
230 gimple_call_set_arg (call
, i
, args
[i
]);
236 /* Build a GIMPLE_CALL statement to function FN. NARGS is the number of
237 arguments. The ... are the arguments. */
240 gimple_build_call (tree fn
, unsigned nargs
, ...)
246 gcc_assert (TREE_CODE (fn
) == FUNCTION_DECL
|| is_gimple_call_addr (fn
));
248 call
= gimple_build_call_1 (fn
, nargs
);
250 va_start (ap
, nargs
);
251 for (i
= 0; i
< nargs
; i
++)
252 gimple_call_set_arg (call
, i
, va_arg (ap
, tree
));
259 /* Build a GIMPLE_CALL statement to function FN. NARGS is the number of
260 arguments. AP contains the arguments. */
263 gimple_build_call_valist (tree fn
, unsigned nargs
, va_list ap
)
268 gcc_assert (TREE_CODE (fn
) == FUNCTION_DECL
|| is_gimple_call_addr (fn
));
270 call
= gimple_build_call_1 (fn
, nargs
);
272 for (i
= 0; i
< nargs
; i
++)
273 gimple_call_set_arg (call
, i
, va_arg (ap
, tree
));
279 /* Helper for gimple_build_call_internal and gimple_build_call_internal_vec.
280 Build the basic components of a GIMPLE_CALL statement to internal
281 function FN with NARGS arguments. */
284 gimple_build_call_internal_1 (enum internal_fn fn
, unsigned nargs
)
286 gimple s
= gimple_build_with_ops (GIMPLE_CALL
, ERROR_MARK
, nargs
+ 3);
287 s
->gsbase
.subcode
|= GF_CALL_INTERNAL
;
288 gimple_call_set_internal_fn (s
, fn
);
289 gimple_call_reset_alias_info (s
);
294 /* Build a GIMPLE_CALL statement to internal function FN. NARGS is
295 the number of arguments. The ... are the arguments. */
298 gimple_build_call_internal (enum internal_fn fn
, unsigned nargs
, ...)
304 call
= gimple_build_call_internal_1 (fn
, nargs
);
305 va_start (ap
, nargs
);
306 for (i
= 0; i
< nargs
; i
++)
307 gimple_call_set_arg (call
, i
, va_arg (ap
, tree
));
314 /* Build a GIMPLE_CALL statement to internal function FN with the arguments
315 specified in vector ARGS. */
318 gimple_build_call_internal_vec (enum internal_fn fn
, vec
<tree
> args
)
323 nargs
= args
.length ();
324 call
= gimple_build_call_internal_1 (fn
, nargs
);
325 for (i
= 0; i
< nargs
; i
++)
326 gimple_call_set_arg (call
, i
, args
[i
]);
332 /* Build a GIMPLE_CALL statement from CALL_EXPR T. Note that T is
333 assumed to be in GIMPLE form already. Minimal checking is done of
337 gimple_build_call_from_tree (tree t
)
341 tree fndecl
= get_callee_fndecl (t
);
343 gcc_assert (TREE_CODE (t
) == CALL_EXPR
);
345 nargs
= call_expr_nargs (t
);
346 call
= gimple_build_call_1 (fndecl
? fndecl
: CALL_EXPR_FN (t
), nargs
);
348 for (i
= 0; i
< nargs
; i
++)
349 gimple_call_set_arg (call
, i
, CALL_EXPR_ARG (t
, i
));
351 gimple_set_block (call
, TREE_BLOCK (t
));
353 /* Carry all the CALL_EXPR flags to the new GIMPLE_CALL. */
354 gimple_call_set_chain (call
, CALL_EXPR_STATIC_CHAIN (t
));
355 gimple_call_set_tail (call
, CALL_EXPR_TAILCALL (t
));
356 gimple_call_set_return_slot_opt (call
, CALL_EXPR_RETURN_SLOT_OPT (t
));
358 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
359 && (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_ALLOCA
360 || DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_ALLOCA_WITH_ALIGN
))
361 gimple_call_set_alloca_for_var (call
, CALL_ALLOCA_FOR_VAR_P (t
));
363 gimple_call_set_from_thunk (call
, CALL_FROM_THUNK_P (t
));
364 gimple_call_set_va_arg_pack (call
, CALL_EXPR_VA_ARG_PACK (t
));
365 gimple_call_set_nothrow (call
, TREE_NOTHROW (t
));
366 gimple_set_no_warning (call
, TREE_NO_WARNING (t
));
372 /* Return index of INDEX's non bound argument of the call. */
375 gimple_call_get_nobnd_arg_index (const_gimple gs
, unsigned index
)
377 unsigned num_args
= gimple_call_num_args (gs
);
378 for (unsigned n
= 0; n
< num_args
; n
++)
380 if (POINTER_BOUNDS_P (gimple_call_arg (gs
, n
)))
392 /* Build a GIMPLE_ASSIGN statement.
394 LHS of the assignment.
395 RHS of the assignment which can be unary or binary. */
398 gimple_build_assign_stat (tree lhs
, tree rhs MEM_STAT_DECL
)
400 enum tree_code subcode
;
403 extract_ops_from_tree_1 (rhs
, &subcode
, &op1
, &op2
, &op3
);
404 return gimple_build_assign_with_ops (subcode
, lhs
, op1
, op2
, op3
409 /* Build a GIMPLE_ASSIGN statement with subcode SUBCODE and operands
410 OP1 and OP2. If OP2 is NULL then SUBCODE must be of class
411 GIMPLE_UNARY_RHS or GIMPLE_SINGLE_RHS. */
414 gimple_build_assign_with_ops (enum tree_code subcode
, tree lhs
, tree op1
,
415 tree op2
, tree op3 MEM_STAT_DECL
)
420 /* Need 1 operand for LHS and 1 or 2 for the RHS (depending on the
422 num_ops
= get_gimple_rhs_num_ops (subcode
) + 1;
424 p
= gimple_build_with_ops_stat (GIMPLE_ASSIGN
, (unsigned)subcode
, num_ops
426 gimple_assign_set_lhs (p
, lhs
);
427 gimple_assign_set_rhs1 (p
, op1
);
430 gcc_assert (num_ops
> 2);
431 gimple_assign_set_rhs2 (p
, op2
);
436 gcc_assert (num_ops
> 3);
437 gimple_assign_set_rhs3 (p
, op3
);
444 gimple_build_assign_with_ops (enum tree_code subcode
, tree lhs
, tree op1
,
445 tree op2 MEM_STAT_DECL
)
447 return gimple_build_assign_with_ops (subcode
, lhs
, op1
, op2
, NULL_TREE
452 /* Build a GIMPLE_COND statement.
454 PRED is the condition used to compare LHS and the RHS.
455 T_LABEL is the label to jump to if the condition is true.
456 F_LABEL is the label to jump to otherwise. */
459 gimple_build_cond (enum tree_code pred_code
, tree lhs
, tree rhs
,
460 tree t_label
, tree f_label
)
464 gcc_assert (TREE_CODE_CLASS (pred_code
) == tcc_comparison
);
465 p
= gimple_build_with_ops (GIMPLE_COND
, pred_code
, 4);
466 gimple_cond_set_lhs (p
, lhs
);
467 gimple_cond_set_rhs (p
, rhs
);
468 gimple_cond_set_true_label (p
, t_label
);
469 gimple_cond_set_false_label (p
, f_label
);
473 /* Build a GIMPLE_COND statement from the conditional expression tree
474 COND. T_LABEL and F_LABEL are as in gimple_build_cond. */
477 gimple_build_cond_from_tree (tree cond
, tree t_label
, tree f_label
)
482 gimple_cond_get_ops_from_tree (cond
, &code
, &lhs
, &rhs
);
483 return gimple_build_cond (code
, lhs
, rhs
, t_label
, f_label
);
486 /* Set code, lhs, and rhs of a GIMPLE_COND from a suitable
487 boolean expression tree COND. */
490 gimple_cond_set_condition_from_tree (gimple stmt
, tree cond
)
495 gimple_cond_get_ops_from_tree (cond
, &code
, &lhs
, &rhs
);
496 gimple_cond_set_condition (stmt
, code
, lhs
, rhs
);
499 /* Build a GIMPLE_LABEL statement for LABEL. */
502 gimple_build_label (tree label
)
504 gimple p
= gimple_build_with_ops (GIMPLE_LABEL
, ERROR_MARK
, 1);
505 gimple_label_set_label (p
, label
);
509 /* Build a GIMPLE_GOTO statement to label DEST. */
512 gimple_build_goto (tree dest
)
514 gimple p
= gimple_build_with_ops (GIMPLE_GOTO
, ERROR_MARK
, 1);
515 gimple_goto_set_dest (p
, dest
);
520 /* Build a GIMPLE_NOP statement. */
523 gimple_build_nop (void)
525 return gimple_alloc (GIMPLE_NOP
, 0);
529 /* Build a GIMPLE_BIND statement.
530 VARS are the variables in BODY.
531 BLOCK is the containing block. */
534 gimple_build_bind (tree vars
, gimple_seq body
, tree block
)
536 gimple p
= gimple_alloc (GIMPLE_BIND
, 0);
537 gimple_bind_set_vars (p
, vars
);
539 gimple_bind_set_body (p
, body
);
541 gimple_bind_set_block (p
, block
);
545 /* Helper function to set the simple fields of a asm stmt.
547 STRING is a pointer to a string that is the asm blocks assembly code.
548 NINPUT is the number of register inputs.
549 NOUTPUT is the number of register outputs.
550 NCLOBBERS is the number of clobbered registers.
554 gimple_build_asm_1 (const char *string
, unsigned ninputs
, unsigned noutputs
,
555 unsigned nclobbers
, unsigned nlabels
)
558 int size
= strlen (string
);
560 /* ASMs with labels cannot have outputs. This should have been
561 enforced by the front end. */
562 gcc_assert (nlabels
== 0 || noutputs
== 0);
564 p
= gimple_build_with_ops (GIMPLE_ASM
, ERROR_MARK
,
565 ninputs
+ noutputs
+ nclobbers
+ nlabels
);
567 p
->gimple_asm
.ni
= ninputs
;
568 p
->gimple_asm
.no
= noutputs
;
569 p
->gimple_asm
.nc
= nclobbers
;
570 p
->gimple_asm
.nl
= nlabels
;
571 p
->gimple_asm
.string
= ggc_alloc_string (string
, size
);
573 if (GATHER_STATISTICS
)
574 gimple_alloc_sizes
[(int) gimple_alloc_kind (GIMPLE_ASM
)] += size
;
579 /* Build a GIMPLE_ASM statement.
581 STRING is the assembly code.
582 NINPUT is the number of register inputs.
583 NOUTPUT is the number of register outputs.
584 NCLOBBERS is the number of clobbered registers.
585 INPUTS is a vector of the input register parameters.
586 OUTPUTS is a vector of the output register parameters.
587 CLOBBERS is a vector of the clobbered register parameters.
588 LABELS is a vector of destination labels. */
591 gimple_build_asm_vec (const char *string
, vec
<tree
, va_gc
> *inputs
,
592 vec
<tree
, va_gc
> *outputs
, vec
<tree
, va_gc
> *clobbers
,
593 vec
<tree
, va_gc
> *labels
)
598 p
= gimple_build_asm_1 (string
,
599 vec_safe_length (inputs
),
600 vec_safe_length (outputs
),
601 vec_safe_length (clobbers
),
602 vec_safe_length (labels
));
604 for (i
= 0; i
< vec_safe_length (inputs
); i
++)
605 gimple_asm_set_input_op (p
, i
, (*inputs
)[i
]);
607 for (i
= 0; i
< vec_safe_length (outputs
); i
++)
608 gimple_asm_set_output_op (p
, i
, (*outputs
)[i
]);
610 for (i
= 0; i
< vec_safe_length (clobbers
); i
++)
611 gimple_asm_set_clobber_op (p
, i
, (*clobbers
)[i
]);
613 for (i
= 0; i
< vec_safe_length (labels
); i
++)
614 gimple_asm_set_label_op (p
, i
, (*labels
)[i
]);
619 /* Build a GIMPLE_CATCH statement.
621 TYPES are the catch types.
622 HANDLER is the exception handler. */
625 gimple_build_catch (tree types
, gimple_seq handler
)
627 gimple p
= gimple_alloc (GIMPLE_CATCH
, 0);
628 gimple_catch_set_types (p
, types
);
630 gimple_catch_set_handler (p
, handler
);
635 /* Build a GIMPLE_EH_FILTER statement.
637 TYPES are the filter's types.
638 FAILURE is the filter's failure action. */
641 gimple_build_eh_filter (tree types
, gimple_seq failure
)
643 gimple p
= gimple_alloc (GIMPLE_EH_FILTER
, 0);
644 gimple_eh_filter_set_types (p
, types
);
646 gimple_eh_filter_set_failure (p
, failure
);
651 /* Build a GIMPLE_EH_MUST_NOT_THROW statement. */
654 gimple_build_eh_must_not_throw (tree decl
)
656 gimple p
= gimple_alloc (GIMPLE_EH_MUST_NOT_THROW
, 0);
658 gcc_assert (TREE_CODE (decl
) == FUNCTION_DECL
);
659 gcc_assert (flags_from_decl_or_type (decl
) & ECF_NORETURN
);
660 gimple_eh_must_not_throw_set_fndecl (p
, decl
);
665 /* Build a GIMPLE_EH_ELSE statement. */
668 gimple_build_eh_else (gimple_seq n_body
, gimple_seq e_body
)
670 gimple p
= gimple_alloc (GIMPLE_EH_ELSE
, 0);
671 gimple_eh_else_set_n_body (p
, n_body
);
672 gimple_eh_else_set_e_body (p
, e_body
);
676 /* Build a GIMPLE_TRY statement.
678 EVAL is the expression to evaluate.
679 CLEANUP is the cleanup expression.
680 KIND is either GIMPLE_TRY_CATCH or GIMPLE_TRY_FINALLY depending on
681 whether this is a try/catch or a try/finally respectively. */
684 gimple_build_try (gimple_seq eval
, gimple_seq cleanup
,
685 enum gimple_try_flags kind
)
689 gcc_assert (kind
== GIMPLE_TRY_CATCH
|| kind
== GIMPLE_TRY_FINALLY
);
690 p
= gimple_alloc (GIMPLE_TRY
, 0);
691 gimple_set_subcode (p
, kind
);
693 gimple_try_set_eval (p
, eval
);
695 gimple_try_set_cleanup (p
, cleanup
);
700 /* Construct a GIMPLE_WITH_CLEANUP_EXPR statement.
702 CLEANUP is the cleanup expression. */
705 gimple_build_wce (gimple_seq cleanup
)
707 gimple p
= gimple_alloc (GIMPLE_WITH_CLEANUP_EXPR
, 0);
709 gimple_wce_set_cleanup (p
, cleanup
);
715 /* Build a GIMPLE_RESX statement. */
718 gimple_build_resx (int region
)
720 gimple p
= gimple_build_with_ops (GIMPLE_RESX
, ERROR_MARK
, 0);
721 p
->gimple_eh_ctrl
.region
= region
;
726 /* The helper for constructing a gimple switch statement.
727 INDEX is the switch's index.
728 NLABELS is the number of labels in the switch excluding the default.
729 DEFAULT_LABEL is the default label for the switch statement. */
732 gimple_build_switch_nlabels (unsigned nlabels
, tree index
, tree default_label
)
734 /* nlabels + 1 default label + 1 index. */
735 gcc_checking_assert (default_label
);
736 gimple p
= gimple_build_with_ops (GIMPLE_SWITCH
, ERROR_MARK
,
738 gimple_switch_set_index (p
, index
);
739 gimple_switch_set_default_label (p
, default_label
);
743 /* Build a GIMPLE_SWITCH statement.
745 INDEX is the switch's index.
746 DEFAULT_LABEL is the default label
747 ARGS is a vector of labels excluding the default. */
750 gimple_build_switch (tree index
, tree default_label
, vec
<tree
> args
)
752 unsigned i
, nlabels
= args
.length ();
754 gimple p
= gimple_build_switch_nlabels (nlabels
, index
, default_label
);
756 /* Copy the labels from the vector to the switch statement. */
757 for (i
= 0; i
< nlabels
; i
++)
758 gimple_switch_set_label (p
, i
+ 1, args
[i
]);
763 /* Build a GIMPLE_EH_DISPATCH statement. */
766 gimple_build_eh_dispatch (int region
)
768 gimple p
= gimple_build_with_ops (GIMPLE_EH_DISPATCH
, ERROR_MARK
, 0);
769 p
->gimple_eh_ctrl
.region
= region
;
773 /* Build a new GIMPLE_DEBUG_BIND statement.
775 VAR is bound to VALUE; block and location are taken from STMT. */
778 gimple_build_debug_bind_stat (tree var
, tree value
, gimple stmt MEM_STAT_DECL
)
780 gimple p
= gimple_build_with_ops_stat (GIMPLE_DEBUG
,
781 (unsigned)GIMPLE_DEBUG_BIND
, 2
784 gimple_debug_bind_set_var (p
, var
);
785 gimple_debug_bind_set_value (p
, value
);
787 gimple_set_location (p
, gimple_location (stmt
));
793 /* Build a new GIMPLE_DEBUG_SOURCE_BIND statement.
795 VAR is bound to VALUE; block and location are taken from STMT. */
798 gimple_build_debug_source_bind_stat (tree var
, tree value
,
799 gimple stmt MEM_STAT_DECL
)
801 gimple p
= gimple_build_with_ops_stat (GIMPLE_DEBUG
,
802 (unsigned)GIMPLE_DEBUG_SOURCE_BIND
, 2
805 gimple_debug_source_bind_set_var (p
, var
);
806 gimple_debug_source_bind_set_value (p
, value
);
808 gimple_set_location (p
, gimple_location (stmt
));
814 /* Build a GIMPLE_OMP_CRITICAL statement.
816 BODY is the sequence of statements for which only one thread can execute.
817 NAME is optional identifier for this critical block. */
820 gimple_build_omp_critical (gimple_seq body
, tree name
)
822 gimple p
= gimple_alloc (GIMPLE_OMP_CRITICAL
, 0);
823 gimple_omp_critical_set_name (p
, name
);
825 gimple_omp_set_body (p
, body
);
830 /* Build a GIMPLE_OMP_FOR statement.
832 BODY is sequence of statements inside the for loop.
833 KIND is the `for' variant.
834 CLAUSES, are any of the OMP loop construct's clauses: private, firstprivate,
835 lastprivate, reductions, ordered, schedule, and nowait.
836 COLLAPSE is the collapse count.
837 PRE_BODY is the sequence of statements that are loop invariant. */
840 gimple_build_omp_for (gimple_seq body
, int kind
, tree clauses
, size_t collapse
,
843 gimple p
= gimple_alloc (GIMPLE_OMP_FOR
, 0);
845 gimple_omp_set_body (p
, body
);
846 gimple_omp_for_set_clauses (p
, clauses
);
847 gimple_omp_for_set_kind (p
, kind
);
848 p
->gimple_omp_for
.collapse
= collapse
;
849 p
->gimple_omp_for
.iter
850 = ggc_alloc_cleared_vec_gimple_omp_for_iter (collapse
);
852 gimple_omp_for_set_pre_body (p
, pre_body
);
858 /* Build a GIMPLE_OMP_PARALLEL statement.
860 BODY is sequence of statements which are executed in parallel.
861 CLAUSES, are the OMP parallel construct's clauses.
862 CHILD_FN is the function created for the parallel threads to execute.
863 DATA_ARG are the shared data argument(s). */
866 gimple_build_omp_parallel (gimple_seq body
, tree clauses
, tree child_fn
,
869 gimple p
= gimple_alloc (GIMPLE_OMP_PARALLEL
, 0);
871 gimple_omp_set_body (p
, body
);
872 gimple_omp_parallel_set_clauses (p
, clauses
);
873 gimple_omp_parallel_set_child_fn (p
, child_fn
);
874 gimple_omp_parallel_set_data_arg (p
, data_arg
);
880 /* Build a GIMPLE_OMP_TASK statement.
882 BODY is sequence of statements which are executed by the explicit task.
883 CLAUSES, are the OMP parallel construct's clauses.
884 CHILD_FN is the function created for the parallel threads to execute.
885 DATA_ARG are the shared data argument(s).
886 COPY_FN is the optional function for firstprivate initialization.
887 ARG_SIZE and ARG_ALIGN are size and alignment of the data block. */
890 gimple_build_omp_task (gimple_seq body
, tree clauses
, tree child_fn
,
891 tree data_arg
, tree copy_fn
, tree arg_size
,
894 gimple p
= gimple_alloc (GIMPLE_OMP_TASK
, 0);
896 gimple_omp_set_body (p
, body
);
897 gimple_omp_task_set_clauses (p
, clauses
);
898 gimple_omp_task_set_child_fn (p
, child_fn
);
899 gimple_omp_task_set_data_arg (p
, data_arg
);
900 gimple_omp_task_set_copy_fn (p
, copy_fn
);
901 gimple_omp_task_set_arg_size (p
, arg_size
);
902 gimple_omp_task_set_arg_align (p
, arg_align
);
908 /* Build a GIMPLE_OMP_SECTION statement for a sections statement.
910 BODY is the sequence of statements in the section. */
913 gimple_build_omp_section (gimple_seq body
)
915 gimple p
= gimple_alloc (GIMPLE_OMP_SECTION
, 0);
917 gimple_omp_set_body (p
, body
);
923 /* Build a GIMPLE_OMP_MASTER statement.
925 BODY is the sequence of statements to be executed by just the master. */
928 gimple_build_omp_master (gimple_seq body
)
930 gimple p
= gimple_alloc (GIMPLE_OMP_MASTER
, 0);
932 gimple_omp_set_body (p
, body
);
938 /* Build a GIMPLE_OMP_TASKGROUP statement.
940 BODY is the sequence of statements to be executed by the taskgroup
944 gimple_build_omp_taskgroup (gimple_seq body
)
946 gimple p
= gimple_alloc (GIMPLE_OMP_TASKGROUP
, 0);
948 gimple_omp_set_body (p
, body
);
954 /* Build a GIMPLE_OMP_CONTINUE statement.
956 CONTROL_DEF is the definition of the control variable.
957 CONTROL_USE is the use of the control variable. */
960 gimple_build_omp_continue (tree control_def
, tree control_use
)
962 gimple p
= gimple_alloc (GIMPLE_OMP_CONTINUE
, 0);
963 gimple_omp_continue_set_control_def (p
, control_def
);
964 gimple_omp_continue_set_control_use (p
, control_use
);
968 /* Build a GIMPLE_OMP_ORDERED statement.
970 BODY is the sequence of statements inside a loop that will executed in
974 gimple_build_omp_ordered (gimple_seq body
)
976 gimple p
= gimple_alloc (GIMPLE_OMP_ORDERED
, 0);
978 gimple_omp_set_body (p
, body
);
984 /* Build a GIMPLE_OMP_RETURN statement.
985 WAIT_P is true if this is a non-waiting return. */
988 gimple_build_omp_return (bool wait_p
)
990 gimple p
= gimple_alloc (GIMPLE_OMP_RETURN
, 0);
992 gimple_omp_return_set_nowait (p
);
998 /* Build a GIMPLE_OMP_SECTIONS statement.
1000 BODY is a sequence of section statements.
1001 CLAUSES are any of the OMP sections contsruct's clauses: private,
1002 firstprivate, lastprivate, reduction, and nowait. */
1005 gimple_build_omp_sections (gimple_seq body
, tree clauses
)
1007 gimple p
= gimple_alloc (GIMPLE_OMP_SECTIONS
, 0);
1009 gimple_omp_set_body (p
, body
);
1010 gimple_omp_sections_set_clauses (p
, clauses
);
1016 /* Build a GIMPLE_OMP_SECTIONS_SWITCH. */
1019 gimple_build_omp_sections_switch (void)
1021 return gimple_alloc (GIMPLE_OMP_SECTIONS_SWITCH
, 0);
1025 /* Build a GIMPLE_OMP_SINGLE statement.
1027 BODY is the sequence of statements that will be executed once.
1028 CLAUSES are any of the OMP single construct's clauses: private, firstprivate,
1029 copyprivate, nowait. */
1032 gimple_build_omp_single (gimple_seq body
, tree clauses
)
1034 gimple p
= gimple_alloc (GIMPLE_OMP_SINGLE
, 0);
1036 gimple_omp_set_body (p
, body
);
1037 gimple_omp_single_set_clauses (p
, clauses
);
1043 /* Build a GIMPLE_OMP_TARGET statement.
1045 BODY is the sequence of statements that will be executed.
1046 CLAUSES are any of the OMP target construct's clauses. */
1049 gimple_build_omp_target (gimple_seq body
, int kind
, tree clauses
)
1051 gimple p
= gimple_alloc (GIMPLE_OMP_TARGET
, 0);
1053 gimple_omp_set_body (p
, body
);
1054 gimple_omp_target_set_clauses (p
, clauses
);
1055 gimple_omp_target_set_kind (p
, kind
);
1061 /* Build a GIMPLE_OMP_TEAMS statement.
1063 BODY is the sequence of statements that will be executed.
1064 CLAUSES are any of the OMP teams construct's clauses. */
1067 gimple_build_omp_teams (gimple_seq body
, tree clauses
)
1069 gimple p
= gimple_alloc (GIMPLE_OMP_TEAMS
, 0);
1071 gimple_omp_set_body (p
, body
);
1072 gimple_omp_teams_set_clauses (p
, clauses
);
1078 /* Build a GIMPLE_OMP_ATOMIC_LOAD statement. */
1081 gimple_build_omp_atomic_load (tree lhs
, tree rhs
)
1083 gimple p
= gimple_alloc (GIMPLE_OMP_ATOMIC_LOAD
, 0);
1084 gimple_omp_atomic_load_set_lhs (p
, lhs
);
1085 gimple_omp_atomic_load_set_rhs (p
, rhs
);
1089 /* Build a GIMPLE_OMP_ATOMIC_STORE statement.
1091 VAL is the value we are storing. */
1094 gimple_build_omp_atomic_store (tree val
)
1096 gimple p
= gimple_alloc (GIMPLE_OMP_ATOMIC_STORE
, 0);
1097 gimple_omp_atomic_store_set_val (p
, val
);
1101 /* Build a GIMPLE_TRANSACTION statement. */
1104 gimple_build_transaction (gimple_seq body
, tree label
)
1106 gimple p
= gimple_alloc (GIMPLE_TRANSACTION
, 0);
1107 gimple_transaction_set_body (p
, body
);
1108 gimple_transaction_set_label (p
, label
);
1112 /* Build a GIMPLE_PREDICT statement. PREDICT is one of the predictors from
1113 predict.def, OUTCOME is NOT_TAKEN or TAKEN. */
1116 gimple_build_predict (enum br_predictor predictor
, enum prediction outcome
)
1118 gimple p
= gimple_alloc (GIMPLE_PREDICT
, 0);
1119 /* Ensure all the predictors fit into the lower bits of the subcode. */
1120 gcc_assert ((int) END_PREDICTORS
<= GF_PREDICT_TAKEN
);
1121 gimple_predict_set_predictor (p
, predictor
);
1122 gimple_predict_set_outcome (p
, outcome
);
1126 #if defined ENABLE_GIMPLE_CHECKING
1127 /* Complain of a gimple type mismatch and die. */
1130 gimple_check_failed (const_gimple gs
, const char *file
, int line
,
1131 const char *function
, enum gimple_code code
,
1132 enum tree_code subcode
)
1134 internal_error ("gimple check: expected %s(%s), have %s(%s) in %s, at %s:%d",
1135 gimple_code_name
[code
],
1136 get_tree_code_name (subcode
),
1137 gimple_code_name
[gimple_code (gs
)],
1138 gs
->gsbase
.subcode
> 0
1139 ? get_tree_code_name ((enum tree_code
) gs
->gsbase
.subcode
)
1141 function
, trim_filename (file
), line
);
1143 #endif /* ENABLE_GIMPLE_CHECKING */
1146 /* Link gimple statement GS to the end of the sequence *SEQ_P. If
1147 *SEQ_P is NULL, a new sequence is allocated. */
1150 gimple_seq_add_stmt (gimple_seq
*seq_p
, gimple gs
)
1152 gimple_stmt_iterator si
;
1156 si
= gsi_last (*seq_p
);
1157 gsi_insert_after (&si
, gs
, GSI_NEW_STMT
);
1160 /* Link gimple statement GS to the end of the sequence *SEQ_P. If
1161 *SEQ_P is NULL, a new sequence is allocated. This function is
1162 similar to gimple_seq_add_stmt, but does not scan the operands.
1163 During gimplification, we need to manipulate statement sequences
1164 before the def/use vectors have been constructed. */
1167 gimple_seq_add_stmt_without_update (gimple_seq
*seq_p
, gimple gs
)
1169 gimple_stmt_iterator si
;
1174 si
= gsi_last (*seq_p
);
1175 gsi_insert_after_without_update (&si
, gs
, GSI_NEW_STMT
);
1178 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
1179 NULL, a new sequence is allocated. */
1182 gimple_seq_add_seq (gimple_seq
*dst_p
, gimple_seq src
)
1184 gimple_stmt_iterator si
;
1188 si
= gsi_last (*dst_p
);
1189 gsi_insert_seq_after (&si
, src
, GSI_NEW_STMT
);
1192 /* Determine whether to assign a location to the statement GS. */
1195 should_carry_location_p (gimple gs
)
1197 /* Don't emit a line note for a label. We particularly don't want to
1198 emit one for the break label, since it doesn't actually correspond
1199 to the beginning of the loop/switch. */
1200 if (gimple_code (gs
) == GIMPLE_LABEL
)
1206 /* Set the location for gimple statement GS to LOCATION. */
1209 annotate_one_with_location (gimple gs
, location_t location
)
1211 if (!gimple_has_location (gs
)
1212 && !gimple_do_not_emit_location_p (gs
)
1213 && should_carry_location_p (gs
))
1214 gimple_set_location (gs
, location
);
1217 /* Set LOCATION for all the statements after iterator GSI in sequence
1218 SEQ. If GSI is pointing to the end of the sequence, start with the
1219 first statement in SEQ. */
1222 annotate_all_with_location_after (gimple_seq seq
, gimple_stmt_iterator gsi
,
1223 location_t location
)
1225 if (gsi_end_p (gsi
))
1226 gsi
= gsi_start (seq
);
1230 for (; !gsi_end_p (gsi
); gsi_next (&gsi
))
1231 annotate_one_with_location (gsi_stmt (gsi
), location
);
1234 /* Set the location for all the statements in a sequence STMT_P to LOCATION. */
1237 annotate_all_with_location (gimple_seq stmt_p
, location_t location
)
1239 gimple_stmt_iterator i
;
1241 if (gimple_seq_empty_p (stmt_p
))
1244 for (i
= gsi_start (stmt_p
); !gsi_end_p (i
); gsi_next (&i
))
1246 gimple gs
= gsi_stmt (i
);
1247 annotate_one_with_location (gs
, location
);
1251 /* Helper function of empty_body_p. Return true if STMT is an empty
1255 empty_stmt_p (gimple stmt
)
1257 if (gimple_code (stmt
) == GIMPLE_NOP
)
1259 if (gimple_code (stmt
) == GIMPLE_BIND
)
1260 return empty_body_p (gimple_bind_body (stmt
));
1265 /* Return true if BODY contains nothing but empty statements. */
1268 empty_body_p (gimple_seq body
)
1270 gimple_stmt_iterator i
;
1272 if (gimple_seq_empty_p (body
))
1274 for (i
= gsi_start (body
); !gsi_end_p (i
); gsi_next (&i
))
1275 if (!empty_stmt_p (gsi_stmt (i
))
1276 && !is_gimple_debug (gsi_stmt (i
)))
1283 /* Perform a deep copy of sequence SRC and return the result. */
1286 gimple_seq_copy (gimple_seq src
)
1288 gimple_stmt_iterator gsi
;
1289 gimple_seq new_seq
= NULL
;
1292 for (gsi
= gsi_start (src
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1294 stmt
= gimple_copy (gsi_stmt (gsi
));
1295 gimple_seq_add_stmt (&new_seq
, stmt
);
1303 /* Return true if calls C1 and C2 are known to go to the same function. */
1306 gimple_call_same_target_p (const_gimple c1
, const_gimple c2
)
1308 if (gimple_call_internal_p (c1
))
1309 return (gimple_call_internal_p (c2
)
1310 && gimple_call_internal_fn (c1
) == gimple_call_internal_fn (c2
));
1312 return (gimple_call_fn (c1
) == gimple_call_fn (c2
)
1313 || (gimple_call_fndecl (c1
)
1314 && gimple_call_fndecl (c1
) == gimple_call_fndecl (c2
)));
1317 /* Detect flags from a GIMPLE_CALL. This is just like
1318 call_expr_flags, but for gimple tuples. */
1321 gimple_call_flags (const_gimple stmt
)
1324 tree decl
= gimple_call_fndecl (stmt
);
1327 flags
= flags_from_decl_or_type (decl
);
1328 else if (gimple_call_internal_p (stmt
))
1329 flags
= internal_fn_flags (gimple_call_internal_fn (stmt
));
1331 flags
= flags_from_decl_or_type (gimple_call_fntype (stmt
));
1333 if (stmt
->gsbase
.subcode
& GF_CALL_NOTHROW
)
1334 flags
|= ECF_NOTHROW
;
1339 /* Return the "fn spec" string for call STMT. */
1342 gimple_call_fnspec (const_gimple stmt
)
1346 type
= gimple_call_fntype (stmt
);
1350 attr
= lookup_attribute ("fn spec", TYPE_ATTRIBUTES (type
));
1354 return TREE_VALUE (TREE_VALUE (attr
));
1357 /* Detects argument flags for argument number ARG on call STMT. */
1360 gimple_call_arg_flags (const_gimple stmt
, unsigned arg
)
1362 tree attr
= gimple_call_fnspec (stmt
);
1364 if (!attr
|| 1 + arg
>= (unsigned) TREE_STRING_LENGTH (attr
))
1367 switch (TREE_STRING_POINTER (attr
)[1 + arg
])
1374 return EAF_DIRECT
| EAF_NOCLOBBER
| EAF_NOESCAPE
;
1377 return EAF_NOCLOBBER
| EAF_NOESCAPE
;
1380 return EAF_DIRECT
| EAF_NOESCAPE
;
1383 return EAF_NOESCAPE
;
1391 /* Detects return flags for the call STMT. */
1394 gimple_call_return_flags (const_gimple stmt
)
1398 if (gimple_call_flags (stmt
) & ECF_MALLOC
)
1401 attr
= gimple_call_fnspec (stmt
);
1402 if (!attr
|| TREE_STRING_LENGTH (attr
) < 1)
1405 switch (TREE_STRING_POINTER (attr
)[0])
1411 return ERF_RETURNS_ARG
| (TREE_STRING_POINTER (attr
)[0] - '1');
1423 /* Return true if GS is a copy assignment. */
1426 gimple_assign_copy_p (gimple gs
)
1428 return (gimple_assign_single_p (gs
)
1429 && is_gimple_val (gimple_op (gs
, 1)));
1433 /* Return true if GS is a SSA_NAME copy assignment. */
1436 gimple_assign_ssa_name_copy_p (gimple gs
)
1438 return (gimple_assign_single_p (gs
)
1439 && TREE_CODE (gimple_assign_lhs (gs
)) == SSA_NAME
1440 && TREE_CODE (gimple_assign_rhs1 (gs
)) == SSA_NAME
);
1444 /* Return true if GS is an assignment with a unary RHS, but the
1445 operator has no effect on the assigned value. The logic is adapted
1446 from STRIP_NOPS. This predicate is intended to be used in tuplifying
1447 instances in which STRIP_NOPS was previously applied to the RHS of
1450 NOTE: In the use cases that led to the creation of this function
1451 and of gimple_assign_single_p, it is typical to test for either
1452 condition and to proceed in the same manner. In each case, the
1453 assigned value is represented by the single RHS operand of the
1454 assignment. I suspect there may be cases where gimple_assign_copy_p,
1455 gimple_assign_single_p, or equivalent logic is used where a similar
1456 treatment of unary NOPs is appropriate. */
1459 gimple_assign_unary_nop_p (gimple gs
)
1461 return (is_gimple_assign (gs
)
1462 && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (gs
))
1463 || gimple_assign_rhs_code (gs
) == NON_LVALUE_EXPR
)
1464 && gimple_assign_rhs1 (gs
) != error_mark_node
1465 && (TYPE_MODE (TREE_TYPE (gimple_assign_lhs (gs
)))
1466 == TYPE_MODE (TREE_TYPE (gimple_assign_rhs1 (gs
)))));
1469 /* Set BB to be the basic block holding G. */
1472 gimple_set_bb (gimple stmt
, basic_block bb
)
1474 stmt
->gsbase
.bb
= bb
;
1476 /* If the statement is a label, add the label to block-to-labels map
1477 so that we can speed up edge creation for GIMPLE_GOTOs. */
1478 if (cfun
->cfg
&& gimple_code (stmt
) == GIMPLE_LABEL
)
1483 t
= gimple_label_label (stmt
);
1484 uid
= LABEL_DECL_UID (t
);
1487 unsigned old_len
= vec_safe_length (label_to_block_map
);
1488 LABEL_DECL_UID (t
) = uid
= cfun
->cfg
->last_label_uid
++;
1489 if (old_len
<= (unsigned) uid
)
1491 unsigned new_len
= 3 * uid
/ 2 + 1;
1493 vec_safe_grow_cleared (label_to_block_map
, new_len
);
1497 (*label_to_block_map
)[uid
] = bb
;
1502 /* Modify the RHS of the assignment pointed-to by GSI using the
1503 operands in the expression tree EXPR.
1505 NOTE: The statement pointed-to by GSI may be reallocated if it
1506 did not have enough operand slots.
1508 This function is useful to convert an existing tree expression into
1509 the flat representation used for the RHS of a GIMPLE assignment.
1510 It will reallocate memory as needed to expand or shrink the number
1511 of operand slots needed to represent EXPR.
1513 NOTE: If you find yourself building a tree and then calling this
1514 function, you are most certainly doing it the slow way. It is much
1515 better to build a new assignment or to use the function
1516 gimple_assign_set_rhs_with_ops, which does not require an
1517 expression tree to be built. */
1520 gimple_assign_set_rhs_from_tree (gimple_stmt_iterator
*gsi
, tree expr
)
1522 enum tree_code subcode
;
1525 extract_ops_from_tree_1 (expr
, &subcode
, &op1
, &op2
, &op3
);
1526 gimple_assign_set_rhs_with_ops_1 (gsi
, subcode
, op1
, op2
, op3
);
1530 /* Set the RHS of assignment statement pointed-to by GSI to CODE with
1531 operands OP1, OP2 and OP3.
1533 NOTE: The statement pointed-to by GSI may be reallocated if it
1534 did not have enough operand slots. */
1537 gimple_assign_set_rhs_with_ops_1 (gimple_stmt_iterator
*gsi
, enum tree_code code
,
1538 tree op1
, tree op2
, tree op3
)
1540 unsigned new_rhs_ops
= get_gimple_rhs_num_ops (code
);
1541 gimple stmt
= gsi_stmt (*gsi
);
1543 /* If the new CODE needs more operands, allocate a new statement. */
1544 if (gimple_num_ops (stmt
) < new_rhs_ops
+ 1)
1546 tree lhs
= gimple_assign_lhs (stmt
);
1547 gimple new_stmt
= gimple_alloc (gimple_code (stmt
), new_rhs_ops
+ 1);
1548 memcpy (new_stmt
, stmt
, gimple_size (gimple_code (stmt
)));
1549 gimple_init_singleton (new_stmt
);
1550 gsi_replace (gsi
, new_stmt
, true);
1553 /* The LHS needs to be reset as this also changes the SSA name
1555 gimple_assign_set_lhs (stmt
, lhs
);
1558 gimple_set_num_ops (stmt
, new_rhs_ops
+ 1);
1559 gimple_set_subcode (stmt
, code
);
1560 gimple_assign_set_rhs1 (stmt
, op1
);
1561 if (new_rhs_ops
> 1)
1562 gimple_assign_set_rhs2 (stmt
, op2
);
1563 if (new_rhs_ops
> 2)
1564 gimple_assign_set_rhs3 (stmt
, op3
);
1568 /* Return the LHS of a statement that performs an assignment,
1569 either a GIMPLE_ASSIGN or a GIMPLE_CALL. Returns NULL_TREE
1570 for a call to a function that returns no value, or for a
1571 statement other than an assignment or a call. */
1574 gimple_get_lhs (const_gimple stmt
)
1576 enum gimple_code code
= gimple_code (stmt
);
1578 if (code
== GIMPLE_ASSIGN
)
1579 return gimple_assign_lhs (stmt
);
1580 else if (code
== GIMPLE_CALL
)
1581 return gimple_call_lhs (stmt
);
1587 /* Set the LHS of a statement that performs an assignment,
1588 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
1591 gimple_set_lhs (gimple stmt
, tree lhs
)
1593 enum gimple_code code
= gimple_code (stmt
);
1595 if (code
== GIMPLE_ASSIGN
)
1596 gimple_assign_set_lhs (stmt
, lhs
);
1597 else if (code
== GIMPLE_CALL
)
1598 gimple_call_set_lhs (stmt
, lhs
);
1604 /* Return a deep copy of statement STMT. All the operands from STMT
1605 are reallocated and copied using unshare_expr. The DEF, USE, VDEF
1606 and VUSE operand arrays are set to empty in the new copy. The new
1607 copy isn't part of any sequence. */
1610 gimple_copy (gimple stmt
)
1612 enum gimple_code code
= gimple_code (stmt
);
1613 unsigned num_ops
= gimple_num_ops (stmt
);
1614 gimple copy
= gimple_alloc (code
, num_ops
);
1617 /* Shallow copy all the fields from STMT. */
1618 memcpy (copy
, stmt
, gimple_size (code
));
1619 gimple_init_singleton (copy
);
1621 /* If STMT has sub-statements, deep-copy them as well. */
1622 if (gimple_has_substatements (stmt
))
1627 switch (gimple_code (stmt
))
1630 new_seq
= gimple_seq_copy (gimple_bind_body (stmt
));
1631 gimple_bind_set_body (copy
, new_seq
);
1632 gimple_bind_set_vars (copy
, unshare_expr (gimple_bind_vars (stmt
)));
1633 gimple_bind_set_block (copy
, gimple_bind_block (stmt
));
1637 new_seq
= gimple_seq_copy (gimple_catch_handler (stmt
));
1638 gimple_catch_set_handler (copy
, new_seq
);
1639 t
= unshare_expr (gimple_catch_types (stmt
));
1640 gimple_catch_set_types (copy
, t
);
1643 case GIMPLE_EH_FILTER
:
1644 new_seq
= gimple_seq_copy (gimple_eh_filter_failure (stmt
));
1645 gimple_eh_filter_set_failure (copy
, new_seq
);
1646 t
= unshare_expr (gimple_eh_filter_types (stmt
));
1647 gimple_eh_filter_set_types (copy
, t
);
1650 case GIMPLE_EH_ELSE
:
1651 new_seq
= gimple_seq_copy (gimple_eh_else_n_body (stmt
));
1652 gimple_eh_else_set_n_body (copy
, new_seq
);
1653 new_seq
= gimple_seq_copy (gimple_eh_else_e_body (stmt
));
1654 gimple_eh_else_set_e_body (copy
, new_seq
);
1658 new_seq
= gimple_seq_copy (gimple_try_eval (stmt
));
1659 gimple_try_set_eval (copy
, new_seq
);
1660 new_seq
= gimple_seq_copy (gimple_try_cleanup (stmt
));
1661 gimple_try_set_cleanup (copy
, new_seq
);
1664 case GIMPLE_OMP_FOR
:
1665 new_seq
= gimple_seq_copy (gimple_omp_for_pre_body (stmt
));
1666 gimple_omp_for_set_pre_body (copy
, new_seq
);
1667 t
= unshare_expr (gimple_omp_for_clauses (stmt
));
1668 gimple_omp_for_set_clauses (copy
, t
);
1669 copy
->gimple_omp_for
.iter
1670 = ggc_alloc_vec_gimple_omp_for_iter
1671 (gimple_omp_for_collapse (stmt
));
1672 for (i
= 0; i
< gimple_omp_for_collapse (stmt
); i
++)
1674 gimple_omp_for_set_cond (copy
, i
,
1675 gimple_omp_for_cond (stmt
, i
));
1676 gimple_omp_for_set_index (copy
, i
,
1677 gimple_omp_for_index (stmt
, i
));
1678 t
= unshare_expr (gimple_omp_for_initial (stmt
, i
));
1679 gimple_omp_for_set_initial (copy
, i
, t
);
1680 t
= unshare_expr (gimple_omp_for_final (stmt
, i
));
1681 gimple_omp_for_set_final (copy
, i
, t
);
1682 t
= unshare_expr (gimple_omp_for_incr (stmt
, i
));
1683 gimple_omp_for_set_incr (copy
, i
, t
);
1687 case GIMPLE_OMP_PARALLEL
:
1688 t
= unshare_expr (gimple_omp_parallel_clauses (stmt
));
1689 gimple_omp_parallel_set_clauses (copy
, t
);
1690 t
= unshare_expr (gimple_omp_parallel_child_fn (stmt
));
1691 gimple_omp_parallel_set_child_fn (copy
, t
);
1692 t
= unshare_expr (gimple_omp_parallel_data_arg (stmt
));
1693 gimple_omp_parallel_set_data_arg (copy
, t
);
1696 case GIMPLE_OMP_TASK
:
1697 t
= unshare_expr (gimple_omp_task_clauses (stmt
));
1698 gimple_omp_task_set_clauses (copy
, t
);
1699 t
= unshare_expr (gimple_omp_task_child_fn (stmt
));
1700 gimple_omp_task_set_child_fn (copy
, t
);
1701 t
= unshare_expr (gimple_omp_task_data_arg (stmt
));
1702 gimple_omp_task_set_data_arg (copy
, t
);
1703 t
= unshare_expr (gimple_omp_task_copy_fn (stmt
));
1704 gimple_omp_task_set_copy_fn (copy
, t
);
1705 t
= unshare_expr (gimple_omp_task_arg_size (stmt
));
1706 gimple_omp_task_set_arg_size (copy
, t
);
1707 t
= unshare_expr (gimple_omp_task_arg_align (stmt
));
1708 gimple_omp_task_set_arg_align (copy
, t
);
1711 case GIMPLE_OMP_CRITICAL
:
1712 t
= unshare_expr (gimple_omp_critical_name (stmt
));
1713 gimple_omp_critical_set_name (copy
, t
);
1716 case GIMPLE_OMP_SECTIONS
:
1717 t
= unshare_expr (gimple_omp_sections_clauses (stmt
));
1718 gimple_omp_sections_set_clauses (copy
, t
);
1719 t
= unshare_expr (gimple_omp_sections_control (stmt
));
1720 gimple_omp_sections_set_control (copy
, t
);
1723 case GIMPLE_OMP_SINGLE
:
1724 case GIMPLE_OMP_TARGET
:
1725 case GIMPLE_OMP_TEAMS
:
1726 case GIMPLE_OMP_SECTION
:
1727 case GIMPLE_OMP_MASTER
:
1728 case GIMPLE_OMP_TASKGROUP
:
1729 case GIMPLE_OMP_ORDERED
:
1731 new_seq
= gimple_seq_copy (gimple_omp_body (stmt
));
1732 gimple_omp_set_body (copy
, new_seq
);
1735 case GIMPLE_TRANSACTION
:
1736 new_seq
= gimple_seq_copy (gimple_transaction_body (stmt
));
1737 gimple_transaction_set_body (copy
, new_seq
);
1740 case GIMPLE_WITH_CLEANUP_EXPR
:
1741 new_seq
= gimple_seq_copy (gimple_wce_cleanup (stmt
));
1742 gimple_wce_set_cleanup (copy
, new_seq
);
1750 /* Make copy of operands. */
1751 for (i
= 0; i
< num_ops
; i
++)
1752 gimple_set_op (copy
, i
, unshare_expr (gimple_op (stmt
, i
)));
1754 if (gimple_has_mem_ops (stmt
))
1756 gimple_set_vdef (copy
, gimple_vdef (stmt
));
1757 gimple_set_vuse (copy
, gimple_vuse (stmt
));
1760 /* Clear out SSA operand vectors on COPY. */
1761 if (gimple_has_ops (stmt
))
1763 gimple_set_use_ops (copy
, NULL
);
1765 /* SSA operands need to be updated. */
1766 gimple_set_modified (copy
, true);
1773 /* Return true if statement S has side-effects. We consider a
1774 statement to have side effects if:
1776 - It is a GIMPLE_CALL not marked with ECF_PURE or ECF_CONST.
1777 - Any of its operands are marked TREE_THIS_VOLATILE or TREE_SIDE_EFFECTS. */
1780 gimple_has_side_effects (const_gimple s
)
1782 if (is_gimple_debug (s
))
1785 /* We don't have to scan the arguments to check for
1786 volatile arguments, though, at present, we still
1787 do a scan to check for TREE_SIDE_EFFECTS. */
1788 if (gimple_has_volatile_ops (s
))
1791 if (gimple_code (s
) == GIMPLE_ASM
1792 && gimple_asm_volatile_p (s
))
1795 if (is_gimple_call (s
))
1797 int flags
= gimple_call_flags (s
);
1799 /* An infinite loop is considered a side effect. */
1800 if (!(flags
& (ECF_CONST
| ECF_PURE
))
1801 || (flags
& ECF_LOOPING_CONST_OR_PURE
))
1810 /* Helper for gimple_could_trap_p and gimple_assign_rhs_could_trap_p.
1811 Return true if S can trap. When INCLUDE_MEM is true, check whether
1812 the memory operations could trap. When INCLUDE_STORES is true and
1813 S is a GIMPLE_ASSIGN, the LHS of the assignment is also checked. */
1816 gimple_could_trap_p_1 (gimple s
, bool include_mem
, bool include_stores
)
1818 tree t
, div
= NULL_TREE
;
1823 unsigned i
, start
= (is_gimple_assign (s
) && !include_stores
) ? 1 : 0;
1825 for (i
= start
; i
< gimple_num_ops (s
); i
++)
1826 if (tree_could_trap_p (gimple_op (s
, i
)))
1830 switch (gimple_code (s
))
1833 return gimple_asm_volatile_p (s
);
1836 t
= gimple_call_fndecl (s
);
1837 /* Assume that calls to weak functions may trap. */
1838 if (!t
|| !DECL_P (t
) || DECL_WEAK (t
))
1843 t
= gimple_expr_type (s
);
1844 op
= gimple_assign_rhs_code (s
);
1845 if (get_gimple_rhs_class (op
) == GIMPLE_BINARY_RHS
)
1846 div
= gimple_assign_rhs2 (s
);
1847 return (operation_could_trap_p (op
, FLOAT_TYPE_P (t
),
1848 (INTEGRAL_TYPE_P (t
)
1849 && TYPE_OVERFLOW_TRAPS (t
)),
1859 /* Return true if statement S can trap. */
1862 gimple_could_trap_p (gimple s
)
1864 return gimple_could_trap_p_1 (s
, true, true);
1867 /* Return true if RHS of a GIMPLE_ASSIGN S can trap. */
1870 gimple_assign_rhs_could_trap_p (gimple s
)
1872 gcc_assert (is_gimple_assign (s
));
1873 return gimple_could_trap_p_1 (s
, true, false);
1877 /* Print debugging information for gimple stmts generated. */
1880 dump_gimple_statistics (void)
1882 int i
, total_tuples
= 0, total_bytes
= 0;
1884 if (! GATHER_STATISTICS
)
1886 fprintf (stderr
, "No gimple statistics\n");
1890 fprintf (stderr
, "\nGIMPLE statements\n");
1891 fprintf (stderr
, "Kind Stmts Bytes\n");
1892 fprintf (stderr
, "---------------------------------------\n");
1893 for (i
= 0; i
< (int) gimple_alloc_kind_all
; ++i
)
1895 fprintf (stderr
, "%-20s %7d %10d\n", gimple_alloc_kind_names
[i
],
1896 gimple_alloc_counts
[i
], gimple_alloc_sizes
[i
]);
1897 total_tuples
+= gimple_alloc_counts
[i
];
1898 total_bytes
+= gimple_alloc_sizes
[i
];
1900 fprintf (stderr
, "---------------------------------------\n");
1901 fprintf (stderr
, "%-20s %7d %10d\n", "Total", total_tuples
, total_bytes
);
1902 fprintf (stderr
, "---------------------------------------\n");
1906 /* Return the number of operands needed on the RHS of a GIMPLE
1907 assignment for an expression with tree code CODE. */
1910 get_gimple_rhs_num_ops (enum tree_code code
)
1912 enum gimple_rhs_class rhs_class
= get_gimple_rhs_class (code
);
1914 if (rhs_class
== GIMPLE_UNARY_RHS
|| rhs_class
== GIMPLE_SINGLE_RHS
)
1916 else if (rhs_class
== GIMPLE_BINARY_RHS
)
1918 else if (rhs_class
== GIMPLE_TERNARY_RHS
)
1924 #define DEFTREECODE(SYM, STRING, TYPE, NARGS) \
1926 ((TYPE) == tcc_unary ? GIMPLE_UNARY_RHS \
1927 : ((TYPE) == tcc_binary \
1928 || (TYPE) == tcc_comparison) ? GIMPLE_BINARY_RHS \
1929 : ((TYPE) == tcc_constant \
1930 || (TYPE) == tcc_declaration \
1931 || (TYPE) == tcc_reference) ? GIMPLE_SINGLE_RHS \
1932 : ((SYM) == TRUTH_AND_EXPR \
1933 || (SYM) == TRUTH_OR_EXPR \
1934 || (SYM) == TRUTH_XOR_EXPR) ? GIMPLE_BINARY_RHS \
1935 : (SYM) == TRUTH_NOT_EXPR ? GIMPLE_UNARY_RHS \
1936 : ((SYM) == COND_EXPR \
1937 || (SYM) == WIDEN_MULT_PLUS_EXPR \
1938 || (SYM) == WIDEN_MULT_MINUS_EXPR \
1939 || (SYM) == DOT_PROD_EXPR \
1940 || (SYM) == REALIGN_LOAD_EXPR \
1941 || (SYM) == VEC_COND_EXPR \
1942 || (SYM) == VEC_PERM_EXPR \
1943 || (SYM) == FMA_EXPR) ? GIMPLE_TERNARY_RHS \
1944 : ((SYM) == CONSTRUCTOR \
1945 || (SYM) == OBJ_TYPE_REF \
1946 || (SYM) == ASSERT_EXPR \
1947 || (SYM) == ADDR_EXPR \
1948 || (SYM) == WITH_SIZE_EXPR \
1949 || (SYM) == SSA_NAME) ? GIMPLE_SINGLE_RHS \
1950 : GIMPLE_INVALID_RHS),
1951 #define END_OF_BASE_TREE_CODES (unsigned char) GIMPLE_INVALID_RHS,
1953 const unsigned char gimple_rhs_class_table
[] = {
1954 #include "all-tree.def"
1958 #undef END_OF_BASE_TREE_CODES
1960 /* Given a memory reference expression T, return its base address.
1961 The base address of a memory reference expression is the main
1962 object being referenced. For instance, the base address for
1963 'array[i].fld[j]' is 'array'. You can think of this as stripping
1964 away the offset part from a memory address.
1966 This function calls handled_component_p to strip away all the inner
1967 parts of the memory reference until it reaches the base object. */
1970 get_base_address (tree t
)
1972 while (handled_component_p (t
))
1973 t
= TREE_OPERAND (t
, 0);
1975 if ((TREE_CODE (t
) == MEM_REF
1976 || TREE_CODE (t
) == TARGET_MEM_REF
)
1977 && TREE_CODE (TREE_OPERAND (t
, 0)) == ADDR_EXPR
)
1978 t
= TREE_OPERAND (TREE_OPERAND (t
, 0), 0);
1980 /* ??? Either the alias oracle or all callers need to properly deal
1981 with WITH_SIZE_EXPRs before we can look through those. */
1982 if (TREE_CODE (t
) == WITH_SIZE_EXPR
)
1989 recalculate_side_effects (tree t
)
1991 enum tree_code code
= TREE_CODE (t
);
1992 int len
= TREE_OPERAND_LENGTH (t
);
1995 switch (TREE_CODE_CLASS (code
))
1997 case tcc_expression
:
2003 case PREDECREMENT_EXPR
:
2004 case PREINCREMENT_EXPR
:
2005 case POSTDECREMENT_EXPR
:
2006 case POSTINCREMENT_EXPR
:
2007 /* All of these have side-effects, no matter what their
2016 case tcc_comparison
: /* a comparison expression */
2017 case tcc_unary
: /* a unary arithmetic expression */
2018 case tcc_binary
: /* a binary arithmetic expression */
2019 case tcc_reference
: /* a reference */
2020 case tcc_vl_exp
: /* a function call */
2021 TREE_SIDE_EFFECTS (t
) = TREE_THIS_VOLATILE (t
);
2022 for (i
= 0; i
< len
; ++i
)
2024 tree op
= TREE_OPERAND (t
, i
);
2025 if (op
&& TREE_SIDE_EFFECTS (op
))
2026 TREE_SIDE_EFFECTS (t
) = 1;
2031 /* No side-effects. */
2039 /* Canonicalize a tree T for use in a COND_EXPR as conditional. Returns
2040 a canonicalized tree that is valid for a COND_EXPR or NULL_TREE, if
2041 we failed to create one. */
2044 canonicalize_cond_expr_cond (tree t
)
2046 /* Strip conversions around boolean operations. */
2047 if (CONVERT_EXPR_P (t
)
2048 && (truth_value_p (TREE_CODE (TREE_OPERAND (t
, 0)))
2049 || TREE_CODE (TREE_TYPE (TREE_OPERAND (t
, 0)))
2051 t
= TREE_OPERAND (t
, 0);
2053 /* For !x use x == 0. */
2054 if (TREE_CODE (t
) == TRUTH_NOT_EXPR
)
2056 tree top0
= TREE_OPERAND (t
, 0);
2057 t
= build2 (EQ_EXPR
, TREE_TYPE (t
),
2058 top0
, build_int_cst (TREE_TYPE (top0
), 0));
2060 /* For cmp ? 1 : 0 use cmp. */
2061 else if (TREE_CODE (t
) == COND_EXPR
2062 && COMPARISON_CLASS_P (TREE_OPERAND (t
, 0))
2063 && integer_onep (TREE_OPERAND (t
, 1))
2064 && integer_zerop (TREE_OPERAND (t
, 2)))
2066 tree top0
= TREE_OPERAND (t
, 0);
2067 t
= build2 (TREE_CODE (top0
), TREE_TYPE (t
),
2068 TREE_OPERAND (top0
, 0), TREE_OPERAND (top0
, 1));
2070 /* For x ^ y use x != y. */
2071 else if (TREE_CODE (t
) == BIT_XOR_EXPR
)
2072 t
= build2 (NE_EXPR
, TREE_TYPE (t
),
2073 TREE_OPERAND (t
, 0), TREE_OPERAND (t
, 1));
2075 if (is_gimple_condexpr (t
))
2081 /* Build a GIMPLE_CALL identical to STMT but skipping the arguments in
2082 the positions marked by the set ARGS_TO_SKIP. */
2085 gimple_call_copy_skip_args (gimple stmt
, bitmap args_to_skip
)
2088 int nargs
= gimple_call_num_args (stmt
);
2090 vargs
.create (nargs
);
2093 for (i
= 0; i
< nargs
; i
++)
2094 if (!bitmap_bit_p (args_to_skip
, i
))
2095 vargs
.quick_push (gimple_call_arg (stmt
, i
));
2097 if (gimple_call_internal_p (stmt
))
2098 new_stmt
= gimple_build_call_internal_vec (gimple_call_internal_fn (stmt
),
2101 new_stmt
= gimple_build_call_vec (gimple_call_fn (stmt
), vargs
);
2103 if (gimple_call_lhs (stmt
))
2104 gimple_call_set_lhs (new_stmt
, gimple_call_lhs (stmt
));
2106 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
2107 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
2109 if (gimple_has_location (stmt
))
2110 gimple_set_location (new_stmt
, gimple_location (stmt
));
2111 gimple_call_copy_flags (new_stmt
, stmt
);
2112 gimple_call_set_chain (new_stmt
, gimple_call_chain (stmt
));
2114 gimple_set_modified (new_stmt
, true);
2121 /* Return true if the field decls F1 and F2 are at the same offset.
2123 This is intended to be used on GIMPLE types only. */
2126 gimple_compare_field_offset (tree f1
, tree f2
)
2128 if (DECL_OFFSET_ALIGN (f1
) == DECL_OFFSET_ALIGN (f2
))
2130 tree offset1
= DECL_FIELD_OFFSET (f1
);
2131 tree offset2
= DECL_FIELD_OFFSET (f2
);
2132 return ((offset1
== offset2
2133 /* Once gimplification is done, self-referential offsets are
2134 instantiated as operand #2 of the COMPONENT_REF built for
2135 each access and reset. Therefore, they are not relevant
2136 anymore and fields are interchangeable provided that they
2137 represent the same access. */
2138 || (TREE_CODE (offset1
) == PLACEHOLDER_EXPR
2139 && TREE_CODE (offset2
) == PLACEHOLDER_EXPR
2140 && (DECL_SIZE (f1
) == DECL_SIZE (f2
)
2141 || (TREE_CODE (DECL_SIZE (f1
)) == PLACEHOLDER_EXPR
2142 && TREE_CODE (DECL_SIZE (f2
)) == PLACEHOLDER_EXPR
)
2143 || operand_equal_p (DECL_SIZE (f1
), DECL_SIZE (f2
), 0))
2144 && DECL_ALIGN (f1
) == DECL_ALIGN (f2
))
2145 || operand_equal_p (offset1
, offset2
, 0))
2146 && tree_int_cst_equal (DECL_FIELD_BIT_OFFSET (f1
),
2147 DECL_FIELD_BIT_OFFSET (f2
)));
2150 /* Fortran and C do not always agree on what DECL_OFFSET_ALIGN
2151 should be, so handle differing ones specially by decomposing
2152 the offset into a byte and bit offset manually. */
2153 if (host_integerp (DECL_FIELD_OFFSET (f1
), 0)
2154 && host_integerp (DECL_FIELD_OFFSET (f2
), 0))
2156 unsigned HOST_WIDE_INT byte_offset1
, byte_offset2
;
2157 unsigned HOST_WIDE_INT bit_offset1
, bit_offset2
;
2158 bit_offset1
= TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (f1
));
2159 byte_offset1
= (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f1
))
2160 + bit_offset1
/ BITS_PER_UNIT
);
2161 bit_offset2
= TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (f2
));
2162 byte_offset2
= (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f2
))
2163 + bit_offset2
/ BITS_PER_UNIT
);
2164 if (byte_offset1
!= byte_offset2
)
2166 return bit_offset1
% BITS_PER_UNIT
== bit_offset2
% BITS_PER_UNIT
;
2173 /* Return a type the same as TYPE except unsigned or
2174 signed according to UNSIGNEDP. */
2177 gimple_signed_or_unsigned_type (bool unsignedp
, tree type
)
2181 type1
= TYPE_MAIN_VARIANT (type
);
2182 if (type1
== signed_char_type_node
2183 || type1
== char_type_node
2184 || type1
== unsigned_char_type_node
)
2185 return unsignedp
? unsigned_char_type_node
: signed_char_type_node
;
2186 if (type1
== integer_type_node
|| type1
== unsigned_type_node
)
2187 return unsignedp
? unsigned_type_node
: integer_type_node
;
2188 if (type1
== short_integer_type_node
|| type1
== short_unsigned_type_node
)
2189 return unsignedp
? short_unsigned_type_node
: short_integer_type_node
;
2190 if (type1
== long_integer_type_node
|| type1
== long_unsigned_type_node
)
2191 return unsignedp
? long_unsigned_type_node
: long_integer_type_node
;
2192 if (type1
== long_long_integer_type_node
2193 || type1
== long_long_unsigned_type_node
)
2195 ? long_long_unsigned_type_node
2196 : long_long_integer_type_node
;
2197 if (int128_integer_type_node
&& (type1
== int128_integer_type_node
|| type1
== int128_unsigned_type_node
))
2199 ? int128_unsigned_type_node
2200 : int128_integer_type_node
;
2201 #if HOST_BITS_PER_WIDE_INT >= 64
2202 if (type1
== intTI_type_node
|| type1
== unsigned_intTI_type_node
)
2203 return unsignedp
? unsigned_intTI_type_node
: intTI_type_node
;
2205 if (type1
== intDI_type_node
|| type1
== unsigned_intDI_type_node
)
2206 return unsignedp
? unsigned_intDI_type_node
: intDI_type_node
;
2207 if (type1
== intSI_type_node
|| type1
== unsigned_intSI_type_node
)
2208 return unsignedp
? unsigned_intSI_type_node
: intSI_type_node
;
2209 if (type1
== intHI_type_node
|| type1
== unsigned_intHI_type_node
)
2210 return unsignedp
? unsigned_intHI_type_node
: intHI_type_node
;
2211 if (type1
== intQI_type_node
|| type1
== unsigned_intQI_type_node
)
2212 return unsignedp
? unsigned_intQI_type_node
: intQI_type_node
;
2214 #define GIMPLE_FIXED_TYPES(NAME) \
2215 if (type1 == short_ ## NAME ## _type_node \
2216 || type1 == unsigned_short_ ## NAME ## _type_node) \
2217 return unsignedp ? unsigned_short_ ## NAME ## _type_node \
2218 : short_ ## NAME ## _type_node; \
2219 if (type1 == NAME ## _type_node \
2220 || type1 == unsigned_ ## NAME ## _type_node) \
2221 return unsignedp ? unsigned_ ## NAME ## _type_node \
2222 : NAME ## _type_node; \
2223 if (type1 == long_ ## NAME ## _type_node \
2224 || type1 == unsigned_long_ ## NAME ## _type_node) \
2225 return unsignedp ? unsigned_long_ ## NAME ## _type_node \
2226 : long_ ## NAME ## _type_node; \
2227 if (type1 == long_long_ ## NAME ## _type_node \
2228 || type1 == unsigned_long_long_ ## NAME ## _type_node) \
2229 return unsignedp ? unsigned_long_long_ ## NAME ## _type_node \
2230 : long_long_ ## NAME ## _type_node;
2232 #define GIMPLE_FIXED_MODE_TYPES(NAME) \
2233 if (type1 == NAME ## _type_node \
2234 || type1 == u ## NAME ## _type_node) \
2235 return unsignedp ? u ## NAME ## _type_node \
2236 : NAME ## _type_node;
2238 #define GIMPLE_FIXED_TYPES_SAT(NAME) \
2239 if (type1 == sat_ ## short_ ## NAME ## _type_node \
2240 || type1 == sat_ ## unsigned_short_ ## NAME ## _type_node) \
2241 return unsignedp ? sat_ ## unsigned_short_ ## NAME ## _type_node \
2242 : sat_ ## short_ ## NAME ## _type_node; \
2243 if (type1 == sat_ ## NAME ## _type_node \
2244 || type1 == sat_ ## unsigned_ ## NAME ## _type_node) \
2245 return unsignedp ? sat_ ## unsigned_ ## NAME ## _type_node \
2246 : sat_ ## NAME ## _type_node; \
2247 if (type1 == sat_ ## long_ ## NAME ## _type_node \
2248 || type1 == sat_ ## unsigned_long_ ## NAME ## _type_node) \
2249 return unsignedp ? sat_ ## unsigned_long_ ## NAME ## _type_node \
2250 : sat_ ## long_ ## NAME ## _type_node; \
2251 if (type1 == sat_ ## long_long_ ## NAME ## _type_node \
2252 || type1 == sat_ ## unsigned_long_long_ ## NAME ## _type_node) \
2253 return unsignedp ? sat_ ## unsigned_long_long_ ## NAME ## _type_node \
2254 : sat_ ## long_long_ ## NAME ## _type_node;
2256 #define GIMPLE_FIXED_MODE_TYPES_SAT(NAME) \
2257 if (type1 == sat_ ## NAME ## _type_node \
2258 || type1 == sat_ ## u ## NAME ## _type_node) \
2259 return unsignedp ? sat_ ## u ## NAME ## _type_node \
2260 : sat_ ## NAME ## _type_node;
2262 GIMPLE_FIXED_TYPES (fract
);
2263 GIMPLE_FIXED_TYPES_SAT (fract
);
2264 GIMPLE_FIXED_TYPES (accum
);
2265 GIMPLE_FIXED_TYPES_SAT (accum
);
2267 GIMPLE_FIXED_MODE_TYPES (qq
);
2268 GIMPLE_FIXED_MODE_TYPES (hq
);
2269 GIMPLE_FIXED_MODE_TYPES (sq
);
2270 GIMPLE_FIXED_MODE_TYPES (dq
);
2271 GIMPLE_FIXED_MODE_TYPES (tq
);
2272 GIMPLE_FIXED_MODE_TYPES_SAT (qq
);
2273 GIMPLE_FIXED_MODE_TYPES_SAT (hq
);
2274 GIMPLE_FIXED_MODE_TYPES_SAT (sq
);
2275 GIMPLE_FIXED_MODE_TYPES_SAT (dq
);
2276 GIMPLE_FIXED_MODE_TYPES_SAT (tq
);
2277 GIMPLE_FIXED_MODE_TYPES (ha
);
2278 GIMPLE_FIXED_MODE_TYPES (sa
);
2279 GIMPLE_FIXED_MODE_TYPES (da
);
2280 GIMPLE_FIXED_MODE_TYPES (ta
);
2281 GIMPLE_FIXED_MODE_TYPES_SAT (ha
);
2282 GIMPLE_FIXED_MODE_TYPES_SAT (sa
);
2283 GIMPLE_FIXED_MODE_TYPES_SAT (da
);
2284 GIMPLE_FIXED_MODE_TYPES_SAT (ta
);
2286 /* For ENUMERAL_TYPEs in C++, must check the mode of the types, not
2287 the precision; they have precision set to match their range, but
2288 may use a wider mode to match an ABI. If we change modes, we may
2289 wind up with bad conversions. For INTEGER_TYPEs in C, must check
2290 the precision as well, so as to yield correct results for
2291 bit-field types. C++ does not have these separate bit-field
2292 types, and producing a signed or unsigned variant of an
2293 ENUMERAL_TYPE may cause other problems as well. */
2294 if (!INTEGRAL_TYPE_P (type
)
2295 || TYPE_UNSIGNED (type
) == unsignedp
)
2298 #define TYPE_OK(node) \
2299 (TYPE_MODE (type) == TYPE_MODE (node) \
2300 && TYPE_PRECISION (type) == TYPE_PRECISION (node))
2301 if (TYPE_OK (signed_char_type_node
))
2302 return unsignedp
? unsigned_char_type_node
: signed_char_type_node
;
2303 if (TYPE_OK (integer_type_node
))
2304 return unsignedp
? unsigned_type_node
: integer_type_node
;
2305 if (TYPE_OK (short_integer_type_node
))
2306 return unsignedp
? short_unsigned_type_node
: short_integer_type_node
;
2307 if (TYPE_OK (long_integer_type_node
))
2308 return unsignedp
? long_unsigned_type_node
: long_integer_type_node
;
2309 if (TYPE_OK (long_long_integer_type_node
))
2311 ? long_long_unsigned_type_node
2312 : long_long_integer_type_node
);
2313 if (int128_integer_type_node
&& TYPE_OK (int128_integer_type_node
))
2315 ? int128_unsigned_type_node
2316 : int128_integer_type_node
);
2318 #if HOST_BITS_PER_WIDE_INT >= 64
2319 if (TYPE_OK (intTI_type_node
))
2320 return unsignedp
? unsigned_intTI_type_node
: intTI_type_node
;
2322 if (TYPE_OK (intDI_type_node
))
2323 return unsignedp
? unsigned_intDI_type_node
: intDI_type_node
;
2324 if (TYPE_OK (intSI_type_node
))
2325 return unsignedp
? unsigned_intSI_type_node
: intSI_type_node
;
2326 if (TYPE_OK (intHI_type_node
))
2327 return unsignedp
? unsigned_intHI_type_node
: intHI_type_node
;
2328 if (TYPE_OK (intQI_type_node
))
2329 return unsignedp
? unsigned_intQI_type_node
: intQI_type_node
;
2331 #undef GIMPLE_FIXED_TYPES
2332 #undef GIMPLE_FIXED_MODE_TYPES
2333 #undef GIMPLE_FIXED_TYPES_SAT
2334 #undef GIMPLE_FIXED_MODE_TYPES_SAT
2337 return build_nonstandard_integer_type (TYPE_PRECISION (type
), unsignedp
);
2341 /* Return an unsigned type the same as TYPE in other respects. */
2344 gimple_unsigned_type (tree type
)
2346 return gimple_signed_or_unsigned_type (true, type
);
2350 /* Return a signed type the same as TYPE in other respects. */
2353 gimple_signed_type (tree type
)
2355 return gimple_signed_or_unsigned_type (false, type
);
2359 /* Return the typed-based alias set for T, which may be an expression
2360 or a type. Return -1 if we don't do anything special. */
2363 gimple_get_alias_set (tree t
)
2367 /* Permit type-punning when accessing a union, provided the access
2368 is directly through the union. For example, this code does not
2369 permit taking the address of a union member and then storing
2370 through it. Even the type-punning allowed here is a GCC
2371 extension, albeit a common and useful one; the C standard says
2372 that such accesses have implementation-defined behavior. */
2374 TREE_CODE (u
) == COMPONENT_REF
|| TREE_CODE (u
) == ARRAY_REF
;
2375 u
= TREE_OPERAND (u
, 0))
2376 if (TREE_CODE (u
) == COMPONENT_REF
2377 && TREE_CODE (TREE_TYPE (TREE_OPERAND (u
, 0))) == UNION_TYPE
)
2380 /* That's all the expressions we handle specially. */
2384 /* For convenience, follow the C standard when dealing with
2385 character types. Any object may be accessed via an lvalue that
2386 has character type. */
2387 if (t
== char_type_node
2388 || t
== signed_char_type_node
2389 || t
== unsigned_char_type_node
)
2392 /* Allow aliasing between signed and unsigned variants of the same
2393 type. We treat the signed variant as canonical. */
2394 if (TREE_CODE (t
) == INTEGER_TYPE
&& TYPE_UNSIGNED (t
))
2396 tree t1
= gimple_signed_type (t
);
2398 /* t1 == t can happen for boolean nodes which are always unsigned. */
2400 return get_alias_set (t1
);
2407 /* Helper for gimple_ior_addresses_taken_1. */
2410 gimple_ior_addresses_taken_1 (gimple stmt ATTRIBUTE_UNUSED
,
2411 tree addr
, void *data
)
2413 bitmap addresses_taken
= (bitmap
)data
;
2414 addr
= get_base_address (addr
);
2418 bitmap_set_bit (addresses_taken
, DECL_UID (addr
));
2424 /* Set the bit for the uid of all decls that have their address taken
2425 in STMT in the ADDRESSES_TAKEN bitmap. Returns true if there
2426 were any in this stmt. */
2429 gimple_ior_addresses_taken (bitmap addresses_taken
, gimple stmt
)
2431 return walk_stmt_load_store_addr_ops (stmt
, addresses_taken
, NULL
, NULL
,
2432 gimple_ior_addresses_taken_1
);
2436 /* Return TRUE iff stmt is a call to a built-in function. */
2439 is_gimple_builtin_call (gimple stmt
)
2443 if (is_gimple_call (stmt
)
2444 && (callee
= gimple_call_fndecl (stmt
))
2445 && is_builtin_fn (callee
)
2446 && DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_NORMAL
)
2452 /* Return true when STMTs arguments match those of FNDECL. */
2455 validate_call (gimple stmt
, tree fndecl
)
2457 tree targs
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
2458 unsigned nargs
= gimple_call_num_args (stmt
);
2459 for (unsigned i
= 0; i
< nargs
; ++i
)
2461 /* Variadic args follow. */
2464 tree arg
= gimple_call_arg (stmt
, i
);
2465 if (INTEGRAL_TYPE_P (TREE_TYPE (arg
))
2466 && INTEGRAL_TYPE_P (TREE_VALUE (targs
)))
2468 else if (POINTER_TYPE_P (TREE_TYPE (arg
))
2469 && POINTER_TYPE_P (TREE_VALUE (targs
)))
2471 else if (TREE_CODE (TREE_TYPE (arg
))
2472 != TREE_CODE (TREE_VALUE (targs
)))
2474 targs
= TREE_CHAIN (targs
);
2476 if (targs
&& !VOID_TYPE_P (TREE_VALUE (targs
)))
2481 /* Return true when STMT is builtins call to CLASS. */
2484 gimple_call_builtin_p (gimple stmt
, enum built_in_class klass
)
2487 if (is_gimple_call (stmt
)
2488 && (fndecl
= gimple_call_fndecl (stmt
)) != NULL_TREE
2489 && DECL_BUILT_IN_CLASS (fndecl
) == klass
)
2490 return validate_call (stmt
, fndecl
);
2494 /* Return true when STMT is builtins call to CODE of CLASS. */
2497 gimple_call_builtin_p (gimple stmt
, enum built_in_function code
)
2500 if (is_gimple_call (stmt
)
2501 && (fndecl
= gimple_call_fndecl (stmt
)) != NULL_TREE
2502 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
2503 && DECL_FUNCTION_CODE (fndecl
) == code
)
2504 return validate_call (stmt
, fndecl
);
2508 /* Return true if STMT clobbers memory. STMT is required to be a
2512 gimple_asm_clobbers_memory_p (const_gimple stmt
)
2516 for (i
= 0; i
< gimple_asm_nclobbers (stmt
); i
++)
2518 tree op
= gimple_asm_clobber_op (stmt
, i
);
2519 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (op
)), "memory") == 0)
2526 /* Dump bitmap SET (assumed to contain VAR_DECLs) to FILE. */
2529 dump_decl_set (FILE *file
, bitmap set
)
2536 fprintf (file
, "{ ");
2538 EXECUTE_IF_SET_IN_BITMAP (set
, 0, i
, bi
)
2540 fprintf (file
, "D.%u", i
);
2541 fprintf (file
, " ");
2544 fprintf (file
, "}");
2547 fprintf (file
, "NIL");
2550 /* Return true when CALL is a call stmt that definitely doesn't
2551 free any memory or makes it unavailable otherwise. */
2553 nonfreeing_call_p (gimple call
)
2555 if (gimple_call_builtin_p (call
, BUILT_IN_NORMAL
)
2556 && gimple_call_flags (call
) & ECF_LEAF
)
2557 switch (DECL_FUNCTION_CODE (gimple_call_fndecl (call
)))
2559 /* Just in case these become ECF_LEAF in the future. */
2561 case BUILT_IN_TM_FREE
:
2562 case BUILT_IN_REALLOC
:
2563 case BUILT_IN_STACK_RESTORE
:
2572 /* Callback for walk_stmt_load_store_ops.
2574 Return TRUE if OP will dereference the tree stored in DATA, FALSE
2577 This routine only makes a superficial check for a dereference. Thus
2578 it must only be used if it is safe to return a false negative. */
2580 check_loadstore (gimple stmt ATTRIBUTE_UNUSED
, tree op
, void *data
)
2582 if ((TREE_CODE (op
) == MEM_REF
|| TREE_CODE (op
) == TARGET_MEM_REF
)
2583 && operand_equal_p (TREE_OPERAND (op
, 0), (tree
)data
, 0))
2588 /* If OP can be inferred to be non-zero after STMT executes, return true. */
2591 infer_nonnull_range (gimple stmt
, tree op
)
2593 /* We can only assume that a pointer dereference will yield
2594 non-NULL if -fdelete-null-pointer-checks is enabled. */
2595 if (!flag_delete_null_pointer_checks
2596 || !POINTER_TYPE_P (TREE_TYPE (op
))
2597 || gimple_code (stmt
) == GIMPLE_ASM
)
2600 if (walk_stmt_load_store_ops (stmt
, (void *)op
,
2601 check_loadstore
, check_loadstore
))
2604 if (is_gimple_call (stmt
) && !gimple_call_internal_p (stmt
))
2606 tree fntype
= gimple_call_fntype (stmt
);
2607 tree attrs
= TYPE_ATTRIBUTES (fntype
);
2608 for (; attrs
; attrs
= TREE_CHAIN (attrs
))
2610 attrs
= lookup_attribute ("nonnull", attrs
);
2612 /* If "nonnull" wasn't specified, we know nothing about
2614 if (attrs
== NULL_TREE
)
2617 /* If "nonnull" applies to all the arguments, then ARG
2618 is non-null if it's in the argument list. */
2619 if (TREE_VALUE (attrs
) == NULL_TREE
)
2621 for (unsigned int i
= 0; i
< gimple_call_num_args (stmt
); i
++)
2623 if (operand_equal_p (op
, gimple_call_arg (stmt
, i
), 0)
2624 && POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (stmt
, i
))))
2630 /* Now see if op appears in the nonnull list. */
2631 for (tree t
= TREE_VALUE (attrs
); t
; t
= TREE_CHAIN (t
))
2633 int idx
= TREE_INT_CST_LOW (TREE_VALUE (t
)) - 1;
2634 tree arg
= gimple_call_arg (stmt
, idx
);
2635 if (operand_equal_p (op
, arg
, 0))
2641 /* If this function is marked as returning non-null, then we can
2642 infer OP is non-null if it is used in the return statement. */
2643 if (gimple_code (stmt
) == GIMPLE_RETURN
2644 && gimple_return_retval (stmt
)
2645 && operand_equal_p (gimple_return_retval (stmt
), op
, 0)
2646 && lookup_attribute ("returns_nonnull",
2647 TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl
))))
2653 /* Compare two case labels. Because the front end should already have
2654 made sure that case ranges do not overlap, it is enough to only compare
2655 the CASE_LOW values of each case label. */
2658 compare_case_labels (const void *p1
, const void *p2
)
2660 const_tree
const case1
= *(const_tree
const*)p1
;
2661 const_tree
const case2
= *(const_tree
const*)p2
;
2663 /* The 'default' case label always goes first. */
2664 if (!CASE_LOW (case1
))
2666 else if (!CASE_LOW (case2
))
2669 return tree_int_cst_compare (CASE_LOW (case1
), CASE_LOW (case2
));
2672 /* Sort the case labels in LABEL_VEC in place in ascending order. */
2675 sort_case_labels (vec
<tree
> label_vec
)
2677 label_vec
.qsort (compare_case_labels
);
2680 /* Prepare a vector of case labels to be used in a GIMPLE_SWITCH statement.
2682 LABELS is a vector that contains all case labels to look at.
2684 INDEX_TYPE is the type of the switch index expression. Case labels
2685 in LABELS are discarded if their values are not in the value range
2686 covered by INDEX_TYPE. The remaining case label values are folded
2689 If a default case exists in LABELS, it is removed from LABELS and
2690 returned in DEFAULT_CASEP. If no default case exists, but the
2691 case labels already cover the whole range of INDEX_TYPE, a default
2692 case is returned pointing to one of the existing case labels.
2693 Otherwise DEFAULT_CASEP is set to NULL_TREE.
2695 DEFAULT_CASEP may be NULL, in which case the above comment doesn't
2696 apply and no action is taken regardless of whether a default case is
2700 preprocess_case_label_vec_for_gimple (vec
<tree
> labels
,
2702 tree
*default_casep
)
2704 tree min_value
, max_value
;
2705 tree default_case
= NULL_TREE
;
2709 min_value
= TYPE_MIN_VALUE (index_type
);
2710 max_value
= TYPE_MAX_VALUE (index_type
);
2711 while (i
< labels
.length ())
2713 tree elt
= labels
[i
];
2714 tree low
= CASE_LOW (elt
);
2715 tree high
= CASE_HIGH (elt
);
2716 bool remove_element
= FALSE
;
2720 gcc_checking_assert (TREE_CODE (low
) == INTEGER_CST
);
2721 gcc_checking_assert (!high
|| TREE_CODE (high
) == INTEGER_CST
);
2723 /* This is a non-default case label, i.e. it has a value.
2725 See if the case label is reachable within the range of
2726 the index type. Remove out-of-range case values. Turn
2727 case ranges into a canonical form (high > low strictly)
2728 and convert the case label values to the index type.
2730 NB: The type of gimple_switch_index() may be the promoted
2731 type, but the case labels retain the original type. */
2735 /* This is a case range. Discard empty ranges.
2736 If the bounds or the range are equal, turn this
2737 into a simple (one-value) case. */
2738 int cmp
= tree_int_cst_compare (high
, low
);
2740 remove_element
= TRUE
;
2747 /* If the simple case value is unreachable, ignore it. */
2748 if ((TREE_CODE (min_value
) == INTEGER_CST
2749 && tree_int_cst_compare (low
, min_value
) < 0)
2750 || (TREE_CODE (max_value
) == INTEGER_CST
2751 && tree_int_cst_compare (low
, max_value
) > 0))
2752 remove_element
= TRUE
;
2754 low
= fold_convert (index_type
, low
);
2758 /* If the entire case range is unreachable, ignore it. */
2759 if ((TREE_CODE (min_value
) == INTEGER_CST
2760 && tree_int_cst_compare (high
, min_value
) < 0)
2761 || (TREE_CODE (max_value
) == INTEGER_CST
2762 && tree_int_cst_compare (low
, max_value
) > 0))
2763 remove_element
= TRUE
;
2766 /* If the lower bound is less than the index type's
2767 minimum value, truncate the range bounds. */
2768 if (TREE_CODE (min_value
) == INTEGER_CST
2769 && tree_int_cst_compare (low
, min_value
) < 0)
2771 low
= fold_convert (index_type
, low
);
2773 /* If the upper bound is greater than the index type's
2774 maximum value, truncate the range bounds. */
2775 if (TREE_CODE (max_value
) == INTEGER_CST
2776 && tree_int_cst_compare (high
, max_value
) > 0)
2778 high
= fold_convert (index_type
, high
);
2780 /* We may have folded a case range to a one-value case. */
2781 if (tree_int_cst_equal (low
, high
))
2786 CASE_LOW (elt
) = low
;
2787 CASE_HIGH (elt
) = high
;
2791 gcc_assert (!default_case
);
2793 /* The default case must be passed separately to the
2794 gimple_build_switch routine. But if DEFAULT_CASEP
2795 is NULL, we do not remove the default case (it would
2796 be completely lost). */
2798 remove_element
= TRUE
;
2802 labels
.ordered_remove (i
);
2808 if (!labels
.is_empty ())
2809 sort_case_labels (labels
);
2811 if (default_casep
&& !default_case
)
2813 /* If the switch has no default label, add one, so that we jump
2814 around the switch body. If the labels already cover the whole
2815 range of the switch index_type, add the default label pointing
2816 to one of the existing labels. */
2818 && TYPE_MIN_VALUE (index_type
)
2819 && TYPE_MAX_VALUE (index_type
)
2820 && tree_int_cst_equal (CASE_LOW (labels
[0]),
2821 TYPE_MIN_VALUE (index_type
)))
2823 tree low
, high
= CASE_HIGH (labels
[len
- 1]);
2825 high
= CASE_LOW (labels
[len
- 1]);
2826 if (tree_int_cst_equal (high
, TYPE_MAX_VALUE (index_type
)))
2828 for (i
= 1; i
< len
; i
++)
2830 high
= CASE_LOW (labels
[i
]);
2831 low
= CASE_HIGH (labels
[i
- 1]);
2833 low
= CASE_LOW (labels
[i
- 1]);
2834 if ((TREE_INT_CST_LOW (low
) + 1
2835 != TREE_INT_CST_LOW (high
))
2836 || (TREE_INT_CST_HIGH (low
)
2837 + (TREE_INT_CST_LOW (high
) == 0)
2838 != TREE_INT_CST_HIGH (high
)))
2843 tree label
= CASE_LABEL (labels
[0]);
2844 default_case
= build_case_label (NULL_TREE
, NULL_TREE
,
2852 *default_casep
= default_case
;
2855 /* Set the location of all statements in SEQ to LOC. */
2858 gimple_seq_set_location (gimple_seq seq
, location_t loc
)
2860 for (gimple_stmt_iterator i
= gsi_start (seq
); !gsi_end_p (i
); gsi_next (&i
))
2861 gimple_set_location (gsi_stmt (i
), loc
);